repo
string
pull_number
int64
instance_id
string
issue_numbers
sequence
base_commit
string
patch
string
test_patch
string
problem_statement
string
hints_text
string
created_at
timestamp[ns, tz=UTC]
version
float64
assertj/assertj
3,820
assertj__assertj-3820
[ "2631" ]
8f5c6b7d4a6994a234c21e736057d987679def77
diff --git a/assertj-core/src/main/java/module-info.java b/assertj-core/src/main/java/module-info.java --- a/assertj-core/src/main/java/module-info.java +++ b/assertj-core/src/main/java/module-info.java @@ -18,6 +18,7 @@ // AssertJ Core's package API exports org.assertj.core.annotations; exports org.assertj.core.api; + exports org.assertj.core.api.comparisonstrategy; exports org.assertj.core.api.exception; exports org.assertj.core.api.filter; exports org.assertj.core.api.iterable; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractAssert.java @@ -36,6 +36,8 @@ import java.util.function.Predicate; import java.util.function.Supplier; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.api.recursive.assertion.RecursiveAssertionConfiguration; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; import org.assertj.core.configuration.ConfigurationProvider; @@ -44,8 +46,6 @@ import org.assertj.core.error.BasicErrorMessageFactory; import org.assertj.core.error.ErrorMessageFactory; import org.assertj.core.error.MessageFormatter; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.Conditions; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Objects; @@ -117,6 +117,10 @@ public WritableAssertionInfo getWritableAssertionInfo() { return info; } + protected ComparisonStrategy getComparisonStrategy() { + return objects.getComparisonStrategy(); + } + /** * Throw an assertion error based on information in this assertion. Equivalent to: * <pre><code class='java'>throw failure(errorMessage, arguments);</code></pre> @@ -1268,21 +1272,6 @@ protected RecursiveAssertionAssert usingRecursiveAssertion() { return (ASSERT) assertFactory.createAssert(extractedValue).withAssertionState(myself); } - /** - * Returns true if actual and other are equal according to the current comparison strategy. - * - * @param actual the object to compare to other - * @param other the object to compare to actual - * @return true if actual and other are equal according to the underlying comparison strategy. - * @since 3.23.0 - * @deprecated {@link ComparisonStrategy} will become part of the public API in the next major release and this method - * will be removed. - */ - @Deprecated(since = "3", forRemoval = true) - protected boolean areEqual(Object actual, Object other) { - return objects.getComparisonStrategy().areEqual(actual, other); - } - /** * Returns actual (the object currently under test). * <p> diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractBigDecimalAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractBigDecimalAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractBigDecimalAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractBigDecimalAssert.java @@ -20,7 +20,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; import org.assertj.core.internal.BigDecimals; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractBigIntegerAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractBigIntegerAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractBigIntegerAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractBigIntegerAssert.java @@ -18,7 +18,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; import org.assertj.core.internal.BigIntegers; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractByteArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractByteArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractByteArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractByteArrayAssert.java @@ -22,7 +22,7 @@ import org.assertj.core.data.Index; import org.assertj.core.internal.ByteArrays; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; public abstract class AbstractByteArrayAssert<SELF extends AbstractByteArrayAssert<SELF>> diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractByteAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractByteAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractByteAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractByteAssert.java @@ -17,7 +17,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; import org.assertj.core.internal.Bytes; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharArrayAssert.java @@ -18,7 +18,7 @@ import org.assertj.core.data.Index; import org.assertj.core.internal.CharArrays; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; public abstract class AbstractCharArrayAssert<SELF extends AbstractCharArrayAssert<SELF>> diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java @@ -44,7 +44,7 @@ import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharacterAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharacterAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharacterAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharacterAssert.java @@ -15,7 +15,7 @@ import java.util.Comparator; import org.assertj.core.internal.Characters; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractComparableAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractComparableAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractComparableAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractComparableAssert.java @@ -15,7 +15,7 @@ import java.util.Comparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractDateAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractDateAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractDateAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractDateAssert.java @@ -40,7 +40,7 @@ import org.assertj.core.configuration.Configuration; import org.assertj.core.configuration.ConfigurationProvider; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Dates; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleArrayAssert.java @@ -17,7 +17,7 @@ import org.assertj.core.data.Index; import org.assertj.core.data.Offset; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.DoubleArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java @@ -21,7 +21,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Doubles; import org.assertj.core.internal.Failures; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatArrayAssert.java @@ -18,7 +18,7 @@ import org.assertj.core.data.Index; import org.assertj.core.data.Offset; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.FloatArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractFloatAssert.java @@ -21,7 +21,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Floats; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractIntArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractIntArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractIntArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractIntArrayAssert.java @@ -16,7 +16,7 @@ import java.util.Comparator; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.IntArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractIntegerAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractIntegerAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractIntegerAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractIntegerAssert.java @@ -16,7 +16,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Integers; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractIterableAssert.java @@ -50,6 +50,9 @@ import java.util.stream.Stream; import org.assertj.core.annotations.Beta; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.IterableElementComparisonStrategy; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.iterable.ThrowingExtractor; @@ -60,11 +63,8 @@ import org.assertj.core.groups.FieldsOrPropertiesExtractor; import org.assertj.core.groups.Tuple; import org.assertj.core.internal.CommonErrors; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ExtendedByTypesComparator; -import org.assertj.core.internal.IterableElementComparisonStrategy; import org.assertj.core.internal.Iterables; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.internal.Objects; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractListAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractListAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractListAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractListAssert.java @@ -18,8 +18,8 @@ import org.assertj.core.data.Index; import org.assertj.core.description.Description; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.Lists; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java @@ -32,7 +32,7 @@ import org.assertj.core.data.TemporalUnitOffset; import org.assertj.core.internal.ChronoLocalDateTimeComparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Objects; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAdderAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAdderAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAdderAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAdderAssert.java @@ -20,7 +20,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Longs; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java @@ -16,7 +16,7 @@ import java.util.Comparator; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.LongArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractLongAssert.java @@ -16,7 +16,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Longs; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java @@ -46,6 +46,8 @@ import java.util.stream.Stream; import org.assertj.core.annotations.Beta; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ObjectArrayElementComparisonStrategy; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.iterable.ThrowingExtractor; @@ -57,11 +59,9 @@ import org.assertj.core.groups.FieldsOrPropertiesExtractor; import org.assertj.core.groups.Tuple; import org.assertj.core.internal.CommonErrors; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ExtendedByTypesComparator; import org.assertj.core.internal.Iterables; -import org.assertj.core.internal.ObjectArrayElementComparisonStrategy; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.internal.Objects; import org.assertj.core.internal.TypeComparators; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractObjectAssert.java @@ -32,7 +32,7 @@ import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; import org.assertj.core.description.Description; import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Objects; import org.assertj.core.internal.TypeComparators; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java @@ -28,7 +28,7 @@ import org.assertj.core.data.TemporalOffset; import org.assertj.core.data.TemporalUnitOffset; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Objects; import org.assertj.core.internal.OffsetDateTimeByInstantComparator; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractOptionalAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractOptionalAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractOptionalAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractOptionalAssert.java @@ -28,10 +28,10 @@ import org.assertj.core.annotations.Beta; import org.assertj.core.api.recursive.assertion.RecursiveAssertionConfiguration; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.Failures; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractShortArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractShortArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractShortArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractShortArrayAssert.java @@ -17,7 +17,7 @@ import java.util.Comparator; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ShortArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractShortAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractShortAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractShortAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractShortAssert.java @@ -16,7 +16,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Shorts; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractStringAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractStringAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractStringAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractStringAssert.java @@ -27,7 +27,7 @@ import java.util.Comparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractTemporalAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractTemporalAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractTemporalAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractTemporalAssert.java @@ -21,7 +21,7 @@ import org.assertj.core.data.TemporalOffset; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Objects; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractUniversalComparableAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractUniversalComparableAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractUniversalComparableAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractUniversalComparableAssert.java @@ -16,7 +16,7 @@ import java.util.Comparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.CheckReturnValue; /** diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java @@ -26,7 +26,7 @@ import org.assertj.core.data.TemporalOffset; import org.assertj.core.internal.ChronoZonedDateTimeByInstantComparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Objects; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerArrayAssert.java @@ -18,7 +18,7 @@ import java.util.concurrent.atomic.AtomicIntegerArray; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.IntArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerAssert.java @@ -21,7 +21,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Integers; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AtomicLongArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AtomicLongArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AtomicLongArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AtomicLongArrayAssert.java @@ -18,7 +18,7 @@ import java.util.concurrent.atomic.AtomicLongArray; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.LongArrays; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AtomicLongAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AtomicLongAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AtomicLongAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AtomicLongAssert.java @@ -21,7 +21,7 @@ import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Longs; import org.assertj.core.util.CheckReturnValue; diff --git a/assertj-core/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java @@ -41,6 +41,8 @@ import java.util.function.Function; import java.util.function.Predicate; +import org.assertj.core.api.comparisonstrategy.AtomicReferenceArrayElementComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.iterable.ThrowingExtractor; @@ -50,9 +52,7 @@ import org.assertj.core.description.Description; import org.assertj.core.groups.FieldsOrPropertiesExtractor; import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.AtomicReferenceArrayElementComparisonStrategy; import org.assertj.core.internal.CommonErrors; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ExtendedByTypesComparator; import org.assertj.core.internal.Iterables; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/AbstractComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AbstractComparisonStrategy.java similarity index 98% rename from assertj-core/src/main/java/org/assertj/core/internal/AbstractComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AbstractComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/AbstractComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AbstractComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; import static java.lang.reflect.Array.getLength; import static java.util.Collections.EMPTY_SET; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/AtomicReferenceArrayElementComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AtomicReferenceArrayElementComparisonStrategy.java similarity index 98% rename from assertj-core/src/main/java/org/assertj/core/internal/AtomicReferenceArrayElementComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AtomicReferenceArrayElementComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/AtomicReferenceArrayElementComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/AtomicReferenceArrayElementComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; import static org.assertj.core.configuration.ConfigurationProvider.CONFIGURATION_PROVIDER; import static org.assertj.core.util.Arrays.isArray; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparatorBasedComparisonStrategy.java similarity index 98% rename from assertj-core/src/main/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparatorBasedComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparatorBasedComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; import static org.assertj.core.configuration.ConfigurationProvider.CONFIGURATION_PROVIDER; import static org.assertj.core.util.IterableUtil.isNullOrEmpty; @@ -21,6 +21,8 @@ import java.util.Set; import java.util.TreeSet; +import org.assertj.core.internal.DescribableComparator; + /** * Implements {@link ComparisonStrategy} contract with a comparison strategy based on a {@link Comparator}. * @@ -28,7 +30,7 @@ */ public class ComparatorBasedComparisonStrategy extends AbstractComparisonStrategy { - static final int NOT_EQUAL = -1; + public static final int NOT_EQUAL = -1; // A raw type is necessary because we can't make assumptions on object to be compared. @SuppressWarnings("rawtypes") diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparisonStrategy.java similarity index 99% rename from assertj-core/src/main/java/org/assertj/core/internal/ComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; /** * Describes the contract to implement a <b>consistent</b> comparison strategy that covers :<br> @@ -157,7 +157,7 @@ public interface ComparisonStrategy { /** * Used in error messages when a custom comparison strategy was used to compare values. * <p> - * For example {@link ComparatorBasedComparisonStrategy} returns: + * For example {@link ComparatorBasedComparisonStrategy} returns: * <pre><code>"when comparing values using " + toString()</code></pre> * * @return the comparison strategy description used in error messages. diff --git a/assertj-core/src/main/java/org/assertj/core/internal/IterableElementComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/IterableElementComparisonStrategy.java similarity index 97% rename from assertj-core/src/main/java/org/assertj/core/internal/IterableElementComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/IterableElementComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/IterableElementComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/IterableElementComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; import static org.assertj.core.configuration.ConfigurationProvider.CONFIGURATION_PROVIDER; import static org.assertj.core.util.IterableUtil.sizeOf; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrayElementComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ObjectArrayElementComparisonStrategy.java similarity index 97% rename from assertj-core/src/main/java/org/assertj/core/internal/ObjectArrayElementComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ObjectArrayElementComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrayElementComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/ObjectArrayElementComparisonStrategy.java @@ -10,7 +10,7 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; +package org.assertj.core.api.comparisonstrategy; import static org.assertj.core.configuration.ConfigurationProvider.CONFIGURATION_PROVIDER; import static org.assertj.core.util.Arrays.isArray; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/StandardComparisonStrategy.java b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/StandardComparisonStrategy.java similarity index 98% rename from assertj-core/src/main/java/org/assertj/core/internal/StandardComparisonStrategy.java rename to assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/StandardComparisonStrategy.java --- a/assertj-core/src/main/java/org/assertj/core/internal/StandardComparisonStrategy.java +++ b/assertj-core/src/main/java/org/assertj/core/api/comparisonstrategy/StandardComparisonStrategy.java @@ -10,14 +10,13 @@ * * Copyright 2012-2025 the original author or authors. */ -package org.assertj.core.internal; - -import static org.assertj.core.util.Preconditions.checkArgument; +package org.assertj.core.api.comparisonstrategy; import java.util.Collection; import java.util.Iterator; import java.util.Set; import java.util.TreeSet; + import org.assertj.core.util.Objects; import org.assertj.core.util.Streams; diff --git a/assertj-core/src/main/java/org/assertj/core/error/MessageFormatter.java b/assertj-core/src/main/java/org/assertj/core/error/MessageFormatter.java --- a/assertj-core/src/main/java/org/assertj/core/error/MessageFormatter.java +++ b/assertj-core/src/main/java/org/assertj/core/error/MessageFormatter.java @@ -16,7 +16,7 @@ import static org.assertj.core.util.Strings.formatIfArgs; import org.assertj.core.description.Description; -import org.assertj.core.internal.AbstractComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.AbstractComparisonStrategy; import org.assertj.core.presentation.Representation; /** diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfter.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfter.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfter.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfter.java @@ -16,8 +16,8 @@ import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} is after another one failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that an {@link Object} is after or equal to another one diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterYear.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterYear.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterYear.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeAfterYear.java @@ -14,8 +14,8 @@ import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} is after given year failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBefore.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBefore.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBefore.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBefore.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Object} is before another one failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that an {@link Object} is before or equal to another one diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeYear.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeYear.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeYear.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBeforeYear.java @@ -14,8 +14,8 @@ import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} is before given year failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBetween.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBetween.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBetween.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeBetween.java @@ -14,8 +14,8 @@ import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is between a start and an end diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqual.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqual.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqual.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeEqual.java @@ -21,10 +21,10 @@ import java.util.Objects; import org.assertj.core.description.Description; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.Failures; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.presentation.Representation; /** diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreater.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreater.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreater.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreater.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is greater than another one failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreaterOrEqual.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreaterOrEqual.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreaterOrEqual.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeGreaterOrEqual.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is greater than or equal to another one diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeIn.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeIn.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeIn.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeIn.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is in a group of values (e.g. an array or diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInTheFuture.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInTheFuture.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInTheFuture.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInTheFuture.java @@ -15,8 +15,8 @@ import java.time.temporal.Temporal; import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} or a {@link Temporal} is in the future failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInThePast.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInThePast.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInThePast.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeInThePast.java @@ -15,8 +15,8 @@ import java.time.temporal.Temporal; import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} or a {@link Temporal} is in the past failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLess.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLess.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLess.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLess.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is less than another one failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLessOrEqual.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLessOrEqual.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLessOrEqual.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeLessOrEqual.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is less than or equal to another one failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubsetOf.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubsetOf.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubsetOf.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubsetOf.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that an <code>Iterable</code> is a subset of an other set diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubstring.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubstring.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubstring.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeSubstring.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; public class ShouldBeSubstring extends BasicErrorMessageFactory { diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeToday.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeToday.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldBeToday.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldBeToday.java @@ -15,8 +15,8 @@ import java.time.LocalDate; import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} is today (matching only year, month and diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContain.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContain.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContain.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContain.java @@ -20,8 +20,8 @@ import java.nio.file.Path; import java.util.List; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains a given set of values failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAnyOf.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAnyOf.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAnyOf.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAnyOf.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; public class ShouldContainAnyOf extends BasicErrorMessageFactory { diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAtIndex.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAtIndex.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAtIndex.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainAtIndex.java @@ -13,8 +13,8 @@ package org.assertj.core.error; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains a value at a given index diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequence.java @@ -17,8 +17,8 @@ import java.util.Set; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@code CharSequence} contains another diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@code CharSequence} contains another {@code CharSequence} only diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactly.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactly.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactly.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactly.java @@ -17,9 +17,9 @@ import java.util.List; import org.assertj.core.configuration.Configuration; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.IndexedDiff; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains exactly a given set diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder.java @@ -16,7 +16,7 @@ import static org.assertj.core.error.ShouldContainExactlyInAnyOrder.ErrorType.NOT_FOUND_ONLY; import static org.assertj.core.util.IterableUtil.isNullOrEmpty; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains exactly a given set diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnly.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnly.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnly.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnly.java @@ -17,8 +17,8 @@ import static org.assertj.core.error.ShouldContainOnly.ErrorType.NOT_FOUND_ONLY; import static org.assertj.core.util.IterableUtil.isNullOrEmpty; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains only a given set of diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnlyKeys.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnlyKeys.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnlyKeys.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainOnlyKeys.java @@ -14,8 +14,8 @@ import static org.assertj.core.util.IterableUtil.isNullOrEmpty; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies map contains only a given set of keys and diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequence.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements contains a sequence of values diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@code CharSequence} contains a Sequence of diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequence.java @@ -17,8 +17,8 @@ import java.lang.reflect.Array; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.Arrays; import org.assertj.core.util.IterableUtil; diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence.java @@ -15,8 +15,8 @@ import static java.util.stream.Collectors.joining; import java.util.Map; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@code CharSequence} contains a Subsequence of diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainsOnlyOnce.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainsOnlyOnce.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldContainsOnlyOnce.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldContainsOnlyOnce.java @@ -16,8 +16,8 @@ import java.util.Set; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements contains only a given set of diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWith.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWith.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWith.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWith.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements ends with a given value or diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWithIgnoringCase.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWithIgnoringCase.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWithIgnoringCase.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldEndWithIgnoringCase.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that {@link CharSequence} ends with a given value diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeBetween.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeBetween.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeBetween.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeBetween.java @@ -14,8 +14,8 @@ import java.util.Date; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@link Date} is not between start - end dates diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeEqual.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeEqual.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeEqual.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeEqual.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that two objects are not equal failed. diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeIn.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeIn.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeIn.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotBeIn.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a value is not in a group of values (e.g. an array or diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContain.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContain.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContain.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContain.java @@ -18,8 +18,8 @@ import java.nio.file.Path; import java.util.List; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements does not contain a given set of values diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainAtIndex.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainAtIndex.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainAtIndex.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainAtIndex.java @@ -13,8 +13,8 @@ package org.assertj.core.error; import org.assertj.core.data.Index; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements does not contain a value at a given diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java @@ -17,8 +17,8 @@ import java.util.Set; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a {@code CharSequence} does not contain another diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSequence.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements does not contain a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSubsequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSubsequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSubsequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainSubsequence.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements does not contains a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWith.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWith.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWith.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWith.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements does not end with a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that {@link CharSequence} does not end with a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveDuplicates.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveDuplicates.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveDuplicates.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotHaveDuplicates.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies a group of elements is does not have duplicates failed. A diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWith.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWith.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWith.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWith.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements does not start with a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies {@link CharSequence} does not start with a diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWith.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWith.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWith.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWith.java @@ -12,8 +12,8 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies that a group of elements starts with a given value or diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWithIgnoringCase.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWithIgnoringCase.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWithIgnoringCase.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldStartWithIgnoringCase.java @@ -12,7 +12,7 @@ */ package org.assertj.core.error; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Creates an error message indicating that an assertion that verifies {@link CharSequence} starts with a given value diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Arrays.java b/assertj-core/src/main/java/org/assertj/core/internal/Arrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Arrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Arrays.java @@ -90,6 +90,9 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.Condition; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; import org.assertj.core.util.ArrayWrapperList; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Arrays2D.java b/assertj-core/src/main/java/org/assertj/core/internal/Arrays2D.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Arrays2D.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Arrays2D.java @@ -29,6 +29,7 @@ import java.lang.reflect.Array; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; import org.assertj.core.error.ShouldHaveDimensions; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/BigDecimals.java b/assertj-core/src/main/java/org/assertj/core/internal/BigDecimals.java --- a/assertj-core/src/main/java/org/assertj/core/internal/BigDecimals.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/BigDecimals.java @@ -19,6 +19,8 @@ import java.math.BigDecimal; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * Reusable assertions for <code>{@link BigDecimal}</code>s. diff --git a/assertj-core/src/main/java/org/assertj/core/internal/BigIntegers.java b/assertj-core/src/main/java/org/assertj/core/internal/BigIntegers.java --- a/assertj-core/src/main/java/org/assertj/core/internal/BigIntegers.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/BigIntegers.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + import static java.math.BigInteger.ONE; import static java.math.BigInteger.ZERO; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ByteArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/ByteArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ByteArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/ByteArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Bytes.java b/assertj-core/src/main/java/org/assertj/core/internal/Bytes.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Bytes.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Bytes.java @@ -12,6 +12,8 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; + /** * Reusable assertions for <code>{@link Byte}</code>s. * diff --git a/assertj-core/src/main/java/org/assertj/core/internal/CharArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/CharArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/CharArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/CharArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Characters.java b/assertj-core/src/main/java/org/assertj/core/internal/Characters.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Characters.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Characters.java @@ -18,6 +18,7 @@ import static org.assertj.core.error.ShouldBeUpperCase.shouldBeUpperCase; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Reusable assertions for <code>{@link Character}</code>s. diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Classes.java b/assertj-core/src/main/java/org/assertj/core/internal/Classes.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Classes.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Classes.java @@ -42,6 +42,8 @@ import java.util.SortedSet; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.Arrays; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Comparables.java b/assertj-core/src/main/java/org/assertj/core/internal/Comparables.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Comparables.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Comparables.java @@ -22,6 +22,9 @@ import java.util.Comparator; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.error.ErrorMessageFactory; import org.assertj.core.error.ShouldBeAfter; import org.assertj.core.error.ShouldBeAfterOrEqualTo; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ConfigurableRecursiveFieldByFieldComparator.java b/assertj-core/src/main/java/org/assertj/core/internal/ConfigurableRecursiveFieldByFieldComparator.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ConfigurableRecursiveFieldByFieldComparator.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/ConfigurableRecursiveFieldByFieldComparator.java @@ -13,7 +13,7 @@ package org.assertj.core.internal; import static java.util.Objects.requireNonNull; -import static org.assertj.core.internal.ComparatorBasedComparisonStrategy.NOT_EQUAL; +import static org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy.NOT_EQUAL; import java.util.Comparator; import java.util.Objects; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Dates.java b/assertj-core/src/main/java/org/assertj/core/internal/Dates.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Dates.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Dates.java @@ -55,6 +55,9 @@ import java.util.concurrent.TimeUnit; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.error.ShouldBeEqualWithTimePrecision; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/DoubleArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/DoubleArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/DoubleArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/DoubleArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Doubles.java b/assertj-core/src/main/java/org/assertj/core/internal/Doubles.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Doubles.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Doubles.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + import static java.lang.Math.abs; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ExtendedByTypesComparator.java b/assertj-core/src/main/java/org/assertj/core/internal/ExtendedByTypesComparator.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ExtendedByTypesComparator.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/ExtendedByTypesComparator.java @@ -12,7 +12,7 @@ */ package org.assertj.core.internal; -import static org.assertj.core.internal.ComparatorBasedComparisonStrategy.NOT_EQUAL; +import static org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy.NOT_EQUAL; import java.util.Comparator; import java.util.Objects; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/FloatArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/FloatArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/FloatArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/FloatArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Floats.java b/assertj-core/src/main/java/org/assertj/core/internal/Floats.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Floats.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Floats.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + import static java.lang.Math.abs; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/IntArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/IntArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/IntArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/IntArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Integers.java b/assertj-core/src/main/java/org/assertj/core/internal/Integers.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Integers.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Integers.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + /** * Reusable assertions for <code>{@link Integer}</code>s. * diff --git a/assertj-core/src/main/java/org/assertj/core/internal/IterableDiff.java b/assertj-core/src/main/java/org/assertj/core/internal/IterableDiff.java --- a/assertj-core/src/main/java/org/assertj/core/internal/IterableDiff.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/IterableDiff.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + import static java.util.Collections.unmodifiableList; import static org.assertj.core.util.Lists.newArrayList; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Iterables.java b/assertj-core/src/main/java/org/assertj/core/internal/Iterables.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Iterables.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Iterables.java @@ -105,6 +105,9 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.Condition; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.configuration.Configuration; import org.assertj.core.error.UnsatisfiedRequirement; import org.assertj.core.error.ZippedElementsShouldSatisfy.ZipSatisfyError; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Lists.java b/assertj-core/src/main/java/org/assertj/core/internal/Lists.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Lists.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Lists.java @@ -30,6 +30,9 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.Condition; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/LongArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/LongArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/LongArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/LongArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Longs.java b/assertj-core/src/main/java/org/assertj/core/internal/Longs.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Longs.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Longs.java @@ -12,6 +12,8 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; + /** * Reusable assertions for <code>{@link Long}</code>s. * diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Numbers.java b/assertj-core/src/main/java/org/assertj/core/internal/Numbers.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Numbers.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Numbers.java @@ -24,6 +24,7 @@ import java.util.Objects; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.data.Offset; import org.assertj.core.data.Percentage; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/ObjectArrays.java @@ -33,6 +33,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.Condition; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Objects.java b/assertj-core/src/main/java/org/assertj/core/internal/Objects.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Objects.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Objects.java @@ -67,6 +67,9 @@ import java.util.Set; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.error.GroupTypeDescription; import org.assertj.core.internal.DeepDifference.Difference; import org.assertj.core.util.introspection.FieldSupport; diff --git a/assertj-core/src/main/java/org/assertj/core/internal/RealNumbers.java b/assertj-core/src/main/java/org/assertj/core/internal/RealNumbers.java --- a/assertj-core/src/main/java/org/assertj/core/internal/RealNumbers.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/RealNumbers.java @@ -20,6 +20,7 @@ import java.math.BigDecimal; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; /** * Base class of reusable assertions for real numbers (float and double). diff --git a/assertj-core/src/main/java/org/assertj/core/internal/ShortArrays.java b/assertj-core/src/main/java/org/assertj/core/internal/ShortArrays.java --- a/assertj-core/src/main/java/org/assertj/core/internal/ShortArrays.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/ShortArrays.java @@ -16,6 +16,8 @@ import org.assertj.core.api.ArraySortedAssert; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.data.Index; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Shorts.java b/assertj-core/src/main/java/org/assertj/core/internal/Shorts.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Shorts.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Shorts.java @@ -12,6 +12,9 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + import static java.lang.Math.abs; /** diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Strings.java b/assertj-core/src/main/java/org/assertj/core/internal/Strings.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Strings.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Strings.java @@ -94,6 +94,8 @@ import java.util.stream.Collectors; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; /** * @author Alex Ruiz diff --git a/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java b/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java --- a/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java +++ b/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java @@ -67,7 +67,7 @@ import org.assertj.core.configuration.ConfigurationProvider; import org.assertj.core.data.MapEntry; import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.Closeables; import org.assertj.core.util.diff.ChangeDelta; import org.assertj.core.util.diff.DeleteDelta; diff --git a/assertj-guava/src/main/java/org/assertj/guava/api/OptionalAssert.java b/assertj-guava/src/main/java/org/assertj/guava/api/OptionalAssert.java --- a/assertj-guava/src/main/java/org/assertj/guava/api/OptionalAssert.java +++ b/assertj-guava/src/main/java/org/assertj/guava/api/OptionalAssert.java @@ -60,13 +60,12 @@ protected Optional<T> getActual() { * @throws AssertionError if the actual {@link Optional} is {@code null}. * @throws AssertionError if the actual {@link Optional} contains nothing or does not have the given value. */ - @SuppressWarnings("deprecation") public OptionalAssert<T> contains(final Object value) { isNotNull(); if (!actual.isPresent()) { throw assertionError(shouldBePresentWithValue(value)); } - if (!areEqual(actual.get(), value)) { + if (!getComparisonStrategy().areEqual(actual.get(), value)) { throw assertionError(shouldBePresentWithValue(actual, value)); } return this;
diff --git a/assertj-core/src/test/java/org/assertj/core/api/AbstractAssert_areEqual_Test.java b/assertj-core/src/test/java/org/assertj/core/api/AbstractAssert_areEqual_Test.java deleted file mode 100644 --- a/assertj-core/src/test/java/org/assertj/core/api/AbstractAssert_areEqual_Test.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - * - * Copyright 2012-2025 the original author or authors. - */ -package org.assertj.core.api; - -import static org.assertj.core.api.BDDAssertions.then; -import static org.mockito.Answers.CALLS_REAL_METHODS; -import static org.mockito.Mockito.verify; - -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; - -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.Objects; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class AbstractAssert_areEqual_Test { - - @Mock(answer = CALLS_REAL_METHODS) - private AbstractAssert<?, Object> underTest; - - @Mock - private ComparisonStrategy comparisonStrategy; - - @Test - @SuppressWarnings("deprecation") - void should_delegate_to_ComparableAssert() { - // GIVEN - underTest.objects = new Objects(comparisonStrategy); - // WHEN - underTest.areEqual(42, 43); - // THEN - verify(comparisonStrategy).areEqual(42, 43); - } - - @Test - void should_be_protected() throws NoSuchMethodException { - // GIVEN - Method areEqual = AbstractAssert.class.getDeclaredMethod("areEqual", Object.class, Object.class); - // WHEN - boolean isProtected = Modifier.isProtected(areEqual.getModifiers()); - // THEN - then(isProtected).isTrue(); - } - -} diff --git a/assertj-core/src/test/java/org/assertj/core/api/AbstractOffsetDateTimeAssertBaseTest.java b/assertj-core/src/test/java/org/assertj/core/api/AbstractOffsetDateTimeAssertBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/api/AbstractOffsetDateTimeAssertBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/api/AbstractOffsetDateTimeAssertBaseTest.java @@ -18,7 +18,7 @@ import java.time.ZoneOffset; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.OffsetDateTimeByInstantComparator; public abstract class AbstractOffsetDateTimeAssertBaseTest extends TemporalAssertBaseTest<OffsetDateTimeAssert, OffsetDateTime> { diff --git a/assertj-core/src/test/java/org/assertj/core/api/AbstractZonedDateTimeAssertBaseTest.java b/assertj-core/src/test/java/org/assertj/core/api/AbstractZonedDateTimeAssertBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/api/AbstractZonedDateTimeAssertBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/api/AbstractZonedDateTimeAssertBaseTest.java @@ -19,7 +19,7 @@ import org.assertj.core.internal.ChronoZonedDateTimeByInstantComparator; import org.assertj.core.internal.Comparables; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; public abstract class AbstractZonedDateTimeAssertBaseTest extends TemporalAssertBaseTest<ZonedDateTimeAssert, ZonedDateTime> { diff --git a/assertj-core/src/test/java/org/assertj/core/api/BaseTestTemplate.java b/assertj-core/src/test/java/org/assertj/core/api/BaseTestTemplate.java --- a/assertj-core/src/test/java/org/assertj/core/api/BaseTestTemplate.java +++ b/assertj-core/src/test/java/org/assertj/core/api/BaseTestTemplate.java @@ -21,7 +21,7 @@ import java.util.Comparator; import org.assertj.core.api.abstract_.AbstractAssert_isNull_Test; import org.assertj.core.error.AssertionErrorCreator; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.Conditions; import org.assertj.core.internal.Objects; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingComparatorForType_Test.java b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingComparatorForType_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingComparatorForType_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingComparatorForType_Test.java @@ -23,7 +23,7 @@ import org.assertj.core.api.AtomicReferenceArrayAssert; import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ExtendedByTypesComparator; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java @@ -17,9 +17,9 @@ import org.assertj.core.api.AtomicReferenceArrayAssert; import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.AtomicReferenceArrayElementComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.AtomicReferenceArrayElementComparisonStrategy; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.testkit.Name; diff --git a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java @@ -17,9 +17,9 @@ import org.assertj.core.api.AtomicReferenceArrayAssert; import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.AtomicReferenceArrayElementComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.AtomicReferenceArrayElementComparisonStrategy; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.testkit.Name; diff --git a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java @@ -23,9 +23,9 @@ import org.assertj.core.api.AtomicReferenceArrayAssert; import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.AtomicReferenceArrayElementComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.AtomicReferenceArrayElementComparisonStrategy; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java @@ -16,9 +16,9 @@ import org.assertj.core.api.AtomicReferenceArrayAssert; import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.AtomicReferenceArrayElementComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.AtomicReferenceArrayElementComparisonStrategy; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringWhitespaces_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringWhitespaces_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringWhitespaces_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsIgnoringWhitespaces_Test.java @@ -25,8 +25,8 @@ import java.util.Comparator; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_Pattern_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_Pattern_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_Pattern_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_Pattern_Test.java @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import org.assertj.core.api.CharSequenceAssert; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_String_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_String_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_String_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_containsPatternSatisfying_String_Test.java @@ -21,7 +21,7 @@ import java.util.function.Consumer; import org.assertj.core.api.CharSequenceAssert; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_Pattern_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_Pattern_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_Pattern_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_Pattern_Test.java @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import org.assertj.core.api.CharSequenceAssert; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_String_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_String_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_String_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_matchesSatisfying_String_Test.java @@ -21,7 +21,7 @@ import java.util.function.Consumer; import org.assertj.core.api.CharSequenceAssert; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingComparatorForType_Test.java b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingComparatorForType_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingComparatorForType_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingComparatorForType_Test.java @@ -24,7 +24,7 @@ import org.assertj.core.api.ConcreteIterableAssert; import org.assertj.core.api.IterableAssertBaseTest; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ExtendedByTypesComparator; import org.assertj.core.internal.Iterables; import org.assertj.core.testkit.Jedi; diff --git a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ConcreteIterableAssert; import org.assertj.core.api.IterableAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.IterableElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.IterableElementComparisonStrategy; import org.assertj.core.internal.Iterables; import org.assertj.core.testkit.Name; import org.assertj.core.testkit.Player; diff --git a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ConcreteIterableAssert; import org.assertj.core.api.IterableAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.IterableElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.IterableElementComparisonStrategy; import org.assertj.core.internal.Iterables; import org.assertj.core.testkit.Name; import org.assertj.core.testkit.Player; diff --git a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_Test.java b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_Test.java @@ -25,10 +25,10 @@ import org.assertj.core.api.ConcreteIterableAssert; import org.assertj.core.api.IterableAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.IterableElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.IterableElementComparisonStrategy; import org.assertj.core.internal.Iterables; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ConcreteIterableAssert; import org.assertj.core.api.IterableAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.IterableElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.IterableElementComparisonStrategy; import org.assertj.core.internal.Iterables; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingComparatorForType_Test.java b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingComparatorForType_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingComparatorForType_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingComparatorForType_Test.java @@ -22,7 +22,7 @@ import org.assertj.core.api.ObjectArrayAssert; import org.assertj.core.api.ObjectArrayAssertBaseTest; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ExtendedByTypesComparator; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorIgnoringFields_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ObjectArrayAssert; import org.assertj.core.api.ObjectArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ObjectArrayElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.ObjectArrayElementComparisonStrategy; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.testkit.Name; import org.assertj.core.testkit.Player; diff --git a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparatorOnFields_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ObjectArrayAssert; import org.assertj.core.api.ObjectArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ObjectArrayElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.ObjectArrayElementComparisonStrategy; import org.assertj.core.internal.ObjectArrays; import org.assertj.core.testkit.Name; import org.assertj.core.testkit.Player; diff --git a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_Test.java @@ -22,10 +22,10 @@ import org.assertj.core.api.ObjectArrayAssert; import org.assertj.core.api.ObjectArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ObjectArrayElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.ObjectArrayElementComparisonStrategy; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingRecursiveFieldByFieldElementComparator_with_RecursiveComparisonConfiguration_Test.java @@ -17,10 +17,10 @@ import org.assertj.core.api.ObjectArrayAssert; import org.assertj.core.api.ObjectArrayAssertBaseTest; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ObjectArrayElementComparisonStrategy; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ConfigurableRecursiveFieldByFieldComparator; -import org.assertj.core.internal.ObjectArrayElementComparisonStrategy; import org.assertj.core.internal.ObjectArrays; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java @@ -21,7 +21,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterYear_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterYear_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterYear_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfterYear_create_Test.java @@ -20,7 +20,7 @@ import static org.assertj.core.util.DateUtil.parse; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfter_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfter_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfter_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeAfter_create_Test.java @@ -21,7 +21,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java @@ -20,7 +20,7 @@ import static org.assertj.core.util.DateUtil.parse; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; class ShouldBeBeforeOrEqualTo_create_Test { diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeYear_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeYear_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeYear_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBeforeYear_create_Test.java @@ -20,7 +20,7 @@ import static org.assertj.core.util.DateUtil.parse; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBefore_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBefore_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBefore_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBefore_create_Test.java @@ -21,7 +21,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBetween_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBetween_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBetween_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeBetween_create_Test.java @@ -20,7 +20,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java @@ -25,8 +25,8 @@ import java.util.Comparator; import org.assertj.core.description.Description; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.internal.TestDescription; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreaterOrEqual_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreaterOrEqual_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreaterOrEqual_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreaterOrEqual_create_Test.java @@ -17,7 +17,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreater_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreater_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreater_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeGreater_create_Test.java @@ -18,7 +18,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeIn_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeIn_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeIn_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeIn_create_Test.java @@ -19,7 +19,7 @@ import org.assertj.core.description.Description; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLessOrEqual_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLessOrEqual_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLessOrEqual_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLessOrEqual_create_Test.java @@ -17,7 +17,7 @@ import static org.assertj.core.error.ShouldBeLessOrEqual.shouldBeLessOrEqual; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLess_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLess_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLess_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeLess_create_Test.java @@ -17,7 +17,7 @@ import static org.assertj.core.error.ShouldBeLess.shouldBeLess; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubsetOf_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubsetOf_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubsetOf_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubsetOf_create_Test.java @@ -17,7 +17,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubstringOf_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubstringOf_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubstringOf_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldBeSubstringOf_create_Test.java @@ -17,8 +17,8 @@ import static org.assertj.core.error.ShouldBeSubstring.shouldBeSubstring; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAnyOf_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAnyOf_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAnyOf_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAnyOf_create_Test.java @@ -19,7 +19,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAtIndex_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAtIndex_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAtIndex_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainAtIndex_create_Test.java @@ -19,7 +19,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequenceOnlyOnce_create_Test.java @@ -16,7 +16,7 @@ import static org.assertj.core.error.ShouldContainCharSequenceOnlyOnce.shouldContainOnlyOnce; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.TestDescription; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainCharSequence_create_Test.java @@ -23,13 +23,11 @@ import static org.assertj.core.util.Sets.set; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; -import java.util.Set; - /** * @author Alex Ruiz * @author Yvonne Wang diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactlyInAnyOrder_create_Test.java @@ -21,7 +21,7 @@ import java.util.Collections; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactly_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactly_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactly_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainExactly_create_Test.java @@ -28,9 +28,9 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.IndexedDiff; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledOnOs; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainOnly_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainOnly_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainOnly_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainOnly_create_Test.java @@ -28,7 +28,7 @@ import org.assertj.core.data.MapEntry; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.assertj.core.testkit.Jedi; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequenceOfCharSequence_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSequence_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequenceOfCharSequence_create_Test.java @@ -22,7 +22,7 @@ import java.util.stream.Stream; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainSubsequence_create_Test.java @@ -21,8 +21,8 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContain_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContain_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContain_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContain_create_Test.java @@ -29,7 +29,7 @@ import java.util.Map; import org.assertj.core.data.MapEntry; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.assertj.core.testkit.Jedi; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainsOnlyOnce_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainsOnlyOnce_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldContainsOnlyOnce_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldContainsOnlyOnce_create_Test.java @@ -20,7 +20,7 @@ import static org.assertj.core.util.Sets.newLinkedHashSet; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWithIgnoringCase_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWithIgnoringCase_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWithIgnoringCase_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWithIgnoringCase_create_Test.java @@ -18,8 +18,8 @@ import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.OtherStringTestComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWith_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWith_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWith_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldEndWith_create_Test.java @@ -19,7 +19,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeEqual_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeEqual_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeEqual_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeEqual_create_Test.java @@ -15,7 +15,7 @@ import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.error.ShouldNotBeEqual.shouldNotBeEqual; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.TestDescription; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeIn_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeIn_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeIn_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotBeIn_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Arrays.array; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainAtIndex_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainAtIndex_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainAtIndex_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainAtIndex_create_Test.java @@ -19,7 +19,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainCharSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainCharSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainCharSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainCharSequence_create_Test.java @@ -21,14 +21,12 @@ import static org.assertj.core.util.Sets.set; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; -import java.util.Set; - /** * @author Alex Ruiz * @author Yvonne Wang diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSequence_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSubsequence_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSubsequence_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSubsequence_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainSubsequence_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContain_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContain_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContain_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContain_create_Test.java @@ -25,7 +25,7 @@ import java.nio.file.Path; import java.util.List; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWithIgnoringCase_create_Test.java @@ -17,8 +17,8 @@ import static org.assertj.core.error.ShouldNotEndWithIgnoringCase.shouldNotEndWithIgnoringCase; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWith_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWith_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWith_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotEndWith_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotHaveDuplicates_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotHaveDuplicates_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotHaveDuplicates_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotHaveDuplicates_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWithIgnoringCase_create_Test.java @@ -17,8 +17,8 @@ import static org.assertj.core.error.ShouldNotStartWithIgnoringCase.shouldNotStartWithIgnoringCase; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.util.OtherStringTestComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWith_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWith_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWith_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotStartWith_create_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.presentation.StandardRepresentation; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWithIgnoringCase_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWithIgnoringCase_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWithIgnoringCase_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWithIgnoringCase_create_Test.java @@ -18,8 +18,8 @@ import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.OtherStringTestComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWith_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWith_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWith_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldStartWith_create_Test.java @@ -19,7 +19,7 @@ import static org.assertj.core.util.Lists.list; import org.assertj.core.description.TextDescription; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_ComparatorBasedComparisonStrategy.java b/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_ComparatorBasedComparisonStrategy.java --- a/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_ComparatorBasedComparisonStrategy.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_ComparatorBasedComparisonStrategy.java @@ -14,6 +14,7 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; public class AbstractTest_ComparatorBasedComparisonStrategy { diff --git a/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_StandardComparisonStrategy.java b/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_StandardComparisonStrategy.java --- a/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_StandardComparisonStrategy.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/AbstractTest_StandardComparisonStrategy.java @@ -12,8 +12,10 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; + public class AbstractTest_StandardComparisonStrategy { protected static StandardComparisonStrategy standardComparisonStrategy = StandardComparisonStrategy.instance(); -} \ No newline at end of file +} diff --git a/assertj-core/src/test/java/org/assertj/core/internal/Arrays_containsAnyOf_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/Arrays_containsAnyOf_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/Arrays_containsAnyOf_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/Arrays_containsAnyOf_Test.java @@ -25,6 +25,7 @@ import static org.mockito.Mockito.verify; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; class Arrays_containsAnyOf_Test extends BaseArraysTest { diff --git a/assertj-core/src/test/java/org/assertj/core/internal/BaseArraysTest.java b/assertj-core/src/test/java/org/assertj/core/internal/BaseArraysTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/BaseArraysTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/BaseArraysTest.java @@ -15,6 +15,7 @@ import static org.assertj.core.util.Arrays.array; import static org.mockito.Mockito.spy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/BigDecimalsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/BigDecimalsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/BigDecimalsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/BigDecimalsBaseTest.java @@ -12,6 +12,8 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; + import static org.assertj.core.util.BigDecimalComparator.BIG_DECIMAL_COMPARATOR; import java.math.BigDecimal; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/BigIntegersBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/BigIntegersBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/BigIntegersBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/BigIntegersBaseTest.java @@ -12,6 +12,8 @@ */ package org.assertj.core.internal; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; + import static org.assertj.core.util.BigIntegerComparator.BIG_INTEGER_COMPARATOR; import java.math.BigInteger; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/BooleanArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/BooleanArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/BooleanArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/BooleanArraysBaseTest.java @@ -17,6 +17,8 @@ import static org.mockito.Mockito.spy; import org.assertj.core.api.WritableAssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.BeforeEach; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ByteArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/ByteArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ByteArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ByteArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/BytesBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/BytesBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/BytesBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/BytesBaseTest.java @@ -14,6 +14,7 @@ import static org.mockito.Mockito.spy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/CharArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/CharArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/CharArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/CharArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveCharacterComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/CharactersBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/CharactersBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/CharactersBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/CharactersBaseTest.java @@ -14,6 +14,7 @@ import static org.mockito.Mockito.spy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveCharacterComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparablesBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparablesBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparablesBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparablesBaseTest.java @@ -16,6 +16,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_areEqual_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_areEqual_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_areEqual_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_areEqual_Test.java @@ -15,6 +15,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_arrayContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_arrayContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_arrayContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_arrayContains_Test.java @@ -16,6 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.util.Arrays.array; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_duplicatesFrom_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_duplicatesFrom_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_duplicatesFrom_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_duplicatesFrom_Test.java @@ -19,6 +19,7 @@ import java.util.ArrayList; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_isGreaterThan_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_isGreaterThan_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_isGreaterThan_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_isGreaterThan_Test.java @@ -18,6 +18,8 @@ import static org.mockito.Mockito.verify; import java.util.Locale; + +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableContains_Test.java @@ -17,6 +17,7 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableRemove_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableRemove_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableRemove_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_iterableRemove_Test.java @@ -17,6 +17,7 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringContains_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringEndsWith_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringEndsWith_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringEndsWith_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringEndsWith_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringStartsWith_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringStartsWith_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringStartsWith_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_stringStartsWith_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_toString_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_toString_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_toString_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ComparatorBasedComparisonStrategy_toString_Test.java @@ -16,6 +16,7 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/DatesBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/DatesBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/DatesBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/DatesBaseTest.java @@ -19,6 +19,7 @@ import java.util.Date; import org.assertj.core.api.WritableAssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.util.DateUtil; import org.assertj.core.util.YearAndMonthComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/DoubleArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/DoubleArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/DoubleArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/DoubleArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/DoublesBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/DoublesBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/DoublesBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/DoublesBaseTest.java @@ -16,6 +16,7 @@ import static org.mockito.Mockito.spy; import org.assertj.core.api.WritableAssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/FloatArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/FloatArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/FloatArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/FloatArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/FloatsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/FloatsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/FloatsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/FloatsBaseTest.java @@ -16,6 +16,8 @@ import static org.mockito.Mockito.spy; import org.assertj.core.api.WritableAssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/IntArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/IntArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/IntArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/IntArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/IntegersBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/IntegersBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/IntegersBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/IntegersBaseTest.java @@ -14,6 +14,8 @@ import static org.mockito.Mockito.spy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/IterableDiff_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/IterableDiff_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/IterableDiff_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/IterableDiff_Test.java @@ -19,6 +19,9 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/IterablesBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/IterablesBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/IterablesBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/IterablesBaseTest.java @@ -20,6 +20,8 @@ import java.util.List; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.BeforeEach; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ListsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/ListsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ListsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ListsBaseTest.java @@ -14,6 +14,8 @@ import static org.mockito.Mockito.spy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/LongArraysBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/LongArraysBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/LongArraysBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/LongArraysBaseTest.java @@ -17,6 +17,8 @@ import java.util.Comparator; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/LongsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/LongsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/LongsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/LongsBaseTest.java @@ -16,6 +16,8 @@ import static org.mockito.Mockito.spy; import org.assertj.core.api.WritableAssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/NumbersBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/NumbersBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/NumbersBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/NumbersBaseTest.java @@ -18,6 +18,8 @@ import java.util.Comparator; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; import org.assertj.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/ObjectsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/ObjectsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/ObjectsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/ObjectsBaseTest.java @@ -21,6 +21,8 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.Assertions; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_areEqual_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_areEqual_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_areEqual_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_areEqual_Test.java @@ -18,6 +18,7 @@ import java.util.stream.Stream; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_arrayContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_arrayContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_arrayContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_arrayContains_Test.java @@ -16,6 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.util.Arrays.array; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_duplicatesFrom_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_duplicatesFrom_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_duplicatesFrom_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_duplicatesFrom_Test.java @@ -18,6 +18,7 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableContains_Test.java @@ -17,6 +17,7 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableRemove_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableRemove_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableRemove_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_iterableRemove_Test.java @@ -17,6 +17,7 @@ import java.util.List; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringContains_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringContains_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringContains_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringContains_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringEndsWith_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringEndsWith_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringEndsWith_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringEndsWith_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringStartsWith_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringStartsWith_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringStartsWith_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StandardComparisonStrategy_stringStartsWith_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsExactlyInAnyOrder_Test.java @@ -28,7 +28,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.BooleanArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; class BooleanArrays_assertContainsExactlyInAnyOrder_Test extends BooleanArraysBaseTest { diff --git a/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsSubsequence_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsSubsequence_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsSubsequence_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/booleanarrays/BooleanArrays_assertContainsSubsequence_Test.java @@ -25,7 +25,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.BooleanArrays; import org.assertj.core.internal.BooleanArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ByteArrays; import org.assertj.core.internal.ByteArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_with_Integer_Arguments_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_with_Integer_Arguments_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_with_Integer_Arguments_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/bytearrays/ByteArrays_assertContainsExactlyInAnyOrder_with_Integer_Arguments_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ByteArrays; import org.assertj.core.internal.ByteArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.IntArrays; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.CharArrays; import org.assertj.core.internal.CharArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/doublearrays/DoubleArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/doublearrays/DoubleArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/doublearrays/DoubleArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/doublearrays/DoubleArrays_assertContainsExactlyInAnyOrder_Test.java @@ -26,7 +26,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.DoubleArrays; import org.assertj.core.internal.DoubleArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.FloatArrays; import org.assertj.core.internal.FloatArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.IntArrays; import org.assertj.core.internal.IntArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/integers/Integers_assertGreaterThan_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/integers/Integers_assertGreaterThan_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/integers/Integers_assertGreaterThan_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/integers/Integers_assertGreaterThan_Test.java @@ -23,7 +23,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Integers; import org.assertj.core.internal.IntegersBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsAnyOf_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsAnyOf_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsAnyOf_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsAnyOf_Test.java @@ -30,7 +30,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Iterables; import org.assertj.core.internal.IterablesBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.testkit.Name; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Iterables; import org.assertj.core.internal.IterablesBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsSubsequence_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsSubsequence_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsSubsequence_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsSubsequence_Test.java @@ -28,7 +28,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Iterables; import org.assertj.core.internal.IterablesBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/assertj-core/src/test/java/org/assertj/core/internal/longarrays/LongArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/longarrays/LongArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/longarrays/LongArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/longarrays/LongArrays_assertContainsExactlyInAnyOrder_Test.java @@ -29,7 +29,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.LongArrays; import org.assertj.core.internal.LongArraysBaseTest; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-core/src/test/java/org/assertj/core/internal/objects/Objects_assertEqual_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/objects/Objects_assertEqual_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/objects/Objects_assertEqual_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/objects/Objects_assertEqual_Test.java @@ -21,7 +21,7 @@ import static org.mockito.Mockito.verify; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Objects; import org.assertj.core.internal.ObjectsBaseTest; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/object/ObjectAssert_doesNotReturn_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/object/ObjectAssert_doesNotReturn_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/object/ObjectAssert_doesNotReturn_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/object/ObjectAssert_doesNotReturn_Test.java @@ -21,7 +21,7 @@ import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.tests.core.testkit.Jedi; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_Test.java @@ -42,7 +42,7 @@ import org.assertj.core.api.recursive.comparison.ComparisonDifference; import org.assertj.core.api.recursive.comparison.RecursiveComparisonConfiguration; import org.assertj.core.api.recursive.comparison.RecursiveComparisonDifferenceCalculator; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.util.DoubleComparator; import org.assertj.tests.core.api.recursive.data.AlwaysEqualPerson; import org.assertj.tests.core.api.recursive.data.FriendlyPerson; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/StringsBaseTest.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/StringsBaseTest.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/StringsBaseTest.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/StringsBaseTest.java @@ -19,7 +19,7 @@ import java.util.Set; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Strings; import org.assertj.tests.core.testkit.CaseInsensitiveStringComparator; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArraysBaseTest.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArraysBaseTest.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArraysBaseTest.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArraysBaseTest.java @@ -20,11 +20,11 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.Arrays; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Conditions; import org.assertj.core.internal.Failures; import org.assertj.core.internal.ObjectArrays; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.testkit.CaseInsensitiveStringComparator; import org.assertj.tests.core.testkit.TestData; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsExactlyInAnyOrder_Test.java @@ -24,7 +24,7 @@ import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; import static org.mockito.Mockito.verify; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsSubsequence_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsSubsequence_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsSubsequence_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/objectarrays/ObjectArrays_assertContainsSubsequence_Test.java @@ -26,7 +26,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ObjectArrays; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArraysBaseTest.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArraysBaseTest.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArraysBaseTest.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArraysBaseTest.java @@ -18,10 +18,10 @@ import java.util.Comparator; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.ShortArrays; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertContainsExactlyInAnyOrder_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertContainsExactlyInAnyOrder_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertContainsExactlyInAnyOrder_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shortarrays/ShortArrays_assertContainsExactlyInAnyOrder_Test.java @@ -28,7 +28,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ShortArrays; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.junit.jupiter.api.Test; /** diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/ShortsBaseTest.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/ShortsBaseTest.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/ShortsBaseTest.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/shorts/ShortsBaseTest.java @@ -15,10 +15,10 @@ import static org.assertj.tests.core.testkit.FieldTestUtils.writeField; import static org.mockito.Mockito.spy; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.Failures; import org.assertj.core.internal.Shorts; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.testkit.AbsValueComparator; import org.junit.jupiter.api.BeforeEach; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsAnyOf_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsAnyOf_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsAnyOf_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsAnyOf_Test.java @@ -20,7 +20,7 @@ import static org.assertj.core.util.Arrays.array; import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsIgnoringNewLines_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsIgnoringNewLines_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsIgnoringNewLines_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertContainsIgnoringNewLines_Test.java @@ -24,7 +24,7 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.WritableAssertionInfo; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.tests.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotContain_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotContain_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotContain_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotContain_Test.java @@ -19,7 +19,7 @@ import static org.assertj.tests.core.testkit.TestData.someInfo; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java @@ -18,9 +18,9 @@ import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.tests.core.internal.StringsBaseTest; import org.assertj.tests.core.testkit.StringHashCodeTestComparator; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java @@ -18,9 +18,9 @@ import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.tests.core.internal.StringsBaseTest; import org.assertj.tests.core.testkit.StringHashCodeTestComparator; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java @@ -19,9 +19,9 @@ import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.tests.core.internal.StringsBaseTest; import org.assertj.tests.core.testkit.StringHashCodeTestComparator; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertIsSubstringOf_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertIsSubstringOf_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertIsSubstringOf_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertIsSubstringOf_Test.java @@ -18,7 +18,7 @@ import static org.assertj.tests.core.testkit.TestData.someInfo; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.tests.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java @@ -18,9 +18,9 @@ import static org.assertj.tests.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.internal.ComparisonStrategy; -import org.assertj.core.internal.StandardComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparatorBasedComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.ComparisonStrategy; +import org.assertj.core.api.comparisonstrategy.StandardComparisonStrategy; import org.assertj.core.internal.Strings; import org.assertj.tests.core.internal.StringsBaseTest; import org.assertj.tests.core.testkit.StringHashCodeTestComparator; diff --git a/assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/OptionalAssert_contains_Test.java b/assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/OptionalAssert_contains_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/OptionalAssert_contains_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/OptionalAssert_contains_Test.java @@ -18,9 +18,10 @@ import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.assertj.guava.api.Assertions.assertThat; -import com.google.common.base.Optional; import org.junit.jupiter.api.Test; +import com.google.common.base.Optional; + /** * @author Kornel * @author Joel Costigliola
Expose `ComparisonStrategy` for third-party usage Currently, `ComparisonStrategy` is part of the internal package and there is no easy way to access it. This limits extension capabilities in JPMS and OSGi applications. Exposing `ComparisonStrategy` in `AbstractAssert` would be handy for the implementation of custom assertions that want to stay in sync with the comparison strategy currently configured. Originally triggered by https://github.com/assertj/assertj-guava/issues/70#issuecomment-1057532277.
Targeting 4.x being a breaking change.
2025-04-09T20:27:10Z
4
assertj/assertj
3,735
assertj__assertj-3735
[ "3733" ]
e5959f4db0cea3b3be39e6dcdf451da971b9ae49
diff --git a/assertj-core/src/main/java/org/assertj/core/error/BasicErrorMessageFactory.java b/assertj-core/src/main/java/org/assertj/core/error/BasicErrorMessageFactory.java --- a/assertj-core/src/main/java/org/assertj/core/error/BasicErrorMessageFactory.java +++ b/assertj-core/src/main/java/org/assertj/core/error/BasicErrorMessageFactory.java @@ -23,6 +23,7 @@ import java.util.Arrays; import java.util.Objects; + import org.assertj.core.description.Description; import org.assertj.core.presentation.Representation; import org.assertj.core.util.VisibleForTesting; diff --git a/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java b/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java --- a/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java +++ b/assertj-core/src/main/java/org/assertj/core/presentation/StandardRepresentation.java @@ -12,16 +12,18 @@ */ package org.assertj.core.presentation; -import org.assertj.core.configuration.Configuration; -import org.assertj.core.configuration.ConfigurationProvider; -import org.assertj.core.data.MapEntry; -import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.ComparatorBasedComparisonStrategy; -import org.assertj.core.util.Closeables; -import org.assertj.core.util.VisibleForTesting; -import org.assertj.core.util.diff.ChangeDelta; -import org.assertj.core.util.diff.DeleteDelta; -import org.assertj.core.util.diff.InsertDelta; +import static java.lang.Integer.toHexString; +import static java.util.stream.Collectors.toList; +import static org.assertj.core.util.Arrays.isArray; +import static org.assertj.core.util.Arrays.isArrayTypePrimitive; +import static org.assertj.core.util.Arrays.isObjectArray; +import static org.assertj.core.util.Arrays.notAnArrayOfPrimitives; +import static org.assertj.core.util.DateUtil.formatAsDatetime; +import static org.assertj.core.util.DateUtil.formatAsDatetimeWithMs; +import static org.assertj.core.util.Preconditions.checkArgument; +import static org.assertj.core.util.Strings.concat; +import static org.assertj.core.util.Strings.quote; +import static org.assertj.core.util.Throwables.getStackTrace; import java.io.File; import java.io.PrintWriter; @@ -61,18 +63,16 @@ import java.util.concurrent.atomic.LongAdder; import java.util.function.Function; -import static java.lang.Integer.toHexString; -import static java.util.stream.Collectors.toList; -import static org.assertj.core.util.Arrays.isArray; -import static org.assertj.core.util.Arrays.isArrayTypePrimitive; -import static org.assertj.core.util.Arrays.isObjectArray; -import static org.assertj.core.util.Arrays.notAnArrayOfPrimitives; -import static org.assertj.core.util.DateUtil.formatAsDatetime; -import static org.assertj.core.util.DateUtil.formatAsDatetimeWithMs; -import static org.assertj.core.util.Preconditions.checkArgument; -import static org.assertj.core.util.Strings.concat; -import static org.assertj.core.util.Strings.quote; -import static org.assertj.core.util.Throwables.getStackTrace; +import org.assertj.core.configuration.Configuration; +import org.assertj.core.configuration.ConfigurationProvider; +import org.assertj.core.data.MapEntry; +import org.assertj.core.groups.Tuple; +import org.assertj.core.internal.ComparatorBasedComparisonStrategy; +import org.assertj.core.util.Closeables; +import org.assertj.core.util.VisibleForTesting; +import org.assertj.core.util.diff.ChangeDelta; +import org.assertj.core.util.diff.DeleteDelta; +import org.assertj.core.util.diff.InsertDelta; /** * Standard java object representation. @@ -234,6 +234,7 @@ public String toStringOf(Object object) { if (object instanceof AtomicReferenceFieldUpdater) return AtomicReferenceFieldUpdater.class.getSimpleName(); if (object instanceof File) return toStringOf((File) object); if (object instanceof Path) return fallbackToStringOf(object); + if (isUnquotedString(object)) return toUnquotedStringOf(object); if (object instanceof String) return toStringOf((String) object); if (object instanceof CharSequence) return toStringOf((CharSequence) object); if (object instanceof Character) return toStringOf((Character) object); @@ -262,6 +263,11 @@ public String toStringOf(Object object) { return fallbackToStringOf(object); } + private static boolean isUnquotedString(Object object) { + String className = object.getClass().getName(); + return className.contains("org.assertj.core") && className.contains("UnquotedString"); + } + private static boolean isInstanceOfNotOverridingToString(Object object, Class<?> type) { return type.isInstance(object) && !hasOverriddenToStringInSubclassOf(object.getClass(), type); } @@ -404,6 +410,10 @@ protected String toStringOf(String s) { return concatWithDoubleQuotes(s); } + protected String toUnquotedStringOf(Object s) { + return s.toString(); + } + protected String toStringOf(CharSequence s) { return concatWithDoubleQuotes(s); }
diff --git a/assertj-core/src/test/java/org/assertj/core/error/BasicErrorMessageFactory_unquotedString_Test.java b/assertj-core/src/test/java/org/assertj/core/error/BasicErrorMessageFactory_unquotedString_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/BasicErrorMessageFactory_unquotedString_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/BasicErrorMessageFactory_unquotedString_Test.java @@ -14,6 +14,7 @@ import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.error.BasicErrorMessageFactory.unquotedString; +import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; import org.junit.jupiter.api.Test; @@ -26,4 +27,15 @@ class BasicErrorMessageFactory_unquotedString_Test { void should_implement_toString() { then(unquotedString("some value")).hasToString("some value"); } + + @Test + void StandardRepresentation_should_not_quote_unquoted_String() { + // GIVEN + CharSequence unquotedString = unquotedString("Hello"); + // WHEN + String unquotedStringStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(unquotedString); + // THEN + then(unquotedStringStandardRepresentation).isEqualTo("Hello"); + } + } diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/presentation/StandardRepresentation_toStringOf_Test.java @@ -109,9 +109,9 @@ void should_quote_CharSequence_implementation() { // GIVEN CharSequence charSequence = new StringBuilder("Hello"); // WHEN - String emptyStringStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(charSequence); + String charSequenceStandardRepresentation = STANDARD_REPRESENTATION.toStringOf(charSequence); // THEN - then(emptyStringStandardRepresentation).isEqualTo("\"Hello\""); + then(charSequenceStandardRepresentation).isEqualTo("\"Hello\""); } @Test
Unquoted string produced by BasicErrorMessageFactory is quoted by StandardRepresentation in 3.27.x **Describe the bug** https://github.com/assertj/assertj/pull/3617 has introduced a regression in `StandardRepresentation` when handling an `UnquotedString`. `UnquotedString` implements `CharSequence` so it's now handled as such. This results in it being wrapped in quotes. * assertj core version: 3.27.x * java version: Any * test framework version: N/A * os (if relevant): N/A **Test case reproducing the bug** ```java @Test void hasNotFailedWhenFailedShouldFail() { assertThat(new TestErrorMessageFactory().create()).isEqualTo("alpha \"bravo\""); } static class TestErrorMessageFactory extends BasicErrorMessageFactory { public TestErrorMessageFactory() { super("%s %s", unquotedString("alpha"), "bravo"); } } ``` A possible workaround is to implement a custom `QuotedString` class that doesn't implement `CharSequence` so that `StandardRepresentation` then falls back to calling `toString()`: ```java static class TestErrorMessageFactory extends BasicErrorMessageFactory { public TestErrorMessageFactory() { super("%s %s", new UnquotedString("alpha"), "bravo"); } private static class UnquotedString { private final String string; private UnquotedString(String string) { this.string = string; } @Override public String toString() { return this.string; } } ``` Is this a reasonable approach? I'm wondering if the loss of the various `CharSequence` methods or the lack of hashCode and equals may cause problems.
Good catch thanks @wilkinsona
2025-01-08T22:44:11Z
3.27
assertj/assertj
3,724
assertj__assertj-3724
[ "3722" ]
9cf73ca09df697516a4295ee8db99520afbb116d
diff --git a/assertj-core/src/main/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonDifferenceCalculator.java b/assertj-core/src/main/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonDifferenceCalculator.java --- a/assertj-core/src/main/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonDifferenceCalculator.java +++ b/assertj-core/src/main/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonDifferenceCalculator.java @@ -17,7 +17,6 @@ import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; -import static java.util.stream.Collectors.toMap; import static java.util.stream.StreamSupport.stream; import static org.assertj.core.api.recursive.comparison.ComparisonDifference.rootComparisonDifference; import static org.assertj.core.api.recursive.comparison.DualValue.DEFAULT_ORDERED_COLLECTION_TYPES; @@ -35,6 +34,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -46,9 +46,11 @@ import java.util.concurrent.atomic.AtomicLongArray; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceArray; +import java.util.function.Function; import java.util.regex.Pattern; +import java.util.stream.Collector; +import java.util.stream.Collectors; import java.util.stream.Stream; - import org.assertj.core.internal.DeepDifference; /** @@ -575,7 +577,7 @@ private static void compareUnorderedIterables(DualValue dualValue, ComparisonSta // It may be that expectedElement matches an actual element in a different hash bucket, to account for this, we check the // other actual elements for matches. This may result in O(n^2) complexity in the worst case. if (!expectedElementMatched) { - for (Map.Entry<Integer, ? extends List<?>> actualElementsEntry : actualElementsGroupedByHashCode.entrySet()) { + for (Entry<Integer, ? extends List<?>> actualElementsEntry : actualElementsGroupedByHashCode.entrySet()) { // avoid checking the same bucket twice if (actualElementsEntry.getKey().equals(expectedHash)) continue; Iterator<?> actualElementsIterator = actualElementsEntry.getValue().iterator(); @@ -627,6 +629,8 @@ private static <K, V> void compareSortedMap(DualValue dualValue, ComparisonState Map<?, ?> actualMap = filterIgnoredFields((Map<?, ?>) dualValue.actual, dualValue.fieldLocation, comparisonState.recursiveComparisonConfiguration); + + @SuppressWarnings("unchecked") Map<K, V> expectedMap = (Map<K, V>) filterIgnoredFields((Map<?, ?>) dualValue.expected, dualValue.fieldLocation, comparisonState.recursiveComparisonConfiguration); @@ -636,9 +640,9 @@ private static <K, V> void compareSortedMap(DualValue dualValue, ComparisonState // no need to inspect entries, maps are not equal as they don't have the same size return; } - Iterator<Map.Entry<K, V>> expectedMapEntries = expectedMap.entrySet().iterator(); - for (Map.Entry<?, ?> actualEntry : actualMap.entrySet()) { - Map.Entry<?, ?> expectedEntry = expectedMapEntries.next(); + Iterator<Entry<K, V>> expectedMapEntries = expectedMap.entrySet().iterator(); + for (Entry<?, ?> actualEntry : actualMap.entrySet()) { + Entry<?, ?> expectedEntry = expectedMapEntries.next(); // check keys are matched before comparing values as keys represents a field if (!java.util.Objects.equals(actualEntry.getKey(), expectedEntry.getKey())) { // report a missing key/field. @@ -683,17 +687,29 @@ private static void compareUnorderedMap(DualValue dualValue, ComparisonState com } private static Map<?, ?> filterIgnoredFields(Map<?, ?> map, FieldLocation fieldLocation, - RecursiveComparisonConfiguration recursiveComparisonConfiguration) { - Set<String> ignoredFields = recursiveComparisonConfiguration.getIgnoredFields(); - List<Pattern> ignoredFieldsRegexes = recursiveComparisonConfiguration.getIgnoredFieldsRegexes(); + RecursiveComparisonConfiguration configuration) { + Set<String> ignoredFields = configuration.getIgnoredFields(); + List<Pattern> ignoredFieldsRegexes = configuration.getIgnoredFieldsRegexes(); if (ignoredFields.isEmpty() && ignoredFieldsRegexes.isEmpty()) { return map; } return map.entrySet().stream() - .filter(e -> !recursiveComparisonConfiguration.matchesAnIgnoredField(fieldLocation.field(e.getKey().toString()))) - .filter(e -> !recursiveComparisonConfiguration.matchesAnIgnoredFieldRegex(fieldLocation.field(e.getKey() - .toString()))) - .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + .filter(e -> !configuration.matchesAnIgnoredField(fieldLocation.field(e.getKey().toString()))) + .filter(e -> !configuration.matchesAnIgnoredFieldRegex(fieldLocation.field(e.getKey().toString()))) + .collect(toMap(Entry::getKey, Entry::getValue)); + } + + // workaround for https://bugs.openjdk.org/browse/JDK-8148463 + private static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper, + Function<? super T, ? extends U> valueMapper) { + @SuppressWarnings("unchecked") + U none = (U) new Object(); + Collector<T, ?, Map<K, U>> downstream = Collectors.toMap(keyMapper, valueMapper.andThen(v -> v == null ? none : v)); + Function<Map<K, U>, Map<K, U>> finisher = map -> { + map.replaceAll((k, v) -> v == none ? null : v); + return map; + }; + return Collectors.collectingAndThen(downstream, finisher); } private static FieldLocation keyFieldLocation(FieldLocation parentFieldLocation, Object key) {
diff --git a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_ignoringFields_Test.java b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_ignoringFields_Test.java --- a/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_ignoringFields_Test.java +++ b/assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/recursive/comparison/RecursiveComparisonAssert_isEqualTo_ignoringFields_Test.java @@ -804,6 +804,22 @@ public void should_honor_ignored_fields_in_nested_map() { .isEqualTo(mapB); } + @Test + void should_honor_ignored_fields_in_map_with_null_value() { + // GIVEN + Map<String, String> actual = new HashMap<>(); + actual.put("foo", "value1"); + actual.put("bar", null); + + Map<String, String> expected = new HashMap<>(); + expected.put("foo", "value2"); + expected.put("bar", null); + // WHEN/THEN + then(actual).usingRecursiveComparison() + .ignoringFields("foo") + .isEqualTo(expected); + } + static class Data { private final InnerData innerData; private final List<InnerData> innerDataList; @@ -839,4 +855,5 @@ public String getField1() { } } } + }
`ignoringFields` on recursive comparison no longer works with maps with null values **Describe the bug** As part of solving https://github.com/assertj/assertj/issues/2988, https://github.com/assertj/assertj/commit/835c1dc3aee2348eb234f3209488fdd078702c2f changed how maps were checked for equality with a new `filterIgnoredFields` function. This function uses Java's `Collectors.toMap` to convert a stream back to a map, which internally uses `uniqKeysMapAccumulator`, which throws a `NullPointerException` if any map entry values are null. This is not ideal for a testing framework, which should be designed to handle all kinds of possible map scenarios, including ones with null entry values. Please replace this `toMap` call ([deeplink](https://github.com/assertj/assertj/commit/835c1dc3aee2348eb234f3209488fdd078702c2f#diff-f64c53f85fc4dab2341ef1f3d02beecf1347b3c9f5df1e7467782c04d6afeee4R693)) with a different function to convert to a map which allows for null values, for example this one suggested in Stack Overflow: https://stackoverflow.com/questions/24630963/nullpointerexception-in-collectors-tomap-with-null-entry-values/32648397#32648397. Thanks! * assertj core version: [3.27.0](https://github.com/assertj/assertj/releases/tag/assertj-build-3.27.0) * java version: 21 **Test case reproducing the bug** ```java @Test public void assertj_ignoringFieldsWithNullValue_shouldNotThrow() { Map<String, String> actualMap = new HashMap<>(); actualMap.put("exampleKey", "exampleValue"); actualMap.put("nullKey", null); Map<String, String> expectedMap = new HashMap<>(); expectedMap.put("exampleKey", "exampleValue"); expectedMap.put("nullKey", null); assertThat(actualMap) .usingRecursiveComparison() .ignoringFields("exampleKey") .isEqualTo(expectedMap); } ``` --- Relates to: * [JDK-8148463](https://bugs.openjdk.org/browse/JDK-8148463)
Thanks for the reproducer, @grantaveryatgfs! We'll look into it.
2025-01-03T08:46:24Z
3.27
assertj/assertj
3,325
assertj__assertj-3325
[ "3322" ]
1d96ab855fa2c70534dbd5373220c75203e7d8ee
diff --git a/assertj-core/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java b/assertj-core/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java --- a/assertj-core/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java +++ b/assertj-core/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java @@ -37,6 +37,7 @@ import java.util.OptionalDouble; import java.util.OptionalInt; import java.util.OptionalLong; +import java.util.Set; import java.util.Spliterator; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -908,6 +909,32 @@ static <ELEMENT> InstanceOfAssertFactory<List, ListAssert<ELEMENT>> list(Class<E return new InstanceOfAssertFactory<>(List.class, Assertions::<ELEMENT> assertThat); } + /** + * {@link InstanceOfAssertFactory} for a {@link Set}, assuming {@code Object} as element type. + * + * @see #set(Class) + * @since 3.26.0 + */ + @SuppressWarnings("rawtypes") // rawtypes: using Class instance + InstanceOfAssertFactory<Set, AbstractCollectionAssert<?, Collection<?>, Object, ObjectAssert<Object>>> SET = set(Object.class); + + /** + * {@link InstanceOfAssertFactory} for a {@link Set}. + * + * @param <E> the {@code Set} element type. + * @param elementType the element type instance. + * @return the factory instance. + * + * @see #SET + * @since 3.26.0 + */ + @SuppressWarnings({ "rawtypes", "unused", "unchecked", "RedundantSuppression" }) + // rawtypes+unchecked: using Class instance, unused: parameter needed for type inference. + // IntelliJ can warn that this is redundant when it is not. + static <E> InstanceOfAssertFactory<Set, AbstractCollectionAssert<?, Collection<? extends E>, E, ObjectAssert<E>>> set(Class<E> elementType) { + return new InstanceOfAssertFactory<>(Set.class, Assertions::<E> assertThat); + } + /** * {@link InstanceOfAssertFactory} for a {@link Stream}, assuming {@code Object} as element type. *
diff --git a/assertj-core/src/test/java/org/assertj/core/api/Assertions_sync_with_InstanceOfAssertFactories_Test.java b/assertj-core/src/test/java/org/assertj/core/api/Assertions_sync_with_InstanceOfAssertFactories_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/Assertions_sync_with_InstanceOfAssertFactories_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/Assertions_sync_with_InstanceOfAssertFactories_Test.java @@ -13,6 +13,7 @@ package org.assertj.core.api; import static java.util.stream.Collectors.toMap; +import static org.assertj.core.api.Assertions.as; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.from; import static org.assertj.core.api.BDDAssertions.then; @@ -27,6 +28,7 @@ import java.lang.reflect.TypeVariable; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.function.Predicate; import java.util.stream.Stream; @@ -34,19 +36,24 @@ class Assertions_sync_with_InstanceOfAssertFactories_Test extends BaseAssertionsTest { - private static final Class<?>[] FIELD_FACTORIES_IGNORED_TYPES = { - // There can be no Comparable field factory with a base type. + private static final Class<?>[] IGNORED_INPUT_TYPES = { + // There is no dedicated `assertThat`. + Set.class + }; + + private static final Class<?>[] IGNORED_ASSERT_TYPES_FOR_FIELD_FACTORIES = { + // There cannot be a `Comparable` field factory with a base type. AbstractComparableAssert.class, - // The comparison of the input GenericArrayTypes will always fail, since it verifies the inner TypeVariable which - // returns the defining Method as result of TypeVariable#getGenericDeclaration(). + // The comparison of the input `GenericArrayTypes` will always fail, since it verifies the inner `TypeVariable` + // which returns the defining `Method` as a result of `TypeVariable#getGenericDeclaration()`. ObjectArrayAssert.class, Object2DArrayAssert.class, // A field factory for an object is pointless. ObjectAssert.class, }; - private static final Class<?>[] METHOD_FACTORIES_IGNORED_TYPES = { - // The comparison of the input GenericArrayTypes will always fail, since it verifies the inner TypeVariable which - // returns the defining Method as result of TypeVariable#getGenericDeclaration(). + private static final Class<?>[] IGNORED_ASSERT_TYPES_FOR_METHOD_FACTORIES = { + // The comparison of the input `GenericArrayTypes` will always fail, since it verifies the inner `TypeVariable` + // which returns the defining `Method` as a result of `TypeVariable#getGenericDeclaration()`. ObjectArrayAssert.class, Object2DArrayAssert.class, }; @@ -73,7 +80,8 @@ void each_standard_assertion_with_type_parameters_should_have_an_instance_of_ass } private Map<Type, Type> findAssertThatParameterAndReturnTypes() { - return Stream.of(findMethodsWithName(Assertions.class, "assertThat", ignoredReturnTypes(FIELD_FACTORIES_IGNORED_TYPES))) + return Stream.of(findMethodsWithName(Assertions.class, "assertThat", + ignoredReturnTypes(IGNORED_ASSERT_TYPES_FOR_FIELD_FACTORIES))) .map(this::toParameterAndReturnTypeEntry) .filter(not(this::isPrimitiveTypeKey)) .collect(toMap(Entry::getKey, Entry::getValue)); @@ -87,7 +95,8 @@ private <K, V> boolean isPrimitiveTypeKey(Entry<K, V> entry) { } private Map<Type, Type> findTypedAssertThatParameterAndReturnTypes() { - return Stream.of(findMethodsWithName(Assertions.class, "assertThat", ignoredReturnTypes(METHOD_FACTORIES_IGNORED_TYPES))) + return Stream.of(findMethodsWithName(Assertions.class, "assertThat", + ignoredReturnTypes(IGNORED_ASSERT_TYPES_FOR_METHOD_FACTORIES))) .filter(this::hasTypeParameters) .map(this::toParameterAndReturnTypeEntry) .collect(toMap(Entry::getKey, Entry::getValue)); @@ -118,35 +127,40 @@ private Map<Type, Type> findFieldFactoryTypes() { .filter(not(Field::isSynthetic)) // Exclude $jacocoData - see #590 and jacoco/jacoco#168 .map(Field::getGenericType) .map(this::extractTypeParameters) + .filter(not(this::isIgnoredInputType)) .filter(not(this::isIgnoredFieldFactory)) .collect(toMap(Entry::getKey, Entry::getValue)); } private boolean isIgnoredFieldFactory(Entry<Type, Type> e) { - return isIgnoredFactory(e, FIELD_FACTORIES_IGNORED_TYPES); + return isIgnoredFactory(e, IGNORED_ASSERT_TYPES_FOR_FIELD_FACTORIES); } private Map<Type, Type> findMethodFactoryTypes() { return Stream.of(InstanceOfAssertFactories.class.getMethods()) .map(Method::getGenericReturnType) .map(this::extractTypeParameters) + .filter(not(this::isIgnoredInputType)) .filter(not(this::isIgnoredMethodFactory)) .collect(toMap(Entry::getKey, Entry::getValue)); } private boolean isIgnoredMethodFactory(Entry<Type, Type> e) { - return isIgnoredFactory(e, METHOD_FACTORIES_IGNORED_TYPES); + return isIgnoredFactory(e, IGNORED_ASSERT_TYPES_FOR_METHOD_FACTORIES); } - private boolean isIgnoredFactory(Entry<Type, Type> e, Class<?>... ignoredTypes) { + private boolean isIgnoredFactory(Entry<Type, Type> e, Class<?>[] ignoredTypes) { return Stream.of(ignoredTypes).anyMatch(type -> e.getValue().equals(type)); } + private boolean isIgnoredInputType(Entry<Type, Type> e) { + return Stream.of(IGNORED_INPUT_TYPES).anyMatch(type -> e.getKey().equals(type)); + } + private Entry<Type, Type> extractTypeParameters(Type type) { assertThat(type).asInstanceOf(type(ParameterizedType.class)) .returns(InstanceOfAssertFactory.class, from(ParameterizedType::getRawType)) - .extracting(ParameterizedType::getActualTypeArguments) - .asInstanceOf(ARRAY) + .extracting(ParameterizedType::getActualTypeArguments, as(ARRAY)) .hasSize(2); Type[] typeArguments = ((ParameterizedType) type).getActualTypeArguments(); return entry(normalize(typeArguments[0]), normalize(typeArguments[1])); diff --git a/assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java b/assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java --- a/assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java +++ b/assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java @@ -87,6 +87,7 @@ import static org.assertj.core.api.InstanceOfAssertFactories.PATH; import static org.assertj.core.api.InstanceOfAssertFactories.PERIOD; import static org.assertj.core.api.InstanceOfAssertFactories.PREDICATE; +import static org.assertj.core.api.InstanceOfAssertFactories.SET; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT_2D_ARRAY; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT_ARRAY; @@ -119,6 +120,7 @@ import static org.assertj.core.api.InstanceOfAssertFactories.map; import static org.assertj.core.api.InstanceOfAssertFactories.optional; import static org.assertj.core.api.InstanceOfAssertFactories.predicate; +import static org.assertj.core.api.InstanceOfAssertFactories.set; import static org.assertj.core.api.InstanceOfAssertFactories.stream; import static org.assertj.core.api.InstanceOfAssertFactories.throwable; import static org.assertj.core.api.InstanceOfAssertFactories.type; @@ -176,6 +178,7 @@ import java.util.stream.Stream; import org.assertj.core.util.Lists; +import org.assertj.core.util.Sets; import org.assertj.core.util.Strings; import org.junit.jupiter.api.Test; @@ -1135,6 +1138,26 @@ void collection_typed_factory_should_allow_collection_typed_assertions() { result.contains("Bart", "Lisa"); } + @Test + void set_factory_should_allow_collection_assertions() { + // GIVEN + Object value = Sets.set("Homer", "Marge", "Bart", "Lisa", "Maggie"); + // WHEN + AbstractCollectionAssert<?, Collection<?>, Object, ObjectAssert<Object>> result = assertThat(value).asInstanceOf(SET); + // THEN + result.contains("Bart", "Lisa"); + } + + @Test + void set_typed_factory_should_allow_collection_typed_assertions() { + // GIVEN + Object value = Sets.set("Homer", "Marge", "Bart", "Lisa", "Maggie"); + // WHEN + AbstractCollectionAssert<?, Collection<? extends String>, String, ObjectAssert<String>> result = assertThat(value).asInstanceOf(set(String.class)); + // THEN + result.contains("Bart", "Lisa"); + } + @Test void list_factory_should_allow_list_assertions() { // GIVEN diff --git a/assertj-core/src/test/java/org/assertj/core/api/SoftAssertions_combined_with_asInstanceOf_Test.java b/assertj-core/src/test/java/org/assertj/core/api/SoftAssertions_combined_with_asInstanceOf_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/SoftAssertions_combined_with_asInstanceOf_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/SoftAssertions_combined_with_asInstanceOf_Test.java @@ -85,6 +85,7 @@ import static org.assertj.core.api.InstanceOfAssertFactories.PATH; import static org.assertj.core.api.InstanceOfAssertFactories.PERIOD; import static org.assertj.core.api.InstanceOfAssertFactories.PREDICATE; +import static org.assertj.core.api.InstanceOfAssertFactories.SET; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT_2D_ARRAY; import static org.assertj.core.api.InstanceOfAssertFactories.SHORT_ARRAY; @@ -99,6 +100,7 @@ import static org.assertj.core.util.Lists.list; import static org.assertj.core.util.Maps.newHashMap; import static org.assertj.core.util.Sets.newHashSet; +import static org.assertj.core.util.Sets.set; import static org.junit.jupiter.params.provider.Arguments.arguments; import java.io.ByteArrayInputStream; @@ -261,6 +263,7 @@ public static Stream<Arguments> should_work_with_any_InstanceOfFactory_source() arguments(OptionalLong.empty(), OPTIONAL_LONG), arguments(Paths.get("."), PATH), arguments((Predicate<String>) String::isEmpty, PREDICATE), + arguments(set("foo"), SET), arguments(Short.MIN_VALUE, SHORT), arguments(new short[0], SHORT_ARRAY), arguments(new short[0][0], SHORT_2D_ARRAY),
Improve interoperability of `asInstanceOf` with `Set` instances Right now, there is no nice way to call `asInstanceOf` when a `Set` instance is expected. Current solutions: ```java Object actual = Set.of("first", "second"); assertThat(actual) .asInstanceOf(InstanceOfAssertFactories.type(Set.class)) .satisfies(set -> assertThat(set).containsExactly("first", "second")); // unchecked warning assertThat(actual) .asInstanceOf(InstanceOfAssertFactories.COLLECTION) // no implicit `isInstanceOf(Set.class)` .containsExactly("first", "second")); assertThat(map.get("value")) .asInstanceOf(new InstanceOfAssertFactory<>(Set.class, Assertions::<String> assertThat)) // verbose .containsExactly("first", "second"); ``` By design, we didn't introduce any `InstanceOfAssertFactory` for types that don't have more specific assertions, i.e., there is no `AbstractSetAssert` today because it wouldn't have any added value compared to [`AbstractCollectionAssert`](https://www.javadoc.io/doc/org.assertj/assertj-core/latest/org/assertj/core/api/AbstractCollectionAssert.html). We should investigate how to improve such use cases. Triggered by: https://github.com/spring-projects/spring-framework/blob/2784f6008e97c7283b2e57e98d18a89fe8f530ae/spring-beans/src/test/java/org/springframework/beans/factory/config/YamlProcessorTests.java#L144-L153
2024-01-07T13:19:44Z
3.25
assertj/assertj
3,318
assertj__assertj-3318
[ "3314" ]
887f97b65da068e03308a6373a0c8a76912377aa
diff --git a/assertj-core/src/main/java/org/assertj/core/api/Assertions.java b/assertj-core/src/main/java/org/assertj/core/api/Assertions.java --- a/assertj-core/src/main/java/org/assertj/core/api/Assertions.java +++ b/assertj-core/src/main/java/org/assertj/core/api/Assertions.java @@ -12,7 +12,6 @@ */ package org.assertj.core.api; -import static java.lang.String.format; import static org.assertj.core.configuration.ConfigurationProvider.CONFIGURATION_PROVIDER; import static org.assertj.core.data.Percentage.withPercentage; @@ -76,7 +75,6 @@ import java.util.stream.Stream; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.ThrowableAssert.ThrowingCallableWithValue; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.filter.InFilter; @@ -98,7 +96,6 @@ import org.assertj.core.description.Description; import org.assertj.core.groups.Properties; import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.Failures; import org.assertj.core.presentation.BinaryRepresentation; import org.assertj.core.presentation.HexadecimalRepresentation; import org.assertj.core.presentation.Representation; @@ -1212,26 +1209,6 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> assertThat(T a return assertThat(catchThrowable(shouldRaiseThrowable)).hasBeenThrown(); } - /** - * Similar to {@link #assertThatThrownBy(ThrowingCallable)}, but when the called code returns a value instead of - * throwing, the assertion error shows the returned value to help understand what went wrong. - * - * @param shouldRaiseThrowable The {@link ThrowingCallableWithValue} or lambda with the code that should raise the throwable. - * @return the created {@link ThrowableAssert}. - * @since 3.25.0 - */ - @CanIgnoreReturnValue - public static AbstractThrowableAssert<?, ? extends Throwable> assertThatThrownBy( - ThrowingCallableWithValue shouldRaiseThrowable) { - Object value; - try { - value = shouldRaiseThrowable.call(); - } catch (Throwable throwable) { - return assertThat(throwable); - } - throw Failures.instance().failure(format("Expecting code to raise a throwable, but it returned [%s] instead", value)); - } - /** * Allows to capture and then assert on a {@link Throwable} like {@code assertThatThrownBy(ThrowingCallable)} but this method * let you set the assertion description the same way you do with {@link AbstractAssert#as(String, Object...) as(String, Object...)}. @@ -1270,26 +1247,6 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> assertThat(T a return assertThat(catchThrowable(shouldRaiseThrowable)).as(description, args).hasBeenThrown(); } - /** - * Similar to {@link #assertThatThrownBy(ThrowingCallable, String, Object...)}, but when the called code returns a value - * instead of throwing, the assertion error shows the returned value to help understand what went wrong. - * - * @param shouldRaiseThrowable The {@link ThrowingCallableWithValue} or lambda with the code that should raise the throwable. - * @return the created {@link ThrowableAssert}. - * @since 3.25.0 - */ - @CanIgnoreReturnValue - public static AbstractThrowableAssert<?, ? extends Throwable> assertThatThrownBy(ThrowingCallableWithValue shouldRaiseThrowable, - String description, Object... args) { - Object value; - try { - value = shouldRaiseThrowable.call(); - } catch (Throwable throwable) { - return assertThat(throwable).as(description, args); - } - throw Failures.instance().failure(format("Expecting code to raise a throwable, but it returned [%s] instead", value)); - } - /** * Allows to capture and then assert on a {@link Throwable} (easier done with lambdas). * <p> diff --git a/assertj-core/src/main/java/org/assertj/core/api/BDDAssertions.java b/assertj-core/src/main/java/org/assertj/core/api/BDDAssertions.java --- a/assertj-core/src/main/java/org/assertj/core/api/BDDAssertions.java +++ b/assertj-core/src/main/java/org/assertj/core/api/BDDAssertions.java @@ -12,8 +12,6 @@ */ package org.assertj.core.api; -import static java.lang.String.format; - import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -74,7 +72,6 @@ import java.util.stream.Stream; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.ThrowableAssert.ThrowingCallableWithValue; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.InFilter; import org.assertj.core.api.filter.NotFilter; @@ -92,7 +89,6 @@ import org.assertj.core.description.Description; import org.assertj.core.groups.Properties; import org.assertj.core.groups.Tuple; -import org.assertj.core.internal.Failures; import org.assertj.core.presentation.BinaryRepresentation; import org.assertj.core.presentation.HexadecimalRepresentation; import org.assertj.core.presentation.Representation; @@ -1334,25 +1330,6 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> then(T actual) return assertThat(catchThrowable(shouldRaiseThrowable)).hasBeenThrown(); } - /** - * Similar to {@link #thenThrownBy(ThrowingCallable)}, but when the called code returns a value instead of - * throwing, the assertion error shows the returned value to help understand what went wrong. - * - * @param shouldRaiseThrowable The {@link ThrowingCallableWithValue} or lambda with the code that should raise the throwable. - * @return the created {@link ThrowableAssert}. - * @since 3.25.0 - */ - @CanIgnoreReturnValue - public static AbstractThrowableAssert<?, ? extends Throwable> thenThrownBy(ThrowingCallableWithValue shouldRaiseThrowable) { - Object value; - try { - value = shouldRaiseThrowable.call(); - } catch (Throwable throwable) { - return assertThat(throwable); - } - throw Failures.instance().failure(format("Expecting code to raise a throwable, but it returned [%s] instead", value)); - } - /** * Allows to capture and then assert on a {@link Throwable} like {@code thenThrownBy(ThrowingCallable)} but this method * let you set the assertion description the same way you do with {@link AbstractAssert#as(String, Object...) as(String, Object...)}. @@ -1390,26 +1367,6 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> then(T actual) return assertThat(catchThrowable(shouldRaiseThrowable)).as(description, args).hasBeenThrown(); } - /** - * Similar to {@link #thenThrownBy(ThrowingCallable, String, Object...)}, but when the called code returns a value instead of - * throwing, the assertion error shows the returned value to help understand what went wrong. - * - * @param shouldRaiseThrowable The {@link ThrowingCallableWithValue} or lambda with the code that should raise the throwable. - * @return the created {@link ThrowableAssert}. - * @since 3.25.0 - */ - @CanIgnoreReturnValue - public static AbstractThrowableAssert<?, ? extends Throwable> thenThrownBy(ThrowingCallableWithValue shouldRaiseThrowable, - String description, Object... args) { - Object value; - try { - value = shouldRaiseThrowable.call(); - } catch (Throwable throwable) { - return assertThat(throwable).as(description, args); - } - throw Failures.instance().failure(format("Expecting code to raise a throwable, but it returned [%s] instead", value)); - } - /** * Allows to capture and then assert on a {@link Throwable} more easily when used with Java 8 lambdas. * diff --git a/assertj-core/src/main/java/org/assertj/core/api/ThrowableAssert.java b/assertj-core/src/main/java/org/assertj/core/api/ThrowableAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/ThrowableAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/ThrowableAssert.java @@ -33,10 +33,6 @@ public interface ThrowingCallable { void call() throws Throwable; } - public interface ThrowingCallableWithValue { - Object call() throws Throwable; - } - public ThrowableAssert(ACTUAL actual) { super(actual, ThrowableAssert.class); }
diff --git a/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThatThrownBy_Test.java b/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThatThrownBy_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThatThrownBy_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThatThrownBy_Test.java @@ -14,9 +14,8 @@ import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.assertj.core.api.Assertions_catchThrowableOfType_Test.raisingException; -import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldHaveMessage.shouldHaveMessage; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; @@ -25,47 +24,63 @@ class Assertions_assertThatThrownBy_Test { @Test - void should_build_ThrowableAssert_with_runtime_exception_thrown() { + void should_work_with_runtime_exception_thrown() { + // WHEN/THEN assertThatThrownBy(codeThrowing(new IllegalArgumentException("boom"))).isInstanceOf(IllegalArgumentException.class) .hasMessage("boom"); } @Test - void should_build_ThrowableAssert_with_throwable_thrown() { + void should_work_with_throwable_thrown() { + // WHEN/THEN assertThatThrownBy(codeThrowing(new Throwable("boom"))).isInstanceOf(Throwable.class) .hasMessage("boom"); } @Test - void should_be_able_to_pass_a_description_to_assertThatThrownBy() { + void should_work_with_method_reference_having_vararg_parameter() { + // WHEN/THEN + assertThatThrownBy(Assertions_assertThatThrownBy_Test::methodThrowing).isInstanceOf(Exception.class) + .hasMessage("boom"); + } + + @Test + void should_support_description() { // GIVEN - // make assertThatThrownBy fail to verify the description afterwards - ThrowingCallable code = () -> assertThatThrownBy(raisingException("boom"), "Test %s", "code").hasMessage("bam"); + Throwable throwable = new Exception("boom"); // WHEN - AssertionError assertionError = expectAssertionError(code); + AssertionError assertionError = expectAssertionError(() -> assertThatThrownBy(codeThrowing(throwable), "Test %s", + "code").hasMessage("bam")); // THEN then(assertionError).hasMessageContaining("[Test code]"); } @Test void should_fail_if_no_throwable_was_thrown() { - // GIVEN - ThrowingCallable code = () -> {}; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertThatThrownBy(code).hasMessage("boom ?")); + AssertionError assertionError = expectAssertionError(() -> assertThatThrownBy(() -> {}).hasMessage("boom ?")); // THEN then(assertionError).hasMessage(format("%nExpecting code to raise a throwable.")); } @Test - void should_fail_with_good_message_when_assertion_is_failing() { + void should_fail_with_proper_message_when_assertion_is_failing() { + // GIVEN + Throwable throwable = new Exception("boom"); // WHEN - AssertionError assertionError = expectAssertionError(() -> assertThatThrownBy(raisingException("boom")).hasMessage("bam")); + AssertionError assertionError = expectAssertionError(() -> assertThatThrownBy(codeThrowing(throwable)).hasMessage("bam")); // THEN - then(assertionError).hasMessageContainingAll("Expecting message to be:", - "\"bam\"", - "but was:", - "\"boom\""); + then(assertionError).hasMessage(shouldHaveMessage(throwable, "bam").create()); + } + + private static ThrowingCallable codeThrowing(Throwable t) { + return () -> { + throw t; + }; + } + + private static void methodThrowing(Object... parameters) throws Exception { + throw new Exception("boom"); } } diff --git a/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_from_ThrowingCallable_Test.java b/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_from_ThrowingCallable_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_from_ThrowingCallable_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_from_ThrowingCallable_Test.java @@ -12,10 +12,8 @@ */ package org.assertj.core.api.throwable; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.assertj.core.util.AssertionsUtil.assertThatAssertionErrorIsThrownBy; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.junit.jupiter.api.Test; @@ -52,14 +50,7 @@ public void call() { // no exception } }); - }).withMessage(format("%nExpecting code to raise a throwable.")); + }).withMessage(String.format("%nExpecting code to raise a throwable.")); } - @Test - void should_fail_and_show_value_returned_by_callable_code() { - // GIVEN - ThrowingCallable code = () -> assertThatThrownBy(() -> 42); - // WHEN/THEN - assertThatAssertionErrorIsThrownBy(code).withMessage("Expecting code to raise a throwable, but it returned [42] instead"); - } } diff --git a/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_with_then_method_Test.java b/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_with_then_method_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_with_then_method_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/throwable/ThrowableAssert_built_with_then_method_Test.java @@ -14,11 +14,11 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.BDDAssertions.thenThrownBy; -import static org.assertj.core.util.AssertionsUtil.assertThatAssertionErrorIsThrownBy; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.junit.jupiter.api.Test; +// TODO build two throwable assert with then and assertThat and compare them. class ThrowableAssert_built_with_then_method_Test { @Test @@ -54,11 +54,4 @@ public void call() { }).withMessage(String.format("%nExpecting code to raise a throwable.")); } - @Test - void should_fail_if_value_is_returned_by_callable_code() { - // GIVEN - ThrowingCallable code = () -> thenThrownBy(() -> 42); - // WHEN/THEN - assertThatAssertionErrorIsThrownBy(code).withMessage("Expecting code to raise a throwable, but it returned [42] instead"); - } }
Compile error due to new assertThatThrownBy **Describe the bug** Compile error in Eclipse 2023-12: The method assertThatThrownBy(ThrowableAssert.ThrowingCallable) is ambiguous for the type AmbigiousCallTest. The only change is the upgrade from 3.24.2 to 3.25. In 3.24.2 the same code was fine. * assertj core version: 3.25 **Test case reproducing the bug** Don't *run* the below code, but just *compile* it. At least ECJ doesn't like it. ```java public class AmbigiousCallTest { @Test void ambigiousCall() throws Exception { assertThatThrownBy(AmbigiousCallTest::verify) .hasMessageContaining("No objects are given"); } public static void verify(final Object... objects) { // foo } } ``` Probably related to #3043. Workaround: Extract the method reference to a local variable. The type information of that local variable is sufficient to choose one of the two methods.
Thanks for reporting it, @Bananeweizen. We're in discussion with the team if we should revert cf06398587da23ac530b03a941e0efe6cf29aef7 as it introduced a breaking change we didn't spot before. My code also doesn't compile after upgrading to AssertJ 3.25.0, BUT @scordio I would like to mention that this is not a big deal for me. I just need to replace method reference (which is not really convenient to read): ```java assertThatThrownBy(Selenide::confirm) .isInstanceOf(AlertNotFoundError.class) ``` to a "standard" lambda (which is better readable to me): ```java assertThatThrownBy(() -> Selenide.confirm()) .isInstanceOf(AlertNotFoundError.class) ``` We got hit by this in Spring Framework as well. https://github.com/spring-projects/spring-framework/commit/ffddbb586e60ce1b66a3b962612636db9c61ed81#diff-58b8a6574dcffc60614d0727c28b36039f5bc28a04143486722d908a24ff55c7 FWIW, if you do intend to keep `ThrowingCallableWithValue`, note that most people typically call that a `ThrowingSupplier`. 😉 **Team decision**: cf06398587da23ac530b03a941e0efe6cf29aef7 should be reverted and #3043 should be evaluated again together with #1652 / #2519.
2024-01-02T21:28:12Z
3.25
assertj/assertj
3,691
assertj__assertj-3691
[ "1355" ]
62273fc8c0989c465d11814b450ac5836ee00a94
diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java --- a/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldNotContainCharSequence.java @@ -69,7 +69,7 @@ public static ErrorMessageFactory shouldNotContain(Throwable actual, CharSequenc " %s%n" + "not to contain:%n" + " %s%n" + - "but found:%n" + + "but did:%n" + "%n" + "Throwable that failed the check:%n" + "%n" + escapePercent(getStackTrace(actual)); // to avoid AssertJ default String formatting
diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainThrowable_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainThrowable_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainThrowable_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldNotContainThrowable_create_Test.java @@ -33,16 +33,16 @@ class ShouldNotContainThrowable_create_Test { @Test void should_create_error_message_with_escaping_percent() { // GIVEN - RuntimeException actual = new RuntimeException("You know nothing %"); + RuntimeException actual = new RuntimeException("You know nothing % Jon Snow"); // WHEN - String errorMessage = shouldNotContain(actual, "You know nothing % Jon Snow").create(new TestDescription("TEST")); + String errorMessage = shouldNotContain(actual, "You know nothing %").create(new TestDescription("TEST")); // THEN then(errorMessage).isEqualTo("[TEST] %n" + "Expecting throwable message:%n" + - " \"You know nothing %%\"%n" + - "not to contain:%n" + " \"You know nothing %% Jon Snow\"%n" + - "but found:%n" + + "not to contain:%n" + + " \"You know nothing %%\"%n" + + "but did:%n" + "%n" + "Throwable that failed the check:%n" + "%n%s", getStackTrace(actual)); @@ -51,19 +51,19 @@ void should_create_error_message_with_escaping_percent() { @Test void should_create_error_message_with_several_values_not_found() { // GIVEN - RuntimeException actual = new RuntimeException("You know nothing"); - String[] sequence = array("You", "know", "nothing", "Jon", "Snow"); - Set<String> notFound = newSet("Jon", "Snow"); + RuntimeException actual = new RuntimeException("You know nothing Jon Snow"); + String[] sequence = array("You", "know", "nothing"); + Set<String> found = newSet("You", "know", "nothing"); // WHEN - String errorMessage = shouldNotContain(actual, sequence, notFound).create(new TestDescription("TEST")); + String errorMessage = shouldNotContain(actual, sequence, found).create(new TestDescription("TEST")); // THEN then(errorMessage).isEqualTo("[TEST] %n" + "Expecting throwable message:%n" + - " \"You know nothing\"%n" + + " \"You know nothing Jon Snow\"%n" + "not to contain:%n" + - " [\"You\", \"know\", \"nothing\", \"Jon\", \"Snow\"]%n" + + " [\"You\", \"know\", \"nothing\"]%n" + "but found:%n" + - " [\"Jon\", \"Snow\"]%n" + + " [\"You\", \"know\", \"nothing\"]%n" + "%n" + "Throwable that failed the check:%n" + "%n%s", getStackTrace(actual));
Describe actual Throwables when ThrowableAssert fails ### Summary When some `Throwable`-related assertion fails the actual `Throwable` that failed the check is not described beyond what actually caused assertion failure (e.g. message mismatch). In non-trivial code when unexpected `Exception`s are thrown this makes debugging inconvenient. Dumping more comprehensive (class, message, stack, perhaps even recursive info about causes) info about the actual `Throwable` could ease assertion failure investigation. Relevant message factories (e.g. `ShouldHaveMessage`) already have access to actual `Throwable`. It may be tempting to use `util.Throwables.ERROR_DESCRIPTION_EXTRACTOR` to describe throwables, but a more powerful mechanism would be appreciated. Providing precise information in case of assertion failure doesn't seem to be overkill (unlike as in #864) ### Error message to improve - [x] ShouldHaveCause #2872 - [ ] ShouldHaveCauseExactlyInstance - [ ] ShouldHaveCauseInstance - [ ] ShouldHaveCauseReference - @weiyilei - [ ] ShouldHaveNoCause - [ ] ShouldHaveNoSuppressedExceptions - [x] ShouldHaveRootCauseExactlyInstance #2910 - [x] ShouldHaveRootCauseInstance #2910 - [ ] ShouldHaveSuppressedException - [ ] ShouldNotContainCharSequence - [ ] ShouldStartWith - [ ] ShouldEndWith If you are interested in contributing to this issue, select one error message, it will be assigned to you.
do you mean printing the stack trace at the end of the error message ? can you give an example of what you would ideally have ? Example of my ideal message: ```java Expecting Throwable message: <"expected exception message"> but was: <"some other unexpected exception"> Throwable that failed the check: Exception in thread "main" java.lang.RuntimeException: some other unexpected exception at Test.functionThatIsSupposedToThrowExpectedException(Test.java:119) at Test.main(Test.java:69) Caused by: java.lang.RuntimeException: unexpected exception in recurseToGenerateSomeStack at Test.recurseToGenerateSomeStack(Test.java:133) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.recurseToGenerateSomeStack(Test.java:138) at Test.functionThatIsSupposedToThrowExpectedException(Test.java:115) ... 1 more Caused by: java.lang.RuntimeException: the root cause of test failure at Test.f(Test.java:169) at Test.e(Test.java:164) at Test.d(Test.java:159) at Test.c(Test.java:154) at Test.b(Test.java:149) at Test.a(Test.java:144) at Test.recurseToGenerateSomeStack(Test.java:129) ... 13 more ``` Such message allows to immediately learn the real cause of test failure. Fair enough, @jakubzytka would you like to contribute it? I have done it for the `hasMessage` assertion, it should also done for other throwable assertions. Submitted #1630 to do the same for `hasMessageMatching` and `hasMessageFindingMatch` assertions. Hello, is this issue still open ? I did try the various method asserting the messages in `AbstractThrowableAssert` but didn't find any case where the stacktrace is not displayed. Or can you point the specific methods to fix ? `Throwables.assertHasMessageContaining` doesn't describe the actual throwable (neither do some other functions that care about the message) Hello, I did a first pull request to fix the hasMessageContaining and hasMessageContainingAll assertions, let me know if this is correct. Next steps is to investigate and fix which methods can still be improved, here is a list to be refined of potential classes to be adapted: ``` // done in upper commits ShouldHaveMessage ShouldHaveMessageFindingMatchRegex ShouldHaveMessageMatchingRegex ShouldHaveRootCause // still need to adapt the stackTrace case ShouldContainCharSequence.shouldContain // to be adapted (maybe doesn't apply) ShouldEndWith ShouldHaveCause ShouldHaveCauseExactlyInstance ShouldHaveCauseInstance ShouldHaveCauseReference ShouldHaveNoCause ShouldHaveNoSuppressedExceptions ShouldHaveRootCauseExactlyInstance ShouldHaveRootCauseInstance ShouldHaveSuppressedException ShouldNotContainCharSequence ShouldStartWith ``` Hi, I've created a PR fixing the `hasCauseInstanceOf` and `hasCauseExactlyInstanceOf`. Please let me know if there is any problem:-). Is anybody working on this? If not, can I? thanks @weiyilei, I have assigned you `ShouldHaveCauseReference`, you can look at `ShouldHaveCauseInstance` for reference. > thanks @weiyilei, I have assigned you `ShouldHaveCauseReference`, you can look at `ShouldHaveCauseInstance` for reference. thank u bro, but I am really confused about what kind of reference will be required in this issue, can u give me an example plz > thanks @weiyilei, I have assigned you `ShouldHaveCauseReference`, you can look at `ShouldHaveCauseInstance` for reference. Hi bro, I still have no idea about what does the cause reference mean, as I know A.getcause() can get the throwable which lead to A, so what's its reference. :( Besides, I want to know whether this issue is still open and remains any job to be finished, thanks. Add more basic assertions for Local(Date|Time) #2541 > thanks @weiyilei, I have assigned you `ShouldHaveCauseReference`, you can look at `ShouldHaveCauseInstance` for reference. should I use getStackTrace() or getSuppressed(), or import org.openjdk.jol.vm.VM and use VM.current().addressof() ShouldHaveCauseInstance is the example > ShouldHaveCauseInstance is the example Hi, I am wondering if the content of ShouldHaveCauseReference_create_Test.java should be modified, or I should just fill the methods of ShouldHaveCauseReference according to the test content I'm not sure I understand the question, if you modify ShouldHaveCauseReference you have to write the tests that proves your changes are working and thus you would need to add test cases in ShouldHaveCauseReference_create_Test @weiyilei if you still don't understand what I'm saying it means you are not yet ready to contribute and you need a bit more experience before doing so. The AssertJ team is working on its spare time maintaining this project, it's not that we don't want to help junior devs to contribute, it's just that we don't have the time to do so. > I'm not sure I understand the question, if you modify ShouldHaveCauseReference you have to write the tests that proves your changes are working and thus you would need to add test cases in ShouldHaveCauseReference_create_Test Hi bro, I have tried my best to understand ur requirements and finished my codes following the example of CauseInstance, I have a pull request and let me know if there is something I need to improve. thanks @joel-costigliola Hi, I'm interested in this issue. Is there any available task for me to do? Hi @joel-costigliola I created a PR to update the ShouldHaveCause with Stacktrace. Please let me know if there is a problem. Hi @joel-costigliola, Thanks for reviewing my earlier PR. I have created a new one for ShouldHaveRootCauseInstance and ShouldHaveRootCauseExactlyInstance. Please let me know if there is any problem with the same.
2024-12-07T16:52:07Z
3.26
assertj/assertj
3,120
assertj__assertj-3120
[ "3113" ]
2ee04f4a24562a3091af70f815fb07da3ae149ac
diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java @@ -1622,7 +1622,7 @@ public SELF isNotEqualToIgnoringWhitespace(CharSequence expected) { * To be exact, the following rules are applied: * <ul> * <li>all leading and trailing whitespace of both actual and expected strings are ignored</li> - * <li>any remaining whitespace, appearing within either string, is collapsed to a single space before comparison</li> + * <li>any remaining whitespace (including non-breaking spaces), appearing within either string, is collapsed to a single space before comparison</li> * </ul> * <p> * Example: @@ -1634,6 +1634,7 @@ public SELF isNotEqualToIgnoringWhitespace(CharSequence expected) { * .isEqualToNormalizingWhitespace(" Game of Thrones ") * .isEqualToNormalizingWhitespace("Game of\tThrones") * .isEqualToNormalizingWhitespace("Game of Thrones"); + * .isEqualToNormalizingWhitespace("Game\u00A0of Thrones"); * * // assertions will fail * assertThat("Game of Thrones").isEqualToNormalizingWhitespace("Game ofThrones"); @@ -1657,7 +1658,7 @@ public SELF isEqualToNormalizingWhitespace(CharSequence expected) { * To be exact, the following rules are applied: * <ul> * <li>all leading and trailing whitespace of both actual and expected strings are ignored</li> - * <li>any remaining whitespace, appearing within either string, is collapsed to a single space before comparison</li> + * <li>any remaining whitespace (including non-breaking spaces), appearing within either string, is collapsed to a single space before comparison</li> * </ul> * <p> * Example: @@ -1686,11 +1687,11 @@ public SELF isNotEqualToNormalizingWhitespace(CharSequence expected) { /** * Verifies that the actual {@code CharSequence} is equal to the given one, after the punctuation - * of both strings have been normalized. + * of both strings has been normalized. * <p> * To be exact, the following rules are applied: * <ul> - * <li>All punctuation of actual and expected strings are ignored and whitespaces are normalized</li> + * <li>All punctuation of actual and expected strings are ignored and whitespaces (including non-breaking spaces) are normalized</li> * <li>Punctuation is any of the following character <b>{@code !"#$%&'()*+,-./:;<=>?@[\]^_`{|}~}</b></li> * </ul> * <p> diff --git a/assertj-core/src/main/java/org/assertj/core/internal/Strings.java b/assertj-core/src/main/java/org/assertj/core/internal/Strings.java --- a/assertj-core/src/main/java/org/assertj/core/internal/Strings.java +++ b/assertj-core/src/main/java/org/assertj/core/internal/Strings.java @@ -84,8 +84,10 @@ import java.io.StringReader; import java.text.Normalizer; import java.util.Base64; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; @@ -106,6 +108,16 @@ */ public class Strings { + private static final Set<Character> NON_BREAKING_SPACES; + + static { + Set<Character> nonBreakingSpaces = new HashSet<>(); + nonBreakingSpaces.add('\u00A0'); + nonBreakingSpaces.add('\u2007'); + nonBreakingSpaces.add('\u202F'); + NON_BREAKING_SPACES = Collections.unmodifiableSet(nonBreakingSpaces); + } + private static final String EMPTY_STRING = ""; private static final Strings INSTANCE = new Strings(); private static final String PUNCTUATION_REGEX = "\\p{Punct}"; @@ -385,7 +397,7 @@ private static String normalizeWhitespace(CharSequence toNormalize) { boolean lastWasSpace = true; for (int i = 0; i < toNormalize.length(); i++) { char c = toNormalize.charAt(i); - if (isWhitespace(c)) { + if (isWhitespace(c) || NON_BREAKING_SPACES.contains(c)) { if (!lastWasSpace) result.append(' '); lastWasSpace = true; } else {
diff --git a/assertj-core/src/test/java/org/assertj/core/api/CharSequenceAssertBaseTest.java b/assertj-core/src/test/java/org/assertj/core/api/CharSequenceAssertBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/api/CharSequenceAssertBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/api/CharSequenceAssertBaseTest.java @@ -16,6 +16,10 @@ import org.assertj.core.internal.Strings; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + /** * Base class for {@link CharSequenceAssert} tests. * @@ -25,6 +29,17 @@ public abstract class CharSequenceAssertBaseTest extends BaseTestTemplate<CharSequenceAssert, CharSequence> { protected Strings strings; + protected static final Set<Character> NON_BREAKING_SPACES; + + static { + Set<Character> nonBreakingSpaces = new HashSet<>(); + nonBreakingSpaces.add('\u00A0'); + nonBreakingSpaces.add('\u2007'); + nonBreakingSpaces.add('\u202F'); + + NON_BREAKING_SPACES = Collections.unmodifiableSet(nonBreakingSpaces); + } + @Override protected CharSequenceAssert create_assertions() { return new CharSequenceAssert("Yoda"); diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isEqualToNormalizingPunctuationAndWhitespace_Test.java @@ -12,10 +12,17 @@ */ package org.assertj.core.api.charsequence; -import static org.mockito.Mockito.verify; - import org.assertj.core.api.CharSequenceAssert; import org.assertj.core.api.CharSequenceAssertBaseTest; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.params.provider.Arguments.arguments; +import static org.mockito.Mockito.verify; /** * Created by harisha talanki on 2/29/20 @@ -32,4 +39,16 @@ protected void verify_internal_effects() { verify(strings).assertEqualsNormalizingPunctuationAndWhitespace(getInfo(assertions), getActual(assertions), "Game of Thrones"); } + + @ParameterizedTest + @MethodSource("notEqualToNormalizingWhiteSpaceGenerator") + void should_pass_if_actual_is_equal_normalizing_breaking_spaces(String actual, String expected) { + assertThat(actual).isEqualToNormalizingPunctuationAndWhitespace(expected); + } + + public static Stream<Arguments> notEqualToNormalizingWhiteSpaceGenerator() { + return NON_BREAKING_SPACES.stream() + .map(nonBreakingSpace -> arguments("my" + nonBreakingSpace + + "foo bar", "my foo bar")); + } } diff --git a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test.java b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test.java --- a/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isNotEqualToNormalizingWhitespace_Test.java @@ -12,10 +12,19 @@ */ package org.assertj.core.api.charsequence; -import static org.mockito.Mockito.verify; - import org.assertj.core.api.CharSequenceAssert; import org.assertj.core.api.CharSequenceAssertBaseTest; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.error.ShouldNotBeEqualNormalizingWhitespace.shouldNotBeEqualNormalizingWhitespace; +import static org.junit.jupiter.params.provider.Arguments.arguments; +import static org.mockito.Mockito.verify; /** * Tests for <code>{@link org.assertj.core.api.CharSequenceAssert#isNotEqualToNormalizingWhitespace(CharSequence)}</code>. @@ -33,4 +42,19 @@ protected CharSequenceAssert invoke_api_method() { protected void verify_internal_effects() { verify(strings).assertNotEqualsNormalizingWhitespace(getInfo(assertions), getActual(assertions), " my foo bar "); } + + @ParameterizedTest + @MethodSource("notEqualToNormalizingWhiteSpaceGenerator") + void should_fail_if_actual_is_equal_normalizing_breaking_spaces(String actual, String expected) { + assertThatExceptionOfType(AssertionError.class) + .isThrownBy(() -> assertThat(actual).isNotEqualToNormalizingWhitespace(expected)) + .withMessage(shouldNotBeEqualNormalizingWhitespace(actual, + expected).create()); + } + + public static Stream<Arguments> notEqualToNormalizingWhiteSpaceGenerator() { + return NON_BREAKING_SPACES.stream() + .map(nonBreakingSpace -> arguments("my" + nonBreakingSpace + + "foo bar", "my foo bar")); + } } diff --git a/assertj-core/src/test/java/org/assertj/core/internal/StringsBaseTest.java b/assertj-core/src/test/java/org/assertj/core/internal/StringsBaseTest.java --- a/assertj-core/src/test/java/org/assertj/core/internal/StringsBaseTest.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/StringsBaseTest.java @@ -15,6 +15,10 @@ import static org.assertj.core.test.TestData.someInfo; import static org.mockito.Mockito.spy; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + import org.assertj.core.api.AssertionInfo; import org.assertj.core.test.CaseInsensitiveStringComparator; import org.junit.jupiter.api.BeforeEach; @@ -38,6 +42,17 @@ public class StringsBaseTest { protected ComparatorBasedComparisonStrategy comparisonStrategy; protected Strings stringsWithCaseInsensitiveComparisonStrategy; + protected static final Set<Character> NON_BREAKING_SPACES; + + static { + Set<Character> nonBreakingSpaces = new HashSet<>(); + nonBreakingSpaces.add('\u00A0'); + nonBreakingSpaces.add('\u2007'); + nonBreakingSpaces.add('\u202F'); + + NON_BREAKING_SPACES = Collections.unmodifiableSet(nonBreakingSpaces); + } + @BeforeEach public void setUp() { failures = spy(new Failures()); diff --git a/assertj-core/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsNormalizingWhitespace_Test.java b/assertj-core/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsNormalizingWhitespace_Test.java --- a/assertj-core/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsNormalizingWhitespace_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsNormalizingWhitespace_Test.java @@ -13,12 +13,14 @@ package org.assertj.core.internal.strings; import static java.lang.String.format; +import static java.util.stream.Stream.concat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatNullPointerException; import static org.assertj.core.error.ShouldBeEqualNormalizingWhitespace.shouldBeEqualNormalizingWhitespace; import static org.assertj.core.internal.ErrorMessages.charSequenceToLookForIsNull; import static org.assertj.core.test.CharArrays.arrayOf; import static org.assertj.core.test.TestData.someInfo; +import static org.junit.jupiter.params.provider.Arguments.arguments; import java.util.stream.Stream; @@ -29,8 +31,6 @@ import org.junit.jupiter.params.provider.MethodSource; /** - * Tests for <code>{@link org.assertj.core.internal.Strings#assertEqualsNormalizingWhitespace(org.assertj.core.api.AssertionInfo, CharSequence, CharSequence)} </code>. - * * @author Alex Ruiz * @author Joel Costigliola * @author Alexander Bischof @@ -69,17 +69,26 @@ void should_pass_if_both_Strings_are_equal_after_whitespace_is_normalized(String strings.assertEqualsNormalizingWhitespace(someInfo(), actual, expected); } - public static Stream<Arguments> equalNormalizingWhitespaceGenerator() { - return Stream.of(Arguments.of("my foo bar", "my foo bar"), - Arguments.of(" my foo bar ", "my foo bar"), - Arguments.of(" my\tfoo bar ", " my foo bar"), - Arguments.of(" my foo bar ", "my foo bar"), - Arguments.of(" my foo bar ", " my foo bar "), - Arguments.of(" ", " "), - Arguments.of(" my\tfoo bar ", new String(arrayOf(' ', 'm', 'y', ' ', 'f', 'o', 'o', ' ', 'b', 'a', 'r'))), - Arguments.of(" my\tfoo bar ", " my\tfoo bar "), // same - Arguments.of(null, null), // null - Arguments.of(" \t \t", " "), - Arguments.of(" abc", "abc ")); + static Stream<Arguments> equalNormalizingWhitespaceGenerator() { + Stream<Arguments> regularWhiteSpaces = Stream.of(arguments("my foo bar", "my foo bar"), + arguments(" my foo bar ", "my foo bar"), + arguments(" my\tfoo bar ", " my foo bar"), + arguments(" my foo bar ", "my foo bar"), + arguments(" my foo bar ", " my foo bar "), + arguments(" ", " "), + arguments(" my\tfoo bar ", + new String(arrayOf(' ', 'm', 'y', ' ', 'f', 'o', 'o', ' ', 'b', + 'a', 'r'))), + arguments(" my\tfoo bar ", " my\tfoo bar "), // same + arguments(null, null), // null + arguments(" \t \t", " "), + arguments(" abc", "abc ")); + + Stream<Arguments> nonBreakingSpaces = NON_BREAKING_SPACES.stream() + .map(nonBreakingSpace -> arguments("my" + nonBreakingSpace + + "foo bar", "my foo bar")); + + return concat(regularWhiteSpaces, nonBreakingSpaces); } + }
isEqualToNormalizingWhitespace doesn't normalise non breaking space characters **Describe the bug** isEqualToNormalizingWhitespace doesn't normalise non breaking space characters * assertj core 3.21: * java version: 17 * Kotlin 1.9.0 * test framework version: JUnit 5.9.1 * os (if relevant): **Compare 2 strings with one having a [NBSP](https://kotlinlang.org/api/latest/jvm/stdlib/kotlin.text/-typography/nbsp.html) character instead of a normal space** ```Kotlin val result = "10,00 € to 100NBSP000,00 € per year" val expected = "10,00 € to 100 000,00 € per year" assertThat(result).isEqualToNormalizingWhitespace(expected) ```
That's a bit weird since the [implementation](https://github.com/assertj/assertj/blob/776b46c514c4336c58a643fb08bf61a1ac6cb9b3/assertj-core/src/main/java/org/assertj/core/internal/Strings.java#L455) detects the whitespace characters according to the JDK `Character` method [isWhitespace](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Character.html#isWhitespace(char)) that includes non-breaking space characters. Could you attach a test case in a PR, reason is a test in a github comment may or may not honor the non-breaking space characters when we copy it to our codebase. Thanks! @joel-costigliola the documentation of the `isWhitespace` method seems to indicate that the non-breaking spaces are not consider as regular whitespaces. I've added this test case locally that seems to confirm that: `should_pass_if_both_Strings_are_equal_after_whitespace_is_normalized` ![image](https://github.com/assertj/assertj/assets/27780296/fd328e59-6e16-4b9b-b625-938321c71193) I've also noticed that the `isNotBlank `assertion is `false` if a string is filled with non-breaking spaces because it uses the `isWhitespace `method as well (I think that's the expected behavior because some unit tests have been specifically added). I think that the non-breaking spaces `('\u00A0', '\u2007', '\u202F')` should be replaced in the `normalizeWhitespace` method. Is it ok if I work on this issue ? I think this might be a good first issue 🙂
2023-07-24T09:02:08Z
3.24
assertj/assertj
3,056
assertj__assertj-3056
[ "3048" ]
9902a7ac2f3d1002dafd70fa1e79ffa1416f571f
diff --git a/assertj-core/src/main/java/org/assertj/core/api/AbstractClassAssert.java b/assertj-core/src/main/java/org/assertj/core/api/AbstractClassAssert.java --- a/assertj-core/src/main/java/org/assertj/core/api/AbstractClassAssert.java +++ b/assertj-core/src/main/java/org/assertj/core/api/AbstractClassAssert.java @@ -28,6 +28,7 @@ import static org.assertj.core.error.ShouldBeInterface.shouldNotBeInterface; import static org.assertj.core.error.ShouldBeRecord.shouldBeRecord; import static org.assertj.core.error.ShouldBeRecord.shouldNotBeRecord; +import static org.assertj.core.error.ShouldHaveNoPackage.shouldHaveNoPackage; import static org.assertj.core.error.ShouldHaveNoSuperclass.shouldHaveNoSuperclass; import static org.assertj.core.error.ShouldHavePackage.shouldHavePackage; import static org.assertj.core.error.ShouldHaveRecordComponents.shouldHaveRecordComponents; @@ -655,8 +656,8 @@ public SELF hasAnnotation(Class<? extends Annotation> annotation) { /** * Verifies that the actual {@code Class} has the given class as direct superclass (as in {@link Class#getSuperclass()}). * <p> - * The {@code superclass} should always be not {@code null}, use {@link #hasNoSuperclass()} to verify the absence of - * the superclass. + * The expected {@code superclass} should always be not {@code null}. To verify the absence of the superclass, use + * {@link #hasNoSuperclass()}. * <p> * Example: * <pre><code class='java'> // this assertion succeeds: @@ -967,7 +968,9 @@ public SELF hasPublicMethods(String... methodNames) { /** * Verifies that the actual {@code Class} has the given package name (as in {@link Class#getPackage()}). - * + * <p> + * The expected package name should always be not {@code null}. To verify the absence of the package, use + * {@link #hasNoPackage()}. * <p> * Example: * <pre><code class='java'> package one.two; @@ -1005,7 +1008,9 @@ private void assertHasPackage(String packageName) { /** * Verifies that the actual {@code Class} has the given package (as in {@link Class#getPackage()}). - * + * <p> + * The expected package should always be not {@code null}. To verify the absence of the package, use + * {@link #hasNoPackage()}. * <p> * Example: * <pre><code class='java'> package one.two; @@ -1027,6 +1032,8 @@ private void assertHasPackage(String packageName) { * @throws AssertionError if the actual {@code Class} does not have the given package. * * @since 3.18.0 + * @see #hasPackage(String) + * @see #hasNoPackage() */ public SELF hasPackage(Package expected) { isNotNull(); @@ -1039,4 +1046,39 @@ private void assertHasPackage(Package expected) { if (!expected.equals(actual.getPackage())) throw assertionError(shouldHavePackage(actual, expected)); } + /** + * Verifies that the actual {@code Class} has no package (as in {@link Class#getPackage()}, when {@code null} + * is returned). + * <p> + * Example: + * <pre><code class='java'> // this assertion succeeds as arrays have no package: + * assertThat(int[].class).hasNoPackage(); + * + * // this assertion succeeds as primitive types have no package: + * assertThat(Integer.TYPE).hasNoPackage(); + * + * // this assertion succeeds as void type has no package: + * assertThat(Void.TYPE).hasNoPackage(); + * + * // this assertion fails as Object has java.lang as package: + * assertThat(Object.class).hasNoPackage();</code></pre> + * + * @return {@code this} assertions object + * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if the actual {@code Class} has a package. + * + * @since 3.25.0 + * @see #hasPackage(Package) + * @see #hasPackage(String) + */ + public SELF hasNoPackage() { + isNotNull(); + assertHasNoPackage(); + return myself; + } + + private void assertHasNoPackage() { + if (actual.getPackage() != null) throw assertionError(shouldHaveNoPackage(actual)); + } + } diff --git a/assertj-core/src/main/java/org/assertj/core/error/ShouldHaveNoPackage.java b/assertj-core/src/main/java/org/assertj/core/error/ShouldHaveNoPackage.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/main/java/org/assertj/core/error/ShouldHaveNoPackage.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2023 the original author or authors. + */ +package org.assertj.core.error; + +import java.util.StringJoiner; + +/** + * Creates an error message indicating that an assertion that verifies that a class has no package failed. + */ +public class ShouldHaveNoPackage extends BasicErrorMessageFactory { + + private static final String SHOULD_HAVE_NO_PACKAGE = new StringJoiner("%n", "%n", "").add("Expecting") + .add(" %s") + .add("to have no package, but had:") + .add(" %s") + .toString(); + + /** + * Creates a new <code>{@link ShouldHaveNoPackage}</code>. + * + * @param actual the actual value in the failed assertion. + * @return the created {@code ErrorMessageFactory}. + */ + public static ErrorMessageFactory shouldHaveNoPackage(Class<?> actual) { + return new ShouldHaveNoPackage(actual); + } + + private ShouldHaveNoPackage(Class<?> actual) { + super(SHOULD_HAVE_NO_PACKAGE, actual, actual.getSuperclass()); + } + +}
diff --git a/assertj-core/src/test/java/org/assertj/core/api/classes/ClassAssert_hasNoPackage_Test.java b/assertj-core/src/test/java/org/assertj/core/api/classes/ClassAssert_hasNoPackage_Test.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/test/java/org/assertj/core/api/classes/ClassAssert_hasNoPackage_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2023 the original author or authors. + */ +package org.assertj.core.api.classes; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldHaveNoPackage.shouldHaveNoPackage; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.assertj.core.util.FailureMessages.actualIsNull; + +import java.util.stream.Stream; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +class ClassAssert_hasNoPackage_Test { + + @Test + void should_fail_if_actual_is_null() { + // GIVEN + Class<?> actual = null; + // WHEN + AssertionError assertionError = expectAssertionError(() -> assertThat(actual).hasNoPackage()); + // THEN + then(assertionError).hasMessage(actualIsNull()); + } + + @Test + void should_fail_if_actual_has_a_package() { + // GIVEN + Class<?> actual = Object.class; + // WHEN + AssertionError assertionError = expectAssertionError(() -> assertThat(actual).hasNoPackage()); + // THEN + then(assertionError).hasMessage(shouldHaveNoPackage(actual).create()); + } + + @ParameterizedTest + @MethodSource("nullPackageTypes") + void should_pass_if_actual_has_no_package(Class<?> actual) { + // WHEN/THEN + assertThat(actual).hasNoPackage(); + } + + private static Stream<Class<?>> nullPackageTypes() { + return Stream.of(int[].class, // any array + Boolean.TYPE, + Byte.TYPE, + Character.TYPE, + Double.TYPE, + Float.TYPE, + Integer.TYPE, + Long.TYPE, + Short.TYPE, + Void.TYPE); + } + +} diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoPackage_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoPackage_create_Test.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoPackage_create_Test.java @@ -0,0 +1,37 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2023 the original author or authors. + */ +package org.assertj.core.error; + +import static java.lang.String.format; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldHaveNoPackage.shouldHaveNoPackage; +import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; + +import org.assertj.core.internal.TestDescription; +import org.junit.jupiter.api.Test; + +class ShouldHaveNoPackage_create_Test { + + @Test + void should_create_error_message() { + // WHEN + String message = shouldHaveNoPackage(String.class).create(new TestDescription("TEST"), STANDARD_REPRESENTATION); + // THEN + then(message).isEqualTo(format("[TEST] %n" + + "Expecting%n" + + " java.lang.String%n" + + "to have no package, but had:%n" + + " java.lang.Object")); + } + +} diff --git a/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoSuperclass_create_Test.java b/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoSuperclass_create_Test.java --- a/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoSuperclass_create_Test.java +++ b/assertj-core/src/test/java/org/assertj/core/error/ShouldHaveNoSuperclass_create_Test.java @@ -17,18 +17,9 @@ import static org.assertj.core.error.ShouldHaveNoSuperclass.shouldHaveNoSuperclass; import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; -import org.assertj.core.description.Description; import org.assertj.core.internal.TestDescription; -import org.assertj.core.presentation.Representation; -import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -/** - * Tests for <code>{@link ShouldHaveNoSuperclass#create(Description, Representation)}</code>. - * - * @author Stefano Cordio - */ -@DisplayName("ShouldHaveNoSuperclass create") class ShouldHaveNoSuperclass_create_Test { @Test
`hasNoPackage()` assertion for arrays and primitive types **Describe the bug** [`Class::getPackage`](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/lang/Class.html#getPackage()) mentions: > If this class represents an array type, a primitive type or void, this method returns `null`. However, [`hasPackage(Package)`](https://www.javadoc.io/doc/org.assertj/assertj-core/latest/org/assertj/core/api/AbstractClassAssert.html#hasPackage(java.lang.Package)) does not allow a `null` parameter. **Test case reproducing the bug** ```java assertThat(int[].class.getPackage()).isNull(); // succeeds assertThat(Integer.TYPE.getPackage()).isNull(); // succeeds assertThat(Void.TYPE.getPackage()).isNull(); // succeeds Package expected = null; assertThat(int[].class).hasPackage(expected); // fails with NPE assertThat(Integer.TYPE).hasPackage(expected); // fails with NPE assertThat(Void.TYPE).hasPackage(expected); // fails with NPE ```
Rather than changing the behavior of `hasPackage(Package)` and potentially `hasPackage(String)`, we should probably add a new `hasNoPackage()` assertion, in the style of [`hasNoSuperclass()`](https://www.javadoc.io/doc/org.assertj/assertj-core/latest/org/assertj/core/api/AbstractClassAssert.html#hasNoSuperclass()).
2023-05-21T09:55:41Z
3.24
assertj/assertj
2,726
assertj__assertj-2726
[ "2692" ]
01e395a70ca4166ecec562efe0992b7282e45194
diff --git a/assertj-core/src/main/java/org/assertj/core/condition/NestableCondition.java b/assertj-core/src/main/java/org/assertj/core/condition/NestableCondition.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/main/java/org/assertj/core/condition/NestableCondition.java @@ -0,0 +1,179 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2022 the original author or authors. + */ +package org.assertj.core.condition; + +import static java.util.Arrays.stream; +import static java.util.stream.Collectors.toList; + +import java.util.List; +import java.util.function.Function; +import java.util.stream.Stream; + +import org.assertj.core.api.Condition; +import org.assertj.core.description.Description; + +/** + * Building block to define a precise soft assertion about a complex object. + * It allows to create readable assertions and produces beautiful assertion error messages. + * <p> + * Example: + * <pre><code class='java'> class Customer { + * final String name; + * final Address address; + * + * Customer(String name, Address address) { + * this.name = name; + * this.address = address; + * } + * } + * + * class Address { + * final String firstLine; + * final String postcode; + * + * Address(String firstLine, String postcode) { + * this.firstLine = firstLine; + * this.postcode = postcode; + * } + * } + * + * static Condition&lt;Customer&gt; name(String expected) { + * return new Condition&lt;&gt;( + * it -> expected.equals(it.name), + * "name: " + expected + * ); + * } + * + * static Condition&lt;Customer&gt; customer(Condition&lt;Customer&gt;... conditions) { + * return nestable("person", conditions); + * } + * + * static Condition&lt;Address&gt; firstLine(String expected) { + * return new Condition&lt;&gt;( + * it -> expected.equals(it.firstLine), + * "first line: " + expected + * ); + * } + * + * static Condition&lt;Address&gt; postcode(String expected) { + * return new Condition&lt;&gt;( + * it -> expected.equals(it.postcode), + * "postcode: " + expected + * ); + * } + * + * static Condition&lt;Customer&gt; address(Condition&lt;Address&gt;... conditions) { + * return nestable( + * "address", + * customer -> customer.address, + * conditions + * ); + * }</code></pre> + * + * And assertions can be written like: + * <pre><code class='java'> assertThat(customer).is( + * customer( + * name("John"), + * address( + * firstLine("3"), + * postcode("KM3 8SP") + * ) + * ) + * ); </code></pre> + * which leads to an easy-to-read assertion error: + * <pre><code class='text'> Expecting actual: + * org.assertj.core.condition.Customer@27ff5d15 + * to be: + * [✗] person:[ + * [✓] name: John, + * [✗] address:[ + * [✗] first line: 3, + * [✓] postcode: KM3 8SP + * ] + * ]</code></pre> + * For an even better assertion error, see <code>{@link VerboseCondition}</code>. + * + * @param <ACTUAL> the type of object this condition accepts ({@literal Customer} in the example) + * @param <NESTED> the type of object nested into {@literal ACTUAL} ({@literal Address} in the example) + * + * @author Alessandro Ciccimarra + */ +public class NestableCondition<ACTUAL, NESTED> extends Join<ACTUAL> { + private final String descriptionPrefix; + + /** + * Creates a new <code>{@link NestableCondition}</code> + * @param descriptionPrefix the prefix to use to build the description + * @param extractor a function to extract the nested object of type {@literal T} from an object fo type {@literal K} + * @param conditions conditions to be checked + * @return the nestable condition + * @param <ACTUAL> the type of object the resulting condition accepts + * @param <NESTED> the type of object nested into {@literal K} + */ + @SafeVarargs + public static <ACTUAL, NESTED> Condition<ACTUAL> nestable(String descriptionPrefix, Function<ACTUAL, NESTED> extractor, + Condition<NESTED>... conditions) { + return new NestableCondition<>(descriptionPrefix, stream(conditions), extractor); + } + + /** + * Creates a new <code>{@link NestableCondition}</code> + * @param descriptionPrefix the prefix to use to build the description + * @param conditions conditions to be checked + * @return the nestable condition + * @param <ACTUAL> the type of object the resulting condition accepts + */ + @SafeVarargs + public static <ACTUAL> Condition<ACTUAL> nestable(String descriptionPrefix, Condition<ACTUAL>... conditions) { + return new NestableCondition<>(descriptionPrefix, stream(conditions)); + } + + private NestableCondition(String descriptionPrefix, Stream<Condition<NESTED>> conditions, Function<ACTUAL, NESTED> extractor) { + super(compose(conditions, extractor)); + this.descriptionPrefix = descriptionPrefix; + } + + private NestableCondition(String descriptionPrefix, Stream<Condition<ACTUAL>> conditions) { + super(conditions.collect(toList())); + this.descriptionPrefix = descriptionPrefix; + } + + @Override + public boolean matches(ACTUAL value) { + return conditions.stream().allMatch(condition -> condition.matches(value)); + } + + @Override + public String descriptionPrefix() { + return descriptionPrefix; + } + + private static <ACTUAL, NESTED> List<Condition<ACTUAL>> compose(Stream<Condition<NESTED>> conditions, + Function<ACTUAL, NESTED> extractor) { + return conditions.map(condition -> compose(condition, extractor)).collect(toList()); + } + + private static <ACTUAL, NESTED> Condition<ACTUAL> compose(Condition<NESTED> condition, Function<ACTUAL, NESTED> extractor) { + return new Condition<ACTUAL>() { + @Override + public boolean matches(ACTUAL value) { + return condition.matches(extractor.apply(value)); + } + + @Override + public Description conditionDescriptionWithStatus(ACTUAL actual) { + return condition.conditionDescriptionWithStatus(extractor.apply(actual)); + } + }; + } +}
diff --git a/assertj-core/src/test/java/org/assertj/core/condition/NestableConditionFixtures.java b/assertj-core/src/test/java/org/assertj/core/condition/NestableConditionFixtures.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/test/java/org/assertj/core/condition/NestableConditionFixtures.java @@ -0,0 +1,110 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2022 the original author or authors. + */ +package org.assertj.core.condition; + +import static org.assertj.core.condition.NestableCondition.nestable; +import static org.assertj.core.condition.VerboseCondition.verboseCondition; + +import org.assertj.core.api.Condition; + +class NestableConditionFixtures { + static Condition<Name> first(String expected) { + return verboseCondition(name -> expected.equals(name.first), + "first: " + expected, + name -> " but was " + name.first); + } + + static Condition<Name> last(String expected) { + return verboseCondition(name -> expected.equals(name.last), + "last: " + expected, + name -> " but was " + name.last); + } + + static Condition<Address> firstLine(String expected) { + return verboseCondition(address -> expected.equals(address.firstLine), + "first line: " + expected, + address -> " but was " + address.firstLine); + } + + static Condition<Address> postcode(String expected) { + return verboseCondition(address -> expected.equals(address.postcode), + "postcode: " + expected, + address -> " but was " + address.postcode); + } + + static Condition<Country> name(String expected) { + return verboseCondition(country -> expected.equals(country.name), + "name: " + expected, + country -> " but was " + country.name); + } + + @SafeVarargs + static Condition<Customer> address(Condition<Address>... conditions) { + return nestable("address", customer -> customer.address, conditions); + } + + @SafeVarargs + static Condition<Customer> name(Condition<Name>... conditions) { + return nestable("name", it -> it.name, conditions); + } + + @SafeVarargs + static Condition<Customer> customer(Condition<Customer>... conditions) { + return nestable("customer", conditions); + } + + @SafeVarargs + static Condition<Address> country(Condition<Country>... conditions) { + return nestable("country", address -> address.country, conditions); + } +} + +class Customer { + final Name name; + final Address address; + + Customer(Name name, Address address) { + this.name = name; + this.address = address; + } +} + +class Name { + final String first; + final String last; + + Name(String first, String last) { + this.first = first; + this.last = last; + } +} + +class Address { + final String firstLine; + final String postcode; + final Country country; + + Address(String firstLine, String postcode, Country country) { + this.firstLine = firstLine; + this.postcode = postcode; + this.country = country; + } +} + +class Country { + final String name; + + Country(String name) { + this.name = name; + } +} diff --git a/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_assertionMessage_Test.java b/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_assertionMessage_Test.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_assertionMessage_Test.java @@ -0,0 +1,88 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2022 the original author or authors. + */ +package org.assertj.core.condition; + +import static java.lang.String.format; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.condition.NestableConditionFixtures.address; +import static org.assertj.core.condition.NestableConditionFixtures.country; +import static org.assertj.core.condition.NestableConditionFixtures.customer; +import static org.assertj.core.condition.NestableConditionFixtures.first; +import static org.assertj.core.condition.NestableConditionFixtures.firstLine; +import static org.assertj.core.condition.NestableConditionFixtures.last; +import static org.assertj.core.condition.NestableConditionFixtures.name; +import static org.assertj.core.condition.NestableConditionFixtures.postcode; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; + +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link NestableCondition#toString()}</code>. + * + * @author Alessandro Ciccimarra + */ +class NestableCondition_assertionMessage_Test { + private final Customer boris = new Customer(new Name("Boris", "Johnson"), + new Address("10, Downing Street", + "SW1A 2AA", + new Country("United Kingdom"))); + + @Test + void should_show_correct_error_message_with_two_nested_objects() { + // GIVEN + Condition<Customer> condition = customer( + name( + first("Boris"), + last("Johnson")), + address( + firstLine("10, Downing Street"), + postcode("SW2A 2AA"))); + // WHEN + AssertionError assertionError = expectAssertionError(() -> assertThat(boris).is(condition)); + + // THEN + then(assertionError).hasMessageContaining(format("[✗] customer:[%n" + + " [✓] name:[%n" + + " [✓] first: Boris,%n" + + " [✓] last: Johnson%n" + + " ],%n" + + " [✗] address:[%n" + + " [✓] first line: 10, Downing Street,%n" + + " [✗] postcode: SW2A 2AA but was SW1A 2AA%n" + + " ]%n" + + "]")); + } + + @Test + void should_show_correct_error_message_with_two_levels_of_nesting() { + // GIVEN + Condition<Customer> condition = customer( + address( + firstLine("10, Downing Street"), + country(name("Gibraltar")))); + // WHEN + AssertionError assertionError = expectAssertionError(() -> assertThat(boris).is(condition)); + + // THEN + then(assertionError).hasMessageContaining(format("[✗] customer:[%n" + + " [✗] address:[%n" + + " [✓] first line: 10, Downing Street,%n" + + " [✗] country:[%n" + + " [✗] name: Gibraltar but was United Kingdom%n" + + " ]%n" + + " ]%n" + + "]")); + } +} diff --git a/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_matches_Test.java b/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_matches_Test.java new file mode 100644 --- /dev/null +++ b/assertj-core/src/test/java/org/assertj/core/condition/NestableCondition_matches_Test.java @@ -0,0 +1,78 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2022 the original author or authors. + */ +package org.assertj.core.condition; + +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.condition.NestableConditionFixtures.address; +import static org.assertj.core.condition.NestableConditionFixtures.customer; +import static org.assertj.core.condition.NestableConditionFixtures.first; +import static org.assertj.core.condition.NestableConditionFixtures.firstLine; +import static org.assertj.core.condition.NestableConditionFixtures.name; +import static org.assertj.core.condition.NestableConditionFixtures.postcode; + +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link NestableCondition#matches(Object)}</code>. + * + * @author Alessandro Ciccimarra + */ +class NestableCondition_matches_Test { + private final Customer boris = new Customer(new Name("Boris", "Johnson"), + new Address("10, Downing Street", + "SW1A 2AA", + new Country("United Kingdom"))); + + @Test + void should_match_if_all_conditions_match() { + // GIVEN + Condition<Customer> condition = customer( + name( + first("Boris")), + address( + firstLine("10, Downing Street"), + postcode("SW1A 2AA") + )); + // THEN + then(condition.matches(boris)).isTrue(); + } + + @Test + void should_not_match_if_any_condition_at_top_level_does_not_match() { + // GIVEN + Condition<Customer> condition = customer( + name( + first("Matt")), + address( + firstLine("10, Downing Street"), + postcode("SW1A 2AA"))); + + // THEN + then(condition.matches(boris)).isFalse(); + } + + @Test + void should_not_match_if_any_condition_in_nested_level_does_not_match() { + // GIVEN + Condition<Customer> condition = customer( + name( + first("Boris")), + address( + firstLine("11, Downing Street"), + postcode("SW1A 2AA"))); + + // THEN + then(condition.matches(boris)).isFalse(); + } +}
Add building block to create custom precise assertions #### Check List: * Unit tests : YES * Javadoc with a code example (on API only) : YES * PR meets the [contributing guidelines](https://github.com/assertj/assertj-core/blob/main/CONTRIBUTING.md) The goal is to be able to make precise soft assertions on complex objects (i.e. where we don't need to assert the equality of the full object) and to have easy to read test code and assertion error messages. Example: ```java assertThat(customer).is( customer( name( first("Boris"), last("Johnson")), address( firstLine("10, Downing Street"), postcode("SW2A 2AA") ) ) ) ``` and on failure get an error message like: ``` [✗] customer:[ [✗] name:[ [✓] first: Boris, [✗] last: Johnson but was Johnstone ], [✗] address:[ [✓] first line: 10, Downing Street, [✗] postcode: SW2A 2AA but was SW1A 2AA ] ``` I tried using custom soft assertions, but I found the error message to be less readable (and the implementation more complex). Maybe there is already another way to achieve this? Also `NestableCondition` is not a great name :relaxed:. Looking forward to getting your feedback. Thanks!
2022-08-01T10:38:38Z
3.23
assertj/assertj
2,685
assertj__assertj-2685
[ "2666" ]
d5180a6d7c1af6ac7272268ee87dad54ae01ee7c
diff --git a/src/main/java/org/assertj/core/internal/Strings.java b/src/main/java/org/assertj/core/internal/Strings.java --- a/src/main/java/org/assertj/core/internal/Strings.java +++ b/src/main/java/org/assertj/core/internal/Strings.java @@ -528,8 +528,7 @@ private boolean stringContains(CharSequence actual, CharSequence sequence) { public void assertContainsIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence sequence) { checkCharSequenceIsNotNull(sequence); assertNotNull(info, actual); - if (!actual.toString().toLowerCase().contains(sequence.toString().toLowerCase())) - throw failures.failure(info, shouldContainIgnoringCase(actual, sequence)); + if (!containsIgnoreCase(actual, sequence)) throw failures.failure(info, shouldContainIgnoringCase(actual, sequence)); } // CS427 Issue link: https://github.com/assertj/assertj-core/issues/2060 @@ -593,8 +592,7 @@ public void assertContainsIgnoringWhitespaces(AssertionInfo info, CharSequence a public void assertDoesNotContainIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence... values) { doCommonCheckForCharSequence(info, actual, values); - String actualLowerCase = actual.toString().toLowerCase(); - Set<CharSequence> foundValues = stream(values).filter(value -> actualLowerCase.contains(value.toString().toLowerCase())) + Set<CharSequence> foundValues = stream(values).filter(value -> containsIgnoreCase(actual, value)) .collect(toCollection(LinkedHashSet::new)); if (foundValues.isEmpty()) return; if (foundValues.size() == 1 && values.length == 1) { @@ -900,7 +898,7 @@ public void assertStartsWith(AssertionInfo info, CharSequence actual, CharSequen public void assertStartsWithIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence prefix) { failIfPrefixIsNull(prefix); assertNotNull(info, actual); - if (!comparisonStrategy.stringStartsWith(actual.toString().toLowerCase(), prefix.toString().toLowerCase())) + if (!startsWith(actual, prefix, true)) throw failures.failure(info, shouldStartWithIgnoringCase(actual, prefix, comparisonStrategy)); } @@ -935,7 +933,7 @@ public void assertDoesNotStartWith(AssertionInfo info, CharSequence actual, Char public void assertDoesNotStartWithIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence prefix) { failIfPrefixIsNull(prefix); assertNotNull(info, actual); - if (comparisonStrategy.stringStartsWith(actual.toString().toLowerCase(), prefix.toString().toLowerCase())) + if (startsWith(actual, prefix, true)) throw failures.failure(info, shouldNotStartWithIgnoringCase(actual, prefix, comparisonStrategy)); } @@ -974,7 +972,7 @@ public void assertEndsWith(AssertionInfo info, CharSequence actual, CharSequence public void assertEndsWithIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence suffix) { failIfSuffixIsNull(suffix); assertNotNull(info, actual); - if (!comparisonStrategy.stringEndsWith(actual.toString().toLowerCase(), suffix.toString().toLowerCase())) + if (!endsWith(actual, suffix, true)) throw failures.failure(info, shouldEndWithIgnoringCase(actual, suffix, comparisonStrategy)); } @@ -1009,7 +1007,7 @@ public void assertDoesNotEndWith(AssertionInfo info, CharSequence actual, CharSe public void assertDoesNotEndWithIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence suffix) { failIfSuffixIsNull(suffix); assertNotNull(info, actual); - if (comparisonStrategy.stringEndsWith(actual.toString().toLowerCase(), suffix.toString().toLowerCase())) + if (endsWith(actual, suffix, true)) throw failures.failure(info, shouldNotEndWithIgnoringCase(actual, suffix, comparisonStrategy)); } @@ -1358,4 +1356,92 @@ private void doCommonCheckForCharSequence(AssertionInfo info, CharSequence actua checkIsNotEmpty(sequence); checkCharSequenceArrayDoesNotHaveNullElements(sequence); } + + // Source: org.apache.commons.lang3.StringUtils + private static boolean containsIgnoreCase(CharSequence str, CharSequence searchStr) { + if (str == null || searchStr == null) { + return false; + } + final int len = searchStr.length(); + final int max = str.length() - len; + for (int i = 0; i <= max; i++) { + if (regionMatches(str, true, i, searchStr, 0, len)) { + return true; + } + } + return false; + } + + // Source: org.apache.commons.lang3.StringUtils + private static boolean endsWith(CharSequence str, CharSequence suffix, boolean ignoreCase) { + if (str == null || suffix == null) { + return str == suffix; + } + if (suffix.length() > str.length()) { + return false; + } + final int strOffset = str.length() - suffix.length(); + return regionMatches(str, ignoreCase, strOffset, suffix, 0, suffix.length()); + } + + // Source: org.apache.commons.lang3.StringUtils + private static boolean startsWith(CharSequence str, CharSequence prefix, boolean ignoreCase) { + if (str == null || prefix == null) { + return str == prefix; + } + // Get length once instead of twice in the unlikely case that it changes. + final int preLen = prefix.length(); + if (prefix.length() > str.length()) { + return false; + } + return regionMatches(str, ignoreCase, 0, prefix, 0, preLen); + } + + // Source: org.apache.commons.lang3.CharSequenceUtils + private static boolean regionMatches(CharSequence cs, boolean ignoreCase, int thisStart, + CharSequence substring, int start, int length) { + if (cs instanceof String && substring instanceof String) { + return ((String) cs).regionMatches(ignoreCase, thisStart, (String) substring, start, length); + } + int index1 = thisStart; + int index2 = start; + int tmpLen = length; + + // Extract these first so we detect NPEs the same as the java.lang.String version + final int srcLen = cs.length() - thisStart; + final int otherLen = substring.length() - start; + + // Check for invalid parameters + if (thisStart < 0 || start < 0 || length < 0) { + return false; + } + + // Check that the regions are long enough + if (srcLen < length || otherLen < length) { + return false; + } + + while (tmpLen-- > 0) { + final char c1 = cs.charAt(index1++); + final char c2 = substring.charAt(index2++); + + if (c1 == c2) { + continue; + } + + if (!ignoreCase) { + return false; + } + + // The real same check as in String.regionMatches(): + final char u1 = Character.toUpperCase(c1); + final char u2 = Character.toUpperCase(c2); + if (u1 != u2 && Character.toLowerCase(u1) != Character.toLowerCase(u2)) { + return false; + } + } + + return true; + } + }
diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertContainsIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertContainsIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertContainsIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertContainsIgnoringCase_Test.java @@ -12,22 +12,19 @@ */ package org.assertj.core.internal.strings; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import static org.assertj.core.api.Assertions.assertThatNullPointerException; +import static org.assertj.core.api.Assertions.catchNullPointerException; +import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.error.ShouldContainCharSequence.shouldContainIgnoringCase; import static org.assertj.core.internal.ErrorMessages.charSequenceToLookForIsNull; import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.api.AssertionInfo; -import org.assertj.core.internal.Strings; import org.assertj.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; - +import org.junitpioneer.jupiter.DefaultLocale; /** - * Tests for <code>{@link Strings#assertContainsIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - * * @author Alex Ruiz * @author Joel Costigliola */ @@ -35,59 +32,88 @@ class Strings_assertContainsIgnoringCase_Test extends StringsBaseTest { @Test void should_fail_if_actual_does_not_contain_sequence() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertContainsIgnoringCase(someInfo(), "Yoda", "Luke")) - .withMessage(shouldContainIgnoringCase("Yoda", "Luke").create()); + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertContainsIgnoringCase(someInfo(), "Yoda", "Luke")); + // THEN + then(assertionError).hasMessage(shouldContainIgnoringCase("Yoda", "Luke").create()); } @Test void should_throw_error_if_sequence_is_null() { - assertThatNullPointerException().isThrownBy(() -> strings.assertContainsIgnoringCase(someInfo(), "Yoda", null)) - .withMessage(charSequenceToLookForIsNull()); + // WHEN + NullPointerException exception = catchNullPointerException(() -> strings.assertContainsIgnoringCase(someInfo(), "Yoda", + null)); + // THEN + then(exception).hasMessage(charSequenceToLookForIsNull()); } @Test void should_fail_if_actual_is_null() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> strings.assertContainsIgnoringCase(someInfo(), null, "Yoda")) - .withMessage(actualIsNull()); + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertContainsIgnoringCase(someInfo(), null, "Yoda")); + // THEN + then(assertionError).hasMessage(actualIsNull()); } @Test void should_pass_if_actual_contains_sequence() { + // WHEN/THEN strings.assertContainsIgnoringCase(someInfo(), "Yoda", "Yo"); } @Test void should_pass_if_actual_contains_sequence_in_different_case() { + // WHEN/THEN strings.assertContainsIgnoringCase(someInfo(), "Yoda", "yo"); } @Test void should_fail_if_actual_does_not_contain_sequence_whatever_custom_comparison_strategy_is() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), "Yoda", "Luke")) - .withMessage(shouldContainIgnoringCase("Yoda", "Luke").create()); + // WHEN + AssertionError assertionError = expectAssertionError(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), + "Yoda", + "Luke")); + // THEN + then(assertionError).hasMessage(shouldContainIgnoringCase("Yoda", "Luke").create()); } @Test void should_throw_error_if_sequence_is_null_whatever_custom_comparison_strategy_is() { - assertThatNullPointerException().isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), - "Yoda", - null)) - .withMessage(charSequenceToLookForIsNull()); + // WHEN + NullPointerException exception = catchNullPointerException(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), + "Yoda", + null)); + // THEN + then(exception).hasMessage(charSequenceToLookForIsNull()); } @Test void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), null, "Yoda")) - .withMessage(actualIsNull()); + // WHEN + AssertionError assertionError = expectAssertionError(() -> stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), + null, + "Yoda")); + // THEN + then(assertionError).hasMessage(actualIsNull()); } @Test void should_pass_if_actual_contains_sequence_whatever_custom_comparison_strategy_is() { + // WHEN/THEN stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), "Yoda", "Yo"); } @Test void should_pass_if_actual_contains_sequence_in_different_case_whatever_custom_comparison_strategy_is() { + // WHEN/THEN stringsWithCaseInsensitiveComparisonStrategy.assertContainsIgnoringCase(someInfo(), "Yoda", "yo"); } + + @Test + @DefaultLocale("tr-TR") + void should_pass_with_Turkish_default_locale() { + // WHEN/THEN + strings.assertContainsIgnoringCase(someInfo(), "Leia", "IA"); + } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotContainIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotContainIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotContainIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotContainIgnoringCase_Test.java @@ -12,39 +12,37 @@ */ package org.assertj.core.internal.strings; -import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.Assertions.catchNullPointerException; import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.api.BDDAssertions.thenIllegalArgumentException; import static org.assertj.core.error.ShouldNotContainCharSequence.shouldNotContainIgnoringCase; import static org.assertj.core.internal.ErrorMessages.arrayOfValuesToLookForIsEmpty; import static org.assertj.core.internal.ErrorMessages.valuesToLookForIsNull; import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.util.Arrays.array; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import static org.mockito.internal.util.collections.Sets.newSet; +import static org.assertj.core.util.Sets.set; -import org.assertj.core.api.AssertionInfo; -import org.assertj.core.internal.Strings; import org.assertj.core.internal.StringsBaseTest; -import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; /** - * Tests for <code>{@link Strings#assertDoesNotContainIgnoringCase(AssertionInfo, CharSequence, CharSequence...)}</code>. - * * @author Brummolix */ -@DisplayName("Strings assertDoesNotContainIgnoringCase") class Strings_assertDoesNotContainIgnoringCase_Test extends StringsBaseTest { @Test void should_pass_if_actual_does_not_contain_value_ignoring_case() { - assertDoesNotContainIgnoringCase("Yoda", "no"); + // WHEN/THEN + strings.assertDoesNotContainIgnoringCase(someInfo(), "Yoda", "no"); } @Test void should_pass_if_actual_does_not_contain_values_ignoring_case() { - assertDoesNotContainIgnoringCase("Yoda", "no", "also no"); + // WHEN/THEN + strings.assertDoesNotContainIgnoringCase(someInfo(), "Yoda", "no", "also no"); } @Test @@ -52,7 +50,8 @@ void should_fail_if_actual_contains_value() { // GIVEN String actual = "Yoda"; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertDoesNotContainIgnoringCase(actual, "od")); + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), actual, + "od")); // THEN then(assertionError).hasMessage(shouldNotContainIgnoringCase(actual, "od").create()); } @@ -62,7 +61,8 @@ void should_fail_if_actual_contains_value_with_different_case() { // GIVEN String actual = "Yoda"; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertDoesNotContainIgnoringCase(actual, "OD")); + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), actual, + "OD")); // THEN then(assertionError).hasMessage(shouldNotContainIgnoringCase(actual, "OD").create()); } @@ -72,10 +72,10 @@ void should_fail_if_actual_contains_one_of_several_values() { // GIVEN String actual = "Yoda"; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertDoesNotContainIgnoringCase(actual, "od", "Yo", "Luke")); + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), actual, "od", + "Yo", "Luke")); // THEN - String message = shouldNotContainIgnoringCase(actual, new CharSequence[] { "od", "Yo", "Luke" }, newSet("od", "Yo")).create(); - then(assertionError).hasMessage(message); + then(assertionError).hasMessage(shouldNotContainIgnoringCase(actual, array("od", "Yo", "Luke"), set("od", "Yo")).create()); } @Test @@ -83,10 +83,10 @@ void should_fail_if_actual_contains_one_of_several_values_with_different_case() // GIVEN String actual = "Yoda"; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertDoesNotContainIgnoringCase(actual, "OD", "yo", "Luke")); + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), actual, "OD", + "yo", "Luke")); // THEN - String message = shouldNotContainIgnoringCase(actual, new CharSequence[] { "OD", "yo", "Luke" }, newSet("OD", "yo")).create(); - then(assertionError).hasMessage(message); + then(assertionError).hasMessage(shouldNotContainIgnoringCase(actual, array("OD", "yo", "Luke"), set("OD", "yo")).create()); } @Test @@ -94,10 +94,10 @@ void should_fail_if_values_are_null() { // GIVEN CharSequence[] values = null; // WHEN - Throwable npe = catchThrowable(() -> assertDoesNotContainIgnoringCase("Yoda", values)); + NullPointerException exception = catchNullPointerException(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), "Yoda", + values)); // THEN - then(npe).isInstanceOf(NullPointerException.class) - .hasMessage(valuesToLookForIsNull()); + then(exception).hasMessage(valuesToLookForIsNull()); } @Test @@ -105,14 +105,15 @@ void should_fail_if_actual_is_null() { // GIVEN String actual = null; // WHEN - AssertionError assertionError = expectAssertionError(() -> assertDoesNotContainIgnoringCase(actual, "Yoda")); + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), actual, + "Yoda")); // THEN then(assertionError).hasMessage(actualIsNull()); } @Test void should_throw_error_if_values_are_empty() { - thenIllegalArgumentException().isThrownBy(() -> assertDoesNotContainIgnoringCase("Yoda")) + thenIllegalArgumentException().isThrownBy(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), "Yoda")) .withMessage(arrayOfValuesToLookForIsEmpty()); } @@ -121,13 +122,19 @@ void should_throw_error_if_values_contains_null() { // GIVEN CharSequence[] values = new CharSequence[] { "1", null }; // WHEN - Throwable npe = catchThrowable(() -> assertDoesNotContainIgnoringCase("Yoda", values)); + NullPointerException exception = catchNullPointerException(() -> strings.assertDoesNotContainIgnoringCase(someInfo(), "Yoda", + values)); // THEN - then(npe).isInstanceOf(NullPointerException.class) - .hasMessage("Expecting CharSequence elements not to be null but found one at index 1"); + then(exception).hasMessage("Expecting CharSequence elements not to be null but found one at index 1"); } - private void assertDoesNotContainIgnoringCase(CharSequence actual, CharSequence... values) { - strings.assertDoesNotContainIgnoringCase(someInfo(), actual, values); + @Test + @DefaultLocale("tr-TR") + void should_fail_with_Turkish_default_locale() { + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotContainIgnoringCase(INFO, "Leia", "EI")); + // THEN + then(assertionError).hasMessage(shouldNotContainIgnoringCase("Leia", "EI").create()); } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotEndWithIgnoringCase_Test.java @@ -18,7 +18,6 @@ import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.StandardComparisonStrategy; @@ -26,11 +25,9 @@ import org.assertj.core.internal.StringsBaseTest; import org.assertj.core.util.StringHashCodeTestComparator; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; -/** - * Tests for <code>{@link Strings#assertDoesNotEndWithIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - */ -class Strings_assertDoesNotEndWithIgnoringCaseIgnoringCase_Test extends StringsBaseTest { +class Strings_assertDoesNotEndWithIgnoringCase_Test extends StringsBaseTest { @Test void should_pass_if_actual_does_not_end_with_suffix() { @@ -78,4 +75,14 @@ void should_fail_if_actual_ends_with_suffix_according_to_custom_comparison_strat // THEN then(assertionError).hasMessage(shouldNotEndWithIgnoringCase("Yoda", "A", hashCodeComparisonStrategy).create()); } + + @Test + @DefaultLocale("tr-TR") + void should_fail_with_Turkish_default_locale() { + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotEndWithIgnoringCase(INFO, "Leia", "IA")); + // THEN + then(assertionError).hasMessage(shouldNotEndWithIgnoringCase("Leia", "IA", StandardComparisonStrategy.instance()).create()); + } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertDoesNotStartWithIgnoringCase_Test.java @@ -18,7 +18,6 @@ import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.StandardComparisonStrategy; @@ -26,10 +25,8 @@ import org.assertj.core.internal.StringsBaseTest; import org.assertj.core.util.StringHashCodeTestComparator; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; -/** - * Tests for <code>{@link Strings#assertDoesNotStartWithIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - */ class Strings_assertDoesNotStartWithIgnoringCase_Test extends StringsBaseTest { @Test @@ -42,7 +39,7 @@ void should_pass_if_actual_does_not_start_with_prefix() { void should_fail_if_actual_starts_with_prefix() { // WHEN AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotStartWithIgnoringCase(INFO, "Yoda", "yo")); - //THEN + // THEN then(assertionError).hasMessage(shouldNotStartWithIgnoringCase("Yoda", "yo", StandardComparisonStrategy.instance()).create()); } @@ -76,6 +73,17 @@ void should_fail_if_actual_starts_with_prefix_according_to_custom_comparison_str // WHEN AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotStartWithIgnoringCase(INFO, "Yoda", "yODA")); // THEN - then(assertionError).hasMessageContainingAll(shouldNotStartWithIgnoringCase("Yoda", "yODA", hashCodeComparisonStrategy).create()); + then(assertionError).hasMessageContainingAll(shouldNotStartWithIgnoringCase("Yoda", "yODA", + hashCodeComparisonStrategy).create()); + } + + @Test + @DefaultLocale("tr-TR") + void should_fail_with_Turkish_default_locale() { + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertDoesNotStartWithIgnoringCase(INFO, "Leia", "LEI")); + // THEN + then(assertionError).hasMessage(shouldNotStartWithIgnoringCase("Leia", "LEI", StandardComparisonStrategy.instance()).create()); } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertEndsWithIgnoringCase_Test.java @@ -15,10 +15,10 @@ import static org.assertj.core.api.Assertions.assertThatNullPointerException; import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.error.ShouldEndWithIgnoringCase.shouldEndWithIgnoringCase; +import static org.assertj.core.test.TestData.someInfo; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.StandardComparisonStrategy; @@ -26,10 +26,8 @@ import org.assertj.core.internal.StringsBaseTest; import org.assertj.core.util.StringHashCodeTestComparator; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; -/** - * Tests for <code>{@link Strings#assertEndsWithIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - */ class Strings_assertEndsWithIgnoringCase_Test extends StringsBaseTest { @Test @@ -84,4 +82,12 @@ void should_fail_if_actual_does_not_end_with_suffix_according_to_custom_comparis // THEN then(assertionError).hasMessage(shouldEndWithIgnoringCase("Yoda", "Luke", hashCodeComparisonStrategy).create()); } + + @Test + @DefaultLocale("tr-TR") + void should_pass_with_Turkish_default_locale() { + // WHEN/THEN + strings.assertEndsWithIgnoringCase(someInfo(), "Leia", "IA"); + } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringCase_Test.java @@ -17,15 +17,11 @@ import static org.assertj.core.test.CharArrays.arrayOf; import static org.assertj.core.test.TestData.someInfo; -import org.assertj.core.api.AssertionInfo; -import org.assertj.core.internal.Strings; import org.assertj.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; - +import org.junitpioneer.jupiter.DefaultLocale; /** - * Tests for <code>{@link Strings#assertEqualsIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - * * @author Alex Ruiz * @author Joel Costigliola */ @@ -103,4 +99,12 @@ void should_pass_if_both_Strings_are_equal_but_not_same_whatever_custom_comparis void should_pass_if_both_Strings_are_equal_ignoring_case_whatever_custom_comparison_strategy_is() { stringsWithCaseInsensitiveComparisonStrategy.assertEqualsIgnoringCase(someInfo(), "Yoda", "YODA"); } + + @Test + @DefaultLocale("tr-TR") + void should_pass_with_Turkish_default_locale() { + // WHEN/THEN + strings.assertEqualsIgnoringCase(someInfo(), "Leia", "LEIA"); + } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertNotEqualsIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertNotEqualsIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertNotEqualsIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertNotEqualsIgnoringCase_Test.java @@ -15,20 +15,19 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.error.ShouldNotBeEqualIgnoringCase.shouldNotBeEqualIgnoringCase; import static org.assertj.core.test.CharArrays.arrayOf; import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.mockito.Mockito.verify; import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.StringsBaseTest; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; /** - * Tests for - * <code>{@link org.assertj.core.internal.Strings#assertNotEqualsIgnoringCase(org.assertj.core.api.AssertionInfo, CharSequence, CharSequence)}</code> - * . - * * @author Alexander Bischof */ class Strings_assertNotEqualsIgnoringCase_Test extends StringsBaseTest { @@ -127,4 +126,14 @@ void should_fail_if_both_Strings_are_equal_ignoring_case_whatever_custom_compari assertThat(error).isInstanceOf(AssertionError.class); verifyFailureThrownWhenStringsAreNotEqual(info, "Yoda", "YODA"); } + + @Test + @DefaultLocale("tr-TR") + void should_fail_with_Turkish_default_locale() { + // WHEN + AssertionError assertionError = expectAssertionError(() -> strings.assertNotEqualsIgnoringCase(INFO, "Leia", "LEIA")); + // THEN + then(assertionError).hasMessage(shouldNotBeEqualIgnoringCase("Leia", "LEIA").create()); + } + } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertStartsWithIgnoringCase_Test.java @@ -18,7 +18,6 @@ import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; -import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ComparatorBasedComparisonStrategy; import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.StandardComparisonStrategy; @@ -26,10 +25,8 @@ import org.assertj.core.internal.StringsBaseTest; import org.assertj.core.util.StringHashCodeTestComparator; import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.DefaultLocale; -/** - * Tests for <code>{@link Strings#assertStartsWithIgnoringCase(AssertionInfo, CharSequence, CharSequence)}</code>. - */ class Strings_assertStartsWithIgnoringCase_Test extends StringsBaseTest { @Test @@ -82,4 +79,12 @@ void should_fail_if_actual_does_not_start_with_prefix_according_to_custom_compar // THEN then(assertionError).hasMessage(shouldStartWithIgnoringCase("Yoda", "Luke", hashCodeComparisonStrategy).create()); } + + @Test + @DefaultLocale("tr-TR") + void should_pass_with_Turkish_default_locale() { + // WHEN/THEN + strings.assertStartsWithIgnoringCase(INFO, "Leia", "LEI"); + } + } diff --git a/src/test/java/org/assertj/core/test/CaseInsensitiveStringComparatorTest.java b/src/test/java/org/assertj/core/test/CaseInsensitiveStringComparatorTest.java --- a/src/test/java/org/assertj/core/test/CaseInsensitiveStringComparatorTest.java +++ b/src/test/java/org/assertj/core/test/CaseInsensitiveStringComparatorTest.java @@ -23,7 +23,7 @@ class CaseInsensitiveStringComparatorTest { @Test @DefaultLocale("tr-TR") - void should_work_with_turkish_locale() { + void should_work_with_Turkish_default_locale() { // WHEN int result = underTest.compare("i", "I"); // THEN
assertContainsIgnoringCase fails to compare i and I in tr_TR locale See `org.assertj.core.internal.Strings#assertContainsIgnoringCase` https://github.com/assertj/assertj-core/blob/9051a958e6ab0a750bb243060aef57001ab97e6e/src/main/java/org/assertj/core/internal/Strings.java#L528-L531 I would suggest adding https://github.com/policeman-tools/forbidden-apis verification to just ban `toLowerCase()`, `toUpperCase()` and other unsafe methods: https://github.com/assertj/assertj-core/issues/2664
2022-06-25T14:10:08Z
3.23
assertj/assertj
2,549
assertj__assertj-2549
[ "2547" ]
92548d8916425787c8a60e6dea638b285825ca12
diff --git a/src/main/java/org/assertj/core/internal/Maps.java b/src/main/java/org/assertj/core/internal/Maps.java --- a/src/main/java/org/assertj/core/internal/Maps.java +++ b/src/main/java/org/assertj/core/internal/Maps.java @@ -419,6 +419,8 @@ private static <K> Set<K> getNotExpectedKeys(Map<K, ?> actual, K[] expectedKeys) @SuppressWarnings("unchecked") private static <K, V> Map<K, V> clone(Map<K, V> map) throws NoSuchMethodException { + if (isMultiValueMapAdapterInstance(map)) throw new IllegalArgumentException("Cannot clone MultiValueMapAdapter"); + try { if (map instanceof Cloneable) { return (Map<K, V>) map.getClass().getMethod("clone").invoke(map); @@ -438,6 +440,19 @@ private static <K, V> Map<K, V> clone(Map<K, V> map) throws NoSuchMethodExceptio } } + private static boolean isMultiValueMapAdapterInstance(Map<?, ?> map) { + return isInstanceOf(map, "org.springframework.util.MultiValueMapAdapter"); + } + + private static boolean isInstanceOf(Object object, String className) { + try { + Class<?> type = Class.forName(className); + return type.isInstance(object); + } catch (ClassNotFoundException e) { + return false; + } + } + public <K, V> void assertContainsValue(AssertionInfo info, Map<K, V> actual, V value) { assertNotNull(info, actual); if (!containsValue(actual, value)) throw failures.failure(info, shouldContainValue(actual, value));
diff --git a/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnlyKeys_Test.java b/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnlyKeys_Test.java --- a/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnlyKeys_Test.java +++ b/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnlyKeys_Test.java @@ -29,6 +29,7 @@ import static org.assertj.core.util.Arrays.array; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; +import static org.assertj.core.util.Lists.list; import static org.assertj.core.util.Sets.set; import static org.junit.jupiter.params.provider.Arguments.arguments; @@ -45,6 +46,7 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.util.MultiValueMapAdapter; import com.google.common.collect.ImmutableMap; @@ -90,9 +92,13 @@ void should_fail_if_given_keys_array_is_empty() { "caseInsensitiveMapsSuccessfulTestCases", }) void should_pass(Map<String, String> actual, String[] expected) { + // GIVEN + int initialSize = actual.size(); // WHEN/THEN assertThatNoException().as(actual.getClass().getName()) .isThrownBy(() -> maps.assertContainsOnlyKeys(info, actual, expected)); + + then(actual).hasSize(initialSize); } private static Stream<Arguments> unmodifiableMapsSuccessfulTestCases() { @@ -127,6 +133,18 @@ private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() { array("Job", "Name")))); } + @Test + void should_pass_with_MultiValueMapAdapter() { + // GIVEN + MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda")))); + String[] expected = array("name"); + int initialSize = actual.size(); + // WHEN + maps.assertContainsOnlyKeys(info, actual, expected); + // THEN + then(actual).hasSize(initialSize); + } + @ParameterizedTest @MethodSource({ "unmodifiableMapsFailureTestCases", @@ -135,12 +153,16 @@ private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() { "commonsCollectionsCaseInsensitiveMapFailureTestCases", }) void should_fail(Map<String, String> actual, String[] expected, Set<String> notFound, Set<String> notExpected) { + // GIVEN + int initialSize = actual.size(); // WHEN assertThatExceptionOfType(AssertionError.class).as(actual.getClass().getName()) .isThrownBy(() -> maps.assertContainsOnlyKeys(info, actual, expected)) // THEN .withMessage(shouldContainOnlyKeys(actual, expected, notFound, notExpected).create()); + + then(actual).hasSize(initialSize); } private static Stream<Arguments> unmodifiableMapsFailureTestCases() { @@ -213,4 +235,20 @@ private static Stream<Arguments> commonsCollectionsCaseInsensitiveMapFailureTest set("job"))); // internal keys are always lowercase } + @Test + void should_fail_with_MultiValueMapAdapter() { + // GIVEN + MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda")), + entry("job", list("Jedi")))); + String[] expected = array("name", "color"); + Set<String> notFound = set("color"); + Set<String> notExpected = set("job"); + int initialSize = actual.size(); + // WHEN + AssertionError error = expectAssertionError(() -> maps.assertContainsOnlyKeys(info, actual, expected)); + // THEN + then(error).hasMessage(shouldContainOnlyKeys(actual, expected, notFound, notExpected).create()); + then(actual).hasSize(initialSize); + } + } diff --git a/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnly_Test.java b/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnly_Test.java --- a/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnly_Test.java +++ b/src/test/java/org/assertj/core/internal/maps/Maps_assertContainsOnly_Test.java @@ -29,10 +29,12 @@ import static org.assertj.core.util.Arrays.array; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; import static org.assertj.core.util.FailureMessages.actualIsNull; +import static org.assertj.core.util.Lists.list; import static org.assertj.core.util.Sets.set; import static org.junit.jupiter.params.provider.Arguments.arguments; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -41,11 +43,13 @@ import org.apache.commons.collections4.map.CaseInsensitiveMap; import org.apache.commons.collections4.map.SingletonMap; import org.apache.commons.lang3.ArrayUtils; +import org.assertj.core.data.MapEntry; import org.assertj.core.internal.MapsBaseTest; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.util.MultiValueMapAdapter; import com.google.common.collect.ImmutableMap; @@ -84,6 +88,15 @@ void should_fail_if_given_entries_array_is_empty() { then(error).hasMessage(shouldBeEmpty(actual).create()); } + @Test + void should_pass_if_value_type_is_array() { + // GIVEN + Map<String, byte[]> actual = mapOf(entry("key1", new byte[] { 1, 2 }), entry("key2", new byte[] { 3, 4, 5 })); + Entry<String, byte[]>[] expected = array(entry("key2", new byte[] { 3, 4, 5 }), entry("key1", new byte[] { 1, 2 })); + // WHEN/THEN + assertThatNoException().isThrownBy(() -> maps.assertContainsOnly(info, actual, expected)); + } + @ParameterizedTest @MethodSource({ "unmodifiableMapsSuccessfulTestCases", @@ -91,9 +104,13 @@ void should_fail_if_given_entries_array_is_empty() { "caseInsensitiveMapsSuccessfulTestCases", }) void should_pass(Map<String, String> actual, Entry<String, String>[] expected) { + // GIVEN + int initialSize = actual.size(); // WHEN/THEN assertThatNoException().as(actual.getClass().getName()) .isThrownBy(() -> maps.assertContainsOnly(info, actual, expected)); + + then(actual).hasSize(initialSize); } private static Stream<Arguments> unmodifiableMapsSuccessfulTestCases() { @@ -132,6 +149,18 @@ private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() { array(entry("Job", "Jedi"), entry("Name", "Yoda"))))); } + @Test + void should_pass_with_MultiValueMapAdapter() { + // GIVEN + MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda")))); + Entry<String, List<String>>[] expected = array(entry("name", list("Yoda"))); + int initialSize = actual.size(); + // WHEN + maps.assertContainsOnly(info, actual, expected); + // THEN + then(actual).hasSize(initialSize); + } + @ParameterizedTest @MethodSource({ "unmodifiableMapsFailureTestCases", @@ -142,12 +171,16 @@ private static Stream<Arguments> caseInsensitiveMapsSuccessfulTestCases() { }) void should_fail(Map<String, String> actual, Entry<String, String>[] expected, Set<Entry<String, String>> notFound, Set<Entry<String, String>> notExpected) { + // GIVEN + int initialSize = actual.size(); // WHEN assertThatExceptionOfType(AssertionError.class).as(actual.getClass().getName()) .isThrownBy(() -> maps.assertContainsOnly(info, actual, expected)) // THEN .withMessage(shouldContainOnly(actual, expected, notFound, notExpected).create()); + + then(actual).hasSize(initialSize); } private static Stream<Arguments> unmodifiableMapsFailureTestCases() { @@ -219,14 +252,20 @@ private static Stream<Arguments> orderDependentFailureTestCases() { set(entry("name", "Yoda"), entry("job", "Jedi")))); } - @SuppressWarnings("unchecked") @Test - void should_pass_if_value_type_is_array() { + void should_fail_with_MultiValueMapAdapter() { // GIVEN - Map<String, byte[]> actual = mapOf(entry("key1", new byte[] { 1, 2 }), entry("key2", new byte[] { 3, 4, 5 })); - Entry<String, byte[]>[] expected = new Entry[] { entry("key2", new byte[] { 3, 4, 5 }), entry("key1", new byte[] { 1, 2 }) }; - // WHEN/THEN - assertThatNoException().isThrownBy(() -> maps.assertContainsOnly(info, actual, expected)); + MultiValueMapAdapter<String, String> actual = new MultiValueMapAdapter<>(mapOf(entry("name", list("Yoda")), + entry("job", list("Jedi")))); + MapEntry<String, List<String>>[] expected = array(entry("name", list("Yoda")), entry("color", list("Green"))); + Set<MapEntry<String, List<String>>> notFound = set(entry("color", list("Green"))); + Set<MapEntry<String, List<String>>> notExpected = set(entry("job", list("Jedi"))); + int initialSize = actual.size(); + // WHEN + AssertionError error = expectAssertionError(() -> maps.assertContainsOnly(info, actual, expected)); + // THEN + then(error).hasMessage(shouldContainOnly(actual, expected, notFound, notExpected).create()); + then(actual).hasSize(initialSize); } }
Regression in `AbstractMapAssert#containsOnlyKeys` with Spring's `MultiValueMapAdapter` #### Summary We saw this when updating from AssertJ 3.19.0 to 3.20.0. It appears that `AbstractMapAssert#containsOnlyKeys` is mutating the map that we're asserting on, which leads to test failures in our case. This is happening on an instance of `org.springframework.util.MultiValueMapAdapter` #### Example ```java var underlyingMap = new HashMap<String, List<String>>(); underlyingMap.put("Hello", List.of("World")); var multiValueMap = CollectionUtils.toMultiValueMap(underlyingMap); // This assertion passes assertThat(multiValueMap).containsOnlyKeys("Hello"); // This assertion fails, as `multiValueMap` and `underlyingMap` are now empty assertThat(multiValueMap).containsOnlyKeys("Hello"); ``` The issue seems to have been introduced in #2167, and is caused by [this](https://github.com/assertj/assertj-core/pull/2167/files#diff-abf21eb7e9d518345dcc39f1097da30169084ee0a538e0b174fba36807cf7386R397) use of `Map#remove` on a "clone" of the `Map` being asserted on. In our case that `Map` is a Spring `MultiValueMapAdapter`, which delegates operations to the underlying `Map` that it was constructed from. The `remove` call on the clone delegates to `multiValueMap#remove` which in turn delegates to `underlyingMap#remove`.
With a quick look, the only option seems to be performing a check of the fully-qualified class name: if it's `org.springframework.util.MultiValueMapAdapter`, we don't try to clone the map but copy it directly.
2022-03-30T20:25:27Z
3.22
assertj/assertj
2,410
assertj__assertj-2410
[ "2397" ]
471e8a80347dc6e759e58a5ead7f700532e00dc6
diff --git a/src/main/java/org/assertj/core/api/Assertions.java b/src/main/java/org/assertj/core/api/Assertions.java --- a/src/main/java/org/assertj/core/api/Assertions.java +++ b/src/main/java/org/assertj/core/api/Assertions.java @@ -1160,7 +1160,7 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> assertThat(T a * * If the provided {@link ThrowingCallable} does not raise an exception, an error is immediately thrown, * in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br> - * To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below: + * To use a test description, use {@link #catchThrowable(ThrowingCallable)} as shown below: * <pre><code class='java'> // assertion will fail but "display me" won't appear in the error * assertThatThrownBy(() -&gt; {}).as("display me") * .isInstanceOf(Exception.class); @@ -1316,7 +1316,7 @@ public static <T> ObjectAssert<T> assertWith(T actual, Consumer<T>... requiremen * <p> * This caught {@link Throwable} can then be asserted. * <p> - * If you need to assert on the real type of Throwable caught (e.g. IOException), use {@link #catchThrowableOfType(ThrowableAssert.ThrowingCallable, Class)}. + * If you need to assert on the real type of Throwable caught (e.g. IOException), use {@link #catchThrowableOfType(ThrowingCallable, Class)}. * <p> * Example: * <pre><code class='java'>{@literal @}Test @@ -1331,7 +1331,7 @@ public static <T> ObjectAssert<T> assertWith(T actual, Consumer<T>... requiremen * * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowableOfType(ThrowableAssert.ThrowingCallable, Class) + * @see #catchThrowableOfType(ThrowingCallable, Class) */ public static Throwable catchThrowable(ThrowingCallable shouldRaiseThrowable) { return AssertionsForClassTypes.catchThrowable(shouldRaiseThrowable); @@ -1372,7 +1372,7 @@ public static Throwable catchThrowable(ThrowingCallable shouldRaiseThrowable) { * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @param type The type of exception that the code is expected to raise. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowable(ThrowableAssert.ThrowingCallable) + * @see #catchThrowable(ThrowingCallable) * @since 3.9.0 */ public static <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(ThrowingCallable shouldRaiseThrowable, @@ -1380,6 +1380,222 @@ public static <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(Throw return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseThrowable, type); } + /** + * Allows catching an instance of {@link Exception}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link Exception} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * Exception exception = catchException(() -&gt; {throw new Exception("boom!");}); + * // assertions succeed + * assertThat(exception).hasMessage("boom!"); + * + * // succeeds as catchException returns null when the code does not throw any exceptions + * assertThat(catchException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an Exception + * catchException(() -&gt; {throw new Throwable("boom!");});</code></pre> + * + * @param shouldRaiseException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static Exception catchException(ThrowingCallable shouldRaiseException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseException, Exception.class); + } + + /** + * Allows catching an instance of {@link RuntimeException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * RuntimeException runtimeException = catchRuntimeException(() -&gt; {throw new RuntimeException("boom!");}); + * // assertions succeed + * assertThat(runtimeException).hasMessage("boom!"); + * + * // succeeds as catchRuntimeException returns null when the code does not throw any exceptions + * assertThat(catchRuntimeException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a RuntimeException + * catchRuntimeException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseRuntimeException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static RuntimeException catchRuntimeException(ThrowingCallable shouldRaiseRuntimeException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseRuntimeException, RuntimeException.class); + } + + /** + * Allows catching an instance of {@link NullPointerException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * NullPointerException nullPointerException = catchNullPointerException(() -&gt; {throw new NullPointerException("boom!");}); + * // assertions succeed + * assertThat(nullPointerException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchNullPointerException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a NullPointerException + * catchNullPointerException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseNullPointerException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static NullPointerException catchNullPointerException(ThrowingCallable shouldRaiseNullPointerException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseNullPointerException, NullPointerException.class); + } + + /** + * Allows catching an instance of {@link IllegalArgumentException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalArgumentException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalArgumentException illegalArgumentException = catchIllegalArgumentException(() -&gt; {throw new IllegalArgumentException("boom!");}); + * // assertions succeed + * assertThat(illegalArgumentException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchIllegalArgumentException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IllegalArgumentException + * catchIllegalArgumentException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIllegalArgumentException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IllegalArgumentException catchIllegalArgumentException(ThrowingCallable shouldRaiseIllegalArgumentException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalArgumentException, IllegalArgumentException.class); + } + + /** + * Allows catching an instance of {@link IOException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IOException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IOException iOException = catchIOException(() -&gt; {throw new IOException("boom!");}); + * // assertions succeed + * assertThat(iOException).hasMessage("boom!"); + * + * // succeeds as catchIOException returns null when the code does not throw any exceptions + * assertThat(catchIOException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIOException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIOException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IOException catchIOException(ThrowingCallable shouldRaiseIOException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIOException, IOException.class); + } + + /** + * Allows catching an instance of {@link ReflectiveOperationException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link ReflectiveOperationException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * ReflectiveOperationException reflectiveOperationException = catchReflectiveOperationException(() -&gt; {throw new ReflectiveOperationException("boom!");}); + * // assertions succeed + * assertThat(reflectiveOperationException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchReflectiveOperationException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchReflectiveOperationException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseReflectiveOperationException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static ReflectiveOperationException catchReflectiveOperationException(ThrowingCallable shouldRaiseReflectiveOperationException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseReflectiveOperationException, ReflectiveOperationException.class); + } + + /** + * Allows catching an instance of {@link IllegalStateException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalStateException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalStateException illegalStateException = catchIllegalStateException(() -&gt; {throw new IllegalStateException("boom!");}); + * // assertions succeed + * assertThat(illegalStateException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchIllegalStateException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIllegalStateException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIllegalStateException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IllegalStateException catchIllegalStateException(ThrowingCallable shouldRaiseIllegalStateException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalStateException, IllegalStateException.class); + } + + /** + * Allows catching an instance of {@link IndexOutOfBoundsException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IndexOutOfBoundsException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IndexOutOfBoundsException indexOutOfBoundsException = catchIndexOutOfBoundsException(() -&gt; {throw new IndexOutOfBoundsException("boom!");}); + * // assertions succeed + * assertThat(indexOutOfBoundsException).hasMessage("boom!"); + * + * // succeeds as catchIndexOutOfBoundsException returns null when the code does not throw any exceptions + * assertThat(catchIndexOutOfBoundsException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIndexOutOfBoundsException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIndexOutOfBoundException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IndexOutOfBoundsException catchIndexOutOfBoundsException(ThrowingCallable shouldRaiseIndexOutOfBoundException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIndexOutOfBoundException, IndexOutOfBoundsException.class); + } + /** * Entry point to check that an exception of type T is thrown by a given {@code throwingCallable} * which allows to chain assertions on the thrown exception. @@ -1389,7 +1605,7 @@ public static <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(Throw * .isThrownBy(() -&gt; { throw new IOException("boom!"); }) * .withMessage("boom!"); </code></pre> * - * This method is more or less the same of {@link #assertThatThrownBy(ThrowableAssert.ThrowingCallable)} but in a more natural way. + * This method is more or less the same of {@link #assertThatThrownBy(ThrowingCallable)} but in a more natural way. * * @param <T> the exception type. * @param exceptionType the exception type class. diff --git a/src/main/java/org/assertj/core/api/BDDAssertions.java b/src/main/java/org/assertj/core/api/BDDAssertions.java --- a/src/main/java/org/assertj/core/api/BDDAssertions.java +++ b/src/main/java/org/assertj/core/api/BDDAssertions.java @@ -1178,7 +1178,7 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> then(T actual) * * If the provided {@link ThrowingCallable} does not raise an exception, an error is immediately thrown, * in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br> - * To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below: + * To use a test description, use {@link #catchThrowable(ThrowingCallable)} as shown below: * <pre><code class='java'> // assertion will fail but "display me" won't appear in the error * thenThrownBy(() -&gt; {}).as("display me") * .isInstanceOf(Exception.class); @@ -1256,7 +1256,7 @@ public static <T extends Throwable> AbstractThrowableAssert<?, T> then(T actual) * * If the provided {@link ThrowingCallable} does not validate against next assertions, an error is immediately raised, * in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br> - * To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below. + * To use a test description, use {@link #catchThrowable(ThrowingCallable)} as shown below. * * <pre><code class='java'> ThrowingCallable doNothing = () -&gt; { * // do nothing @@ -1680,7 +1680,7 @@ public static <ELEMENT> SpliteratorAssert<ELEMENT> then(Spliterator<ELEMENT> act * <p> * This caught {@link Throwable} can then be asserted. * <p> - * If you need to assert on the real type of Throwable caught (e.g. IOException), use {@link #catchThrowableOfType(ThrowableAssert.ThrowingCallable, Class)}. + * If you need to assert on the real type of Throwable caught (e.g. IOException), use {@link #catchThrowableOfType(ThrowingCallable, Class)}. * <p> * Example: * <pre><code class='java'>{@literal @}Test @@ -1695,7 +1695,7 @@ public static <ELEMENT> SpliteratorAssert<ELEMENT> then(Spliterator<ELEMENT> act * * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowableOfType(ThrowableAssert.ThrowingCallable, Class) + * @see #catchThrowableOfType(ThrowingCallable, Class) * * @since 3.20.0 */ @@ -1738,7 +1738,7 @@ public static Throwable catchThrowable(ThrowingCallable shouldRaiseThrowable) { * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @param type The type of exception that the code is expected to raise. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowable(ThrowableAssert.ThrowingCallable) + * @see #catchThrowable(ThrowingCallable) * * @since 3.20.0 */ @@ -1747,6 +1747,222 @@ public static <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(Throw return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseThrowable, type); } + /** + * Allows catching an instance of {@link Exception}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link Exception} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * Exception exception = catchException(() -&gt; {throw new Exception("boom!");}); + * // assertions succeed + * assertThat(exception).hasMessage("boom!"); + * + * // succeeds as catchException returns null when the code does not throw any exceptions + * assertThat(catchException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an Exception + * catchException(() -&gt; {throw new Throwable("boom!");});</code></pre> + * + * @param shouldRaiseException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static Exception catchException(ThrowingCallable shouldRaiseException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseException, Exception.class); + } + + /** + * Allows catching an instance of {@link RuntimeException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * RuntimeException runtimeException = catchRuntimeException(() -&gt; {throw new RuntimeException("boom!");}); + * // assertions succeed + * assertThat(runtimeException).hasMessage("boom!"); + * + * // succeeds as catchRuntimeException returns null when the code does not throw any exceptions + * assertThat(catchRuntimeException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a RuntimeException + * catchRuntimeException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseRuntimeException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static RuntimeException catchRuntimeException(ThrowingCallable shouldRaiseRuntimeException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseRuntimeException, RuntimeException.class); + } + + /** + * Allows catching an instance of {@link NullPointerException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * NullPointerException nullPointerException = catchNullPointerException(() -&gt; {throw new NullPointerException("boom!");}); + * // assertions succeed + * assertThat(nullPointerException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchNullPointerException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a NullPointerException + * catchNullPointerException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseNullPointerException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static NullPointerException catchNullPointerException(ThrowingCallable shouldRaiseNullPointerException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseNullPointerException, NullPointerException.class); + } + + /** + * Allows catching an instance of {@link IllegalArgumentException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalArgumentException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalArgumentException illegalArgumentException = catchIllegalArgumentException(() -&gt; {throw new IllegalArgumentException("boom!");}); + * // assertions succeed + * assertThat(illegalArgumentException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchIllegalArgumentException(() -&gt; {})).isNull();</code></pre> + * + * // fails as the thrown instance is not an IllegalArgumentException + * catchIllegalArgumentException(() -&gt; {throw new Exception("boom!");}); + * + * @param shouldRaiseIllegalArgumentException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IllegalArgumentException catchIllegalArgumentException(ThrowingCallable shouldRaiseIllegalArgumentException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalArgumentException, IllegalArgumentException.class); + } + + /** + * Allows catching an instance of {@link IOException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IOException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IOException iOException = catchIOException(() -&gt; {throw new IOException("boom!");}); + * // assertions succeed + * assertThat(iOException).hasMessage("boom!"); + * + * // succeeds as catchIOException returns null when the code does not throw any exceptions + * assertThat(catchIOException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIOException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIOException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IOException catchIOException(ThrowingCallable shouldRaiseIOException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIOException, IOException.class); + } + + /** + * Allows catching an instance of {@link ReflectiveOperationException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link ReflectiveOperationException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * ReflectiveOperationException reflectiveOperationException = catchReflectiveOperationException(() -&gt; {throw new ReflectiveOperationException("boom!");}); + * // assertions succeed + * assertThat(reflectiveOperationException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchReflectiveOperationException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchReflectiveOperationException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseReflectiveOperationException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static ReflectiveOperationException catchReflectiveOperationException(ThrowingCallable shouldRaiseReflectiveOperationException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseReflectiveOperationException, ReflectiveOperationException.class); + } + + /** + * Allows catching an instance of {@link IllegalStateException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalStateException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalStateException illegalStateException = catchIllegalStateException(() -&gt; {throw new IllegalStateException("boom!");}); + * // assertions succeed + * assertThat(illegalStateException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchIllegalStateException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIllegalStateException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIllegalStateException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IllegalStateException catchIllegalStateException(ThrowingCallable shouldRaiseIllegalStateException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalStateException, IllegalStateException.class); + } + + /** + * Allows catching an instance of {@link IndexOutOfBoundsException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IndexOutOfBoundsException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IndexOutOfBoundsException indexOutOfBoundsException = catchIndexOutOfBoundsException(() -&gt; {throw new IndexOutOfBoundsException("boom!");}); + * // assertions succeed + * assertThat(indexOutOfBoundsException).hasMessage("boom!"); + * + * // succeeds as catchIndexOutOfBoundsException returns null when the code does not throw any exceptions + * assertThat(catchIndexOutOfBoundsException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIndexOutOfBoundsException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIndexOutOfBoundException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + public static IndexOutOfBoundsException catchIndexOutOfBoundsException(ThrowingCallable shouldRaiseIndexOutOfBoundException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIndexOutOfBoundException, IndexOutOfBoundsException.class); + } + /** * Entry point to check that an exception of type T is thrown by a given {@code throwingCallable} * which allows to chain assertions on the thrown exception. @@ -1756,7 +1972,7 @@ public static <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(Throw * .isThrownBy(() -&gt; { throw new IOException("boom!"); }) * .withMessage("boom!"); </code></pre> * - * This method is more or less the same of {@link #thenThrownBy(ThrowableAssert.ThrowingCallable)} but in a more natural way. + * This method is more or less the same of {@link #thenThrownBy(ThrowingCallable)} but in a more natural way. * * @param <T> the exception type. * @param exceptionType the exception type class. diff --git a/src/main/java/org/assertj/core/api/WithAssertions.java b/src/main/java/org/assertj/core/api/WithAssertions.java --- a/src/main/java/org/assertj/core/api/WithAssertions.java +++ b/src/main/java/org/assertj/core/api/WithAssertions.java @@ -2458,7 +2458,7 @@ default AbstractOffsetDateTimeAssert<?> assertThat(final OffsetDateTime offsetDa * * If the provided {@link ThrowingCallable} does not raise an exception, an error is immediately thrown, * in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br> - * To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below: + * To use a test description, use {@link #catchThrowable(ThrowingCallable)} as shown below: * <pre><code class='java'> // assertion will fail but "display me" won't appear in the error * assertThatThrownBy(() -&gt; {}).as("display me") * .isInstanceOf(Exception.class); @@ -2537,7 +2537,7 @@ default AbstractOffsetDateTimeAssert<?> assertThat(final OffsetDateTime offsetDa * * If the provided {@link ThrowingCallable} does not validate against next assertions, an error is immediately raised, * in that case the test description provided with {@link AbstractAssert#as(String, Object...) as(String, Object...)} is not honored.<br> - * To use a test description, use {@link #catchThrowable(ThrowableAssert.ThrowingCallable)} as shown below. + * To use a test description, use {@link #catchThrowable(ThrowingCallable)} as shown below. * * <pre><code class='java'> ThrowingCallable doNothing = () -&gt; { * // do nothing @@ -2636,7 +2636,7 @@ default <T> ObjectAssert<T> assertWith(T actual, @SuppressWarnings("unchecked") * * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowableOfType(ThrowableAssert.ThrowingCallable, Class) + * @see #catchThrowableOfType(ThrowingCallable, Class) */ default Throwable catchThrowable(final ThrowingCallable shouldRaiseThrowable) { return Assertions.catchThrowable(shouldRaiseThrowable); @@ -2676,7 +2676,7 @@ default Throwable catchThrowable(final ThrowingCallable shouldRaiseThrowable) { * @param shouldRaiseThrowable The lambda with the code that should raise the exception. * @param type The type of exception that the code is expected to raise. * @return The captured exception or <code>null</code> if none was raised by the callable. - * @see #catchThrowable(ThrowableAssert.ThrowingCallable) + * @see #catchThrowable(ThrowingCallable) * @since 3.9.0 */ default <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(final ThrowingCallable shouldRaiseThrowable, @@ -2684,6 +2684,222 @@ default <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(final Throw return Assertions.catchThrowableOfType(shouldRaiseThrowable, type); } + /** + * Allows catching an instance of {@link Exception}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link Exception} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * Exception exception = catchException(() -&gt; {throw new Exception("boom!");}); + * // assertions succeed + * assertThat(exception).hasMessage("boom!"); + * + * // succeeds as catchException returns null when the code does not throw any exceptions + * assertThat(catchException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an Exception + * catchException(() -&gt; {throw new Throwable("boom!");});</code></pre> + * + * @param shouldRaiseException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default Exception catchException(ThrowingCallable shouldRaiseException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseException, Exception.class); + } + + /** + * Allows catching an instance of {@link RuntimeException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * RuntimeException runtimeException = catchRuntimeException(() -&gt; {throw new RuntimeException("boom!");}); + * // assertions succeed + * assertThat(runtimeException).hasMessage("boom!"); + * + * // succeeds as catchRuntimeException returns null when the code does not throw any exceptions + * assertThat(catchRuntimeException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a RuntimeException + * catchRuntimeException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseRuntimeException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default RuntimeException catchRuntimeException(ThrowingCallable shouldRaiseRuntimeException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseRuntimeException, RuntimeException.class); + } + + /** + * Allows catching an instance of {@link NullPointerException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link RuntimeException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * NullPointerException nullPointerException = catchNullPointerException(() -&gt; {throw new NullPointerException("boom!");}); + * // assertions succeed + * assertThat(nullPointerException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchNullPointerException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not a NullPointerException + * catchNullPointerException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseNullPointerException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default NullPointerException catchNullPointerException(ThrowingCallable shouldRaiseNullPointerException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseNullPointerException, NullPointerException.class); + } + + /** + * Allows catching an instance of {@link IllegalArgumentException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalArgumentException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalArgumentException illegalArgumentException = catchIllegalArgumentException(() -&gt; {throw new IllegalArgumentException("boom!");}); + * // assertions succeed + * assertThat(illegalArgumentException).hasMessage("boom!"); + * + * // succeeds as catchNullPointerException returns null when the code does not throw any exceptions + * assertThat(catchIllegalArgumentException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IllegalArgumentException + * catchIllegalArgumentException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIllegalArgumentException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default IllegalArgumentException catchIllegalArgumentException(ThrowingCallable shouldRaiseIllegalArgumentException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalArgumentException, IllegalArgumentException.class); + } + + /** + * Allows catching an instance of {@link IOException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IOException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IOException iOException = catchIOException(() -&gt; {throw new IOException("boom!");}); + * // assertions succeed + * assertThat(iOException).hasMessage("boom!"); + * + * // succeeds as catchIOException returns null when the code does not throw any exceptions + * assertThat(catchIOException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIOException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIOException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default IOException catchIOException(ThrowingCallable shouldRaiseIOException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIOException, IOException.class); + } + + /** + * Allows catching an instance of {@link ReflectiveOperationException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link ReflectiveOperationException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * ReflectiveOperationException reflectiveOperationException = catchReflectiveOperationException(() -&gt; {throw new ReflectiveOperationException("boom!");}); + * // assertions succeed + * assertThat(reflectiveOperationException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchReflectiveOperationException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchReflectiveOperationException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseReflectiveOperationException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default ReflectiveOperationException catchReflectiveOperationException(ThrowingCallable shouldRaiseReflectiveOperationException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseReflectiveOperationException, ReflectiveOperationException.class); + } + + /** + * Allows catching an instance of {@link IllegalStateException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IllegalStateException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IllegalStateException illegalStateException = catchIllegalStateException(() -&gt; {throw new IllegalStateException("boom!");}); + * // assertions succeed + * assertThat(illegalStateException).hasMessage("boom!"); + * + * // succeeds as catchReflectiveOperationException returns null when the code does not throw any exceptions + * assertThat(catchIllegalStateException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIllegalStateException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIllegalStateException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default IllegalStateException catchIllegalStateException(ThrowingCallable shouldRaiseIllegalStateException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIllegalStateException, IllegalStateException.class); + } + + /** + * Allows catching an instance of {@link IndexOutOfBoundsException}. + * <p> + * A call is made to {@code catchThrowable(ThrowingCallable)}, if no exception is thrown it returns null + * otherwise it checks that the caught {@link Throwable} is of type {@link IndexOutOfBoundsException} and casts it making it convenient to perform subtype-specific assertions on it. + * <p> + * Example: + * <pre><code class='java'> + * IndexOutOfBoundsException indexOutOfBoundsException = catchIndexOutOfBoundsException(() -&gt; {throw new IndexOutOfBoundsException("boom!");}); + * // assertions succeed + * assertThat(indexOutOfBoundsException).hasMessage("boom!"); + * + * // succeeds as catchIndexOutOfBoundsException returns null when the code does not throw any exceptions + * assertThat(catchIndexOutOfBoundsException(() -&gt; {})).isNull(); + * + * // fails as the thrown instance is not an IOException + * catchIndexOutOfBoundsException(() -&gt; {throw new Exception("boom!");});</code></pre> + * + * @param shouldRaiseIndexOutOfBoundException The lambda with the code that should raise the exception. + * @return The captured exception or <code>null</code> if none was raised by the callable. + * @see #catchThrowable(ThrowingCallable) + * @since 3.22.0 + */ + default IndexOutOfBoundsException catchIndexOutOfBoundsException(ThrowingCallable shouldRaiseIndexOutOfBoundException) { + return AssertionsForClassTypes.catchThrowableOfType(shouldRaiseIndexOutOfBoundException, IndexOutOfBoundsException.class); + } + /** * Entry point to check that an exception of type T is thrown by a given {@code throwingCallable} * which allows to chain assertions on the thrown exception. @@ -2693,7 +2909,7 @@ default <THROWABLE extends Throwable> THROWABLE catchThrowableOfType(final Throw * .isThrownBy(() -&gt; { throw new IOException("boom!"); }) * .withMessage("boom!"); </code></pre> * - * This method is more or less the same of {@link #assertThatThrownBy(ThrowableAssert.ThrowingCallable)} but in a more natural way. + * This method is more or less the same of {@link #assertThatThrownBy(ThrowingCallable)} but in a more natural way. * * @param <T> the exception type. * @param exceptionType the exception type class.
diff --git a/src/test/java/org/assertj/core/api/Assertions_catchException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchException_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchException_Test { + + @Test + void catchException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchException(raisingThrowable("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(Exception.class.getName(), Throwable.class.getName()); + } + + @Test + void catchException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final Exception expected = new Exception("boom!!"); + // WHEN + Exception actual = catchException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + Exception actual = catchException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchException_should_catch_mocked_throwable() { + // GIVEN + Exception exception = mock(Exception.class); + // WHEN + Throwable actual = catchException(codeThrowing(exception)); + // THEN + then(actual).isSameAs(exception); + } + + static ThrowingCallable raisingThrowable(final String reason) { + return codeThrowing(new Throwable(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchIOException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchIOException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchIOException_Test.java @@ -0,0 +1,71 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import java.io.IOException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchIOException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchIOException_Test { + + @Test + void catchIOException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchIOException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(Exception.class.getName(), IOException.class.getName()); + } + + @Test + void catchIOException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final IOException expected = new IOException("boom!!"); + // WHEN + IOException actual = catchIOException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchIOException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + IOException actual = catchIOException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchIOException_should_catch_mocked_throwable() { + // GIVEN + IOException exception = mock(IOException.class); + // WHEN + Throwable actual = catchIOException(codeThrowing(exception)); + // THEN + then(actual).isSameAs(exception); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchIllegalArgumentException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchIllegalArgumentException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchIllegalArgumentException_Test.java @@ -0,0 +1,71 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import java.io.IOException; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchIllegalArgumentException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchIllegalArgumentException_Test { + + @Test + void catchIllegalArgumentException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchIllegalArgumentException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(Exception.class.getName(), IllegalArgumentException.class.getName()); + } + + @Test + void catchIllegalArgumentException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final IllegalArgumentException expected = new IllegalArgumentException("boom!!"); + // WHEN + IllegalArgumentException actual = catchIllegalArgumentException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchIllegalArgumentException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + IllegalArgumentException actual = catchIllegalArgumentException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchIllegalArgumentException_should_catch_mocked_throwable() { + // GIVEN + IllegalArgumentException exception = mock(IllegalArgumentException.class); + // WHEN + Throwable actual = catchIllegalArgumentException(codeThrowing(exception)); + // THEN + then(actual).isSameAs(exception); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchIllegalStateException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchIllegalStateException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchIllegalStateException_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchIllegalStateException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchIllegalStateException_Test { + + @Test + void catchIllegalStateException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchIllegalStateException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(IllegalStateException.class.getName(), Exception.class.getName()); + } + + @Test + void catchIllegalStateException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final IllegalStateException expected = new IllegalStateException("boom!!"); + // WHEN + IllegalStateException actual = catchIllegalStateException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchIllegalStateException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + IllegalStateException actual = catchIllegalStateException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchIllegalStateException_should_catch_mocked_throwable() { + // GIVEN + IllegalStateException illegalStateException = mock(IllegalStateException.class); + // WHEN + Throwable actual = catchIllegalStateException(codeThrowing(illegalStateException)); + // THEN + then(actual).isSameAs(illegalStateException); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchIndexOutOfBoundsException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchIndexOutOfBoundsException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchIndexOutOfBoundsException_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchIndexOutOfBoundsException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchIndexOutOfBoundsException_Test { + + @Test + void catchIndexOutOfBoundsException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchIndexOutOfBoundsException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(IndexOutOfBoundsException.class.getName(), Exception.class.getName()); + } + + @Test + void catchIndexOutOfBoundsException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final IndexOutOfBoundsException expected = new IndexOutOfBoundsException("boom!!"); + // WHEN + IndexOutOfBoundsException actual = catchIndexOutOfBoundsException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchIndexOutOfBoundsException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + IndexOutOfBoundsException actual = catchIndexOutOfBoundsException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchIndexOutOfBoundsException_should_catch_mocked_throwable() { + // GIVEN + IndexOutOfBoundsException indexOutOfBoundsException = mock(IndexOutOfBoundsException.class); + // WHEN + Throwable actual = catchIndexOutOfBoundsException(codeThrowing(indexOutOfBoundsException)); + // THEN + then(actual).isSameAs(indexOutOfBoundsException); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchNullPointerException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchNullPointerException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchNullPointerException_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchNullPointerException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +class Assertions_catchNullPointerException_Test { + + @Test + void catchNullPointerException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchNullPointerException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(Exception.class.getName(), NullPointerException.class.getName()); + } + + @Test + void catchNullPointerException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final NullPointerException expected = new NullPointerException("boom!!"); + // WHEN + NullPointerException actual = catchNullPointerException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchNullPointerException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + NullPointerException actual = catchNullPointerException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchNullPointerException_should_catch_mocked_throwable() { + // GIVEN + NullPointerException exception = mock(NullPointerException.class); + // WHEN + Throwable actual = catchNullPointerException(codeThrowing(exception)); + // THEN + then(actual).isSameAs(exception); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchReflectiveOperationException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchReflectiveOperationException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchReflectiveOperationException_Test.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchReflectiveOperationException; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +class Assertions_catchReflectiveOperationException_Test { + + @Test + void catchReflectiveOperationException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchReflectiveOperationException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(ReflectiveOperationException.class.getName(), Exception.class.getName()); + } + + @Test + void catchReflectiveOperationException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final ReflectiveOperationException expected = new ReflectiveOperationException("boom!!"); + // WHEN + ReflectiveOperationException actual = catchReflectiveOperationException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchReflectiveOperationException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + ReflectiveOperationException actual = catchReflectiveOperationException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchReflectiveOperationException_should_catch_mocked_throwable() { + // GIVEN + ReflectiveOperationException exception = mock(ReflectiveOperationException.class); + // WHEN + Throwable actual = catchReflectiveOperationException(codeThrowing(exception)); + // THEN + then(actual).isSameAs(exception); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/Assertions_catchRuntimeException_Test.java b/src/test/java/org/assertj/core/api/Assertions_catchRuntimeException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_catchRuntimeException_Test.java @@ -0,0 +1,67 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions_catchThrowable_Test.codeThrowing; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.mockito.Mockito.mock; +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.api.Test; + +class Assertions_catchRuntimeException_Test { + + @Test + void catchRuntimeException_should_fail_with_good_message_if_wrong_type() { + // GIVEN + ThrowingCallable code = () -> catchRuntimeException(raisingException("boom!!")); + // WHEN + AssertionError assertionError = expectAssertionError(code); + // THEN + assertThat(assertionError).hasMessageContainingAll(RuntimeException.class.getName(), Exception.class.getName()); + } + + @Test + void catchRuntimeException_should_succeed_and_return_actual_instance_with_correct_class() { + // GIVEN + final RuntimeException expected = new RuntimeException("boom!!"); + // WHEN + RuntimeException actual = catchRuntimeException(codeThrowing(expected)); + // THEN + then(actual).isSameAs(expected); + } + + @Test + void catchRuntimeException_should_succeed_and_return_null_if_no_exception_thrown() { + // WHEN + RuntimeException actual = catchRuntimeException(() -> {}); + // THEN + then(actual).isNull(); + } + + @Test + void catchRuntimeException_should_catch_mocked_throwable() { + // GIVEN + RuntimeException runtimeException = mock(RuntimeException.class); + // WHEN + Throwable actual = catchRuntimeException(codeThrowing(runtimeException)); + // THEN + then(actual).isSameAs(runtimeException); + } + + static ThrowingCallable raisingException(final String reason) { + return codeThrowing(new Exception(reason)); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchException_Test extends EntryPointAssertionsBaseTest { + + private static final Exception EXCEPTION = new Exception(); + + @ParameterizedTest + @MethodSource("catchExceptions") + void should_catch_Exception(Function<ThrowingCallable, Exception> catchException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw EXCEPTION; + }; + // WHEN + Exception throwable = catchException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, Exception>> catchExceptions() { + return Stream.of(Assertions::catchException, BDDAssertions::catchException, withAssertions::catchException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIOException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIOException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIOException_Test.java @@ -0,0 +1,46 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchIOException_Test extends EntryPointAssertionsBaseTest { + + private static final IOException IO_EXCEPTION = new IOException(); + + @ParameterizedTest + @MethodSource("catchIOExceptions") + void should_catch_IOException(Function<ThrowingCallable, IOException> catchIOException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw IO_EXCEPTION; + }; + // WHEN + IOException throwable = catchIOException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(IO_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, IOException>> catchIOExceptions() { + return Stream.of(Assertions::catchIOException, BDDAssertions::catchIOException, withAssertions::catchIOException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalArgumentException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalArgumentException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalArgumentException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchIllegalArgumentException_Test extends EntryPointAssertionsBaseTest { + + private static final IllegalArgumentException ILLEGAL_ARGUMENT_EXCEPTION = new IllegalArgumentException(); + + @ParameterizedTest + @MethodSource("catchIllegalArgumentExceptions") + void should_catch_IllegalArgumentException(Function<ThrowingCallable, IllegalArgumentException> catchIllegalArgumentExceptions) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw ILLEGAL_ARGUMENT_EXCEPTION; + }; + // WHEN + IllegalArgumentException throwable = catchIllegalArgumentExceptions.apply(throwingCallable); + // THEN + then(throwable).isSameAs(ILLEGAL_ARGUMENT_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, IllegalArgumentException>> catchIllegalArgumentExceptions() { + return Stream.of(Assertions::catchIllegalArgumentException, BDDAssertions::catchIllegalArgumentException, withAssertions::catchIllegalArgumentException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalStateException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalStateException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIllegalStateException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchIllegalStateException_Test extends EntryPointAssertionsBaseTest { + + private static final IllegalStateException ILLEGAL_STATE_EXCEPTION = new IllegalStateException(); + + @ParameterizedTest + @MethodSource("catchIllegalStateExceptions") + void should_catch_IllegalStateException(Function<ThrowingCallable, IllegalStateException> catchIllegalStateException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw ILLEGAL_STATE_EXCEPTION; + }; + // WHEN + IllegalStateException throwable = catchIllegalStateException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(ILLEGAL_STATE_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, IllegalStateException>> catchIllegalStateExceptions() { + return Stream.of(Assertions::catchIllegalStateException, BDDAssertions::catchIllegalStateException, withAssertions::catchIllegalStateException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIndexOutOfBoundsException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIndexOutOfBoundsException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchIndexOutOfBoundsException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchIndexOutOfBoundsException_Test extends EntryPointAssertionsBaseTest { + + private static final IndexOutOfBoundsException INDEX_OUT_OF_BOUNDS_EXCEPTION = new IndexOutOfBoundsException(); + + @ParameterizedTest + @MethodSource("catchIndexOutOfBoundsExceptions") + void should_catch_IndexOutOfBoundsException(Function<ThrowingCallable, IndexOutOfBoundsException> catchIndexOutOfBoundsException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw INDEX_OUT_OF_BOUNDS_EXCEPTION; + }; + // WHEN + IndexOutOfBoundsException throwable = catchIndexOutOfBoundsException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(INDEX_OUT_OF_BOUNDS_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, IndexOutOfBoundsException>> catchIndexOutOfBoundsExceptions() { + return Stream.of(Assertions::catchIndexOutOfBoundsException, BDDAssertions::catchIndexOutOfBoundsException, withAssertions::catchIndexOutOfBoundsException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchNullPointerException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchNullPointerException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchNullPointerException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchNullPointerException_Test extends EntryPointAssertionsBaseTest { + + private static final NullPointerException NULL_POINTER_EXCEPTION = new NullPointerException(); + + @ParameterizedTest + @MethodSource("catchNullPointerExceptions") + void should_catch_NullPointerException(Function<ThrowingCallable, NullPointerException> catchNullPointerException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw NULL_POINTER_EXCEPTION; + }; + // WHEN + NullPointerException throwable = catchNullPointerException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(NULL_POINTER_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, NullPointerException>> catchNullPointerExceptions() { + return Stream.of(Assertions::catchNullPointerException, BDDAssertions::catchNullPointerException, withAssertions::catchNullPointerException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchReflectiveOperationException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchReflectiveOperationException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchReflectiveOperationException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchReflectiveOperationException_Test extends EntryPointAssertionsBaseTest { + + private static final ReflectiveOperationException REFLECTIVE_OPERATION_EXCEPTION = new ReflectiveOperationException(); + + @ParameterizedTest + @MethodSource("catchReflectiveOperationExceptions") + void should_catch_ReflectiveOperationException(Function<ThrowingCallable, ReflectiveOperationException> catchReflectiveOperationExceptions) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw REFLECTIVE_OPERATION_EXCEPTION; + }; + // WHEN + ReflectiveOperationException throwable = catchReflectiveOperationExceptions.apply(throwingCallable); + // THEN + then(throwable).isSameAs(REFLECTIVE_OPERATION_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, ReflectiveOperationException>> catchReflectiveOperationExceptions() { + return Stream.of(Assertions::catchReflectiveOperationException, BDDAssertions::catchReflectiveOperationException, withAssertions::catchReflectiveOperationException); + } + +} diff --git a/src/test/java/org/assertj/core/api/EntryPointAssertions_catchRuntimeException_Test.java b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchRuntimeException_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/EntryPointAssertions_catchRuntimeException_Test.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.assertj.core.api.BDDAssertions.then; + +class EntryPointAssertions_catchRuntimeException_Test extends EntryPointAssertionsBaseTest { + + private static final RuntimeException RUNTIME_EXCEPTION = new RuntimeException(); + + @ParameterizedTest + @MethodSource("catchRuntimeExceptions") + void should_catch_RuntimeException(Function<ThrowingCallable, RuntimeException> catchRuntimeException) { + // GIVEN + ThrowingCallable throwingCallable = () -> { + throw RUNTIME_EXCEPTION; + }; + // WHEN + RuntimeException throwable = catchRuntimeException.apply(throwingCallable); + // THEN + then(throwable).isSameAs(RUNTIME_EXCEPTION); + } + + private static Stream<Function<ThrowingCallable, RuntimeException>> catchRuntimeExceptions() { + return Stream.of(Assertions::catchRuntimeException, BDDAssertions::catchRuntimeException, withAssertions::catchRuntimeException); + } + +}
`catchThrowableOfType` enriched alternatives #### Summary Similarly to the enriched alternatives of [`assertThatExceptionOfType`](https://assertj.github.io/doc/#assertj-core-exception-assertions-assertThatExceptionOfType), we could introduce enriched versions of `catchThrowableOfType`. #### Example ```java NullPointerException npe = catchNullPointerException( () -> ... ); IllegalArgumentException iae = catchIllegalArgumentException( () -> ... ); IllegalStateException ise = catchIllegalStateException( () -> ... ); IOException ioe = catchIOException( () -> ... ); ```
I would like to work on this one as my first contribution Sure, thanks @Spacca!
2021-11-14T09:30:05Z
3.21
assertj/assertj
2,297
assertj__assertj-2297
[ "731" ]
fe1c4630da8a38e378a6baa703191d9860e3df2d
diff --git a/src/main/java/org/assertj/core/api/AbstractAssert.java b/src/main/java/org/assertj/core/api/AbstractAssert.java --- a/src/main/java/org/assertj/core/api/AbstractAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractAssert.java @@ -817,6 +817,37 @@ public SELF matches(Predicate<? super ACTUAL> predicate, String predicateDescrip * @throws NullPointerException if given Consumer is null */ public SELF satisfies(Consumer<ACTUAL> requirements) { + return internalSatisfies(requirements); + } + + /** + * Verifies that the actual object satisfied the given requirements expressed as a {@link ThrowingConsumer}. + * <p> + * This is the same assertion as {@link #satisfies(java.util.function.Consumer)} except that a {@link ThrowingConsumer} rethrows checked exceptions as {@link RuntimeException}. + * More precisely, {@link RuntimeException} and {@link AssertionError} are rethrown as they are while any other {@link Throwable} are rethrown as {@link RuntimeException}. + * <p> + * Example: + * <pre><code class='java'> // read() throws IOException + * ThrowingConsumer&lt;Reader&gt; hasReachedEOF = reader -&gt; assertThat(reader.read()).isEqualTo(-1); + * + * // assertion succeeds as the file is empty (note that if hasReachedEOF was declared as Consumer&lt;Reader&gt; the following line would not compile): + * assertThat(new FileReader("empty.txt")).satisfies(hasReachedEOF); + * + * // assertion fails as the file is not empty: + * assertThat(new FileReader("nonEmpty.txt")).satisfies(hasReachedEOF);</code></pre> + * + * @param throwingConsumer requirements to assert on the actual object - must not be null.. + * @return this assertion object. + * + * @throws NullPointerException if given {@link ThrowingConsumer} is null + * @throws RuntimeException rethrown as is by given {@link ThrowingConsumer} or wrapping any {@link Throwable}. + * @throws AssertionError rethrown as is by given {@link ThrowingConsumer} + */ + public SELF satisfies(ThrowingConsumer<ACTUAL> throwingConsumer) { + return internalSatisfies(throwingConsumer); + } + + private SELF internalSatisfies(Consumer<ACTUAL> requirements) { requireNonNull(requirements, "The Consumer<T> expressing the assertions requirements must not be null"); requirements.accept(actual); return myself; diff --git a/src/main/java/org/assertj/core/api/ThrowingConsumer.java b/src/main/java/org/assertj/core/api/ThrowingConsumer.java new file mode 100644 --- /dev/null +++ b/src/main/java/org/assertj/core/api/ThrowingConsumer.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import java.util.function.Consumer; + +/** + * {@link Consumer} that deals with checked exceptions by rethrowing them as {@link RuntimeException}. + * <p> + * More precisely, {@link RuntimeException} and {@link AssertionError} are rethrown as they are while any other {@link Throwable} are rethrown as {@link RuntimeException}. + * + * @param <T> consumed type + */ +@FunctionalInterface +public interface ThrowingConsumer<T> extends Consumer<T> { + + @Override + default void accept(final T input) { + try { + acceptThrows(input); + } catch (final RuntimeException | AssertionError e) { + throw e; + } catch (Throwable e) { + throw new RuntimeException(e); + } + } + + void acceptThrows(T input) throws Throwable; +}
diff --git a/src/test/java/org/assertj/core/api/ThrowingConsumer_accept_Test.java b/src/test/java/org/assertj/core/api/ThrowingConsumer_accept_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/ThrowingConsumer_accept_Test.java @@ -0,0 +1,53 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; + +class ThrowingConsumer_accept_Test { + + @Test + void should_rethrow_checked_exception_as_runtime_exception() { + // GIVEN + IOException ioException = new IOException("boom!"); + ThrowingConsumer<?> throwingConsumer = throwingConsumer(ioException); + // WHEN + Throwable throwable = catchThrowable(() -> throwingConsumer.accept(null)); + // THEN + then(throwable).isExactlyInstanceOf(RuntimeException.class) + .hasCause(ioException); + } + + @Test + void should_rethrow_runtime_exception_as_is() { + // GIVEN + RuntimeException runtimeException = new RuntimeException("boom!"); + ThrowingConsumer<?> throwingConsumer = throwingConsumer(runtimeException); + // WHEN + Throwable throwable = catchThrowable(() -> throwingConsumer.accept(null)); + // THEN + then(throwable).isSameAs(runtimeException); + } + + private static ThrowingConsumer<?> throwingConsumer(Throwable throwable) { + return value -> { + throw throwable; + }; + } + +} diff --git a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_Consumer_Test.java b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_Consumer_Test.java --- a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_Consumer_Test.java +++ b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_Consumer_Test.java @@ -27,13 +27,11 @@ class AbstractAssert_satisfies_with_Consumer_Test { private Jedi yoda; - private Jedi luke; private Consumer<Jedi> jediRequirements; @BeforeEach void setup() { yoda = new Jedi("Yoda", "Green"); - luke = new Jedi("Luke Skywalker", "Green"); jediRequirements = jedi -> { assertThat(jedi.lightSaberColor).as("check light saber").isEqualTo("Green"); assertThat(jedi.getName()).as("check name").doesNotContain("Dark"); @@ -42,13 +40,15 @@ void setup() { @Test void should_satisfy_single_requirement() { - assertThat(yoda).satisfies(jedi -> assertThat(jedi.lightSaberColor).isEqualTo("Green")); + // GIVEN + Consumer<Jedi> jediRequirement = jedi -> assertThat(jedi.lightSaberColor).isEqualTo("Green"); + // WHEN/THEN + then(yoda).satisfies(jediRequirement); } @Test void should_satisfy_multiple_requirements() { assertThat(yoda).satisfies(jediRequirements); - assertThat(luke).satisfies(jediRequirements); } @Test @@ -63,7 +63,9 @@ void should_fail_according_to_requirements() { @Test void should_fail_if_consumer_is_null() { + // GIVEN Consumer<Jedi> nullRequirements = null; + // WHEN/THEN assertThatNullPointerException().isThrownBy(() -> assertThat(yoda).satisfies(nullRequirements)) .withMessage("The Consumer<T> expressing the assertions requirements must not be null"); } diff --git a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_ThrowingConsumer_Test.java b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_ThrowingConsumer_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_satisfies_with_ThrowingConsumer_Test.java @@ -0,0 +1,115 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api.abstract_; + +import static java.nio.file.Files.isReadable; +import static java.nio.file.Files.readAllLines; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.api.BDDAssertions.thenNullPointerException; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; + +import java.nio.file.Path; +import java.nio.file.Paths; + +import org.assertj.core.api.ThrowingConsumer; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class AbstractAssert_satisfies_with_ThrowingConsumer_Test { + + private ThrowingConsumer<Path> isEOF; + + @BeforeEach + void setup() { + isEOF = path -> { + assertThat(isReadable(path)).isTrue(); + // this would not compile if isEOF was declared as Consumer<Path> since it can throw an IOException + assertThat(readAllLines(path)).isEmpty(); + }; + } + + @Test + void should_satisfy_single_requirement() { + // GIVEN + Path emptyFile = Paths.get("src/test/resources/empty.txt"); + ThrowingConsumer<Path> isEmpty = path -> assertThat(readAllLines(path)).isEmpty(); + // WHEN/THEN + then(emptyFile).satisfies(isEmpty); + } + + @Test + void should_satisfy_multiple_requirements() { + // GIVEN + Path emptyFile = Paths.get("src/test/resources/empty.txt"); + // WHEN/THEN + then(emptyFile).satisfies(isEOF); + } + + @Test + void should_fail_according_to_requirements() { + // GIVEN + Path asciiFile = Paths.get("src/test/resources/ascii.txt"); + // WHEN + AssertionError assertionError = expectAssertionError(() -> assertThat(asciiFile).satisfies(isEOF)); + // THEN + then(assertionError).hasMessageContaining("Expecting empty but was: [\"abc\"]"); + } + + @Test + void should_rethrow_throwables_as_runtime_exceptions() { + // GIVEN + Throwable exception = new Throwable("boom!"); + // WHEN + Throwable throwable = catchThrowable(() -> assertThat("foo").satisfies(throwingConsumer(exception))); + // THEN + then(throwable).isInstanceOf(RuntimeException.class) + .hasCauseReference(exception); + } + + @Test + void should_propagate_RuntimeException_as_is() { + // GIVEN + RuntimeException runtimeException = new RuntimeException("boom!"); + // WHEN + Throwable throwable = catchThrowable(() -> assertThat("foo").satisfies(throwingConsumer(runtimeException))); + // THEN + then(throwable).isSameAs(runtimeException); + } + + @Test + void should_propagate_AssertionError_as_is() { + // GIVEN + AssertionError assertionError = new AssertionError("boom!"); + // WHEN + Throwable throwable = catchThrowable(() -> assertThat("foo").satisfies(throwingConsumer(assertionError))); + // THEN + then(throwable).isSameAs(assertionError); + } + + @Test + void should_fail_if_throwing_consumer_is_null() { + // GIVEN + ThrowingConsumer<String> nullRequirements = null; + // WHEN/THEN + thenNullPointerException().isThrownBy(() -> assertThat("foo").satisfies(nullRequirements)) + .withMessage("The Consumer<T> expressing the assertions requirements must not be null"); + } + + private static ThrowingConsumer<String> throwingConsumer(Throwable throwable) { + return value -> { + throw throwable; + }; + } +} diff --git a/src/test/resources/empty.txt b/src/test/resources/empty.txt new file mode 100644
Add satisfies with checked exceptions consumer. Hello, Currently it is not possible to write code like that: ``` java assertThat(item).satisfies(i -> { assertThat(i.getContent()).isEqualTo("data"); // i.getContent() throws IOException }); ``` Would it be possible to add satisfies method which can handle consumer throwing checked exception? If exception would be thrown then it would be assumed that condition failed. Thanks, Michał
I believe it is not possible at the AssertJ level, you would need to use something like `LambdaExceptionUtil` as described in http://stackoverflow.com/questions/27644361/how-can-i-throw-checked-exceptions-from-inside-java-8-streams. First answer in link that you posted suggested that this is in fact `java.util.function` API design error. But AssertJ defines its own API, which doesn't extend Java SDK API. So you could define interface like this: ``` import java.util.function.Consumer; @FunctionalInterface public interface CheckedConsumer<T> { void accept(T value) throws Exception; static <T> Consumer<T> toConsumer(CheckedConsumer<T> checkedConsumer) { return value -> { try { checkedConsumer.accept(value); } catch (Exception ex) { throw new RuntimeException(ex); } }; } } ``` This interfaces would be used as satisfies method parameter. @joel-costigliola I guess a solution would be similar to the one for `extracting` contributed in https://github.com/joel-costigliola/assertj-core/pull/963 Good point @PascalSchumacher, that should be possible indeed.
2021-07-26T10:30:08Z
3.2
assertj/assertj
2,247
assertj__assertj-2247
[ "2236" ]
e8e221dafb69c88b64bddb8accd497e26c1635ee
diff --git a/src/main/java/org/assertj/core/api/AbstractFileAssert.java b/src/main/java/org/assertj/core/api/AbstractFileAssert.java --- a/src/main/java/org/assertj/core/api/AbstractFileAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractFileAssert.java @@ -19,6 +19,7 @@ import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.file.FileSystem; +import java.nio.file.Path; import java.security.MessageDigest; import java.util.function.Predicate; @@ -685,12 +686,43 @@ public SELF hasExtension(String expected) { * @throws AssertionError if the actual {@code File} does not have the expected name. * * @see java.io.File#getName() name definition. + * @see #hasFileName(String) */ public SELF hasName(String expected) { files.assertHasName(info, actual, expected); return myself; } + /** + * Verifies that the actual {@code File} has given name (alias of {@link #hasName(String)}). + * + * <p> + * Example: + * <pre><code class='java'> File xFile = new File(&quot;somewhere/xFile.java&quot;); + * File xDirectory = new File(&quot;somewhere/xDirectory&quot;); + * + * // assertion will pass + * assertThat(xFile).hasFileName(&quot;xFile.java&quot;); + * assertThat(xDirectory).hasFileName(&quot;xDirectory&quot;); + * + * // assertion will fail + * assertThat(xFile).hasFileName(&quot;xFile&quot;); + * assertThat(xDirectory).hasFileName(&quot;somewhere&quot;);</code></pre> + * + * @param expected the expected {@code File} name. + * @return {@code this} assertion object. + * @throws NullPointerException if the expected name is {@code null}. + * @throws AssertionError if the actual {@code File} is {@code null}. + * @throws AssertionError if the actual {@code File} does not have the expected name. + * + * @see java.io.File#getName() name definition. + * @see #hasName(String) + * @since 3.21.0 + */ + public SELF hasFileName(String expected) { + return hasName(expected); + } + /** * Verifies that the actual {@code File} does not have a parent. *
diff --git a/src/test/java/org/assertj/core/api/file/FileAssert_hasFileName_Test.java b/src/test/java/org/assertj/core/api/file/FileAssert_hasFileName_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/file/FileAssert_hasFileName_Test.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.api.file; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.FileAssert; +import org.assertj.core.api.FileAssertBaseTest; + +class FileAssert_hasFileName_Test extends FileAssertBaseTest { + + private final String expected = "expected.name"; + + @Override + protected FileAssert invoke_api_method() { + return assertions.hasFileName(expected); + } + + @Override + protected void verify_internal_effects() { + verify(files).assertHasName(getInfo(assertions), getActual(assertions), expected); + } +}
Add `hasFileName` as an alias of `hasName` for `File` assertions #### Example ```java assertThat(new File("file.txt")).hasFileName("file.txt"); ``` Related to #2205.
Hey, Can I pick this up? Sure @ShivakumarSwamy, go for it!
2021-06-05T17:57:43Z
3.2
assertj/assertj
2,200
assertj__assertj-2200
[ "2199" ]
81ed66f4710138d30e4a4b5ab83ef2989e7f39dd
diff --git a/src/main/java/org/assertj/core/api/Descriptable.java b/src/main/java/org/assertj/core/api/Descriptable.java --- a/src/main/java/org/assertj/core/api/Descriptable.java +++ b/src/main/java/org/assertj/core/api/Descriptable.java @@ -46,7 +46,7 @@ public interface Descriptable<SELF> { * } catch (AssertionError e) { * assertThat(e).hasMessage(&quot;[check Frodo's age]\n * expected: 33\n - * but was : 50&quot;); + * but was: 50&quot;); * }</code></pre> * * @param description the new description to set. @@ -82,7 +82,7 @@ default SELF as(String description, Object... args) { * { * assertThat(e).hasMessage(&quot;[check Frodo's age]\n * expected: 33\n - * but was : 50&quot;); + * but was: 50&quot;); * }</code></pre> * * @param descriptionSupplier the description {@link Supplier}. diff --git a/src/main/java/org/assertj/core/error/ShouldBeEqual.java b/src/main/java/org/assertj/core/error/ShouldBeEqual.java --- a/src/main/java/org/assertj/core/error/ShouldBeEqual.java +++ b/src/main/java/org/assertj/core/error/ShouldBeEqual.java @@ -41,7 +41,7 @@ */ public class ShouldBeEqual implements AssertionErrorFactory { - private static final String EXPECTED_BUT_WAS_MESSAGE = "%nexpected: %s%nbut was : %s"; + private static final String EXPECTED_BUT_WAS_MESSAGE = "%nexpected: %s%n but was: %s"; private static final String EXPECTED_BUT_WAS_MESSAGE_USING_COMPARATOR = EXPECTED_BUT_WAS_MESSAGE + "%n%s"; private static final Class<?>[] MSG_ARG_TYPES = array(String.class, String.class, String.class); private static final Class<?>[] MSG_ARG_TYPES_FOR_ASSERTION_FAILED_ERROR = array(String.class, Object.class, diff --git a/src/main/java/org/assertj/core/error/ShouldBeEqualIgnoringCase.java b/src/main/java/org/assertj/core/error/ShouldBeEqualIgnoringCase.java --- a/src/main/java/org/assertj/core/error/ShouldBeEqualIgnoringCase.java +++ b/src/main/java/org/assertj/core/error/ShouldBeEqualIgnoringCase.java @@ -31,6 +31,6 @@ public static ErrorMessageFactory shouldBeEqual(CharSequence actual, CharSequenc } private ShouldBeEqualIgnoringCase(CharSequence actual, CharSequence expected) { - super("%nexpected: %s%nbut was : %s%nignoring case considerations", expected, actual); + super("%nexpected: %s%n but was: %s%nignoring case considerations", expected, actual); } }
diff --git a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isInstanceOfSatisfying_Test.java b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isInstanceOfSatisfying_Test.java --- a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isInstanceOfSatisfying_Test.java +++ b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_isInstanceOfSatisfying_Test.java @@ -78,7 +78,7 @@ void should_fail_according_to_requirements() { // THEN then(assertionError).hasMessage(format("[check light saber] %n" + "expected: \"Green\"%n" + - "but was : \"Red\"")); + " but was: \"Red\"")); } @Test diff --git a/src/test/java/org/assertj/core/api/atomic/reference/AtomicReferenceAssert_hasValueSatisfying_Test.java b/src/test/java/org/assertj/core/api/atomic/reference/AtomicReferenceAssert_hasValueSatisfying_Test.java --- a/src/test/java/org/assertj/core/api/atomic/reference/AtomicReferenceAssert_hasValueSatisfying_Test.java +++ b/src/test/java/org/assertj/core/api/atomic/reference/AtomicReferenceAssert_hasValueSatisfying_Test.java @@ -44,7 +44,7 @@ void should_fail_when_actual_has_value_which_does_not_satisfy_given_requirements AssertionError error = expectAssertionError(() -> assertThat(actual).hasValueSatisfying(value -> assertThat(value).isEqualToIgnoringCase(expectedValue))); // THEN then(error).hasMessageContainingAll("expected: \"bar\"", - "but was : \"foo\""); + " but was: \"foo\""); } @Test diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqual_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqual_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqual_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqual_Test.java @@ -39,7 +39,7 @@ void should_display_comparison_strategy_in_error_message() { then(error.getExpected().getValue()).isEqualTo(STANDARD_REPRESENTATION.toStringOf(expected)); then(error).hasMessage(format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\"%n" + + " but was: \"Luke\"%n" + "when comparing values using CaseInsensitiveStringComparator")); } @@ -56,7 +56,7 @@ void should_use_actual_and_expected_representation_in_AssertionFailedError_actua then(error.getExpected().getValue()).isEqualTo("[1, 2, 4]"); then(error).hasMessage(format("[numbers] %n" + "expected: [1, 2, 4]%n" + - "but was : [1, 2, 3]")); + " but was: [1, 2, 3]")); } } diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_Test.java @@ -59,7 +59,7 @@ void should_create_AssertionFailedError_if_JUnit5_is_present_and_trim_spaces_in_ then(error).isInstanceOf(AssertionFailedError.class) .hasMessage(format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\"")); + " but was: \"Luke\"")); } public static Stream<String> parameters() { diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_differentiating_expected_and_actual_Test.java @@ -61,7 +61,7 @@ void should_create_AssertionError_with_message_differentiating_expected_double_a then(error).isInstanceOf(AssertionFailedError.class) .hasMessage(format("[my test] %n" + "expected: 42.0%n" + - "but was : 42.0f")); + " but was: 42.0f")); } @Test @@ -78,7 +78,7 @@ void should_create_AssertionError_with_message_differentiating_expected_and_actu then(error).isInstanceOf(AssertionFailedError.class) .hasMessage("[my test] %n" + "expected: \"Person[name=Jake] (Person@%s)\"%n" + - "but was : \"Person[name=Jake] (Person@%s)\"", + " but was: \"Person[name=Jake] (Person@%s)\"", toHexString(expected.hashCode()), toHexString(actual.hashCode())); } @@ -97,7 +97,7 @@ void should_create_AssertionError_with_message_differentiating_expected_and_actu then(error).isInstanceOf(AssertionFailedError.class) .hasMessage("[my test] %n" + "expected: \"Person[name=Jake] (Person@%s)\"%n" + - "but was : \"Person[name=Jake] (Person@%s)\"%n" + + " but was: \"Person[name=Jake] (Person@%s)\"%n" + "when comparing values using PersonComparator", toHexString(expected.hashCode()), toHexString(actual.hashCode())); } @@ -116,7 +116,7 @@ void should_create_AssertionError_with_message_differentiating_null_and_object_w then(error).isInstanceOf(AssertionFailedError.class) .hasMessage("[my test] %n" + "expected: \"null (ToStringIsNull@%s)\"%n" + - "but was : null", + " but was: null", toHexString(expected.hashCode())); } @@ -134,7 +134,7 @@ void should_create_AssertionError_with_message_differentiating_object_with_null_ then(error).isInstanceOf(AssertionFailedError.class) .hasMessage("[my test] %n" + "expected: null%n" + - "but was : \"null (ToStringIsNull@%s)\"", + " but was: \"null (ToStringIsNull@%s)\"", toHexString(actual.hashCode())); } diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_Test.java @@ -75,7 +75,7 @@ private void check(AssertionError error) throws Exception { new Class<?>[] { String.class, Object.class, Object.class }, format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\""), + " but was: \"Luke\""), STANDARD_REPRESENTATION.toStringOf("Yoda"), STANDARD_REPRESENTATION.toStringOf("Luke")); assertThat(error).isNotInstanceOf(ComparisonFailure.class) @@ -85,7 +85,7 @@ private void check(AssertionError error) throws Exception { assertThat(assertionFailedError.getExpected().getValue()).isEqualTo(STANDARD_REPRESENTATION.toStringOf("Yoda")); assertThat(error).hasMessage(format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\"")); + " but was: \"Luke\"")); } private static Object createComparisonFailure(ConstructorInvoker invoker) throws Exception { diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_and_OTA4J_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_and_OTA4J_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_and_OTA4J_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqual_newAssertionError_without_JUnit_and_OTA4J_Test.java @@ -79,7 +79,7 @@ private void check(AssertionError error) throws Exception { array(String.class, Object.class, Object.class), format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\""), + " but was: \"Luke\""), STANDARD_REPRESENTATION.toStringOf("Yoda"), STANDARD_REPRESENTATION.toStringOf("Luke")); verify(constructorInvoker).newInstance(ComparisonFailure.class.getName(), @@ -90,6 +90,6 @@ private void check(AssertionError error) throws Exception { assertThat(error).isNotInstanceOfAny(ComparisonFailure.class, AssertionFailedError.class) .hasMessage(format("[Jedi] %n" + "expected: \"Yoda\"%n" + - "but was : \"Luke\"")); + " but was: \"Luke\"")); } } diff --git a/src/test/java/org/assertj/core/matcher/AssertionMatcher_matches_Test.java b/src/test/java/org/assertj/core/matcher/AssertionMatcher_matches_Test.java --- a/src/test/java/org/assertj/core/matcher/AssertionMatcher_matches_Test.java +++ b/src/test/java/org/assertj/core/matcher/AssertionMatcher_matches_Test.java @@ -94,7 +94,7 @@ void matcher_should_fill_description_when_assertion_fails() { verify(description).appendText(argThat(new ArgumentMatcher<String>() { @Override public boolean matches(String s) { - return s.contains(format("%nexpected: 0%nbut was : 1")) + return s.contains(format("%nexpected: 0%n but was: 1")) && s.contains("at org.assertj.core.matcher.AssertionMatcher_matches_Test$1.assertion(AssertionMatcher_matches_Test.java:") && s.contains("at org.assertj.core.matcher.AssertionMatcher.matches(AssertionMatcher.java:") && s.contains("at org.assertj.core.matcher.AssertionMatcher_matches_Test.matcher_should_fill_description_when_assertion_fails(AssertionMatcher_matches_Test.java:"); diff --git a/src/test/java/org/assertj/core/test/ErrorMessagesForTest.java b/src/test/java/org/assertj/core/test/ErrorMessagesForTest.java --- a/src/test/java/org/assertj/core/test/ErrorMessagesForTest.java +++ b/src/test/java/org/assertj/core/test/ErrorMessagesForTest.java @@ -17,11 +17,11 @@ public class ErrorMessagesForTest { public static String shouldBeEqualMessage(String actual, String expected) { - return format("%nexpected: " + expected + "%nbut was : " + actual); + return format("%nexpected: " + expected + "%n but was: " + actual); } public static String shouldBeEqualMessage(String description, String actual, String expected) { - return format("[" + description + "] %nexpected: " + expected + "%nbut was : " + actual); + return format("[" + description + "] %nexpected: " + expected + "%n but was: " + actual); } }
Remove space in default error message for ShouldBeEqual to help IntelliJ diff detection Referencing this StackOverflow ticket: https://stackoverflow.com/questions/10934743/formatting-output-so-that-intellij-idea-shows-diffs-for-two-texts It seems that IntelliJ expects assertion errors to be in this format in order to have nicely rendered diffs: `expected: xxx but was: yyy` But AssertJ would format this with an extra space after the `was`: `expected: xxx but was : yyy` Which would cause it to fail the regex, and IntelliJ defaults to just printing the error string. I tried patching my AssertJ `ShouldBeEqual` locally without that extra space, and IntelliJ was then able to detect it properly. Would it be possible to remove the extraneous space after the `was` so that the test output can automatically be detected as a diff? Thanks! Link to message: https://github.com/assertj/assertj-core/blob/main/src/main/java/org/assertj/core/error/ShouldBeEqual.java#L44
We did that to get values aligned but I guess we can remove that space for intellij idea ``` expected: but was : ``` @rpeng could we do that instead?: ``` expected: but was: ``` @scordio did you have that issue? can you try that second proposal to see if it renders properly? Just tried it out with the second proposal (leading space) and it works in IntelliJ! For reference, the detectable patterns are here https://github.com/17712484466/intellij-community/blob/master/plugins/testng/src/com/theoryinpractice/testng/model/TestProxy.java#L48 thanks @rpeng! let's do that then. Do you want to contribute it?
2021-05-06T02:12:01Z
3.19
assertj/assertj
2,193
assertj__assertj-2193
[ "2103" ]
5dc550b80937ab3ff41645de7036e97fa0097f38
diff --git a/src/main/java/org/assertj/core/condition/MappedCondition.java b/src/main/java/org/assertj/core/condition/MappedCondition.java new file mode 100644 --- /dev/null +++ b/src/main/java/org/assertj/core/condition/MappedCondition.java @@ -0,0 +1,125 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.condition; + +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; + +import java.util.function.Function; + +import org.assertj.core.annotations.Beta; +import org.assertj.core.api.Condition; + +/** + * Container-<code>{@link Condition}</code> that does a mapping and then uses nested + * <code>{@link Condition}</code> to test the mapped actual value. + * + * <pre><code class='java'> Condition&lt;String&gt; hasLineSeparator = new Condition&lt;&gt;(t -&gt; t.contains(System.lineSeparator()), "has lineSeparator"); + * + * Condition&lt;Optional&lt;String&gt;&gt; optionalHasLineSeparator = MappedCondition.mappedCondition(Optional::get, hasLineSeparator, "optional value has lineSeparator"); + * + * // returns true + * optionalHasLineSeparator.matches(Optional.of("a" + System.lineSeparator())); + * + * // returns false + * optionalHasLineSeparator.matches(Optional.of("a"));</code></pre> + * + * @param <FROM> the type of object this condition accepts. + * @param <TO> the type of object the nested condition accepts. + * + * @author Stefan Bischof + */ +@Beta +public class MappedCondition<FROM, TO> extends Condition<FROM> { + + private Condition<TO> condition; + private Function<FROM, TO> mapping; + private String mappingDescription; + + /** + * Creates a new <code>{@link MappedCondition}</code>. + * <p> + * Note that the mappingDescription argument follows {@link String#format(String, Object...)} syntax. + * + * @param <FROM> the type of object the given condition accept. + * @param <TO> the type of object the nested condition accept. + * @param mapping the Function that maps the value to test to the a value for the nested condition. + * @param condition the nested condition to evaluate. + * @param mappingDescription describes the mapping, follows {@link String#format(String, Object...)} syntax. + * @param args for describing the mapping as in {@link String#format(String, Object...)} syntax. + * @return the created {@code MappedCondition}. + * @throws NullPointerException if the given condition is {@code null}. + * @throws NullPointerException if the given mapping is {@code null}. + */ + public static <FROM, TO> MappedCondition<FROM, TO> mappedCondition(Function<FROM, TO> mapping, Condition<TO> condition, + String mappingDescription, Object... args) { + requireNonNull(mappingDescription, "The given mappingDescription should not be null"); + return new MappedCondition<>(mapping, condition, format(mappingDescription, args)); + } + + /** + * Creates a new <code>{@link MappedCondition}</code> + * + * @param <FROM> the type of object the given condition accept. + * @param <TO> the type of object the nested condition accept. + * @param mapping the Function that maps the value to test to the a value for the nested condition. + * @param condition the nested condition to evaluate. + * @return the created {@code MappedCondition}. + * @throws NullPointerException if the given condition is {@code null}. + * @throws NullPointerException if the given mapping is {@code null}. + */ + public static <FROM, TO> MappedCondition<FROM, TO> mappedCondition(Function<FROM, TO> mapping, Condition<TO> condition) { + return mappedCondition(mapping, condition, ""); + } + + private MappedCondition(Function<FROM, TO> mapping, Condition<TO> condition, String mappingDescription) { + requireNonNull(condition, "The given condition should not be null"); + requireNonNull(mapping, "The given mapping function should not be null"); + this.mapping = mapping; + this.mappingDescription = mappingDescription; + this.condition = condition; + } + + /** + * Maps the value with the given function and verifies that it satisfies the nested <code>{@link Condition}</code>. + * + * @param value the value to map + * @return {@code true} if the given mapped value satisfies the nested condition; {@code false} otherwise. + */ + @Override + public boolean matches(FROM value) { + TO mappedObject = mapping.apply(value); + String desc = buildMappingDescription(value, mappedObject); + describedAs(desc); + return condition.matches(mappedObject); + } + + /** + * Build the mapped condition description when applied with the FROM and TO values. + * + * @param from the value to map + * @param to the mapped value + * @return the mapped condition description . + */ + protected String buildMappingDescription(FROM from, TO to) { + StringBuilder sb = new StringBuilder(); + sb.append("mapped"); + if (!mappingDescription.isEmpty()) sb.append(format("%n using: %s", mappingDescription)); + sb.append(format("%n from: <%s> %s%n", from.getClass().getSimpleName(), from)); + sb.append(format(" to: <%s> %s%n", to.getClass().getSimpleName(), from, to)); + sb.append(" then checked:"); + sb.append(format("%n %-10s", condition)); + return sb.toString(); + } + +}
diff --git a/src/test/java/org/assertj/core/condition/MappedConditionTest.java b/src/test/java/org/assertj/core/condition/MappedConditionTest.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/condition/MappedConditionTest.java @@ -0,0 +1,126 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2021 the original author or authors. + */ +package org.assertj.core.condition; + +import static java.lang.String.format; +import static java.lang.System.lineSeparator; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.api.BDDAssertions.thenNullPointerException; +import static org.assertj.core.condition.MappedCondition.mappedCondition; + +import java.util.Optional; + +import org.assertj.core.api.Condition; +import org.junit.jupiter.api.Test; + +class MappedConditionTest { + + private static final String INNER_CONDITION_DESCRIPTION = "isString and BAR"; + + private static final String BAR = "bar"; + + private static final String FOO = "foo"; + + private final static Condition<String> isBarString = new Condition<>(s -> BAR.equals(s), INNER_CONDITION_DESCRIPTION); + + private final static String BAR_CONDITION_DESCRIPTION = format("mapped%n" + + " using: ::toString%n" + + " from: <StringBuilder> " + BAR + "%n" + + " to: <String> " + BAR + "%n" + + " then checked:%n" + + " " + INNER_CONDITION_DESCRIPTION); + + private final static String BAR_CONDITION_DESCRIPTION_PLAIN = format("mapped%n" + + " from: <StringBuilder> " + BAR + "%n" + + " to: <String> " + BAR + "%n" + + " then checked:%n" + + " " + INNER_CONDITION_DESCRIPTION); + + private final static String FOO_CONDITION_DESCRIPTION = format("mapped%n" + + " using: ::toString%n" + + " from: <StringBuilder> " + FOO + "%n" + + " to: <String> " + FOO + "%n" + + " then checked:%n" + + " " + INNER_CONDITION_DESCRIPTION); + + @Test + void mappedCondition_withDescription_works() { + // WHEN + Condition<StringBuilder> mappedCondition = mappedCondition(StringBuilder::toString, isBarString, "%stoString", "::"); + // THEN + then(mappedCondition.matches(new StringBuilder(BAR))).isTrue(); + then(mappedCondition).hasToString(BAR_CONDITION_DESCRIPTION); + then(mappedCondition.matches(new StringBuilder(FOO))).isFalse(); + then(mappedCondition).hasToString(FOO_CONDITION_DESCRIPTION); + } + + @Test + void mappedCondition_withoutDescription_works() { + // WHEN + Condition<StringBuilder> mappedCondition = mappedCondition(StringBuilder::toString, isBarString); + // THEN + then(mappedCondition.matches(new StringBuilder(BAR))).isTrue(); + then(mappedCondition).hasToString(BAR_CONDITION_DESCRIPTION_PLAIN); + } + + @Test + void mappedCondition_with_description_and_null_condition_should_throw_NPE() { + // GIVEN + Condition<String> nullCondition = null; + // WHEN/THEN + thenNullPointerException().isThrownBy(() -> mappedCondition(StringBuilder::toString, nullCondition, "::toString")) + .withMessage("The given condition should not be null"); + } + + @Test + void mappedCondition_with_description_and_null_mapping_function_should_throw_NPE() { + thenNullPointerException().isThrownBy(() -> mappedCondition(null, isBarString, "::toString")) + .withMessage("The given mapping function should not be null"); + } + + @Test + void mappedCondition_without_description_and_null_condition_should_throw_NPE() { + // GIVEN + Condition<String> nullCondition = null; + // WHEN/THEN + thenNullPointerException().isThrownBy(() -> mappedCondition(StringBuilder::toString, nullCondition)) + .withMessage("The given condition should not be null"); + } + + @Test + void mappedCondition_without_description_and_null_mapping_function_should_throw_NPE() { + thenNullPointerException().isThrownBy(() -> mappedCondition(null, isBarString)) + .withMessage("The given mapping function should not be null"); + } + + @Test + void mappedCondition_with_null_description_and_should_throw_NPE() { + // GIVEN + String nullDescription = null; + // WHEN/THEN + thenNullPointerException().isThrownBy(() -> mappedCondition(StringBuilder::toString, isBarString, nullDescription)) + .withMessage("The given mappingDescription should not be null"); + } + + @Test + void example() { + // GIVEN + Condition<String> hasLineSeparator = new Condition<>(text -> text.contains(lineSeparator()), "has lineSeparator"); + Optional<String> optionalString = Optional.of("a" + lineSeparator()); + // WHEN + Condition<Optional<String>> mappedCondition = mappedCondition(Optional<String>::get, hasLineSeparator); + boolean matches = mappedCondition.matches(optionalString); + // THEN + then(matches).isTrue(); + } +}
Mapped-Condition **Mapped**: A Condition that describes the way of Mapping to an nested Condition.
Is there anything i can to to get this pr into the next release? I'll try have a look, it is not so clear what problem you are trying to solve, an example would be welcome Thank you, the main goal is to have a Container-Condition that does a mapping and then usees nested Conditions to test the mapped actual value. Example: https://github.com/osgi/osgi-test/blob/5598f37b2aac45f6e6eb744310587198d39f445b/org.osgi.test.assertj.framework/src/main/java/org/osgi/test/assertj/conditions/Conditions.java#L937-L966 There are several more in the file. Thanks, I'll put it back to the next release then
2021-04-30T11:29:02Z
3.19
assertj/assertj
2,042
assertj__assertj-2042
[ "1948" ]
9bf0d577a30e2bd7f1abe6ba1259b9f7404443a8
diff --git a/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java b/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java --- a/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractLongArrayAssert.java @@ -12,6 +12,10 @@ */ package org.assertj.core.api; +import static java.util.Objects.requireNonNull; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; + +import java.util.Arrays; import java.util.Comparator; import org.assertj.core.data.Index; @@ -21,7 +25,7 @@ import org.assertj.core.util.VisibleForTesting; public abstract class AbstractLongArrayAssert<SELF extends AbstractLongArrayAssert<SELF>> - extends AbstractArrayAssert<SELF, long[], Long> { + extends AbstractArrayAssert<SELF, long[], Long> { @VisibleForTesting protected LongArrays arrays = LongArrays.instance(); @@ -198,6 +202,33 @@ public SELF contains(long... values) { return myself; } + /** + * Verifies that the actual array contains the values of the given array, in any order. + * <p> + * Example: + * <pre><code class='java'> // assertions will pass + * assertThat(new long[] { 1L, 2L, 3L }).contains(new Long[] { 1L, 2L }); + * assertThat(new long[] { 1L, 2L, 3L }).contains(new Long[] { 3L, 1L }); + * assertThat(new long[] { 1L, 2L, 3L }).contains(new Long[] { 1L, 3L, 2L }); + * + * // assertions will fail + * assertThat(new long[] { 1L, 2L, 3L }).contains(new Long[] { 1L, 4L }); + * assertThat(new long[] { 1L, 2L, 3L }).contains(new Long[] { 4L, 7L });</code></pre> + * + * @param values the given {@code Long} array of values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual array does not contain the given values. + * @since 3.19.0 + */ + public SELF contains(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContains(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual array contains only the given values and nothing else, in any order. * <p> @@ -224,6 +255,34 @@ public SELF containsOnly(long... values) { return myself; } + /** + * Verifies that the actual array contains only the values of the given array and nothing else, in any order. + * <p> + * Example: + * <pre><code class='java'> // assertions will pass + * assertThat(new long[] { 1L, 2L, 3L }).containsOnly(new Long[] { 1L, 2L, 3L }); + * assertThat(new long[] { 1L, 2L, 3L }).containsOnly(new Long[] { 2L, 3L, 1L }); + * assertThat(new long[] { 1L, 1L, 2L }).containsOnly(new Long[] { 1L, 2L }); + * + * // assertions will fail + * assertThat(new long[] { 1L, 2L, 3L }).containsOnly(new Long[] { 1L, 2L, 3L, 4L }); + * assertThat(new long[] { 1L, 2L, 3L }).containsOnly(new Long[] { 4L, 7L });</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual array does not contain the given values, i.e. the actual array contains some + * or none of the given values, or the actual array contains more values than the given ones. + * @since 3.19.0 + */ + public SELF containsOnly(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContainsOnly(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual array contains the given values only once. * <p> @@ -249,6 +308,33 @@ public SELF containsOnlyOnce(long... values) { return myself; } + /** + * Verifies that the actual array contains the values of the given array only once. + * <p> + * Examples : + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1, 2, 3 }).containsOnlyOnce(new Long[] { 1, 2 }); + * + * // assertions will fail + * assertThat(new long[] { 1, 2, 1 }).containsOnlyOnce(new Long[] { 1 }); + * assertThat(new long[] { 1, 2, 3 }).containsOnlyOnce(new Long[] { 4 }); + * assertThat(new long[] { 1, 2, 3, 3 }).containsOnlyOnce(new Long[] { 0, 1, 2, 3, 4, 5 });</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual group does not contain the given values, i.e. the actual group contains some + * or none of the given values, or the actual group contains more than once these values. + * @since 3.19.0 + */ + public SELF containsOnlyOnce(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContainsOnlyOnce(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual array contains the given sequence, without any other values between them. * <p> @@ -271,6 +357,30 @@ public SELF containsSequence(long... sequence) { return myself; } + /** + * Verifies that the actual array contains the given sequence, without any other values between them. + * <p> + * Example: + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1, 2, 3 }).containsSequence(new Long[] { 1, 2 }); + * + * // assertion will fail + * assertThat(new long[] { 1, 2, 3 }).containsSequence(new Long[] { 1, 3 }); + * assertThat(new long[] { 1, 2, 3 }).containsSequence(new Long[] { 2, 1 });</code></pre> + * + * @param sequence the sequence of values to look for. + * @return myself assertion object. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the given array is {@code null}. + * @throws AssertionError if the actual array does not contain the given sequence. + * @since 3.19.0 + */ + public SELF containsSequence(Long[] sequence) { + requireNonNullParameter(sequence, "sequence"); + arrays.assertContainsSequence(info, actual, toPrimitiveLongArray(sequence)); + return myself; + } + /** * Verifies that the actual array contains the given subsequence (possibly with other values between them). * <p> @@ -293,6 +403,30 @@ public SELF containsSubsequence(long... subsequence) { return myself; } + /** + * Verifies that the actual array contains the given subsequence (possibly with other values between them). + * <p> + * Example: + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1, 2, 3 }).containsSubsequence(new Long[] { 1, 2 }); + * assertThat(new long[] { 1, 2, 3 }).containsSubsequence(new Long[] { 1, 3 }); + * + * // assertion will fail + * assertThat(new long[] { 1, 2, 3 }).containsSubsequence(new Long[] { 2, 1 });</code></pre> + * + * @param subsequence the subsequence of values to look for. + * @return myself assertion object. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the given array is {@code null}. + * @throws AssertionError if the actual array does not contain the given subsequence. + * @since 3.19.0 + */ + public SELF containsSubsequence(Long[] subsequence) { + requireNonNullParameter(subsequence, "subsequence"); + arrays.assertContainsSubsequence(info, actual, toPrimitiveLongArray(subsequence)); + return myself; + } + /** * Verifies that the actual array contains the given value at the given index. * <p> @@ -341,6 +475,30 @@ public SELF doesNotContain(long... values) { return myself; } + /** + * Verifies that the actual array does not contain the values of the given array. + * <p> + * Example: + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1L, 2L, 3L }).doesNotContain(new Long[] { 4L }); + * + * // assertion will fail + * assertThat(new long[] { 1L, 2L, 3L }).doesNotContain(new Long[] { 2L });</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual array contains any of the given values. + * @since 3.19.0 + */ + public SELF doesNotContain(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertDoesNotContain(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual array does not contain the given value at the given index. * <p> @@ -408,6 +566,32 @@ public SELF startsWith(long... sequence) { return myself; } + /** + * Verifies that the actual array starts with the given sequence of values, without any other values between them. + * Similar to <code>{@link #containsSequence(Long[])}</code>, but it also verifies that the first element in the + * sequence is also first element of the actual array. + * <p> + * Example: + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1L, 2L, 3L }).startsWith(new Long[] { 1L, 2L }); + * + * // assertion will fail + * assertThat(new long[] { 1L, 2L, 3L }).startsWith(new Long[] { 2L, 3L });</code></pre> + * + * @param sequence the sequence of values to look for. + * @return myself assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual array does not start with the given sequence. + * @since 3.19.0 + */ + public SELF startsWith(Long[] sequence) { + requireNonNullParameter(sequence, "sequence"); + arrays.assertStartsWith(info, actual, toPrimitiveLongArray(sequence)); + return myself; + } + /** * Verifies that the actual array ends with the given sequence of values, without any other values between them. * Similar to <code>{@link #containsSequence(long...)}</code>, but it also verifies that the last element in the @@ -432,6 +616,32 @@ public SELF endsWith(long... sequence) { return myself; } + /** + * Verifies that the actual array ends with the given sequence of values, without any other values between them. + * Similar to <code>{@link #containsSequence(Long[])}</code>, but it also verifies that the last element in the + * sequence is also last element of the actual array. + * <p> + * Example: + * <pre><code class='java'> // assertion will pass + * assertThat(new long[] { 1L, 2L, 3L }).endsWith(new Long[] { 2L, 3L }); + * + * // assertion will fail + * assertThat(new long[] { 1L, 2L, 3L }).endsWith(new Long[] { 3L, 4L });</code></pre> + * + * @param sequence the sequence of values to look for. + * @return myself assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty array. + * @throws AssertionError if the actual array is {@code null}. + * @throws AssertionError if the actual array does not end with the given sequence. + * @since 3.19.0 + */ + public SELF endsWith(Long[] sequence) { + requireNonNullParameter(sequence, "sequence"); + arrays.assertEndsWith(info, actual, toPrimitiveLongArray(sequence)); + return myself; + } + /** {@inheritDoc} */ @Override public SELF isSorted() { @@ -487,6 +697,33 @@ public SELF containsExactly(long... values) { return myself; } + /** + * Verifies that the actual group contains only the values of the given array and nothing else, <b>in order</b>. + * <p> + * Example : + * <pre><code class='java'> long[] longs = { 1, 2, 3 }; + * + * // assertion will pass + * assertThat(longs).containsExactly(new Long[] { 1, 2, 3 }); + * + * // assertion will fail as actual and expected order differ + * assertThat(longs).containsExactly(new Long[] { 2, 1, 3 });</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + * @since 3.19.0 + */ + public SELF containsExactly(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContainsExactly(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual group contains exactly the given values and nothing else, <b>in any order</b>.<br> * <p> @@ -513,6 +750,33 @@ public SELF containsExactlyInAnyOrder(long... values) { return myself; } + /** + * Verifies that the actual group contains exactly the values of the given array and nothing else, <b>in any order</b>.<br> + * <p> + * Example : + * <pre><code class='java'> // assertions will pass + * assertThat(new long[] { 1L, 2L }).containsExactlyInAnyOrder(new Long[] { 1L, 2L }); + * assertThat(new long[] { 1L, 2L, 1L }).containsExactlyInAnyOrder(new Long[] { 1L, 1L, 2L }); + * + * // assertions will fail + * assertThat(new long[] { 1L, 2L }).containsExactlyInAnyOrder(new Long[] { 1L }); + * assertThat(new long[] { 1L }).containsExactlyInAnyOrder(new Long[] { 1L, 2L }); + * assertThat(new long[] { 1L, 2L, 1L }).containsExactlyInAnyOrder(new Long[] { 1L, 2L });</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones. + * @since 3.19.0 + */ + public SELF containsExactlyInAnyOrder(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContainsExactlyInAnyOrder(info, actual, toPrimitiveLongArray(values)); + return myself; + } + /** * Verifies that the actual array contains at least one of the given values. * <p> @@ -543,4 +807,43 @@ public SELF containsAnyOf(long... values) { return myself; } + /** + * Verifies that the actual array contains at least one value of the given array. + * <p> + * Example : + * <pre><code class='java'> long[] oneTwoThree = { 1L, 2L, 3L }; + * + * // assertions will pass + * assertThat(oneTwoThree).containsAnyOf(new Long[] { 2L }) + * .containsAnyOf(new Long[] { 2L, 3L }) + * .containsAnyOf(new Long[] { 1L, 2L, 3L }) + * .containsAnyOf(new Long[] { 1L, 2L, 3L, 4L }) + * .containsAnyOf(new Long[] { 5L, 6L, 7L, 2L }); + * + * // assertions will fail + * assertThat(oneTwoThree).containsAnyOf(new Long[] { 4L }); + * assertThat(oneTwoThree).containsAnyOf(new Long[] { 4L, 5L, 6L, 7L });</code></pre> + * + * @param values the array of values whose at least one which is expected to be in the array under test. + * @return {@code this} assertion object. + * @throws NullPointerException if the array of values is {@code null}. + * @throws IllegalArgumentException if the array of values is empty and the array under test is not empty. + * @throws AssertionError if the array under test is {@code null}. + * @throws AssertionError if the array under test does not contain any of the given {@code values}. + * @since 3.19.0 + */ + public SELF containsAnyOf(Long[] values) { + requireNonNullParameter(values, "values"); + arrays.assertContainsAnyOf(info, actual, toPrimitiveLongArray(values)); + return myself; + } + + private static void requireNonNullParameter(Object parameter, String parameterName) { + requireNonNull(parameter, shouldNotBeNull(parameterName).create()); + } + + private static long[] toPrimitiveLongArray(Long[] values) { + return Arrays.stream(values).mapToLong(Long::longValue).toArray(); + } + }
diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsAnyOf_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsAnyOf_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsAnyOf_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsAnyOf(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsAnyOf(Long[])") +class LongArrayAssert_containsAnyOf_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsAnyOf(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsAnyOf(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsAnyOf(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactlyInAnyOrder_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactlyInAnyOrder_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactlyInAnyOrder_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsExactlyInAnyOrder(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsExactlyInAnyOrder(Long[])") +class LongArrayAssert_containsExactlyInAnyOrder_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsExactlyInAnyOrder(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsExactlyInAnyOrder(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsExactlyInAnyOrder(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsExactly(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsExactly(Long[])") +class LongArrayAssert_containsExactly_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsExactly(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsExactly(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsExactly(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnlyOnce_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnlyOnce_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnlyOnce_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsOnlyOnce(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsOnlyOnce(Long[])") +class LongArrayAssert_containsOnlyOnce_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsOnlyOnce(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsOnlyOnce(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsOnlyOnce(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnly_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnly_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsOnly_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsOnly(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsOnly(Long[])") +class LongArrayAssert_containsOnly_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsOnly(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsOnly(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsOnly(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSequence_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSequence_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSequence_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsSequence(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsSequence(Long[])") +class LongArrayAssert_containsSequence_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] sequence = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsSequence(sequence)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("sequence").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsSequence(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsSequence(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSubsequence_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSubsequence_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsSubsequence_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#containsSubsequence(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert containsSubsequence(Long[])") +class LongArrayAssert_containsSubsequence_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] subsequence = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.containsSubsequence(subsequence)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("subsequence").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsSubsequence(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsSubsequence(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_contains_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_contains_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_contains_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#contains(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert contains(Long[])") +class LongArrayAssert_contains_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.contains(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.contains(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContains(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_doesNotContain_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_doesNotContain_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_doesNotContain_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#doesNotContain(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert doesNotContain(Long[])") +class LongArrayAssert_doesNotContain_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] values = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.doesNotContain(values)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("values").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.doesNotContain(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertDoesNotContain(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_endsWith_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_endsWith_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_endsWith_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#endsWith(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert endsWith(Long[])") +class LongArrayAssert_endsWith_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] sequence = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.endsWith(sequence)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("sequence").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.endsWith(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertEndsWith(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_startsWith_with_Long_array_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_startsWith_with_Long_array_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_startsWith_with_Long_array_Test.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.longarray; + +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldNotBeNull.shouldNotBeNull; +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link LongArrayAssert#startsWith(Long[])}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("LongArrayAssert startsWith(Long[])") +class LongArrayAssert_startsWith_with_Long_array_Test extends LongArrayAssertBaseTest { + + @Test + void should_fail_if_values_is_null() { + // GIVEN + Long[] sequence = null; + // WHEN + Throwable thrown = catchThrowable(() -> assertions.startsWith(sequence)); + // THEN + then(thrown).isInstanceOf(NullPointerException.class) + .hasMessage(shouldNotBeNull("sequence").create()); + } + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.startsWith(new Long[] { 6L, 8L }); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertStartsWith(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L)); + } + +}
Comparing of Boxed vs. Unboxed Array broken #### Summary Before PR #1767 (specifically commit [76762a3664b3af2101d580355a05c6f0b3aa46cc](../commit/76762a3664b3af2101d580355a05c6f0b3aa46cc)) it was possible to compare a boxed with an unboxed array. This was because org.assertj.core.util.areEqualArrays was comparing entry by entry which took care of automatic boxing/unboxing. This functionality is broken and I'm not sure whether this was intentional. It's causing quite some refactoring efforts on our side to upgrade and I would argue that the old behaviour was quite useful. #### Example ```java // fails with assertj 3.16.1; succeeds with 3.13.2; probably broken since 13.15.1 @Test void testCompareArrays() { long[] array1 = new long[] { 2L, 2L }; Long[] array2 = new Long[] { 2L, 2L }; BDDAssertions.assertThat(array1).isEqualTo(array2); } ```
My initial feeling is that the old behavior was violating the language design, which [supports boxing/unboxing for primitive types](https://docs.oracle.com/javase/specs/jls/se8/html/jls-5.html#jls-5.1.7) but not for the corresponding arrays. Also, IntelliJ fires an [`assertEquals`() between objects of inconvertible types](https://www.jetbrains.com/help/idea/list-of-java-inspections.html#junit) warning with the example above. Indeed, `org.junit.jupiter.api.Assertions.assertEquals` fails similarly. However, I understand the convenience of the previous behavior. Instead of having it behind an `equals` check, my proposal would be to make it explicit adding the overloaded assertions in the `contains` family which would support boxed arrays. E.g., in `AbstractLongArrayAssert` we would add: ```java public SELF containsExactly(Long[] values) { arrays.assertContainsExactly(info, actual, values); return myself; } ``` which would allow: ```java @Test void testCompareArrays() { long[] array1 = new long[] { 2L, 2L }; Long[] array2 = new Long[] { 2L, 2L }; assertThat(array1).containsExactly(array2); } ``` @nioertel would this approach fit your needs? @joel-costigliola what's your opinion about it? As a bonus topic, is it something to consider for the recursive comparison api improvements? Valid point. Adding the Long[] version to AbstractLongArrayAssert could be handy, however it should then also be added to the other Abstract(Primitive)ArrayAsserts for consistency of the API. Also the other way round is not possible as ObjectArrayAssert can't really deal with this situation. In any case we refactored our affected test code, so I'm okay with the breaking change now. > Adding the Long[] version to AbstractLongArrayAssert could be handy, however it should then also be added to the other Abstract(Primitive)ArrayAsserts for consistency of the API. Yes, definitely. > Also the other way round is not possible as ObjectArrayAssert can't really deal with this situation. Right, there is no way to have a nice API on `AbstractObjectArrayAssert`. From user point of view, the "easiest" workaround I found to fullfill this use case is the following: ```java @Test void testCompareArrays() { long[] array1 = new long[] { 2L, 2L }; Long[] array2 = new Long[] { 2L, 2L }; assertThat(array2).containsExactly(Arrays.stream(array1).boxed().toArray(Long[]::new)); } ``` We could add additional `AbstractArrayAssert` children to cover the wrapper arrays if the community starts asking for this, but at the moment it seems overkilling. > In any case we refactored our affected test code, so I'm okay with the breaking change now. Thanks for the feedback! We might put this change on the roadmap anyway. I agree with @scordio approach, it's going to be a bit of work to do all the primitive types but that's fine. I think we can consider allowing comparing long[] to Long[] in the recursive comparison as the philosophy of this comparison is to focus on the data (types are less important unless you enable strict type checking). We won't be able to do much on this one, adding the `Long` version of `contains`: `contains(Long... values)` leads to a compilation error (reference to contains is ambiguous) when using it like: ```java assertThat(longs).contains(6L, 8L); ``` Java does not know if it should use `contains(Long... values)` or `contains(long... values)`.
2020-11-22T23:34:41Z
3.18
assertj/assertj
1,983
assertj__assertj-1983
[ "1961" ]
8a9712c105031456f046471c81029bbf5baeee25
diff --git a/src/main/java/org/assertj/core/api/Assumptions.java b/src/main/java/org/assertj/core/api/Assumptions.java --- a/src/main/java/org/assertj/core/api/Assumptions.java +++ b/src/main/java/org/assertj/core/api/Assumptions.java @@ -1293,16 +1293,16 @@ protected static <ASSERTION> Class<? extends ASSERTION> generateAssumptionClass( } private static RuntimeException assumptionNotMet(AssertionError assertionError) throws ReflectiveOperationException { - Class<?> assumptionClass = getAssumptionClass("org.junit.AssumptionViolatedException"); + Class<?> assumptionClass = getAssumptionClass("org.opentest4j.TestAbortedException"); if (assumptionClass != null) return assumptionNotMet(assumptionClass, assertionError); - assumptionClass = getAssumptionClass("org.opentest4j.TestAbortedException"); + assumptionClass = getAssumptionClass("org.testng.SkipException"); if (assumptionClass != null) return assumptionNotMet(assumptionClass, assertionError); - assumptionClass = getAssumptionClass("org.testng.SkipException"); + assumptionClass = getAssumptionClass("org.junit.AssumptionViolatedException"); if (assumptionClass != null) return assumptionNotMet(assumptionClass, assertionError); - throw new IllegalStateException("Assumptions require JUnit, opentest4j or TestNG on the classpath"); + throw new IllegalStateException("Assumptions require opentest4j, TestNG or JUnit on the classpath"); } private static Class<?> getAssumptionClass(String className) {
diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Atomics_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Atomics_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Atomics_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Atomics_Test.java @@ -13,9 +13,9 @@ package org.assertj.core.api.assumptions; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.util.Arrays.array; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -31,7 +31,6 @@ import java.util.concurrent.atomic.AtomicStampedReference; import java.util.stream.Stream; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; @@ -41,154 +40,154 @@ class Assumptions_assumeThat_Atomics_Test { static Stream<AssumptionRunner<?>> provideAssumptionsRunners() { return Stream.of( - new AssumptionRunner<AtomicBoolean>(new AtomicBoolean(true)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isFalse(); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).isTrue(); - } - }, - new AssumptionRunner<AtomicInteger>(new AtomicInteger(42)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasNegativeValue(); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasPositiveValue(); - } - }, - new AssumptionRunner<AtomicIntegerArray>(new AtomicIntegerArray(new int[] { 2, 5, 7 })) { - @Override - public void runFailingAssumption() { - assumeThat(actual).contains(20); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).contains(7); - } - }, - new AssumptionRunner<AtomicIntegerFieldUpdater<VolatileFieldsHolder>>(AtomicIntegerFieldUpdater.newUpdater(VolatileFieldsHolder.class, - "intValue")) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasValue(10, VOLATILE_FIELDS_HOLDER); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasValue(0, VOLATILE_FIELDS_HOLDER); - } - }, - new AssumptionRunner<AtomicLong>(new AtomicLong(42)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasNegativeValue(); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasPositiveValue(); - } - }, - new AssumptionRunner<AtomicLongArray>(new AtomicLongArray(new long[] { 2, 5, 7 })) { - @Override - public void runFailingAssumption() { - assumeThat(actual).contains(20); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).contains(7); - } - }, - new AssumptionRunner<AtomicLongFieldUpdater<VolatileFieldsHolder>>(AtomicLongFieldUpdater.newUpdater(VolatileFieldsHolder.class, - "longValue")) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasValue(10L, VOLATILE_FIELDS_HOLDER); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasValue(0L, VOLATILE_FIELDS_HOLDER); - } - }, - new AssumptionRunner<AtomicReference<String>>(new AtomicReference<>("test")) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasValue("other"); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasValue("test"); - } - }, - new AssumptionRunner<AtomicReferenceArray<String>>(new AtomicReferenceArray<>(array("2", "5", "7"))) { - @Override - public void runFailingAssumption() { - assumeThat(actual).contains("20"); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).contains("7"); - } - }, - new AssumptionRunner<AtomicReferenceFieldUpdater<VolatileFieldsHolder, String>>(AtomicReferenceFieldUpdater.newUpdater(VolatileFieldsHolder.class, - String.class, - "stringValue")) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasValue("other", VOLATILE_FIELDS_HOLDER); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasValue("test", VOLATILE_FIELDS_HOLDER); - } - }, - new AssumptionRunner<AtomicMarkableReference<String>>(new AtomicMarkableReference<>("test", true)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasReference("other"); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasReference("test"); - } - }, - new AssumptionRunner<AtomicStampedReference<String>>(new AtomicStampedReference<>("test", 1)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasStamp(0); - } - - @Override - public void runPassingAssumption() { - assumeThat(actual).hasStamp(1); - } - }); + new AssumptionRunner<AtomicBoolean>(new AtomicBoolean(true)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isFalse(); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).isTrue(); + } + }, + new AssumptionRunner<AtomicInteger>(new AtomicInteger(42)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasNegativeValue(); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasPositiveValue(); + } + }, + new AssumptionRunner<AtomicIntegerArray>(new AtomicIntegerArray(new int[] { 2, 5, 7 })) { + @Override + public void runFailingAssumption() { + assumeThat(actual).contains(20); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).contains(7); + } + }, + new AssumptionRunner<AtomicIntegerFieldUpdater<VolatileFieldsHolder>>(AtomicIntegerFieldUpdater.newUpdater(VolatileFieldsHolder.class, + "intValue")) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasValue(10, VOLATILE_FIELDS_HOLDER); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasValue(0, VOLATILE_FIELDS_HOLDER); + } + }, + new AssumptionRunner<AtomicLong>(new AtomicLong(42)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasNegativeValue(); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasPositiveValue(); + } + }, + new AssumptionRunner<AtomicLongArray>(new AtomicLongArray(new long[] { 2, 5, 7 })) { + @Override + public void runFailingAssumption() { + assumeThat(actual).contains(20); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).contains(7); + } + }, + new AssumptionRunner<AtomicLongFieldUpdater<VolatileFieldsHolder>>(AtomicLongFieldUpdater.newUpdater(VolatileFieldsHolder.class, + "longValue")) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasValue(10L, VOLATILE_FIELDS_HOLDER); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasValue(0L, VOLATILE_FIELDS_HOLDER); + } + }, + new AssumptionRunner<AtomicReference<String>>(new AtomicReference<>("test")) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasValue("other"); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasValue("test"); + } + }, + new AssumptionRunner<AtomicReferenceArray<String>>(new AtomicReferenceArray<>(array("2", "5", "7"))) { + @Override + public void runFailingAssumption() { + assumeThat(actual).contains("20"); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).contains("7"); + } + }, + new AssumptionRunner<AtomicReferenceFieldUpdater<VolatileFieldsHolder, String>>(AtomicReferenceFieldUpdater.newUpdater(VolatileFieldsHolder.class, + String.class, + "stringValue")) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasValue("other", VOLATILE_FIELDS_HOLDER); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasValue("test", VOLATILE_FIELDS_HOLDER); + } + }, + new AssumptionRunner<AtomicMarkableReference<String>>(new AtomicMarkableReference<>("test", true)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasReference("other"); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasReference("test"); + } + }, + new AssumptionRunner<AtomicStampedReference<String>>(new AtomicStampedReference<>("test", 1)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasStamp(0); + } + + @Override + public void runPassingAssumption() { + assumeThat(actual).hasStamp(1); + } + }); } @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumptionRunner.runFailingAssumption()); + expectAssumptionNotMetException(assumptionRunner::runFailingAssumption); } @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_run_test_when_assumption_passes(AssumptionRunner<?> assumptionRunner) { - assertThatCode(() -> assumptionRunner.runPassingAssumption()).doesNotThrowAnyException(); + assertThatCode(assumptionRunner::runPassingAssumption).doesNotThrowAnyException(); } @SuppressWarnings("unused") diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Numbers_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Numbers_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Numbers_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Numbers_Test.java @@ -15,14 +15,13 @@ import static java.math.BigDecimal.ZERO; import static java.math.BigInteger.ONE; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.math.BigDecimal; import java.math.BigInteger; import java.util.stream.Stream; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; @@ -30,238 +29,238 @@ class Assumptions_assumeThat_Numbers_Test { static Stream<AssumptionRunner<?>> provideAssumptionsRunners() { return Stream.of( - new AssumptionRunner<Byte>() { - @Override - public void runFailingAssumption() { - assumeThat((byte) 4).isLessThan((byte) 2); - } + new AssumptionRunner<Byte>() { + @Override + public void runFailingAssumption() { + assumeThat((byte) 4).isLessThan((byte) 2); + } - @Override - public void runPassingAssumption() { - assumeThat((byte) 4).isGreaterThan((byte) 2); - } - }, - new AssumptionRunner<Byte>((byte) 4) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan((byte) 2); - } + @Override + public void runPassingAssumption() { + assumeThat((byte) 4).isGreaterThan((byte) 2); + } + }, + new AssumptionRunner<Byte>((byte) 4) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan((byte) 2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan((byte) 2); - } - }, - new AssumptionRunner<byte[]>(new byte[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).containsOnlyOnce(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan((byte) 2); + } + }, + new AssumptionRunner<byte[]>(new byte[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).containsOnlyOnce(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).containsOnlyOnce(4); - } - }, - new AssumptionRunner<Short>() { - @Override - public void runFailingAssumption() { - assumeThat((short) 4).isLessThan((short) 2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).containsOnlyOnce(4); + } + }, + new AssumptionRunner<Short>() { + @Override + public void runFailingAssumption() { + assumeThat((short) 4).isLessThan((short) 2); + } - @Override - public void runPassingAssumption() { - assumeThat((short) 4).isGreaterThan((short) 2); - } - }, - new AssumptionRunner<Short>((short) 4) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan((short) 2); - } + @Override + public void runPassingAssumption() { + assumeThat((short) 4).isGreaterThan((short) 2); + } + }, + new AssumptionRunner<Short>((short) 4) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan((short) 2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan((short) 2); - } - }, - new AssumptionRunner<short[]>(new short[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).containsOnlyOnce((short) 2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan((short) 2); + } + }, + new AssumptionRunner<short[]>(new short[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).containsOnlyOnce((short) 2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).containsOnlyOnce((short) 4); - } - }, - new AssumptionRunner<Integer>() { - @Override - public void runFailingAssumption() { - assumeThat(4).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).containsOnlyOnce((short) 4); + } + }, + new AssumptionRunner<Integer>() { + @Override + public void runFailingAssumption() { + assumeThat(4).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(4).isGreaterThan(2); - } - }, - new AssumptionRunner<Integer>(4) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(4).isGreaterThan(2); + } + }, + new AssumptionRunner<Integer>(4) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(2); - } - }, - new AssumptionRunner<int[]>(new int[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).containsOnlyOnce(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(2); + } + }, + new AssumptionRunner<int[]>(new int[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).containsOnlyOnce(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).containsOnlyOnce(4); - } - }, - new AssumptionRunner<Long>() { - @Override - public void runFailingAssumption() { - assumeThat(4L).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).containsOnlyOnce(4); + } + }, + new AssumptionRunner<Long>() { + @Override + public void runFailingAssumption() { + assumeThat(4L).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(4L).isGreaterThan(2); - } - }, - new AssumptionRunner<Long>(4L) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(4L).isGreaterThan(2); + } + }, + new AssumptionRunner<Long>(4L) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(2); - } - }, - new AssumptionRunner<long[]>(new long[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).containsOnlyOnce(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(2); + } + }, + new AssumptionRunner<long[]>(new long[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).containsOnlyOnce(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).containsOnlyOnce(4); - } - }, + @Override + public void runPassingAssumption() { + assumeThat(actual).containsOnlyOnce(4); + } + }, - new AssumptionRunner<Float>() { - @Override - public void runFailingAssumption() { - assumeThat(4.0f).isLessThan(2); - } + new AssumptionRunner<Float>() { + @Override + public void runFailingAssumption() { + assumeThat(4.0f).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(4.0f).isGreaterThan((byte) 2); - } - }, - new AssumptionRunner<Float>(4.0f) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(4.0f).isGreaterThan((byte) 2); + } + }, + new AssumptionRunner<Float>(4.0f) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(2); - } - }, - new AssumptionRunner<float[]>(new float[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasSize(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(2); + } + }, + new AssumptionRunner<float[]>(new float[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasSize(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).hasSize(3); - } - }, - new AssumptionRunner<Double>() { - @Override - public void runFailingAssumption() { - assumeThat(4.0).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).hasSize(3); + } + }, + new AssumptionRunner<Double>() { + @Override + public void runFailingAssumption() { + assumeThat(4.0).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(4.0).isGreaterThan((byte) 2); - } - }, - new AssumptionRunner<Double>(4.0) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(2); - } + @Override + public void runPassingAssumption() { + assumeThat(4.0).isGreaterThan((byte) 2); + } + }, + new AssumptionRunner<Double>(4.0) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(2); - } - }, - new AssumptionRunner<double[]>(new double[] { 2, 4, 2 }) { - @Override - public void runFailingAssumption() { - assumeThat(actual).hasSize(2); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(2); + } + }, + new AssumptionRunner<double[]>(new double[] { 2, 4, 2 }) { + @Override + public void runFailingAssumption() { + assumeThat(actual).hasSize(2); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).hasSize(3); - } - }, - new AssumptionRunner<BigDecimal>(new BigDecimal(4)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(ZERO); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).hasSize(3); + } + }, + new AssumptionRunner<BigDecimal>(new BigDecimal(4)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(ZERO); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(ZERO); - } - }, - new AssumptionRunner<BigInteger>(BigInteger.valueOf(4)) { - @Override - public void runFailingAssumption() { - assumeThat(actual).isLessThan(ONE); - } + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(ZERO); + } + }, + new AssumptionRunner<BigInteger>(BigInteger.valueOf(4)) { + @Override + public void runFailingAssumption() { + assumeThat(actual).isLessThan(ONE); + } - @Override - public void runPassingAssumption() { - assumeThat(actual).isGreaterThan(ONE); - } - }); + @Override + public void runPassingAssumption() { + assumeThat(actual).isGreaterThan(ONE); + } + }); } @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumptionRunner.runFailingAssumption()); + expectAssumptionNotMetException(assumptionRunner::runFailingAssumption); } @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_run_test_when_assumption_passes(AssumptionRunner<?> assumptionRunner) { - assertThatCode(() -> assumptionRunner.runPassingAssumption()).doesNotThrowAnyException(); + assertThatCode(assumptionRunner::runPassingAssumption).doesNotThrowAnyException(); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Object_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Object_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Object_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Object_Test.java @@ -14,10 +14,9 @@ import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.Test; class Assumptions_assumeThat_Object_Test { @@ -42,23 +41,23 @@ void should_run_test_when_assumption_for_internally_created_list_passes() { @Test void should_ignore_test_when_assumption_fails() { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(STRING_OBJECT).isNotNull() - .isEqualTo("other")); + expectAssumptionNotMetException(() -> assumeThat(STRING_OBJECT).isNotNull() + .isEqualTo("other")); } @Test void should_ignore_test_when_assumption_for_internally_created_string_assertion_fails() { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(STRING_OBJECT).isNotNull() - .asString() - .isEqualTo("other")); + expectAssumptionNotMetException(() -> assumeThat(STRING_OBJECT).isNotNull() + .asString() + .isEqualTo("other")); } @Test void should_ignore_test_when_assumption_for_internally_created_list_assertion_fails() { Object listObject = asList(1, 2, 3); - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(listObject).isNotNull() - .asList() - .contains(4, - 5)); + expectAssumptionNotMetException(() -> assumeThat(listObject).isNotNull() + .asList() + .contains(4, + 5)); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_Test.java @@ -14,12 +14,11 @@ import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.presentation.UnicodeRepresentation.UNICODE_REPRESENTATION; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import org.assertj.core.util.CaseInsensitiveStringComparator; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.Test; class Assumptions_assumeThat_Test { @@ -27,7 +26,7 @@ class Assumptions_assumeThat_Test { @Test void should_ignore_test_when_one_of_the_assumption_fails() { assumeThat("foo").isNotEmpty(); - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat("bar").isEmpty()); + expectAssumptionNotMetException(() -> assumeThat("bar").isEmpty()); } @Test diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_involving_iterable_navigation_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_involving_iterable_navigation_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_involving_iterable_navigation_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_involving_iterable_navigation_Test.java @@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.InstanceOfAssertFactories.type; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.Lists.list; import static org.assertj.core.util.Lists.newArrayList; import static org.assertj.core.util.Sets.newLinkedHashSet; @@ -75,65 +75,65 @@ void should_run_test_when_assumption_after_navigating_to_elements_passes() { @Test void should_ignore_test_when_assumption_on_size_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).size() - .as("check size") - .isGreaterThan(3)); + expectAssumptionNotMetException(() -> assumeThat(jedis).size() + .as("check size") + .isGreaterThan(3)); } @Test void should_ignore_test_when_assumption_after_navigating_to_first_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).first() - .as("check first element") - .isEqualTo(luke)); + expectAssumptionNotMetException(() -> assumeThat(jedis).first() + .as("check first element") + .isEqualTo(luke)); } @Test void should_ignore_test_when_assumption_after_navigating_to_first_with_InstanceOfAssertFactory_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).first(as(type(Jedi.class))) - .as("check first element") - .isEqualTo(luke)); + expectAssumptionNotMetException(() -> assumeThat(jedis).first(as(type(Jedi.class))) + .as("check first element") + .isEqualTo(luke)); } @Test void should_ignore_test_when_assumption_after_navigating_to_last_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).last() - .as("check last element") - .isEqualTo(yoda)); + expectAssumptionNotMetException(() -> assumeThat(jedis).last() + .as("check last element") + .isEqualTo(yoda)); } @Test void should_ignore_test_when_assumption_after_navigating_to_last_with_InstanceOfAssertFactory_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).last(as(type(Jedi.class))) - .as("check last element") - .isEqualTo(yoda)); + expectAssumptionNotMetException(() -> assumeThat(jedis).last(as(type(Jedi.class))) + .as("check last element") + .isEqualTo(yoda)); } @Test void should_ignore_test_when_assumption_after_navigating_to_element_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).element(1) - .as("check element at index 1") - .isEqualTo(yoda)); + expectAssumptionNotMetException(() -> assumeThat(jedis).element(1) + .as("check element at index 1") + .isEqualTo(yoda)); } @Test void should_ignore_test_when_assumption_after_navigating_to_element_with_InstanceOfAssertFactory_fails() { - expectAssumptionViolatedException(() -> assumeThat(jedis).element(1, as(type(Jedi.class))) - .as("check element at index 1") - .isEqualTo(yoda)); + expectAssumptionNotMetException(() -> assumeThat(jedis).element(1, as(type(Jedi.class))) + .as("check element at index 1") + .isEqualTo(yoda)); } @Test void should_ignore_test_when_assumption_after_navigating_to_singleElement_fails() { - expectAssumptionViolatedException(() -> assumeThat(list(yoda)).singleElement() - .as("check single element") - .isEqualTo(luke)); + expectAssumptionNotMetException(() -> assumeThat(list(yoda)).singleElement() + .as("check single element") + .isEqualTo(luke)); } @Test void should_ignore_test_when_assumption_after_navigating_to_singleElement_with_InstanceOfAssertFactory_fails() { - expectAssumptionViolatedException(() -> assumeThat(list(yoda)).singleElement(as(type(Jedi.class))) - .as("check single element") - .isEqualTo(luke)); + expectAssumptionNotMetException(() -> assumeThat(list(yoda)).singleElement(as(type(Jedi.class))) + .as("check single element") + .isEqualTo(luke)); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_Stream_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_Stream_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_Stream_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_Stream_Test.java @@ -12,14 +12,13 @@ */ package org.assertj.core.api.assumptions; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.Lists.newArrayList; import java.util.List; import java.util.stream.Stream; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.Test; class Assumptions_assumeThat_with_Stream_Test { @@ -27,14 +26,14 @@ class Assumptions_assumeThat_with_Stream_Test { @Test void stream_test() { Stream<String> stream = Stream.of("test"); - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(stream).containsAnyOf("other", - "foo")); + expectAssumptionNotMetException(() -> assumeThat(stream).containsAnyOf("other", + "foo")); } @Test void list_test() { List<String> list = newArrayList("test"); - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(list).contains("other", - "foo")); + expectAssumptionNotMetException(() -> assumeThat(list).contains("other", + "foo")); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_asInstanceOf_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_asInstanceOf_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_asInstanceOf_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_asInstanceOf_Test.java @@ -13,12 +13,11 @@ package org.assertj.core.api.assumptions; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.InstanceOfAssertFactories.INTEGER; import static org.assertj.core.api.InstanceOfAssertFactories.STRING; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -38,7 +37,7 @@ void should_run_test_when_assumption_with_as_instance_of_passes() { @Test void should_ignore_test_when_assumption_with_as_instance_of_fails() { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(value).asInstanceOf(INTEGER) - .isZero()); + expectAssumptionNotMetException(() -> assumeThat(value).asInstanceOf(INTEGER) + .isZero()); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_extracting_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_extracting_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_extracting_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_extracting_Test.java @@ -14,10 +14,10 @@ import static com.google.common.collect.Sets.newHashSet; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.entry; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.test.Maps.mapOf; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.Lists.newArrayList; import java.util.Map; @@ -25,7 +25,6 @@ import org.assertj.core.test.CartoonCharacter; import org.assertj.core.test.Jedi; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -90,7 +89,7 @@ void should_allow_assumptions_with_flatExtracting() { @Test void should_ignore_test_when_assumption_using_extracting_fails() { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(jedis).extracting("name") - .contains("Vader")); + expectAssumptionNotMetException(() -> assumeThat(jedis).extracting("name") + .contains("Vader")); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_filteredOn_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_filteredOn_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_filteredOn_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_filteredOn_Test.java @@ -14,13 +14,12 @@ import static com.google.common.collect.Sets.newHashSet; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.util.Set; import org.assertj.core.test.Jedi; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -44,6 +43,7 @@ void should_run_test_when_assumption_with_filtered_elements_passes() { @Test void should_ignore_test_when_assumption_with_filtered_elements_fails() { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(jedis).filteredOn("name", "Luke").contains(yoda)); + expectAssumptionNotMetException(() -> assumeThat(jedis).filteredOn("name", "Luke") + .contains(yoda)); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_succeedsWithin_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_succeedsWithin_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_succeedsWithin_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_succeedsWithin_Test.java @@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.InstanceOfAssertFactories.STRING; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.time.Duration; import java.util.concurrent.CompletableFuture; @@ -50,8 +50,8 @@ void should_ignore_test_when_assumption_after_succeedsWithin_fails() { String value = "ook!"; CompletableFuture<String> future = completedFuture(value); // WHEN - expectAssumptionViolatedException(() -> assumeThat(future).succeedsWithin(1, MILLISECONDS) - .isEqualTo("eeek!")); + expectAssumptionNotMetException(() -> assumeThat(future).succeedsWithin(1, MILLISECONDS) + .isEqualTo("eeek!")); } @Test @@ -72,8 +72,8 @@ void should_ignore_test_when_assumption_after_succeedsWithin_asString_fails() { String value = "ook!"; CompletableFuture<String> future = completedFuture(value); // WHEN - expectAssumptionViolatedException(() -> assumeThat(future).succeedsWithin(1, MILLISECONDS, as(STRING)) - .startsWith("eek")); + expectAssumptionNotMetException(() -> assumeThat(future).succeedsWithin(1, MILLISECONDS, as(STRING)) + .startsWith("eek")); } @Test @@ -94,8 +94,8 @@ void should_ignore_test_when_assumption_after_succeedsWithin_with_Duration_fails String value = "ook!"; CompletableFuture<String> future = completedFuture(value); // WHEN - expectAssumptionViolatedException(() -> assumeThat(future).succeedsWithin(ONE_MILLIS) - .isEqualTo("eeek!")); + expectAssumptionNotMetException(() -> assumeThat(future).succeedsWithin(ONE_MILLIS) + .isEqualTo("eeek!")); } @Test @@ -116,7 +116,7 @@ void should_ignore_test_when_assumption_after_succeedsWithin_with_Duration_asStr String value = "ook!"; CompletableFuture<String> future = completedFuture(value); // WHEN - expectAssumptionViolatedException(() -> assumeThat(future).succeedsWithin(ONE_MILLIS, as(STRING)) - .startsWith("eek")); + expectAssumptionNotMetException(() -> assumeThat(future).succeedsWithin(ONE_MILLIS, as(STRING)) + .startsWith("eek")); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_java_8_types_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_java_8_types_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_java_8_types_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_java_8_types_Test.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.Assumptions.assumeThatCode; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.time.Duration; import java.time.Instant; @@ -41,9 +42,9 @@ import java.util.stream.Stream; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import org.opentest4j.TestAbortedException; class Assumptions_assumeThat_with_various_java_8_types_Test { @@ -296,7 +297,7 @@ public void runPassingAssumption() { @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(assumptionRunner::runFailingAssumption); + expectAssumptionNotMetException(assumptionRunner::runFailingAssumption); } @ParameterizedTest diff --git a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_types_Test.java b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_types_Test.java --- a/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_types_Test.java +++ b/src/test/java/org/assertj/core/api/assumptions/Assumptions_assumeThat_with_various_types_Test.java @@ -16,12 +16,12 @@ import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatObject; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.api.Assumptions.assumeThatObject; import static org.assertj.core.api.Assumptions.assumeThatThrownBy; import static org.assertj.core.util.Arrays.array; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.Lists.list; import static org.assertj.core.util.Maps.newHashMap; import static org.mockito.Mockito.mock; @@ -45,7 +45,6 @@ import org.assertj.core.api.Condition; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.assertj.core.api.test.ComparableExample; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; @@ -221,7 +220,7 @@ public void runPassingAssumption() { }, new AssumptionRunner<ThrowingCallable>(new ThrowingCallable() { @Override - public void call() throws Throwable { + public void call() { throw new IllegalArgumentException(); } }) { @@ -368,7 +367,7 @@ public void runPassingAssumption() { new Condition<>(list -> list.getFirst().equals("abc"), "First element is 'abc'"))); } }, - new AssumptionRunner<Spliterator>(Stream.of(1, 2).spliterator()) { + new AssumptionRunner<Spliterator<Integer>>(Stream.of(1, 2).spliterator()) { @Override public void runFailingAssumption() { assumeThat(actual).hasCharacteristics(Spliterator.DISTINCT); @@ -393,12 +392,12 @@ private static URL createUrl() { @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumptionRunner.runFailingAssumption()); + expectAssumptionNotMetException(assumptionRunner::runFailingAssumption); } @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_run_test_when_assumption_passes(AssumptionRunner<?> assumptionRunner) { - assertThatCode(() -> assumptionRunner.runPassingAssumption()).doesNotThrowAnyException(); + assertThatCode(assumptionRunner::runPassingAssumption).doesNotThrowAnyException(); } } diff --git a/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java b/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java --- a/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java +++ b/src/test/java/org/assertj/core/api/assumptions/BDDAssumptionsTest.java @@ -16,7 +16,7 @@ import static org.assertj.core.api.BDDAssumptions.given; import static org.assertj.core.api.BDDAssumptions.givenCode; import static org.assertj.core.api.BDDAssumptions.givenObject; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.mockito.BDDMockito.willReturn; import static org.mockito.Mockito.mock; @@ -92,7 +92,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isFalse()); + expectAssumptionNotMetException(() -> given(actual).isFalse()); } } @@ -107,7 +107,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isFalse()); + expectAssumptionNotMetException(() -> given(actual).isFalse()); } } @@ -122,7 +122,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains(false)); + expectAssumptionNotMetException(() -> given(actual).contains(false)); } } @@ -137,7 +137,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -152,7 +152,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -167,7 +167,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains((byte) 0)); + expectAssumptionNotMetException(() -> given(actual).contains((byte) 0)); } } @@ -182,7 +182,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -197,7 +197,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -212,7 +212,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains((short) 0)); + expectAssumptionNotMetException(() -> given(actual).contains((short) 0)); } } @@ -227,7 +227,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -242,7 +242,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -257,7 +257,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains(0)); + expectAssumptionNotMetException(() -> given(actual).contains(0)); } } @@ -272,7 +272,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -287,7 +287,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -302,7 +302,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -317,7 +317,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains(0L)); + expectAssumptionNotMetException(() -> given(actual).contains(0L)); } } @@ -332,7 +332,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -347,7 +347,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -362,7 +362,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains(0.0f)); + expectAssumptionNotMetException(() -> given(actual).contains(0.0f)); } } @@ -377,7 +377,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -392,7 +392,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -407,7 +407,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains(0.0f)); + expectAssumptionNotMetException(() -> given(actual).contains(0.0f)); } } @@ -422,7 +422,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isZero()); + expectAssumptionNotMetException(() -> given(actual).isZero()); } } @@ -437,7 +437,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isLowerCase()); + expectAssumptionNotMetException(() -> given(actual).isLowerCase()); } } @@ -452,7 +452,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isLowerCase()); + expectAssumptionNotMetException(() -> given(actual).isLowerCase()); } } @@ -467,7 +467,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).contains('C')); + expectAssumptionNotMetException(() -> given(actual).contains('C')); } } @@ -482,7 +482,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNullOrEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNullOrEmpty()); } } @@ -497,7 +497,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNullOrEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNullOrEmpty()); } } @@ -512,7 +512,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNullOrEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNullOrEmpty()); } } @@ -527,7 +527,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNullOrEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNullOrEmpty()); } } @@ -542,7 +542,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isInterface()); + expectAssumptionNotMetException(() -> given(actual).isInterface()); } } @@ -568,7 +568,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasAllNullFieldsOrProperties()); + expectAssumptionNotMetException(() -> given(actual).hasAllNullFieldsOrProperties()); } } @@ -583,7 +583,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNullOrEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNullOrEmpty()); } } @@ -598,7 +598,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> givenObject(actual).hasAllNullFieldsOrProperties()); + expectAssumptionNotMetException(() -> givenObject(actual).hasAllNullFieldsOrProperties()); } } } @@ -621,7 +621,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEqualByComparingTo(new Yoda())); + expectAssumptionNotMetException(() -> given(actual).isNotEqualByComparingTo(new Yoda())); } } @@ -636,7 +636,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasMessage("")); + expectAssumptionNotMetException(() -> given(actual).hasMessage("")); } } @@ -651,7 +651,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> givenCode(() -> { /* some code */ }).hasMessage("Yoda time")); + expectAssumptionNotMetException(() -> givenCode(() -> { /* some code */ }).hasMessage("Yoda time")); } } @@ -666,7 +666,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> givenCode(() -> { + expectAssumptionNotMetException(() -> givenCode(() -> { throw new Exception("Yoda time"); }).doesNotThrowAnyException()); } @@ -684,7 +684,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2)); } } @@ -699,7 +699,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isExhausted()); + expectAssumptionNotMetException(() -> given(actual).isExhausted()); } } @@ -714,7 +714,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2)); } } @@ -729,7 +729,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsEntry(2, 1)); + expectAssumptionNotMetException(() -> given(actual).containsEntry(2, 1)); } } @@ -744,7 +744,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).accepts(-2, -1)); + expectAssumptionNotMetException(() -> given(actual).accepts(-2, -1)); } } @@ -759,7 +759,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).accepts(-2, -1)); + expectAssumptionNotMetException(() -> given(actual).accepts(-2, -1)); } } @@ -774,7 +774,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).accepts(-2, -1)); + expectAssumptionNotMetException(() -> given(actual).accepts(-2, -1)); } } @@ -789,7 +789,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).accepts(-2.0, -1.0)); + expectAssumptionNotMetException(() -> given(actual).accepts(-2.0, -1.0)); } } @@ -804,7 +804,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -819,7 +819,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -834,7 +834,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -849,7 +849,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -864,7 +864,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2)); } } @@ -879,7 +879,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2)); } } @@ -894,7 +894,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2L)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2L)); } } @@ -909,7 +909,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).containsOnly(2.0)); + expectAssumptionNotMetException(() -> given(actual).containsOnly(2.0)); } } @@ -924,7 +924,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasCharacteristics(Spliterator.DISTINCT)); + expectAssumptionNotMetException(() -> given(actual).hasCharacteristics(Spliterator.DISTINCT)); } } @@ -944,7 +944,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotCancelled()); + expectAssumptionNotMetException(() -> given(actual).isNotCancelled()); } } @@ -959,7 +959,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotDone()); + expectAssumptionNotMetException(() -> given(actual).isNotDone()); } } @@ -974,7 +974,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotDone()); + expectAssumptionNotMetException(() -> given(actual).isNotDone()); } } @@ -989,7 +989,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isFalse()); + expectAssumptionNotMetException(() -> given(actual).isFalse()); } } @@ -1004,7 +1004,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasNegativeValue()); + expectAssumptionNotMetException(() -> given(actual).hasNegativeValue()); } } @@ -1019,7 +1019,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -1045,7 +1045,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasValue(2, value)); + expectAssumptionNotMetException(() -> given(actual).hasValue(2, value)); } } @@ -1060,7 +1060,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasNegativeValue()); + expectAssumptionNotMetException(() -> given(actual).hasNegativeValue()); } } @@ -1075,7 +1075,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -1101,7 +1101,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasValue(2L, value)); + expectAssumptionNotMetException(() -> given(actual).hasValue(2L, value)); } } @@ -1116,7 +1116,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).doesNotHaveValue("Yoda")); + expectAssumptionNotMetException(() -> given(actual).doesNotHaveValue("Yoda")); } } @@ -1131,7 +1131,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotEmpty()); + expectAssumptionNotMetException(() -> given(actual).isNotEmpty()); } } @@ -1159,7 +1159,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasValue("", value)); + expectAssumptionNotMetException(() -> given(actual).hasValue("", value)); } } @@ -1174,7 +1174,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isNotMarked()); + expectAssumptionNotMetException(() -> given(actual).isNotMarked()); } } @@ -1189,7 +1189,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasStamp(0)); + expectAssumptionNotMetException(() -> given(actual).hasStamp(0)); } } @@ -1204,7 +1204,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter("2016-12-03T10:15:30Z")); + expectAssumptionNotMetException(() -> given(actual).isAfter("2016-12-03T10:15:30Z")); } } @@ -1219,7 +1219,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(LocalDate.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(LocalDate.now())); } } @@ -1234,7 +1234,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(LocalTime.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(LocalTime.now())); } } @@ -1249,7 +1249,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(OffsetTime.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(OffsetTime.now())); } } @@ -1264,7 +1264,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(LocalDateTime.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(LocalDateTime.now())); } } @@ -1279,7 +1279,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(Instant.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(Instant.now())); } } @@ -1294,7 +1294,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(OffsetDateTime.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(OffsetDateTime.now())); } } @@ -1309,7 +1309,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAfter(ZonedDateTime.now())); + expectAssumptionNotMetException(() -> given(actual).isAfter(ZonedDateTime.now())); } } @@ -1324,7 +1324,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).hasContent("B")); + expectAssumptionNotMetException(() -> given(actual).hasContent("B")); } } @@ -1339,7 +1339,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAbsolute()); + expectAssumptionNotMetException(() -> given(actual).isAbsolute()); } } @@ -1354,7 +1354,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(actual).isAbsolute()); + expectAssumptionNotMetException(() -> given(actual).isAbsolute()); } } @@ -1367,7 +1367,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(new URI("http://assertj.org")).hasPort(80)); + expectAssumptionNotMetException(() -> given(new URI("http://assertj.org")).hasPort(80)); } } @@ -1380,7 +1380,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(new URL("http://assertj.org")).hasPort(80)); + expectAssumptionNotMetException(() -> given(new URL("http://assertj.org")).hasPort(80)); } } @@ -1393,7 +1393,7 @@ void should_run_test_when_assumption_passes() { @Test void should_ignore_test_when_assumption_fails() { - expectAssumptionViolatedException(() -> given(Duration.ofHours(1)).isNotNull().isNegative()); + expectAssumptionNotMetException(() -> given(Duration.ofHours(1)).isNotNull().isNegative()); } } } diff --git a/src/test/java/org/assertj/core/api/assumptions/BaseAssumptionsRunnerTest.java b/src/test/java/org/assertj/core/api/assumptions/BaseAssumptionsRunnerTest.java --- a/src/test/java/org/assertj/core/api/assumptions/BaseAssumptionsRunnerTest.java +++ b/src/test/java/org/assertj/core/api/assumptions/BaseAssumptionsRunnerTest.java @@ -13,7 +13,7 @@ package org.assertj.core.api.assumptions; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import java.util.Collection; import java.util.function.Function; @@ -23,7 +23,6 @@ import org.assertj.core.data.TolkienCharacter; import org.assertj.core.data.TolkienCharacter.Race; import org.assertj.core.test.CartoonCharacter; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; @@ -81,7 +80,7 @@ private static void setupData() { @ParameterizedTest @MethodSource("provideAssumptionsRunners") void should_ignore_test_when_assumption_fails(AssumptionRunner<?> assumptionRunner) { - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(assumptionRunner::runFailingAssumption); + expectAssumptionNotMetException(assumptionRunner::runFailingAssumption); } @ParameterizedTest diff --git a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asHexString_Test.java b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asHexString_Test.java --- a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asHexString_Test.java +++ b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asHexString_Test.java @@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.FailureMessages.actualIsNull; import org.assertj.core.api.SoftAssertions; @@ -101,7 +101,7 @@ void should_fail_with_soft_assertions_capturing_all_errors() { @Test void should_ignore_test_when_assumption_for_internally_created_hex_string_assertion_fails() { - expectAssumptionViolatedException(() -> assumeThat(BYTES).asHexString().isEqualTo("other")); + expectAssumptionNotMetException(() -> assumeThat(BYTES).asHexString().isEqualTo("other")); } @Test diff --git a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_Test.java b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_Test.java --- a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_Test.java +++ b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_Test.java @@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.FailureMessages.actualIsNull; import org.assertj.core.api.SoftAssertions; @@ -101,7 +101,7 @@ void should_ignore_test_when_assumption_for_internally_created_hex_string_assert // GIVEN String foo = "foo"; // WHEN/THEN - expectAssumptionViolatedException(() -> assumeThat(foo.getBytes()).asString().isEqualTo("bar")); + expectAssumptionNotMetException(() -> assumeThat(foo.getBytes()).asString().isEqualTo("bar")); } @Test diff --git a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_with_charset_Test.java b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_with_charset_Test.java --- a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_with_charset_Test.java +++ b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_asString_with_charset_Test.java @@ -16,7 +16,7 @@ import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.util.AssertionsUtil.expectAssertionError; -import static org.assertj.core.util.AssertionsUtil.expectAssumptionViolatedException; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.nio.charset.Charset; @@ -110,7 +110,7 @@ void should_ignore_test_when_assumption_for_internally_created_hex_string_assert String real = "Gerçek"; byte[] bytes = real.getBytes(TURKISH_CHARSET); // WHEN/THEN - expectAssumptionViolatedException(() -> assumeThat(bytes).asString(TURKISH_CHARSET).isEqualTo("bar")); + expectAssumptionNotMetException(() -> assumeThat(bytes).asString(TURKISH_CHARSET).isEqualTo("bar")); } @Test diff --git a/src/test/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonAssert_assumptions_Test.java b/src/test/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonAssert_assumptions_Test.java --- a/src/test/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonAssert_assumptions_Test.java +++ b/src/test/java/org/assertj/core/api/recursive/comparison/RecursiveComparisonAssert_assumptions_Test.java @@ -13,13 +13,12 @@ package org.assertj.core.api.recursive.comparison; import static org.assertj.core.api.Assertions.assertThatCode; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assumptions.assumeThat; import static org.assertj.core.presentation.UnicodeRepresentation.UNICODE_REPRESENTATION; +import static org.assertj.core.util.AssertionsUtil.expectAssumptionNotMetException; import org.assertj.core.api.RecursiveComparisonAssert_isEqualTo_BaseTest; import org.assertj.core.internal.objects.data.Person; -import org.junit.AssumptionViolatedException; import org.junit.jupiter.api.Test; class RecursiveComparisonAssert_assumptions_Test extends RecursiveComparisonAssert_isEqualTo_BaseTest { @@ -35,8 +34,8 @@ void should_ignore_test_when_one_of_the_assumption_fails() { unexpected.home.address.number = 2; // THEN assumeThat(actual).usingRecursiveComparison().isEqualTo(expected); - assertThatExceptionOfType(AssumptionViolatedException.class).isThrownBy(() -> assumeThat(actual).usingRecursiveComparison() - .isEqualTo(unexpected)); + expectAssumptionNotMetException(() -> assumeThat(actual).usingRecursiveComparison() + .isEqualTo(unexpected)); } @Test diff --git a/src/test/java/org/assertj/core/util/AssertionsUtil.java b/src/test/java/org/assertj/core/util/AssertionsUtil.java --- a/src/test/java/org/assertj/core/util/AssertionsUtil.java +++ b/src/test/java/org/assertj/core/util/AssertionsUtil.java @@ -24,7 +24,7 @@ import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import org.assertj.core.api.ThrowableAssertAlternative; -import org.junit.AssumptionViolatedException; +import org.opentest4j.TestAbortedException; public class AssertionsUtil { @@ -40,8 +40,8 @@ public static ThrowableAssertAlternative<AssertionError> assertThatAssertionErro return assertThatExceptionOfType(AssertionError.class).isThrownBy(shouldRaiseAssertionError); } - public static void expectAssumptionViolatedException(ThrowingCallable shouldRaiseError) { - assertThatThrownBy(shouldRaiseError).isInstanceOf(AssumptionViolatedException.class); + public static void expectAssumptionNotMetException(ThrowingCallable shouldRaiseError) { + assertThatThrownBy(shouldRaiseError).isInstanceOf(TestAbortedException.class); } public static Charset getDifferentCharsetFrom(Charset charset) {
Assumptions.assumeThat incompatible with TestNG 7.1.1+ See https://github.com/cbeust/testng/issues/2352
Thanks for reporting the issue @C-Otto. We could increase the precedence of `org.testng.SkipException` over `org.junit.AssumptionViolatedException`, but I would first wait for a feedback on the TestNG issue you opened. cbeust/testng#2358 should fix this issue.
2020-08-27T19:24:36Z
3.17
assertj/assertj
1,890
assertj__assertj-1890
[ "1887" ]
50de3245ac017de6544f225777ee3bf058fce9a6
diff --git a/src/main/java/org/assertj/core/api/AbstractUrlAssert.java b/src/main/java/org/assertj/core/api/AbstractUrlAssert.java --- a/src/main/java/org/assertj/core/api/AbstractUrlAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractUrlAssert.java @@ -419,7 +419,7 @@ public SELF hasNoParameter(String name, String value) { } /** - * Verifies that the actual {@link URL} is equivalent to the given one after <b>their parameters are sorted</b>. + * Verifies that the actual {@code URL} is equivalent to the given one after <b>their parameters are sorted</b>. * <p> * Example: * <pre><code class='java'> URL url = new URL("http://example.com?a=b&amp;c=d"); @@ -434,7 +434,7 @@ public SELF hasNoParameter(String name, String value) { * //... and this one fails as domains are different. * assertThat(url).isEqualToWithSortedQueryParameters(new URL("http://example2.com?amp;a=b&amp;c=d")); </code></pre> * - * @param expected the expected {@link URL} to compare actual to. + * @param expected the expected {@code URL} to compare actual to. * @return {@code this} assertion object. * @throws NullPointerException if the given URL is {@code null}. * @throws AssertionError if the actual {@code URL} is {@code null}.
diff --git a/src/test/java/org/assertj/core/api/uri/UriAssert_hasNoPath_Test.java b/src/test/java/org/assertj/core/api/uri/UriAssert_hasNoPath_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/uri/UriAssert_hasNoPath_Test.java @@ -0,0 +1,31 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.uri; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.UriAssert; +import org.assertj.core.api.UriAssertBaseTest; + +public class UriAssert_hasNoPath_Test extends UriAssertBaseTest { + + @Override + protected UriAssert invoke_api_method() { + return assertions.hasNoPath(); + } + + @Override + protected void verify_internal_effects() { + verify(uris).assertHasPath(getInfo(assertions), getActual(assertions), null); + } +} diff --git a/src/test/java/org/assertj/core/api/url/UrlAssert_hasNoPath_Test.java b/src/test/java/org/assertj/core/api/url/UrlAssert_hasNoPath_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/url/UrlAssert_hasNoPath_Test.java @@ -0,0 +1,31 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2020 the original author or authors. + */ +package org.assertj.core.api.url; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.UrlAssert; +import org.assertj.core.api.UrlAssertBaseTest; + +public class UrlAssert_hasNoPath_Test extends UrlAssertBaseTest { + + @Override + protected UrlAssert invoke_api_method() { + return assertions.hasNoPath(); + } + + @Override + protected void verify_internal_effects() { + verify(urls).assertHasPath(getInfo(assertions), getActual(assertions), ""); + } +}
Some APIs have no test #### Summary When I run the coverage test, I find: - in `AbstractUrlAssert`: `hasNoPath()` and `hasNoParameters()` have no test. - in `AbstractUriAssert`:`hasNoPath()` has no test. Actually, Intellij IDEA tells me that they are never used. I am happy to raise this as a PR if this issue is considered valid.
Yes please, improving test coverage is always good! This gap is also mentioned in the Sonar report: https://sonarcloud.io/component_measures?id=joel-costigliola_assertj-core&metric=new_lines_to_cover&view=list OK, thanks! I think I may have to wait for my last PR being merged. When I write tests for `haveNoPath()`, I am very confused. According to javadoc, this assertion succeeds: ```java assertThat(new URL("http://www.helloworld.org")).hasNoPath(); ``` But this assertion fails: ```java assertThat(new URI("http://helloworld.org")).hasNoPath(); // empty path ``` They are not symmetrical.
2020-05-29T07:34:58Z
3.16
assertj/assertj
1,769
assertj__assertj-1769
[ "1768" ]
089bbe43e39a0933818d98a698eea5d49aeeaedf
diff --git a/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java b/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java --- a/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java +++ b/src/main/java/org/assertj/core/error/ClassModifierShouldBe.java @@ -12,12 +12,13 @@ */ package org.assertj.core.error; -import static java.lang.String.format; import static java.lang.reflect.Modifier.isPrivate; import static java.lang.reflect.Modifier.isProtected; import static java.lang.reflect.Modifier.isPublic; +import static org.assertj.core.util.Strings.isNullOrEmpty; import java.lang.reflect.Modifier; +import java.util.StringJoiner; /** * Error message factory for an assertion which checks that a class has (or has not) a specific modifier. @@ -87,7 +88,16 @@ private static String modifiers(Class<?> actual) { int modifiers = actual.getModifiers(); boolean isPackagePrivate = !isPublic(modifiers) && !isProtected(modifiers) && !isPrivate(modifiers); String modifiersDescription = Modifier.toString(modifiers); - return isPackagePrivate ? format("%s %s", PACKAGE_PRIVATE, modifiersDescription) : modifiersDescription; + StringJoiner sj = new StringJoiner(" "); + + if (isPackagePrivate) { + sj.add(PACKAGE_PRIVATE); + } + if (!modifiersDescription.isEmpty()) { + sj.add(modifiersDescription); + } + + return sj.toString(); } }
diff --git a/src/test/java/org/assertj/core/error/ClassModifierShouldBe_create_Test.java b/src/test/java/org/assertj/core/error/ClassModifierShouldBe_create_Test.java --- a/src/test/java/org/assertj/core/error/ClassModifierShouldBe_create_Test.java +++ b/src/test/java/org/assertj/core/error/ClassModifierShouldBe_create_Test.java @@ -28,7 +28,7 @@ class ClassModifierShouldBe_create_Test { @Test void should_create_error_message_for_is_final() { // GIVEN - Class<Object> nonFinalClass = Object.class; + Class<?> nonFinalClass = Object.class; // WHEN String error = shouldBeFinal(nonFinalClass).create(new TestDescription("TEST")); // THEN @@ -41,7 +41,7 @@ void should_create_error_message_for_is_final() { @Test void should_create_error_message_for_is_not_final() { // GIVEN - Class<String> finalClass = String.class; + Class<?> finalClass = String.class; // WHEN String error = shouldNotBeFinal(finalClass).create(new TestDescription("TEST")); // THEN @@ -52,20 +52,35 @@ void should_create_error_message_for_is_not_final() { } @Test - void should_create_clear_error_message_when_actual_is_package_private() { + void should_create_clear_error_message_when_actual_is_package_private_enum() { // GIVEN - Class<PackagePrivate> packagePrivateClass = PackagePrivate.class; + Class<?> packagePrivateEnum = PackagePrivateEnum.class; // WHEN - String error = shouldBePublic(packagePrivateClass).create(new TestDescription("TEST")); + String error = shouldBePublic(packagePrivateEnum).create(new TestDescription("TEST")); // THEN then(error).isEqualTo(format("[TEST] %n" + "Expecting:%n" + - " <org.assertj.core.error.ClassModifierShouldBe_create_Test.PackagePrivate>%n" + + " <org.assertj.core.error.ClassModifierShouldBe_create_Test.PackagePrivateEnum>%n" + "to be a \"public\" class but was \"package-private static final\".")); } - enum PackagePrivate { - MONITOR + @Test + void should_create_clear_error_message_when_actual_is_only_package_private() { + // GIVEN + Class<?> packagePrivateClass = PackagePrivateClass.class; + // WHEN + String error = shouldBePublic(packagePrivateClass).create(new TestDescription("TEST")); + // THEN + then(error).isEqualTo(format("[TEST] %n" + + "Expecting:%n" + + " <org.assertj.core.error.ClassModifierShouldBe_create_Test.PackagePrivateClass>%n" + + "to be a \"public\" class but was \"package-private\".")); + } + + enum PackagePrivateEnum { + } + + class PackagePrivateClass { } }
Trailing space in the modifiers list for ClassModifierShouldBe #### Summary When a class is only `package-private` and a `ClassModifierShouldBe` message is created, the list of modifiers in the failure message contains a trailing space right after the `package-private` value. #### Example Given: ```java class PackagePrivate { } ``` `assertThat(PackagePrivate.class).isPublic()` yields: ``` java.lang.AssertionError: Expecting: <org.example.PackagePrivate> to be a "public" class but was "package-private ". ```
2020-01-30T00:50:43Z
3.15
assertj/assertj
1,743
assertj__assertj-1743
[ "1512" ]
da67a1395c10f02bd80ccd5b642239fad6ae089e
diff --git a/src/main/java/org/assertj/core/api/AbstractClassAssert.java b/src/main/java/org/assertj/core/api/AbstractClassAssert.java --- a/src/main/java/org/assertj/core/api/AbstractClassAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractClassAssert.java @@ -349,6 +349,45 @@ public SELF hasAnnotation(Class<? extends Annotation> annotation) { return myself; } + /** + * Verifies that the actual {@code Class} has the given class as direct superclass (as in {@link Class#getSuperclass()}). + * <p> + * Example: + * <pre><code class='java'> // this assertion succeeds as Object has no superclass: + * assertThat(Object.class).hasSuperclass(null); + * + * // this assertion succeeds as interfaces have no superclass: + * assertThat(Cloneable.class).hasSuperclass(null); + * + * // this assertion succeeds as primitive types have no superclass: + * assertThat(Integer.TYPE).hasSuperclass(null); + * + * // this assertion succeeds as void type has no superclass: + * assertThat(Void.TYPE).hasSuperclass(null); + * + * // this assertion succeeds: + * assertThat(Integer.class).hasSuperclass(Number.class); + * + * // this assertion succeeds as superclass for array classes is Object: + * assertThat(Integer[].class).hasSuperclass(Object.class); + * + * // this assertion fails as only direct superclass matches: + * assertThat(Integer.class).hasSuperclass(Object.class); + * + * // this assertion fails as interfaces are not superclasses: + * assertThat(Integer.class).hasSuperclass(Comparable.class);</code></pre> + * + * @param superclass the class which must be the direct superclass of actual. + * @return {@code this} assertions object + * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if the actual {@code Class} doesn't have the given class as direct superclass. + * @since 3.15.0 + */ + public SELF hasSuperclass(Class<?> superclass) { + classes.assertHasSuperclass(info, actual, superclass); + return myself; + } + /** * @deprecated use {@link #hasPublicFields(String...)} instead. * @param fields the fields who must be in the class. diff --git a/src/main/java/org/assertj/core/error/ShouldHaveAnnotations.java b/src/main/java/org/assertj/core/error/ShouldHaveAnnotations.java --- a/src/main/java/org/assertj/core/error/ShouldHaveAnnotations.java +++ b/src/main/java/org/assertj/core/error/ShouldHaveAnnotations.java @@ -16,7 +16,7 @@ import java.util.Collection; /** - * Creates an error message indicating that an assertion that verifies that a class have an annotation. + * Creates an error message indicating that an assertion that verifies that a class has annotations failed. * * @author William Delanoue * @author Joel Costigliola @@ -24,7 +24,7 @@ public class ShouldHaveAnnotations extends BasicErrorMessageFactory { /** - * Creates a new <code>{@link org.assertj.core.error.ShouldHaveAnnotations}</code>. + * Creates a new <code>{@link ShouldHaveAnnotations}</code>. * * @param actual the actual value in the failed assertion. * @param expected expected annotations for this class diff --git a/src/main/java/org/assertj/core/error/ShouldHaveSuperclass.java b/src/main/java/org/assertj/core/error/ShouldHaveSuperclass.java new file mode 100644 --- /dev/null +++ b/src/main/java/org/assertj/core/error/ShouldHaveSuperclass.java @@ -0,0 +1,47 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2019 the original author or authors. + */ +package org.assertj.core.error; + +import java.util.StringJoiner; + +/** + * Creates an error message indicating that an assertion that verifies that a class has a given superclass failed. + * + * @author Stefano Cordio + */ +public class ShouldHaveSuperclass extends BasicErrorMessageFactory { + + private static final String SHOULD_HAVE_SUPERCLASS = new StringJoiner("%n", "%n", "").add("Expecting") + .add(" <%s>") + .add("to have superclass:") + .add(" <%s>") + .add("but had:") + .add(" <%s>") + .toString(); + + /** + * Creates a new <code>{@link ShouldHaveSuperclass}</code>. + * + * @param actual the actual value in the failed assertion. + * @param superclass expected superclass for this class. + * @return the created {@code ErrorMessageFactory}. + */ + public static ErrorMessageFactory shouldHaveSuperclass(Class<?> actual, Class<?> superclass) { + return new ShouldHaveSuperclass(actual, superclass); + } + + private ShouldHaveSuperclass(Class<?> actual, Class<?> superclass) { + super(SHOULD_HAVE_SUPERCLASS, actual, superclass, actual.getSuperclass()); + } + +} diff --git a/src/main/java/org/assertj/core/internal/Classes.java b/src/main/java/org/assertj/core/internal/Classes.java --- a/src/main/java/org/assertj/core/internal/Classes.java +++ b/src/main/java/org/assertj/core/internal/Classes.java @@ -31,6 +31,7 @@ import static org.assertj.core.error.ShouldHaveMethods.shouldNotHaveMethods; import static org.assertj.core.error.ShouldHaveNoFields.shouldHaveNoDeclaredFields; import static org.assertj.core.error.ShouldHaveNoFields.shouldHaveNoPublicFields; +import static org.assertj.core.error.ShouldHaveSuperclass.shouldHaveSuperclass; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields; import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields; import static org.assertj.core.util.Lists.newArrayList; @@ -258,6 +259,23 @@ public void assertContainsAnnotations(AssertionInfo info, Class<?> actual, if (!missing.isEmpty()) throw failures.failure(info, shouldHaveAnnotations(actual, expected, missing)); } + /** + * Verifies that the actual {@code Class} has the given class as direct {@code superclass}. + * + * @param info contains information about the assertion. + * @param actual the "actual" {@code Class}. + * @param superclass the direct superclass, which can be null according to {@link Class#getSuperclass()}. + * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if the actual {@code Class} superclass does not have the expected superclass. + */ + public void assertHasSuperclass(AssertionInfo info, Class<?> actual, Class<?> superclass) { + assertNotNull(info, actual); + Class<?> actualSuperclass = actual.getSuperclass(); + if ((actualSuperclass == null && superclass != null) || (actualSuperclass != null && !actualSuperclass.equals(superclass))) { + throw failures.failure(info, shouldHaveSuperclass(actual, superclass)); + } + } + /** * Verifies that the actual {@code Class} has the {@code fields}. *
diff --git a/src/test/java/org/assertj/core/api/classes/ClassAssert_hasSuperclass_Test.java b/src/test/java/org/assertj/core/api/classes/ClassAssert_hasSuperclass_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/classes/ClassAssert_hasSuperclass_Test.java @@ -0,0 +1,39 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2019 the original author or authors. + */ +package org.assertj.core.api.classes; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.ClassAssert; +import org.assertj.core.api.ClassAssertBaseTest; +import org.junit.jupiter.api.DisplayName; + +/** + * Tests for <code>{@link ClassAssert#hasSuperclass(Class)}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("ClassAssert hasSuperclass") +class ClassAssert_hasSuperclass_Test extends ClassAssertBaseTest { + + @Override + protected ClassAssert invoke_api_method() { + return assertions.hasSuperclass(Object.class); + } + + @Override + protected void verify_internal_effects() { + verify(classes).assertHasSuperclass(getInfo(assertions), getActual(assertions), Object.class); + } + +} diff --git a/src/test/java/org/assertj/core/error/ShouldHaveSuperclass_create_Test.java b/src/test/java/org/assertj/core/error/ShouldHaveSuperclass_create_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/error/ShouldHaveSuperclass_create_Test.java @@ -0,0 +1,49 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2019 the original author or authors. + */ +package org.assertj.core.error; + +import static java.lang.String.format; +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldHaveSuperclass.shouldHaveSuperclass; +import static org.assertj.core.presentation.StandardRepresentation.STANDARD_REPRESENTATION; + +import org.assertj.core.description.Description; +import org.assertj.core.internal.TestDescription; +import org.assertj.core.presentation.Representation; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +/** + * Tests for <code>{@link ShouldHaveSuperclass#create(Description, Representation)}</code>. + * + * @author Stefano Cordio + */ +@DisplayName("ShouldHaveSuperclass create") +class ShouldHaveSuperclass_create_Test { + + @Test + void should_create_error_message() { + // WHEN + String message = shouldHaveSuperclass(String.class, Integer.class).create(new TestDescription("TEST"), + STANDARD_REPRESENTATION); + // THEN + then(message).isEqualTo(format("[TEST] %n" + + "Expecting%n" + + " <java.lang.String>%n" + + "to have superclass:%n" + + " <java.lang.Integer>%n" + + "but had:%n" + + " <java.lang.Object>")); + } + +} diff --git a/src/test/java/org/assertj/core/internal/classes/Classes_assertHasSuperclass_Test.java b/src/test/java/org/assertj/core/internal/classes/Classes_assertHasSuperclass_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/internal/classes/Classes_assertHasSuperclass_Test.java @@ -0,0 +1,94 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2019 the original author or authors. + */ +package org.assertj.core.internal.classes; + +import static org.assertj.core.api.BDDAssertions.then; +import static org.assertj.core.error.ShouldHaveSuperclass.shouldHaveSuperclass; +import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.util.AssertionsUtil.expectAssertionError; +import static org.assertj.core.util.FailureMessages.actualIsNull; + +import java.util.stream.Stream; + +import org.assertj.core.internal.ClassesBaseTest; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.NullSource; +import org.junit.jupiter.params.provider.ValueSource; + +@DisplayName("Classes assertHasSuperclass") +class Classes_assertHasSuperclass_Test extends ClassesBaseTest { + + @Test + void should_fail_if_actual_is_null() { + // GIVEN + Class<?> actual = null; + // WHEN + AssertionError assertionError = expectAssertionError(() -> classes.assertHasSuperclass(someInfo(), actual, Object.class)); + // THEN + then(assertionError).hasMessage(actualIsNull()); + } + + @Test + void should_pass_if_actual_has_given_class_as_direct_superclass() { + // GIVEN + Class<?> actual = Integer.class; + Class<?> superclass = Number.class; + // WHEN/THEN + classes.assertHasSuperclass(someInfo(), actual, superclass); + } + + @ParameterizedTest + @NullSource + @ValueSource(classes = { Object.class, Comparable.class, String.class }) + void should_fail_if_actual_has_not_given_class_as_direct_superclass(Class<?> superclass) { + // GIVEN + Class<?> actual = Integer.class; + // WHEN + AssertionError assertionError = expectAssertionError(() -> classes.assertHasSuperclass(someInfo(), actual, superclass)); + // THEN + then(assertionError).hasMessage(shouldHaveSuperclass(actual, superclass).create()); + } + + @Test + void should_pass_if_actual_is_an_array_class_and_object_class_is_given() { + // GIVEN + Class<?> actual = Integer[].class; + // WHEN/THEN + classes.assertHasSuperclass(someInfo(), actual, Object.class); + } + + @ParameterizedTest + @MethodSource("nullSuperclassTypes") + void should_pass_if_actual_has_no_superclass_and_null_is_given(Class<?> actual) { + // WHEN/THEN + classes.assertHasSuperclass(someInfo(), actual, null); + } + + private static Stream<Class<?>> nullSuperclassTypes() { + return Stream.of(Object.class, + Cloneable.class, // any interface + Boolean.TYPE, + Byte.TYPE, + Character.TYPE, + Double.TYPE, + Float.TYPE, + Integer.TYPE, + Long.TYPE, + Short.TYPE, + Void.TYPE); + } + +} diff --git a/src/test/java/org/assertj/core/internal/classes/Classes_assertIsPackagePrivate_Test.java b/src/test/java/org/assertj/core/internal/classes/Classes_assertIsPackagePrivate_Test.java --- a/src/test/java/org/assertj/core/internal/classes/Classes_assertIsPackagePrivate_Test.java +++ b/src/test/java/org/assertj/core/internal/classes/Classes_assertIsPackagePrivate_Test.java @@ -50,7 +50,7 @@ void should_fail_if_actual_is_not_a_package_private_class() { // WHEN AssertionError assertionError = expectAssertionError(() -> classes.assertIsPackagePrivate(someInfo(), clazz)); // THEN - then(assertionError).hasMessage(shouldBePackagePrivate(Object.class).create()); + then(assertionError).hasMessage(shouldBePackagePrivate(clazz).create()); } static class PackagePrivateClass {
Add hasSuperclass(Class<?>) to AbstractClassAssert #### Summary Sometimes I feel the lack of `isNotAssignableFrom()` in `AbstractClassAssert`. Recently I needed a defensive test to enforce an exception class staying "checked" (i.e., not inheriting from `RuntimeException`). To do that, I created a custom condition: ```java private static Condition<Class<?>> runtimeException = new Condition<>(RuntimeException.class::isAssignableFrom, RuntimeException.class.getSimpleName()); ``` used in this way: ```java assertThat(MyException.class).isNot(runtimeException); ``` #### Example Following the behavior of the existing `isAssignableFrom(Class<?>...)`, a new api could check that `actual` is not assignable from any of the `others`: ```java public SELF isNotAssignableFrom(Class<?>... others) ``` With it, my example would not require a custom condition: ```java assertThat(RuntimeException.class).isNotAssignableFrom(MyException.class); ``` Would this new api make sense? #### Additional Thoughts Would it make sense to have some assertions for `Throwable` classes, for example to verify if the class is inheriting from `Error`/`Exception`/`RuntimeException`? It would address the ugly backwards readability of `isAssignableFrom` :-) Maybe a new `AbstractThrowableClassAssert` inheriting from `AbstractClassAssert` could to the job.
I'm ok with `isNotAssignableFrom` as the opposite of `isAssignableFrom`. Having said that, I have always found the (JDK) `isAssignableFrom` method to be highly unclear, I often find myself wondering which class should inherit from the other one. It would be great to find a better name for the `isAssignableFrom` assertion (and `isNotAssignableFrom`) that would not be ambiguous, maybe `isA/isAn` and `isNotA/isNotAn`? ```java assertThat(MyException.class).isAn(Error.class); assertThat(MyException.class).isNotA(RuntimeException.class); ``` I don't see adding `AbstractThrowableClassAssert` to be valuable enough at this point, were you thinking having assertions like `isRuntimeException()` ? Yes, I was thinking about something like `isRuntimeException()` just because of the JDK `isAssignableFrom` unclearness! But you're right, there would be no need for an `AbstractThrowableClassAssert` having a better name for `isAssignableFrom`. I would avoid the `isA`/`isAn` duplication, the options that came to my mind: 1. `isInheritingFrom()` and potentially a `isBeingInheritedFrom()` for the superclass use case 2. `hasSuperclass()` / `hasSubclass()`, but it's probably too close to [Class#getSuperclass()](https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html#getSuperclass--) and it could create confusion since the `getSuperclass()` has less flexibility 3. `isChildOf()`/`isParentOf()` or `hasParent()`/`hasChild()` @joel-costigliola let's try again on this topic, but this time simpler :-) What about having `isAssignableTo(Class<?>)`? @joel-costigliola one more try 🙂 What about `hasSuperclass(Class<?>)` which is backed by [`getSuperclass()`](https://docs.oracle.com/javase/8/docs/api/java/lang/Class.html#getSuperclass--) from the JDK? Not flexible as the previous one as it doesn't match implemented interfaces but it's good enough for my purpose and doesn't deviate from JDK api. Works for me! :)
2020-01-02T08:43:16Z
3.14
assertj/assertj
1,629
assertj__assertj-1629
[ "1628" ]
9f7ab5b7f2ee2b015b5268a323b4ac69bca5240a
diff --git a/src/main/java/org/assertj/core/api/SoftProxies.java b/src/main/java/org/assertj/core/api/SoftProxies.java --- a/src/main/java/org/assertj/core/api/SoftProxies.java +++ b/src/main/java/org/assertj/core/api/SoftProxies.java @@ -54,6 +54,7 @@ class SoftProxies { .or(named("extractingByKey")) .or(named("extractingByKeys")) .or(named("extractingFromEntries")) + .or(named("get")) .or(named("asInstanceOf")); private static final Junction<MethodDescription> METHODS_NOT_TO_PROXY = methodsNamed("as").or(named("clone"))
diff --git a/src/test/java/org/assertj/core/api/SoftAssertionsTest.java b/src/test/java/org/assertj/core/api/SoftAssertionsTest.java --- a/src/test/java/org/assertj/core/api/SoftAssertionsTest.java +++ b/src/test/java/org/assertj/core/api/SoftAssertionsTest.java @@ -823,24 +823,18 @@ public void should_work_with_predicate() { } @Test - public void should_work_with_optional() { - // GIVEN - Optional<String> optional = Optional.of("Gandalf"); - // WHEN - softly.assertThat(optional).contains("Gandalf"); - // THEN - softly.assertAll(); - } - - @Test - public void should_work_with_optional_chained_with_map() { + void should_work_with_optional() { // GIVEN Optional<String> optional = Optional.of("Gandalf"); // WHEN softly.assertThat(optional) - .contains("Gandalf") + .contains("Gandalf"); + softly.assertThat(optional) .map(String::length) .contains(7); + softly.assertThat(optional) + .get() + .isEqualTo("Gandalf"); // THEN softly.assertAll(); } @@ -1810,14 +1804,21 @@ public void optional_soft_assertions_should_report_errors_on_methods_that_switch .map(String::length) .hasValue(4) .hasValue(888); // fail + softly.assertThat(optional) + .as("get()") + .overridingErrorMessage("error message") + .get() + .isEqualTo("Yoda") + .isEqualTo("Luke"); // fail // THEN List<Throwable> errorsCollected = softly.errorsCollected(); - assertThat(errorsCollected).hasSize(3); + assertThat(errorsCollected).hasSize(4); assertThat(errorsCollected.get(0)).hasMessage("[map(String::length)] error message"); assertThat(errorsCollected.get(1)).hasMessageContaining("flatMap(upperCaseOptional)") .hasMessageContaining("yoda"); assertThat(errorsCollected.get(2)).hasMessageContaining("map(String::length) after flatMap(upperCaseOptional)") .hasMessageContaining("888"); + assertThat(errorsCollected.get(3)).hasMessage("[get()] error message"); } @Test
AbstractOptionalAssert#get() is not correctly proxied with SoftAssertions #### Summary AbstractOptionalAssert#get() is not listed under `METHODS_CHANGING_THE_OBJECT_UNDER_TEST` of `SoftProxies`. #### Example ```java SoftAssertions softly = new SoftAssertions(); Optional<String> optional = Optional.of("Yoda"); softly.assertThat(optional) .get() .isEqualTo("Luke"); // fails here assertThat(softly.errorsCollected()).hasSize(1); ```
Good catch!
2019-10-06T08:57:07Z
3.13
assertj/assertj
1,332
assertj__assertj-1332
[ "1331" ]
1747c1e5df371cef2750dc4321eaf391670e294c
diff --git a/src/main/java/org/assertj/core/internal/Strings.java b/src/main/java/org/assertj/core/internal/Strings.java --- a/src/main/java/org/assertj/core/internal/Strings.java +++ b/src/main/java/org/assertj/core/internal/Strings.java @@ -550,7 +550,7 @@ private void checkCharSequenceIsNotNull(CharSequence sequence) { * @throws AssertionError if the given {@code CharSequence}s are not equal. */ public void assertEqualsIgnoringCase(AssertionInfo info, CharSequence actual, CharSequence expected) { - if (!areEqualIgnoringCase(actual, expected)) throw failures.failure(info, shouldBeEqual(actual, expected)); + if (!areEqualIgnoringCase(actual, expected)) throw failures.failure(info, shouldBeEqual(actual, expected), actual, expected); } /** @@ -584,7 +584,7 @@ public void assertIsEqualToNormalizingNewlines(AssertionInfo info, CharSequence String actualNormalized = normalizeNewlines(actual); String expectedNormalized = normalizeNewlines(expected); if (!actualNormalized.equals(expectedNormalized)) - throw failures.failure(info, shouldBeEqualIgnoringNewLineDifferences(actual, expected)); + throw failures.failure(info, shouldBeEqualIgnoringNewLineDifferences(actual, expected), actual, expected); } private static String normalizeNewlines(CharSequence actual) { @@ -601,7 +601,7 @@ private static String normalizeNewlines(CharSequence actual) { */ public void assertEqualsIgnoringWhitespace(AssertionInfo info, CharSequence actual, CharSequence expected) { if (!areEqualIgnoringWhitespace(actual, expected)) - throw failures.failure(info, shouldBeEqualIgnoringWhitespace(actual, expected)); + throw failures.failure(info, shouldBeEqualIgnoringWhitespace(actual, expected), actual, expected); } /** @@ -647,7 +647,7 @@ private String removeAllWhitespaces(CharSequence toBeStripped) { */ public void assertEqualsNormalizingWhitespace(AssertionInfo info, CharSequence actual, CharSequence expected) { if (!areEqualNormalizingWhitespace(actual, expected)) - throw failures.failure(info, shouldBeEqualNormalizingWhitespace(actual, expected)); + throw failures.failure(info, shouldBeEqualNormalizingWhitespace(actual, expected), actual, expected); } /** @@ -1084,7 +1084,7 @@ public void assertIsEqualToIgnoringNewLines(AssertionInfo info, CharSequence act String actualWithoutNewLines = removeNewLines(actual); String expectedWithoutNewLines = removeNewLines(expected); if (!actualWithoutNewLines.equals(expectedWithoutNewLines)) - throw failures.failure(info, shouldBeEqualIgnoringNewLines(actual, expected)); + throw failures.failure(info, shouldBeEqualIgnoringNewLines(actual, expected), actual, expected); } public void assertLowerCase(AssertionInfo info, CharSequence actual) {
diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringWhitespace_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringWhitespace_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringWhitespace_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertEqualsIgnoringWhitespace_Test.java @@ -94,6 +94,6 @@ public static Stream<Arguments> equalIgnoringWhitespaceGenerator() { private void verifyFailureThrownWhenStringsAreNotEqualIgnoringWhitespace(AssertionInfo info, String actual, String expected) { - verify(failures).failure(info, shouldBeEqualIgnoringWhitespace(actual, expected)); + verify(failures).failure(info, shouldBeEqualIgnoringWhitespace(actual, expected), actual, expected); } } diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualToNormalizingNewlines_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualToNormalizingNewlines_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualToNormalizingNewlines_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualToNormalizingNewlines_Test.java @@ -51,7 +51,7 @@ public void should_fail_if_newlines_are_different_in_both_strings() { try { strings.assertIsEqualToNormalizingNewlines(someInfo(), actual, expected); } catch (AssertionError e) { - verify(failures).failure(someInfo(), shouldBeEqualIgnoringNewLineDifferences(actual, expected)); + verify(failures).failure(someInfo(), shouldBeEqualIgnoringNewLineDifferences(actual, expected), actual, expected); return; } failBecauseExpectedAssertionErrorWasNotThrown(); diff --git a/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualsToIgnoringNewLines_Test.java b/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualsToIgnoringNewLines_Test.java --- a/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualsToIgnoringNewLines_Test.java +++ b/src/test/java/org/assertj/core/internal/strings/Strings_assertIsEqualsToIgnoringNewLines_Test.java @@ -54,7 +54,7 @@ private void verifyThatAssertationErrorWasThrown(String actual, String expected) try { strings.assertIsEqualToIgnoringNewLines(someInfo(), actual, expected); } catch (AssertionError e) { - verify(failures).failure(someInfo(), shouldBeEqualIgnoringNewLines(actual, expected)); + verify(failures).failure(someInfo(), shouldBeEqualIgnoringNewLines(actual, expected), actual, expected); return; } failBecauseExpectedAssertionErrorWasNotThrown();
Throw AssertionFailedError instead of AssertionError in non-strict string assertions #### Summary Throwing AssertionFailedError from opentest4j facilitates test failure analysis in IDE providing handy "diff" view. AssertJ already uses AssertionFailedError (if available on the classpath) but not in non-strict string assertions (from AbstractCharSequenceAssert), specifically: * isEqualToIgnoringNewLines * isEqualToIgnoringWhitespace * isEqualToIgnoringNewLines Given that two of them mention new lines it is rather common to use them to compare long multi-line strings, and IDE's "diff" feature would be very helpful to nitpick some single-letter difference in big texts (possibly coming from files). #### Example ```java String generatedText = sut.generateText(); assertThat(generatedText).isEqualToNormalizingNewlines(loadText("expected.txt")); ``` ![image](https://user-images.githubusercontent.com/2374036/46114945-98d85080-c1fd-11e8-87a2-9cc52e0eed3e.png)
2018-09-26T23:58:31Z
3.11
assertj/assertj
1,568
assertj__assertj-1568
[ "1567" ]
b770d0c9c26606e84acbe3836320209805db2186
diff --git a/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java b/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java --- a/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java +++ b/src/main/java/org/assertj/core/api/InstanceOfAssertFactories.java @@ -204,15 +204,25 @@ static <VALUE> InstanceOfAssertFactory<Optional, OptionalAssert<VALUE>> optional /** * {@link InstanceOfAssertFactory} for a {@link URI}. + * <p> + * Note: The {@code TYPE} suffix prevents the shadowing of {@code java.net.URI} when the factory is statically imported. + * </p> + * + * @since 3.13.2 */ - InstanceOfAssertFactory<URI, AbstractUriAssert<?>> URI = new InstanceOfAssertFactory<>(URI.class, - Assertions::assertThat); + InstanceOfAssertFactory<URI, AbstractUriAssert<?>> URI_TYPE = new InstanceOfAssertFactory<>(URI.class, + Assertions::assertThat); /** * {@link InstanceOfAssertFactory} for a {@link URL}. + * <p> + * Note: The {@code TYPE} suffix prevents the shadowing of {@code java.net.URL} when the factory is statically imported. + * </p> + * + * @since 3.13.2 */ - InstanceOfAssertFactory<URL, AbstractUrlAssert<?>> URL = new InstanceOfAssertFactory<>(URL.class, - Assertions::assertThat); + InstanceOfAssertFactory<URL, AbstractUrlAssert<?>> URL_TYPE = new InstanceOfAssertFactory<>(URL.class, + Assertions::assertThat); /** * {@link InstanceOfAssertFactory} for a {@code boolean} or its corresponding boxed type {@link Boolean}.
diff --git a/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java b/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java --- a/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java +++ b/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java @@ -82,8 +82,8 @@ import static org.assertj.core.api.InstanceOfAssertFactories.STRING_BUFFER; import static org.assertj.core.api.InstanceOfAssertFactories.STRING_BUILDER; import static org.assertj.core.api.InstanceOfAssertFactories.THROWABLE; -import static org.assertj.core.api.InstanceOfAssertFactories.URI; -import static org.assertj.core.api.InstanceOfAssertFactories.URL; +import static org.assertj.core.api.InstanceOfAssertFactories.URI_TYPE; +import static org.assertj.core.api.InstanceOfAssertFactories.URL_TYPE; import static org.assertj.core.api.InstanceOfAssertFactories.ZONED_DATE_TIME; import static org.assertj.core.api.InstanceOfAssertFactories.array; import static org.assertj.core.api.InstanceOfAssertFactories.atomicIntegerFieldUpdater; @@ -113,6 +113,8 @@ import java.math.BigDecimal; import java.math.BigInteger; import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; import java.nio.file.Paths; import java.time.Instant; import java.time.LocalDate; @@ -319,21 +321,21 @@ void big_integer_factory_should_allow_big_integer_assertions() { } @Test - void uri_factory_should_allow_uri_assertions() { + void uri_type_factory_should_allow_uri_assertions() { // GIVEN - Object value = java.net.URI.create("http://localhost"); + Object value = URI.create("http://localhost"); // WHEN - AbstractUriAssert<?> result = assertThat(value).asInstanceOf(URI); + AbstractUriAssert<?> result = assertThat(value).asInstanceOf(URI_TYPE); // THEN result.hasHost("localhost"); } @Test - void url_factory_should_allow_url_assertions() throws MalformedURLException { + void url_type_factory_should_allow_url_assertions() throws MalformedURLException { // GIVEN - Object value = new java.net.URL("http://localhost"); + Object value = new URL("http://localhost"); // WHEN - AbstractUrlAssert<?> result = assertThat(value).asInstanceOf(URL); + AbstractUrlAssert<?> result = assertThat(value).asInstanceOf(URL_TYPE); // THEN result.hasHost("localhost"); }
import static org.assertj.core.api.Assertions.* clashes with URI (java.net.URI) #### Summary Statically importing AssertJ assertions imports fields from `InstanceOfAssertFactories`. The `URI` field shadows `java.net.URI` and requires fully qualified naming when using `java.net.URI`. There's a similar issue for `URL` as these two types match exactly constant names from `InstanceOfAssertFactories`. #### Example ```java import static org.assertj.core.api.Assertions.*; URI.create("http://foo") ``` Compilation failure: ``` PagedResourcesAssemblerUnitTests.java:[272,79] cannot find symbol symbol: method create(java.lang.String) location: variable URI of type org.assertj.core.api.InstanceOfAssertFactory<java.net.URI,org.assertj.core.api.AbstractUriAssert<?>> ```
Thanks for reporting this @mp911de. Thinking out loud, we might simply rename the factories to disambiguate this. That would help. URI and URL are the only two types where factory constants are clashing. All other types use some sort of different letter casing. Maybe this should have been part of the 3.13.1 release as well @joel-costigliola, as it seems to break existing code and the field names would need to be changed and removed. I have a similar impression. Getting this clash out of the way before wide adoption of 3.13.x would be helpful. Once a significant number of projects updates to 3.13 and relies on `InstanceOfAssertFactories.URL/URI`, the rename will break their code. > Thanks for reporting this @mp911de. > > Thinking out loud, we might simply rename the factories to disambiguate this. @joel-costigliola That would be an option, e.g., having ~`URI_INSTANCE`~ `URI_TYPE` instead of `URI`. Or another option would be to have `Assertions` not implementing `InstanceOfAssertFactories`. That was introduced to simplify the discovery of the factories, but [we realized that for Eclipse was not enough anyway](https://github.com/joel-costigliola/assertj-core/pull/1498#issuecomment-496692889) and IntelliJ seems still able to discover them using the smart completion. Renaming `URI` to `URI_TYPE` and `URL` to `URL_TYPE` seems the best option. @scordio do you want to tackle this one ? We will release 3.13.2 at the end of the week just to get a bit more feedback. @joel-costigliola yes I'll take care of it.
2019-07-29T22:53:20Z
3.13
assertj/assertj
1,511
assertj__assertj-1511
[ "1506" ]
ce439bde862f94cb3bfd327406f3c943f375c63e
diff --git a/src/main/java/org/assertj/core/api/AbstractDateAssert.java b/src/main/java/org/assertj/core/api/AbstractDateAssert.java --- a/src/main/java/org/assertj/core/api/AbstractDateAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractDateAssert.java @@ -711,7 +711,7 @@ public SELF isBeforeOrEqualsTo(Date other) { } /** - * Verifies that the actual {@code Date} is before or equals to the given one. + * Verifies that the actual {@code Date} is before or equal to the given one. * <p> * Example: * <pre><code class='java'> SimpleDateFormat dateFormat = new SimpleDateFormat(&quot;yyyy-MM-dd&quot;); @@ -914,7 +914,7 @@ public SELF isAfterOrEqualsTo(Date other) { } /** - * Verifies that the actual {@code Date} is after or equals to the given one. + * Verifies that the actual {@code Date} is after or equal to the given one. * <p> * Example: * <pre><code class='java'> SimpleDateFormat dateFormat = new SimpleDateFormat(&quot;yyyy-MM-dd&quot;); diff --git a/src/main/java/org/assertj/core/api/AbstractInstantAssert.java b/src/main/java/org/assertj/core/api/AbstractInstantAssert.java --- a/src/main/java/org/assertj/core/api/AbstractInstantAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractInstantAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.Preconditions.checkArgument; import java.time.Instant; @@ -108,7 +108,7 @@ public SELF isBeforeOrEqualTo(Instant other) { assertNotNull(info, actual); assertInstantParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -156,7 +156,7 @@ public SELF isAfterOrEqualTo(Instant other) { assertNotNull(info, actual); assertInstantParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractLocalDateAssert.java b/src/main/java/org/assertj/core/api/AbstractLocalDateAssert.java --- a/src/main/java/org/assertj/core/api/AbstractLocalDateAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractLocalDateAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeToday.shouldBeToday; import static org.assertj.core.util.Preconditions.checkArgument; @@ -104,7 +104,7 @@ public SELF isBeforeOrEqualTo(LocalDate other) { Objects.instance().assertNotNull(info, actual); assertLocalDateParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -149,7 +149,7 @@ public SELF isAfterOrEqualTo(LocalDate other) { Objects.instance().assertNotNull(info, actual); assertLocalDateParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java b/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java --- a/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractLocalDateTimeAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeEqualIgnoringHours.shouldBeEqualIgnoringHours; import static org.assertj.core.error.ShouldBeEqualIgnoringMinutes.shouldBeEqualIgnoringMinutes; import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos; @@ -113,7 +113,7 @@ public SELF isBeforeOrEqualTo(LocalDateTime other) { Objects.instance().assertNotNull(info, actual); assertLocalDateTimeParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -158,7 +158,7 @@ public SELF isAfterOrEqualTo(LocalDateTime other) { Objects.instance().assertNotNull(info, actual); assertLocalDateTimeParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractLocalTimeAssert.java b/src/main/java/org/assertj/core/api/AbstractLocalTimeAssert.java --- a/src/main/java/org/assertj/core/api/AbstractLocalTimeAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractLocalTimeAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos; import static org.assertj.core.error.ShouldBeEqualIgnoringSeconds.shouldBeEqualIgnoringSeconds; import static org.assertj.core.error.ShouldHaveSameHourAs.shouldHaveSameHourAs; @@ -106,7 +106,7 @@ public SELF isBeforeOrEqualTo(LocalTime other) { Objects.instance().assertNotNull(info, actual); assertLocalTimeParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -151,7 +151,7 @@ public SELF isAfterOrEqualTo(LocalTime other) { Objects.instance().assertNotNull(info, actual); assertLocalTimeParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java b/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java --- a/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractOffsetDateTimeAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeEqualIgnoringHours.shouldBeEqualIgnoringHours; import static org.assertj.core.error.ShouldBeEqualIgnoringMinutes.shouldBeEqualIgnoringMinutes; import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos; @@ -116,7 +116,7 @@ public SELF isBeforeOrEqualTo(OffsetDateTime other) { Objects.instance().assertNotNull(info, actual); assertOffsetDateTimeParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -163,7 +163,7 @@ public SELF isAfterOrEqualTo(OffsetDateTime other) { Objects.instance().assertNotNull(info, actual); assertOffsetDateTimeParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractOffsetTimeAssert.java b/src/main/java/org/assertj/core/api/AbstractOffsetTimeAssert.java --- a/src/main/java/org/assertj/core/api/AbstractOffsetTimeAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractOffsetTimeAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos; import static org.assertj.core.error.ShouldBeEqualIgnoringSeconds.shouldBeEqualIgnoringSeconds; import static org.assertj.core.error.ShouldBeEqualIgnoringTimezone.shouldBeEqualIgnoringTimezone; @@ -110,7 +110,7 @@ public SELF isBeforeOrEqualTo(OffsetTime other) { Objects.instance().assertNotNull(info, actual); assertOffsetTimeParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -156,7 +156,7 @@ public SELF isAfterOrEqualTo(OffsetTime other) { Objects.instance().assertNotNull(info, actual); assertOffsetTimeParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java b/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java --- a/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractZonedDateTimeAssert.java @@ -13,9 +13,9 @@ package org.assertj.core.api; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeEqualIgnoringHours.shouldBeEqualIgnoringHours; import static org.assertj.core.error.ShouldBeEqualIgnoringMinutes.shouldBeEqualIgnoringMinutes; import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos; @@ -112,7 +112,7 @@ public SELF isBeforeOrEqualTo(ZonedDateTime other) { Objects.instance().assertNotNull(info, actual); assertDateTimeParameterIsNotNull(other); if (actual.isAfter(other)) { - throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeBeforeOrEqualTo(actual, other)); } return myself; } @@ -162,7 +162,7 @@ public SELF isAfterOrEqualTo(ZonedDateTime other) { Objects.instance().assertNotNull(info, actual); assertDateTimeParameterIsNotNull(other); if (actual.isBefore(other)) { - throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other)); + throw Failures.instance().failure(info, shouldBeAfterOrEqualTo(actual, other)); } return myself; } diff --git a/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo.java b/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java similarity index 61% rename from src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo.java rename to src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java --- a/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo.java +++ b/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualTo.java @@ -15,37 +15,38 @@ import org.assertj.core.internal.ComparisonStrategy; import org.assertj.core.internal.StandardComparisonStrategy; - /** - * Creates an error message indicating that an assertion that verifies that a {@link Object} is before or equals to another one + * Creates an error message indicating that an assertion that verifies that an {@link Object} is after or equal to another one * failed. * * @author Joel Costigliola */ -public class ShouldBeBeforeOrEqualsTo extends BasicErrorMessageFactory { +public class ShouldBeAfterOrEqualTo extends BasicErrorMessageFactory { /** - * Creates a new <code>{@link ShouldBeBeforeOrEqualsTo}</code>. + * Creates a new <code>{@link ShouldBeAfterOrEqualTo}</code>. + * * @param actual the actual value in the failed assertion. * @param other the value used in the failed assertion to compare the actual value to. * @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion. * @return the created {@code ErrorMessageFactory}. */ - public static ErrorMessageFactory shouldBeBeforeOrEqualsTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { - return new ShouldBeBeforeOrEqualsTo(actual, other, comparisonStrategy); + public static ErrorMessageFactory shouldBeAfterOrEqualTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { + return new ShouldBeAfterOrEqualTo(actual, other, comparisonStrategy); } /** - * Creates a new <code>{@link ShouldBeBeforeOrEqualsTo}</code>. + * Creates a new <code>{@link ShouldBeAfterOrEqualTo}</code>. + * * @param actual the actual value in the failed assertion. * @param other the value used in the failed assertion to compare the actual value to. * @return the created {@code ErrorMessageFactory}. */ - public static ErrorMessageFactory shouldBeBeforeOrEqualsTo(Object actual, Object other) { - return new ShouldBeBeforeOrEqualsTo(actual, other, StandardComparisonStrategy.instance()); + public static ErrorMessageFactory shouldBeAfterOrEqualTo(Object actual, Object other) { + return new ShouldBeAfterOrEqualTo(actual, other, StandardComparisonStrategy.instance()); } - private ShouldBeBeforeOrEqualsTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { - super("%nExpecting:%n <%s>%nto be before or equals to:%n <%s>%s", actual, other, comparisonStrategy); + private ShouldBeAfterOrEqualTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { + super("%nExpecting:%n <%s>%nto be after or equal to:%n <%s>%s", actual, other, comparisonStrategy); } } diff --git a/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo.java b/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java similarity index 62% rename from src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo.java rename to src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java --- a/src/main/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo.java +++ b/src/main/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo.java @@ -12,44 +12,41 @@ */ package org.assertj.core.error; -import java.util.Date; - -import org.assertj.core.internal.*; +import org.assertj.core.internal.ComparisonStrategy; +import org.assertj.core.internal.StandardComparisonStrategy; /** - * Creates an error message indicating that an assertion that verifies that a {@link Date} is after or equals to another - * one + * Creates an error message indicating that an assertion that verifies that an {@link Object} is before or equal to another one * failed. * * @author Joel Costigliola */ -public class ShouldBeAfterOrEqualsTo extends BasicErrorMessageFactory { +public class ShouldBeBeforeOrEqualTo extends BasicErrorMessageFactory { /** - * Creates a new <code>{@link ShouldBeAfterOrEqualsTo}</code>. + * Creates a new <code>{@link ShouldBeBeforeOrEqualTo}</code>. * * @param actual the actual value in the failed assertion. * @param other the value used in the failed assertion to compare the actual value to. * @param comparisonStrategy the {@link ComparisonStrategy} used to evaluate assertion. * @return the created {@code ErrorMessageFactory}. */ - public static ErrorMessageFactory shouldBeAfterOrEqualsTo(Object actual, Object other, - ComparisonStrategy comparisonStrategy) { - return new ShouldBeAfterOrEqualsTo(actual, other, comparisonStrategy); + public static ErrorMessageFactory shouldBeBeforeOrEqualTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { + return new ShouldBeBeforeOrEqualTo(actual, other, comparisonStrategy); } /** - * Creates a new <code>{@link ShouldBeAfterOrEqualsTo}</code>. + * Creates a new <code>{@link ShouldBeBeforeOrEqualTo}</code>. * * @param actual the actual value in the failed assertion. * @param other the value used in the failed assertion to compare the actual value to. * @return the created {@code ErrorMessageFactory}. */ - public static ErrorMessageFactory shouldBeAfterOrEqualsTo(Object actual, Object other) { - return new ShouldBeAfterOrEqualsTo(actual, other, StandardComparisonStrategy.instance()); + public static ErrorMessageFactory shouldBeBeforeOrEqualTo(Object actual, Object other) { + return new ShouldBeBeforeOrEqualTo(actual, other, StandardComparisonStrategy.instance()); } - private ShouldBeAfterOrEqualsTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { - super("%nExpecting:%n <%s>%nto be after or equals to:%n <%s>%s", actual, other, comparisonStrategy); + private ShouldBeBeforeOrEqualTo(Object actual, Object other, ComparisonStrategy comparisonStrategy) { + super("%nExpecting:%n <%s>%nto be before or equal to:%n <%s>%s", actual, other, comparisonStrategy); } } diff --git a/src/main/java/org/assertj/core/internal/Dates.java b/src/main/java/org/assertj/core/internal/Dates.java --- a/src/main/java/org/assertj/core/internal/Dates.java +++ b/src/main/java/org/assertj/core/internal/Dates.java @@ -13,10 +13,10 @@ package org.assertj.core.internal; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.error.ShouldBeAfterYear.shouldBeAfterYear; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.error.ShouldBeBeforeYear.shouldBeBeforeYear; import static org.assertj.core.error.ShouldBeBetween.shouldBeBetween; import static org.assertj.core.error.ShouldBeCloseTo.shouldBeCloseTo; @@ -128,7 +128,7 @@ public void assertIsBeforeOrEqualTo(AssertionInfo info, Date actual, Date other) assertNotNull(info, actual); dateParameterIsNotNull(other); if (!isBeforeOrEqualTo(actual, other)) - throw failures.failure(info, shouldBeBeforeOrEqualsTo(actual, other, comparisonStrategy)); + throw failures.failure(info, shouldBeBeforeOrEqualTo(actual, other, comparisonStrategy)); } /** @@ -160,7 +160,7 @@ public void assertIsAfterOrEqualTo(AssertionInfo info, Date actual, Date other) assertNotNull(info, actual); dateParameterIsNotNull(other); if (!isAfterOrEqualTo(actual, other)) - throw failures.failure(info, shouldBeAfterOrEqualsTo(actual, other, comparisonStrategy)); + throw failures.failure(info, shouldBeAfterOrEqualTo(actual, other, comparisonStrategy)); } /**
diff --git a/src/test/java/org/assertj/core/api/date/DateAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/date/DateAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/date/DateAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/date/DateAssert_isAfterOrEqualTo_Test.java @@ -18,7 +18,6 @@ import org.assertj.core.api.DateAssert; - /** * Tests for <code>{@link DateAssert#isAfterOrEqualTo(Date)}</code>. * diff --git a/src/test/java/org/assertj/core/api/instant/InstantAssert_IsBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/instant/InstantAssert_IsBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/instant/InstantAssert_IsBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/instant/InstantAssert_IsBeforeOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.instant; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.Instant; @@ -36,10 +36,8 @@ public void test_isBeforeOrEqual_assertion() { @Test public void test_isBeforeOrEqual_assertion_error_message() { - Instant instantReference = Instant.parse("2007-12-03T10:15:30.00Z"); - Instant instantAfter = Instant.parse("2007-12-03T10:15:35.00Z"); - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(instantAfter).isBeforeOrEqualTo(instantReference)) - .withMessage(format("%nExpecting:%n <2007-12-03T10:15:35Z>%nto be before or equals to:%n <2007-12-03T10:15:30Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test @@ -69,4 +67,3 @@ private static void verify_that_isBeforeOrEqual_assertion_fails_and_throws_Asser } } - diff --git a/src/test/java/org/assertj/core/api/instant/InstantAssert_isAfterOrEqual_Test.java b/src/test/java/org/assertj/core/api/instant/InstantAssert_isAfterOrEqual_Test.java --- a/src/test/java/org/assertj/core/api/instant/InstantAssert_isAfterOrEqual_Test.java +++ b/src/test/java/org/assertj/core/api/instant/InstantAssert_isAfterOrEqual_Test.java @@ -12,12 +12,11 @@ */ package org.assertj.core.api.instant; - -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.Instant; @@ -40,11 +39,7 @@ public void test_isAfterOrEqual_assertion_error_message() { Instant instantReference = Instant.parse("2007-12-03T10:15:30.00Z"); Instant instantAfter = Instant.parse("2007-12-03T10:15:35.00Z"); assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(instantReference).isAfterOrEqualTo(instantAfter)) - .withMessage(format("%n" + - "Expecting:%n" + - " <2007-12-03T10:15:30Z>%n" + - "to be after or equals to:%n" + - " <2007-12-03T10:15:35Z>")); + .withMessage(shouldBeAfterOrEqualTo(instantReference, instantAfter).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isAfterOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localdate; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalDate; @@ -32,23 +32,17 @@ public class LocalDateAssert_isAfterOrEqualTo_Test extends LocalDateAssertBaseTe @Test public void test_isAfterOrEqual_assertion() { - // WHEN - assertThat(AFTER).isAfterOrEqualTo(REFERENCE); - assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); - // THEN - verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); + // WHEN + assertThat(AFTER).isAfterOrEqualTo(REFERENCE); + assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); + // THEN + verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); } @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> - assertThat(LocalDate.of(2000, 1, 5)).isAfterOrEqualTo(LocalDate.of(2012, 1, 1))).withMessage(format("%n" + - "Expecting:%n" + - " <2000-01-05>%n" - + - "to be after or equals to:%n" - + - " <2012-01-01>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localdate/LocalDateAssert_isBeforeOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localdate; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalDate; @@ -41,8 +41,8 @@ public void test_isBeforeOrEqual_assertion() { @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(LocalDate.of(2000, 1, 5)).isBeforeOrEqualTo(LocalDate.of(1998, 1, 1))) - .withMessage(format("%nExpecting:%n <2000-01-05>%nto be before or equals to:%n <1998-01-01>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isAfterOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localdatetime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalDateTime; @@ -32,27 +32,17 @@ public class LocalDateTimeAssert_isAfterOrEqualTo_Test extends LocalDateTimeAsse @Test public void test_isAfterOrEqual_assertion() { - // WHEN - assertThat(AFTER).isAfterOrEqualTo(REFERENCE); - assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); - // THEN - verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); + // WHEN + assertThat(AFTER).isAfterOrEqualTo(REFERENCE); + assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); + // THEN + verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); } @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(LocalDateTime.of(2000, 1, 5, 3, 0, - 5)).isAfterOrEqualTo(LocalDateTime.of(2012, - 1, - 1, - 3, - 3, - 3))) - .withMessage(format("%n" + - "Expecting:%n" + - " <2000-01-05T03:00:05>%n" + - "to be after or equals to:%n" + - " <2012-01-01T03:03:03>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isBeforeOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localdatetime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalDateTime; @@ -41,8 +41,8 @@ public void test_isBeforeOrEqual_assertion() { @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(LocalDateTime.of(2000, 1, 5, 3, 0, 5)).isBeforeOrEqualTo(LocalDateTime.of(1998, 1, 1, 3, 3, 3))) - .withMessage(format("%nExpecting:%n <2000-01-05T03:00:05>%nto be before or equals to:%n <1998-01-01T03:03:03>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isAfterOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localtime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalTime; @@ -32,24 +32,17 @@ public class LocalTimeAssert_isAfterOrEqualTo_Test extends LocalTimeAssertBaseTe @Test public void test_isAfterOrEqual_assertion() { - // WHEN - assertThat(AFTER).isAfterOrEqualTo(REFERENCE); - assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); - // THEN - verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); + // WHEN + assertThat(AFTER).isAfterOrEqualTo(REFERENCE); + assertThat(REFERENCE).isAfterOrEqualTo(REFERENCE); + // THEN + verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(BEFORE, REFERENCE); } @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(LocalTime.of(3, 0, - 5)).isAfterOrEqualTo(LocalTime.of(3, - 3, - 3))) - .withMessage(format("%n" + - "Expecting:%n" + - " <03:00:05>%n" + - "to be after or equals to:%n" + - " <03:03:03>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/localtime/LocalTimeAssert_isBeforeOrEqualTo_Test.java @@ -12,11 +12,11 @@ */ package org.assertj.core.api.localtime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.LocalTime; @@ -32,24 +32,17 @@ public class LocalTimeAssert_isBeforeOrEqualTo_Test extends LocalTimeAssertBaseT @Test public void test_isBeforeOrEqual_assertion() { - // WHEN - assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE); - assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE); - // THEN - verify_that_isBeforeOrEqual_assertion_fails_and_throws_AssertionError(AFTER, REFERENCE); + // WHEN + assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE); + assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE); + // THEN + verify_that_isBeforeOrEqual_assertion_fails_and_throws_AssertionError(AFTER, REFERENCE); } @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(LocalTime.of(3, 0, - 5)).isBeforeOrEqualTo(LocalTime.of(3, - 0, - 4))) - .withMessage(format("%n" + - "Expecting:%n" + - " <03:00:05>%n" + - "to be before or equals to:%n" + - " <03:00:04>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isAfterOrEqualTo_Test.java @@ -12,13 +12,11 @@ */ package org.assertj.core.api.offsetdatetime; -import static java.lang.String.format; -import static java.time.OffsetDateTime.of; -import static java.time.ZoneOffset.UTC; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.OffsetDateTime; @@ -45,15 +43,8 @@ public void test_isAfterOrEqual_assertion() { @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(of(2000, 1, 5, 3, 0, 5, 0, - UTC)).isAfterOrEqualTo(of(2012, 1, 1, - 3, 3, 3, 0, - UTC))) - .withMessage(format("%n" + - "Expecting:%n" + - " <2000-01-05T03:00:05Z>%n" + - "to be after or equals to:%n" + - " <2012-01-01T03:03:03Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/offsetdatetime/OffsetDateTimeAssert_isBeforeOrEqualTo_Test.java @@ -12,13 +12,11 @@ */ package org.assertj.core.api.offsetdatetime; -import static java.lang.String.format; -import static java.time.OffsetDateTime.of; -import static java.time.ZoneOffset.UTC; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.OffsetDateTime; @@ -45,15 +43,8 @@ public void test_isBeforeOrEqual_assertion() { @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(of(2000, 1, 5, 3, 0, 5, 0, - UTC)).isBeforeOrEqualTo(of(1998, 1, - 1, 3, 3, - 3, 0, - UTC))) - .withMessage(format("%nExpecting:%n" + - " <2000-01-05T03:00:05Z>%n" + - "to be before or equals to:%n" + - " <1998-01-01T03:03:03Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isAfterOrEqualTo_Test.java @@ -12,15 +12,14 @@ */ package org.assertj.core.api.offsettime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.OffsetTime; -import java.time.ZoneOffset; import org.junit.jupiter.api.Test; @@ -44,17 +43,8 @@ public void test_isAfterOrEqual_assertion() { @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(OffsetTime.of(3, 0, 5, 0, - ZoneOffset.UTC)).isAfterOrEqualTo(OffsetTime.of(3, - 3, - 3, - 0, - ZoneOffset.UTC))) - .withMessage(format("%n" + - "Expecting:%n" + - " <03:00:05Z>%n" + - "to be after or equals to:%n" + - " <03:03:03Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/offsettime/OffsetTimeAssert_isBeforeOrEqualTo_Test.java @@ -12,15 +12,14 @@ */ package org.assertj.core.api.offsettime; -import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.OffsetTime; -import java.time.ZoneOffset; import org.junit.jupiter.api.Test; @@ -44,17 +43,8 @@ public void test_isBeforeOrEqual_assertion() { @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(OffsetTime.of(3, 0, 5, 0, - ZoneOffset.UTC)).isBeforeOrEqualTo(OffsetTime.of(3, - 0, - 4, - 0, - ZoneOffset.UTC))) - .withMessage(format("%n" + - "Expecting:%n" + - " <03:00:05Z>%n" + - "to be before or equals to:%n" + - " <03:00:04Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test diff --git a/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isAfterOrEqualTo_Test.java @@ -12,12 +12,12 @@ */ package org.assertj.core.api.zoneddatetime; -import static java.lang.String.format; import static java.time.ZoneOffset.UTC; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.ZoneId; @@ -57,8 +57,8 @@ public void isAfterOrEqualTo_should_compare_datetimes_in_actual_timezone() { @Test public void test_isAfterOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(ZonedDateTime.of(2000, 1, 5, 3, 0, 5, 0, UTC)).isAfterOrEqualTo(ZonedDateTime.of(2012, 1, 1, 3, 3, 3, 0, UTC))) - .withMessage(format("%nExpecting:%n <2000-01-05T03:00:05Z>%nto be after or equals to:%n <2012-01-01T03:03:03Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isAfterOrEqualTo(AFTER)) + .withMessage(shouldBeAfterOrEqualTo(REFERENCE, AFTER).create()); } @Test @@ -66,7 +66,7 @@ public void should_fail_if_actual_is_null() { assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> { ZonedDateTime actual = null; assertThat(actual).isAfterOrEqualTo(ZonedDateTime.now()); - }).withMessage( actualIsNull()); + }).withMessage(actualIsNull()); } @Test @@ -82,7 +82,7 @@ public void should_fail_if_dateTime_as_string_parameter_is_null() { } private static void verify_that_isAfterOrEqual_assertion_fails_and_throws_AssertionError(ZonedDateTime dateToCheck, - ZonedDateTime reference) { + ZonedDateTime reference) { try { assertThat(dateToCheck).isAfterOrEqualTo(reference); } catch (AssertionError e) { diff --git a/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/api/zoneddatetime/ZonedDateTimeAssert_isBeforeOrEqualTo_Test.java @@ -12,12 +12,11 @@ */ package org.assertj.core.api.zoneddatetime; -import static java.lang.String.format; -import static java.time.ZoneOffset.UTC; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.FailureMessages.actualIsNull; import java.time.ZoneId; @@ -35,43 +34,31 @@ public class ZonedDateTimeAssert_isBeforeOrEqualTo_Test extends ZonedDateTimeAss @Test public void test_isBeforeOrEqual_assertion() { - // WHEN - assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE); - assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE.toString()); - assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE); - assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE.toString()); - // THEN - verify_that_isBeforeOrEqual_assertion_fails_and_throws_AssertionError(AFTER, REFERENCE); + // WHEN + assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE); + assertThat(BEFORE).isBeforeOrEqualTo(REFERENCE.toString()); + assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE); + assertThat(REFERENCE).isBeforeOrEqualTo(REFERENCE.toString()); + // THEN + verify_that_isBeforeOrEqual_assertion_fails_and_throws_AssertionError(AFTER, REFERENCE); } @Test public void isBeforeOrEqualTo_should_compare_datetimes_in_actual_timezone() { - ZonedDateTime utcDateTime = ZonedDateTime.of(2013, 6, 10, 0, 0, 0, 0, ZoneOffset.UTC); - ZoneId cestTimeZone = ZoneId.of("Europe/Berlin"); - ZonedDateTime cestDateTime1 = ZonedDateTime.of(2013, 6, 10, 2, 0, 0, 0, cestTimeZone); - ZonedDateTime cestDateTime2 = ZonedDateTime.of(2013, 6, 10, 3, 0, 0, 0, cestTimeZone); - // utcDateTime = cestDateTime1 - assertThat(utcDateTime).as("in UTC time zone").isBeforeOrEqualTo(cestDateTime1); - // utcDateTime < cestDateTime2 - assertThat(utcDateTime).as("in UTC time zone").isBeforeOrEqualTo(cestDateTime2); + ZonedDateTime utcDateTime = ZonedDateTime.of(2013, 6, 10, 0, 0, 0, 0, ZoneOffset.UTC); + ZoneId cestTimeZone = ZoneId.of("Europe/Berlin"); + ZonedDateTime cestDateTime1 = ZonedDateTime.of(2013, 6, 10, 2, 0, 0, 0, cestTimeZone); + ZonedDateTime cestDateTime2 = ZonedDateTime.of(2013, 6, 10, 3, 0, 0, 0, cestTimeZone); + // utcDateTime = cestDateTime1 + assertThat(utcDateTime).as("in UTC time zone").isBeforeOrEqualTo(cestDateTime1); + // utcDateTime < cestDateTime2 + assertThat(utcDateTime).as("in UTC time zone").isBeforeOrEqualTo(cestDateTime2); } @Test public void test_isBeforeOrEqual_assertion_error_message() { - assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(ZonedDateTime.of(2000, 1, 5, 3, 0, 5, 0, - UTC)).isBeforeOrEqualTo(ZonedDateTime.of(1998, - 1, - 1, - 3, - 3, - 3, - 0, - UTC))) - .withMessage(format("%n" + - "Expecting:%n" + - " <2000-01-05T03:00:05Z>%n" + - "to be before or equals to:%n" + - " <1998-01-01T03:03:03Z>")); + assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(REFERENCE).isBeforeOrEqualTo(BEFORE)) + .withMessage(shouldBeBeforeOrEqualTo(REFERENCE, BEFORE).create()); } @Test @@ -95,19 +82,19 @@ public void should_fail_if_dateTime_as_string_parameter_is_null() { } private static void verify_that_isBeforeOrEqual_assertion_fails_and_throws_AssertionError(ZonedDateTime dateToCheck, - ZonedDateTime reference) { - try { - assertThat(dateToCheck).isBeforeOrEqualTo(reference); - } catch (AssertionError e) { - // AssertionError was expected, test same assertion with String based parameter - try { - assertThat(dateToCheck).isBeforeOrEqualTo(reference.toString()); - } catch (AssertionError e2) { - // AssertionError was expected (again) - return; - } - } - fail("Should have thrown AssertionError"); + ZonedDateTime reference) { + try { + assertThat(dateToCheck).isBeforeOrEqualTo(reference); + } catch (AssertionError e) { + // AssertionError was expected, test same assertion with String based parameter + try { + assertThat(dateToCheck).isBeforeOrEqualTo(reference.toString()); + } catch (AssertionError e2) { + // AssertionError was expected (again) + return; + } + } + fail("Should have thrown AssertionError"); } } diff --git a/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo_create_Test.java b/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java similarity index 79% rename from src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo_create_Test.java rename to src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualsTo_create_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualTo_create_Test.java @@ -14,7 +14,7 @@ import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.util.DateUtil.parse; import org.assertj.core.description.Description; @@ -25,17 +25,17 @@ /** * Tests for - * <code>{@link ShouldBeBeforeOrEqualsTo#create(Description, org.assertj.core.presentation.Representation)}</code>. + * <code>{@link ShouldBeAfterOrEqualTo#create(Description, org.assertj.core.presentation.Representation)}</code>. * * @author Joel Costigliola */ -public class ShouldBeBeforeOrEqualsTo_create_Test { +public class ShouldBeAfterOrEqualTo_create_Test { private ErrorMessageFactory factory; @BeforeEach public void setUp() { - factory = shouldBeBeforeOrEqualsTo(parse("2011-01-01"), parse("2012-01-01")); + factory = shouldBeAfterOrEqualTo(parse("2011-01-01"), parse("2012-01-01")); } @Test @@ -44,7 +44,7 @@ public void should_create_error_message() { assertThat(message).isEqualTo(format("[Test] %n" + "Expecting:%n" + " <2011-01-01T00:00:00.000>%n" + - "to be before or equals to:%n" + + "to be after or equal to:%n" + " <2012-01-01T00:00:00.000>")); } } diff --git a/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo_create_Test.java b/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java similarity index 83% rename from src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo_create_Test.java rename to src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeAfterOrEqualsTo_create_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeBeforeOrEqualTo_create_Test.java @@ -14,7 +14,7 @@ import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.util.DateUtil.parse; import org.assertj.core.description.Description; @@ -25,17 +25,17 @@ /** * Tests for - * <code>{@link ShouldBeAfterOrEqualsTo#create(Description, org.assertj.core.presentation.Representation)}</code>. + * <code>{@link ShouldBeBeforeOrEqualTo#create(Description, org.assertj.core.presentation.Representation)}</code>. * * @author Joel Costigliola */ -public class ShouldBeAfterOrEqualsTo_create_Test { +public class ShouldBeBeforeOrEqualTo_create_Test { private ErrorMessageFactory factory; @BeforeEach public void setUp() { - factory = shouldBeAfterOrEqualsTo(parse("2011-01-01"), parse("2012-01-01")); + factory = shouldBeBeforeOrEqualTo(parse("2011-01-01"), parse("2012-01-01")); } @Test @@ -44,7 +44,7 @@ public void should_create_error_message() { assertThat(message).isEqualTo(format("[Test] %n" + "Expecting:%n" + " <2011-01-01T00:00:00.000>%n" + - "to be after or equals to:%n" + + "to be before or equal to:%n" + " <2012-01-01T00:00:00.000>")); } } diff --git a/src/test/java/org/assertj/core/internal/dates/Dates_assertIsAfterOrEqualTo_Test.java b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsAfterOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/internal/dates/Dates_assertIsAfterOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsAfterOrEqualTo_Test.java @@ -14,7 +14,7 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatNullPointerException; -import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; +import static org.assertj.core.error.ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo; import static org.assertj.core.internal.ErrorMessages.dateToCompareActualWithIsNull; import static org.assertj.core.test.TestData.someInfo; import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; @@ -24,6 +24,7 @@ import java.util.Date; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.error.ShouldBeAfterOrEqualTo; import org.assertj.core.internal.Dates; import org.assertj.core.internal.DatesBaseTest; import org.junit.jupiter.api.Test; @@ -43,7 +44,7 @@ public void should_fail_if_actual_is_not_strictly_after_given_date() { try { dates.assertIsAfterOrEqualTo(info, actual, other); } catch (AssertionError e) { - verify(failures).failure(info, shouldBeAfterOrEqualsTo(actual, other)); + verify(failures).failure(info, shouldBeAfterOrEqualTo(actual, other)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); @@ -78,7 +79,7 @@ public void should_fail_if_actual_is_not_strictly_after_given_date_according_to_ try { datesWithCustomComparisonStrategy.assertIsAfterOrEqualTo(info, actual, other); } catch (AssertionError e) { - verify(failures).failure(info, shouldBeAfterOrEqualsTo(actual, other, yearAndMonthComparisonStrategy)); + verify(failures).failure(info, ShouldBeAfterOrEqualTo.shouldBeAfterOrEqualTo(actual, other, yearAndMonthComparisonStrategy)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); diff --git a/src/test/java/org/assertj/core/internal/dates/Dates_assertIsBeforeOrEqualTo_Test.java b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsBeforeOrEqualTo_Test.java --- a/src/test/java/org/assertj/core/internal/dates/Dates_assertIsBeforeOrEqualTo_Test.java +++ b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsBeforeOrEqualTo_Test.java @@ -14,7 +14,7 @@ import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.assertj.core.api.Assertions.assertThatNullPointerException; -import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; +import static org.assertj.core.error.ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo; import static org.assertj.core.internal.ErrorMessages.dateToCompareActualWithIsNull; import static org.assertj.core.test.TestData.someInfo; import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; @@ -24,6 +24,7 @@ import java.util.Date; import org.assertj.core.api.AssertionInfo; +import org.assertj.core.error.ShouldBeBeforeOrEqualTo; import org.assertj.core.internal.Dates; import org.assertj.core.internal.DatesBaseTest; import org.junit.jupiter.api.Test; @@ -43,7 +44,7 @@ public void should_fail_if_actual_is_not_strictly_before_given_date() { try { dates.assertIsBeforeOrEqualTo(info, actual, other); } catch (AssertionError e) { - verify(failures).failure(info, shouldBeBeforeOrEqualsTo(actual, other)); + verify(failures).failure(info, shouldBeBeforeOrEqualTo(actual, other)); return; } failBecauseExpectedAssertionErrorWasNotThrown(); @@ -78,7 +79,7 @@ public void should_fail_if_actual_is_not_strictly_before_given_date_according_to try { datesWithCustomComparisonStrategy.assertIsBeforeOrEqualTo(info, actual, other); } catch (AssertionError e) { - verify(failures).failure(info, shouldBeBeforeOrEqualsTo(actual, other, yearAndMonthComparisonStrategy)); + verify(failures).failure(info, ShouldBeBeforeOrEqualTo.shouldBeBeforeOrEqualTo(actual, other, yearAndMonthComparisonStrategy)); return; } failBecauseExpectedAssertionErrorWasNotThrown();
Typo in AbstractDateAssert#isBeforeOrEqualsTo method name? #### Summary `AbstractDateAssert#isBeforeOrEqualsTo` has a trailing 's' in 'Equals' and it looks like a typo compared to java 8 time assertions (same for `AbstractDateAssert#isAfterOrEqualsTo`). It is just a style issue and fixing it would be a backwards incompatible change. Also `org.assertj.core.error.ShouldBeBeforeOrEqualsTo` and `org.assertj.core.error.ShouldBeAfterOrEqualsTo` have the same flaw. #### Example ```java assertThat(new Date).isBeforeOrEqualsTo(anotherDate) assertThat(Instant.now()).isBeforeOrEqualTo(anotherInstant) ```
Yeah, good catch :man_facepalming: . The way to go is is deprecate `isBeforeOrEqualsTo` / `isAfterOrEqualsTo` and add `isBeforeOrEqualTo` / `isAfterOrEqualTo`. For the other typos a rename refactoring will do.
2019-05-22T05:05:31Z
3.12
assertj/assertj
1,243
assertj__assertj-1243
[ "1232" ]
17d405970d0c67b02e4d2cf27761b7351f8d7af7
diff --git a/src/main/java/org/assertj/core/api/Assertions.java b/src/main/java/org/assertj/core/api/Assertions.java --- a/src/main/java/org/assertj/core/api/Assertions.java +++ b/src/main/java/org/assertj/core/api/Assertions.java @@ -2565,6 +2565,29 @@ public static <T> T assertThat(final AssertProvider<T> component) { return AssertionsForInterfaceTypes.assertThat(actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuilder actual) { + return AssertionsForClassTypes.assertThat(actual); + } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuffer actual) { + return AssertionsForClassTypes.assertThat(actual); + } + /** * Creates a new instance of <code>{@link CharSequenceAssert}from a {@link String}</code>. * diff --git a/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java b/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java --- a/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java +++ b/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java @@ -487,6 +487,30 @@ public static AbstractShortArrayAssert<?> assertThat(short[] actual) { return new ShortArrayAssert(actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuilder actual) { + return new CharSequenceAssert(actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuffer actual) { + return new CharSequenceAssert(actual); + } + /** * Creates a new instance of <code>{@link StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/Assumptions.java b/src/main/java/org/assertj/core/api/Assumptions.java --- a/src/main/java/org/assertj/core/api/Assumptions.java +++ b/src/main/java/org/assertj/core/api/Assumptions.java @@ -308,6 +308,31 @@ public static AbstractCharArrayAssert<?> assumeThat(char[] actual) { return asAssumption(CharSequenceAssert.class, CharSequence.class, actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> assumption from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assumption for assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assumeThat(StringBuilder actual) { + return asAssumption(CharSequenceAssert.class, CharSequence.class, actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> assumption from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assumption for assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assumeThat(StringBuffer actual) { + return asAssumption(CharSequenceAssert.class, CharSequence.class, actual); + } + + /** * Creates a new instance of <code>{@link ShortAssert}</code> assumption. * diff --git a/src/main/java/org/assertj/core/api/BDDAssertions.java b/src/main/java/org/assertj/core/api/BDDAssertions.java --- a/src/main/java/org/assertj/core/api/BDDAssertions.java +++ b/src/main/java/org/assertj/core/api/BDDAssertions.java @@ -800,6 +800,30 @@ public static AbstractShortArrayAssert<?> then(short[] actual) { return assertThat(actual); } + /** + * Creates a new instance of <code>{@link org.assertj.core.api.CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> then(StringBuilder actual) { + return assertThat(actual); + } + + /** + * Creates a new instance of <code>{@link org.assertj.core.api.CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> then(StringBuffer actual) { + return assertThat(actual); + } + /** * Creates a new instance of <code>{@link org.assertj.core.api.StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/Java6AbstractBDDSoftAssertions.java b/src/main/java/org/assertj/core/api/Java6AbstractBDDSoftAssertions.java --- a/src/main/java/org/assertj/core/api/Java6AbstractBDDSoftAssertions.java +++ b/src/main/java/org/assertj/core/api/Java6AbstractBDDSoftAssertions.java @@ -487,6 +487,30 @@ public CharSequenceAssert then(CharSequence actual) { return proxy(CharSequenceAssert.class, CharSequence.class, actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public CharSequenceAssert then(StringBuilder actual) { + return proxy(CharSequenceAssert.class, CharSequence.class, actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public CharSequenceAssert then(StringBuffer actual) { + return proxy(CharSequenceAssert.class, CharSequence.class, actual); + } + /** * Creates a new instance of <code>{@link StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/Java6AbstractStandardSoftAssertions.java b/src/main/java/org/assertj/core/api/Java6AbstractStandardSoftAssertions.java --- a/src/main/java/org/assertj/core/api/Java6AbstractStandardSoftAssertions.java +++ b/src/main/java/org/assertj/core/api/Java6AbstractStandardSoftAssertions.java @@ -489,6 +489,30 @@ public CharSequenceAssert assertThat(CharSequence actual) { return proxy(CharSequenceAssert.class, CharSequence.class, actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public CharSequenceAssert assertThat(StringBuilder actual) { + return proxy(CharSequenceAssert.class, CharSequence.class, actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public CharSequenceAssert assertThat(StringBuffer actual) { + return proxy(CharSequenceAssert.class, CharSequence.class, actual); + } + /** * Creates a new instance of <code>{@link StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/Java6Assertions.java b/src/main/java/org/assertj/core/api/Java6Assertions.java --- a/src/main/java/org/assertj/core/api/Java6Assertions.java +++ b/src/main/java/org/assertj/core/api/Java6Assertions.java @@ -924,6 +924,30 @@ public static AbstractShortArrayAssert<?> assertThat(short[] actual) { return new CharSequenceAssert(actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuilder actual) { + return new CharSequenceAssert(actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(StringBuffer actual) { + return new CharSequenceAssert(actual); + } + /** * Creates a new instance of <code>{@link StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/Java6BDDAssertions.java b/src/main/java/org/assertj/core/api/Java6BDDAssertions.java --- a/src/main/java/org/assertj/core/api/Java6BDDAssertions.java +++ b/src/main/java/org/assertj/core/api/Java6BDDAssertions.java @@ -795,6 +795,30 @@ public static AbstractShortArrayAssert<?> then(short[] actual) { return assertThat(actual); } + /** + * Creates a new instance of <code>{@link org.assertj.core.api.CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> then(StringBuilder actual) { + return assertThat(actual); + } + + /** + * Creates a new instance of <code>{@link org.assertj.core.api.CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + public static AbstractCharSequenceAssert<?, ? extends CharSequence> then(StringBuffer actual) { + return assertThat(actual); + } + /** * Creates a new instance of <code>{@link org.assertj.core.api.StringAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/WithAssertions.java b/src/main/java/org/assertj/core/api/WithAssertions.java --- a/src/main/java/org/assertj/core/api/WithAssertions.java +++ b/src/main/java/org/assertj/core/api/WithAssertions.java @@ -564,6 +564,30 @@ default <VALUE> AtomicStampedReferenceAssert<VALUE> assertThat(AtomicStampedRefe return Assertions.assertThat(actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + default AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(final StringBuilder actual) { + return Assertions.assertThat(actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + default AbstractCharSequenceAssert<?, ? extends CharSequence> assertThat(final StringBuffer actual) { + return Assertions.assertThat(actual); + } + /** * Creates a new instance of <code>{@link ShortArrayAssert}</code>. * diff --git a/src/main/java/org/assertj/core/api/WithAssumptions.java b/src/main/java/org/assertj/core/api/WithAssumptions.java --- a/src/main/java/org/assertj/core/api/WithAssumptions.java +++ b/src/main/java/org/assertj/core/api/WithAssumptions.java @@ -371,6 +371,29 @@ default <VALUE> AtomicStampedReferenceAssert<VALUE> assumeThat(AtomicStampedRefe return Assumptions.assumeThat(actual); } + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> assumption from a {@link StringBuilder}. + * + * @param actual the actual value. + * @return the created assumption for assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + default AbstractCharSequenceAssert<?, ? extends CharSequence> assumeThat(final StringBuilder actual) { + return Assumptions.assumeThat(actual); + } + + /** + * Creates a new instance of <code>{@link CharSequenceAssert}</code> assumption from a {@link StringBuffer}. + * + * @param actual the actual value. + * @return the created assumption for assertion object. + * @since 3.11.0 + */ + @CheckReturnValue + default AbstractCharSequenceAssert<?, ? extends CharSequence> assumeThat(final StringBuffer actual) { + return Assumptions.assumeThat(actual); + } /** * Creates a new instance of <code>{@link ShortArrayAssert}</code> assumption. *
diff --git a/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuffer_Test.java b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuffer_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuffer_Test.java @@ -0,0 +1,36 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2018 the original author or authors. + */ +package org.assertj.core.api; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests for <code>{@link Assertions#assertThat(StringBuffer)}</code>. + */ +public class Assertions_assertThat_with_StringBuffer_Test { + + @Test + public void should_create_Assert() { + AbstractCharSequenceAssert<?, ?> assertions = Assertions.assertThat(new StringBuffer("Yoda")); + assertThat(assertions).isNotNull(); + } + + @Test + public void should_pass_actual() { + StringBuffer actual = new StringBuffer("Yoda"); + AbstractCharSequenceAssert<?, ?> assertions = Assertions.assertThat(actual); + assertThat(assertions.actual).isSameAs(actual); + } +} diff --git a/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuilder_Test.java b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuilder_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_StringBuilder_Test.java @@ -0,0 +1,36 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2018 the original author or authors. + */ +package org.assertj.core.api; + +import org.junit.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +/** + * Tests for <code>{@link Assertions#assertThat(StringBuilder)}</code>. + */ +public class Assertions_assertThat_with_StringBuilder_Test { + + @Test + public void should_create_Assert() { + AbstractCharSequenceAssert<?, ?> assertions = Assertions.assertThat(new StringBuilder("Yoda")); + assertThat(assertions).isNotNull(); + } + + @Test + public void should_pass_actual() { + StringBuilder actual = new StringBuilder("Yoda"); + AbstractCharSequenceAssert<?, ?> assertions = Assertions.assertThat(actual); + assertThat(assertions.actual).isSameAs(actual); + } +}
Java 11 EA Build: Compile Errors: ambiguous reference both <T>assertThat(T) and method assertThat(java.lang.CharSequence) match Just for your information. When running `mvn clean test` using: ``` java version "11-ea" 2018-09-25 Java(TM) SE Runtime Environment 18.9 (build 11-ea+10) Java HotSpot(TM) 64-Bit Server VM 18.9 (build 11-ea+10, mixed mode) ``` I get these compile errors: ``` [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/AutoCloseableBDDSoftAssertionsTest.java:[62,13] reference to then is ambiguous both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/SoftAssertionsTest.java:[219,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/BDDSoftAssertionsTest.java:[216,13] reference to then is ambiguous both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/perf/SoftAssertionsPerfTest.java:[192,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/AutoCloseableSoftAssertionsTest.java:[71,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[35,61] reference to assertThat is ambiguous both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match [ERROR] /C:/Users/Redhawk/dev/assertj-core/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[41,61] reference to assertThat is ambiguous both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match ```
Any idea what the underlying cause is ? For info, same error in my environment. **Java version** ``` ~/prog/assertj/assertj-core-3.x ∙ java -version openjdk version "11-ea" 2018-09-25 OpenJDK Runtime Environment 18.9 (build 11-ea+13) OpenJDK 64-Bit Server VM 18.9 (build 11-ea+13, mixed mode) ``` **Maven version** ``` ~/prog/assertj/assertj-core-3.x ∙ mvn -version Apache Maven 3.5.3 (3383c37e1f9e9b3bc3df5050c29c8aff9f295297; 2018-02-25T08:49:05+13:00) Maven home: /home/joel/prog/apache-maven-3.5.3 Java version: 11-ea, vendor: Oracle Corporation Java home: /home/joel/prog/java/jdk-11 Default locale: en_NZ, platform encoding: UTF-8 OS name: "linux", version: "4.4.0-124-generic", arch: "amd64", family: "unix" ``` Output of `mvn clean test`: ``` [INFO] Scanning for projects... [INFO] Inspecting build with total of 1 modules... [INFO] Installing Nexus Staging features: [INFO] ... total of 1 executions of maven-deploy-plugin replaced with nexus-staging-maven-plugin [INFO] [INFO] ----------------------< org.assertj:assertj-core >---------------------- [INFO] Building AssertJ fluent assertions 3.11.0-SNAPSHOT [INFO] -------------------------------[ bundle ]------------------------------- [INFO] [INFO] --- maven-clean-plugin:3.1.0:clean (default-clean) @ assertj-core --- [INFO] Deleting /home/joel/prog/assertj/assertj-core-3.x/target [INFO] [INFO] --- license-maven-plugin:3.0:format (default) @ assertj-core --- [INFO] Updating license headers... [INFO] [INFO] --- jacoco-maven-plugin:0.8.1:prepare-agent (prepare-agent) @ assertj-core --- [INFO] argLine set to -javaagent:/home/joel/.m2/repository/org/jacoco/org.jacoco.agent/0.8.1/org.jacoco.agent-0.8.1-runtime.jar=destfile=/home/joel/prog/assertj/assertj-core-3.x/target/jacoco.exec,excludes=**/*hamcrest*/** --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.math=ALL-UNNAMED [INFO] [INFO] --- maven-resources-plugin:3.1.0:resources (default-resources) @ assertj-core --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] skip non existing resourceDirectory /home/joel/prog/assertj/assertj-core-3.x/src/main/resources [INFO] [INFO] --- maven-compiler-plugin:3.7.0:compile (default-compile) @ assertj-core --- [INFO] Changes detected - recompiling the module! [INFO] Compiling 579 source files to /home/joel/prog/assertj/assertj-core-3.x/target/classes [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java: Some input files use or override a deprecated API. [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/main/java/org/assertj/core/api/AbstractDoubleAssert.java: Recompile with -Xlint:deprecation for details. [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/main/java/org/assertj/core/api/AbstractIterableAssert.java: Some input files use unchecked or unsafe operations. [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/main/java/org/assertj/core/api/AbstractIterableAssert.java: Recompile with -Xlint:unchecked for details. [INFO] [INFO] --- maven-resources-plugin:3.1.0:testResources (default-testResources) @ assertj-core --- [INFO] Using 'UTF-8' encoding to copy filtered resources. [INFO] Copying 17 resources [INFO] [INFO] --- maven-compiler-plugin:3.7.0:testCompile (default-testCompile) @ assertj-core --- [INFO] Changes detected - recompiling the module! [INFO] Compiling 2730 source files to /home/joel/prog/assertj/assertj-core-3.x/target/test-classes [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/FloatAssertBaseTest.java: Some input files use or override a deprecated API. [INFO] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/FloatAssertBaseTest.java: Recompile with -Xlint:deprecation for details. [INFO] ------------------------------------------------------------- [ERROR] COMPILATION ERROR : [INFO] ------------------------------------------------------------- [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/AutoCloseableBDDSoftAssertionsTest.java:[62,13] reference to then is ambiguous both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/perf/SoftAssertionsPerfTest.java:[214,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/SoftAssertionsTest.java:[219,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[35,61] reference to assertThat is ambiguous both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[41,61] reference to assertThat is ambiguous both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/BDDSoftAssertionsTest.java:[216,13] reference to then is ambiguous both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/AutoCloseableSoftAssertionsTest.java:[71,13] reference to assertThat is ambiguous both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [INFO] 7 errors [INFO] ------------------------------------------------------------- [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 48.899 s [INFO] Finished at: 2018-05-19T11:24:59+12:00 [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.7.0:testCompile (default-testCompile) on project assertj-core: Compilation failure: Compilation failure: [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/AutoCloseableBDDSoftAssertionsTest.java:[62,13] reference to then is ambiguous [ERROR] both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/perf/SoftAssertionsPerfTest.java:[214,13] reference to assertThat is ambiguous [ERROR] both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/SoftAssertionsTest.java:[219,13] reference to assertThat is ambiguous [ERROR] both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[35,61] reference to assertThat is ambiguous [ERROR] both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/Assertions_assertThat_with_CharSequence_Test.java:[41,61] reference to assertThat is ambiguous [ERROR] both method assertThat(java.lang.CharSequence) in org.assertj.core.api.Assertions and method <T>assertThat(T) in org.assertj.core.api.Assertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/BDDSoftAssertionsTest.java:[216,13] reference to then is ambiguous [ERROR] both method <T>then(T) in org.assertj.core.api.Java6AbstractBDDSoftAssertions and method then(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractBDDSoftAssertions match [ERROR] /home/joel/prog/assertj/assertj-core-3.x/src/test/java/org/assertj/core/api/AutoCloseableSoftAssertionsTest.java:[71,13] reference to assertThat is ambiguous [ERROR] both method <T>assertThat(T) in org.assertj.core.api.Java6AbstractStandardSoftAssertions and method assertThat(java.lang.CharSequence) in org.assertj.core.api.Java6AbstractStandardSoftAssertions match [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException ``` It looks like the cause for this problem is that `StringBuilder` is implementing `Comparable` now in Java 11 (see [StringBuilder doc Java 11](https://download.java.net/java/early_access/jdk11/docs/api/java.base/java/lang/StringBuilder.html) and [StringBuilder doc Java 10](https://docs.oracle.com/javase/10/docs/api/java/lang/StringBuilder.html)). The same also applies for `StringBuffer`. We have two options to fix this: - add a cast to `CharSequence` in the usage (e.g. in our tests) - add `assertThat(StringBuilder)` and `assertThat(StringBuffer)` The first solution would also force assertj users to do the cast once they migrate to Java 11. The 2nd one would be more smooth for the user but of course means some extra code on our side. I'd go for the 2nd solution. @joel-costigliola, @PascalSchumacher what do you think? @epeee thanks for the investigation, the second option is definitely better (user experience first!). thanks, I will provide a PR later on today.
2018-05-21T15:08:28Z
3.1
assertj/assertj
1,204
assertj__assertj-1204
[ "1145" ]
99b8065754ee41dfe29e626b4f66fd1d1ba8aec3
diff --git a/src/main/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively.java b/src/main/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively.java --- a/src/main/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively.java +++ b/src/main/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively.java @@ -64,8 +64,8 @@ private static String describeDifference(Difference difference, Representation r "- expected: <%s>%n" + "- actual : <%s>", join(difference.getPath()).with("."), - otherFieldValueRepresentation.replace("%", "%%"), - actualFieldValueRepresentation.replace("%", "%%")); + otherFieldValueRepresentation == null ? null : otherFieldValueRepresentation.replace("%", "%%"), + actualFieldValueRepresentation == null ? null : actualFieldValueRepresentation.replace("%", "%%")); } }
diff --git a/src/test/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively_create_Test.java b/src/test/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively_create_Test.java --- a/src/test/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively_create_Test.java +++ b/src/test/java/org/assertj/core/error/ShouldBeEqualByComparingFieldByFieldRecursively_create_Test.java @@ -24,11 +24,13 @@ import java.util.TreeMap; import java.util.TreeSet; +import org.assertj.core.api.ThrowableAssert; import org.assertj.core.description.TextDescription; import org.assertj.core.internal.DeepDifference; import org.assertj.core.internal.DeepDifference.Difference; import org.assertj.core.internal.objects.Objects_assertIsEqualToComparingFieldByFieldRecursive_Test.WithCollection; import org.assertj.core.internal.objects.Objects_assertIsEqualToComparingFieldByFieldRecursive_Test.WithMap; +import org.assertj.core.test.Name; import org.junit.Test; public class ShouldBeEqualByComparingFieldByFieldRecursively_create_Test { @@ -97,4 +99,20 @@ public void should_use_unambiguous_fields_description_when_standard_description_ toHexString(withLinkedHashMap.map.hashCode()))); } + @Test + public void should_not_fall_with_npe_if_field_of_one_of_compared_objects_is_null() { + final Name actualName = new Name("Andy"); + final Name nullName = new Name(null); + + Throwable error = ThrowableAssert.catchThrowable(new ThrowableAssert.ThrowingCallable() { + @Override + public void call() throws Throwable { + assertThat(actualName).isEqualToComparingFieldByFieldRecursively(nullName); + } + }); + + assertThat(error).isNotExactlyInstanceOf(NullPointerException.class); + + } + }
`isEqualToComparingFieldByFieldRecursively` throws NullPointerException while comparing null values #### Summary If a nested field is null while comparing equality for two objects with `isEqualToComparingFieldByFieldRecursively` a NullPointerException is thrown. ### Stacktrace ``` Exception in thread "main" java.lang.NullPointerException at org.assertj.core.error.ShouldBeEqualByComparingFieldByFieldRecursively.describeDifference(ShouldBeEqualByComparingFieldByFieldRecursively.java:67) at org.assertj.core.error.ShouldBeEqualByComparingFieldByFieldRecursively.shouldBeEqualByComparingFieldByFieldRecursive(ShouldBeEqualByComparingFieldByFieldRecursively.java:32) at org.assertj.core.internal.Objects.assertIsEqualToComparingFieldByFieldRecursively(Objects.java:728) at org.assertj.core.api.AbstractObjectAssert.isEqualToComparingFieldByFieldRecursively(AbstractObjectAssert.java:653) ... ``` #### Example ```java class Main { static class Value { private final String v; public Value(String v) { this.v = v; } } public static void main(String[] args) { Value actual = new Value(null); //swapping actual and expected leads to the same error Value expected = new Value("something"); Assertions.assertThat(expected).isEqualToComparingFieldByFieldRecursively(actual); } } ``` #### Java 8 specific ? * YES Tested with Version 3.9.0
Thanks for reporting this ! This is regression in version `2.9.0/3.9.0` introduced by https://github.com/joel-costigliola/assertj-core/commit/fb9f9522f7abe2d863af6aa760ec7e8162009f49 @hwielenberg As a workaround you can use `3.8.0` I could write the Bugfix if you want. Should be straightforward. Contributions welcome! 😃 I have a fix for this, will create pull request shortly. backport to 2.x (fixed in 3.9.1)
2018-03-13T19:54:11Z
2.9
assertj/assertj
1,184
assertj__assertj-1184
[ "1175" ]
48cbf8e2fbb7185492aba6e4fa2dfdc659139a45
diff --git a/src/main/java/org/assertj/core/api/AbstractFileAssert.java b/src/main/java/org/assertj/core/api/AbstractFileAssert.java --- a/src/main/java/org/assertj/core/api/AbstractFileAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractFileAssert.java @@ -16,8 +16,8 @@ import static org.assertj.core.util.Preconditions.checkNotNull; import java.io.File; +import java.io.UncheckedIOException; import java.nio.charset.Charset; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Files; import org.assertj.core.util.CheckReturnValue; import org.assertj.core.util.VisibleForTesting; @@ -228,7 +228,7 @@ public SELF isRelative() { * @throws IllegalArgumentException if the given {@code File} is not an existing file. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the content of the given one. * * @deprecated use {@link #hasSameContentAs(File)} instead @@ -266,7 +266,7 @@ public SELF hasContentEqualTo(File expected) { * @throws IllegalArgumentException if the given {@code File} is not an existing file. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the content of the given one. */ public SELF hasSameContentAs(File expected) { @@ -298,7 +298,7 @@ public SELF hasSameContentAs(File expected) { * @throws IllegalArgumentException if the given {@code File} is not an existing file. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the content of the given one. */ public SELF hasSameContentAs(File expected, Charset expectedCharset) { @@ -325,7 +325,7 @@ public SELF hasSameContentAs(File expected, Charset expectedCharset) { * @throws NullPointerException if the given content is {@code null}. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the given binary content. */ public SELF hasBinaryContent(byte[] expected) { @@ -391,7 +391,7 @@ public SELF usingCharset(Charset charset) { * @throws NullPointerException if the given content is {@code null}. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the given content. */ public SELF hasContent(String expected) { @@ -469,7 +469,7 @@ public SELF canRead() { * @param expected the expected parent {@code File}. * @return {@code this} assertion object. * @throws NullPointerException if the expected parent {@code File} is {@code null}. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} parent is not equal to the expected one. * diff --git a/src/main/java/org/assertj/core/api/AbstractPathAssert.java b/src/main/java/org/assertj/core/api/AbstractPathAssert.java --- a/src/main/java/org/assertj/core/api/AbstractPathAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractPathAssert.java @@ -15,6 +15,7 @@ import static org.assertj.core.util.Preconditions.checkArgument; import static org.assertj.core.util.Preconditions.checkNotNull; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.file.ClosedFileSystemException; import java.nio.file.FileSystem; @@ -25,7 +26,6 @@ import java.nio.file.spi.FileSystemProvider; import org.assertj.core.api.exception.PathsException; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Paths; import org.assertj.core.util.CheckReturnValue; import org.assertj.core.util.VisibleForTesting; @@ -181,7 +181,7 @@ public SELF hasSameContentAs(Path expected, Charset expectedCharset) { * @throws NullPointerException if the given content is {@code null}. * @throws AssertionError if the actual {@code File} is {@code null}. * @throws AssertionError if the actual {@code File} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the content of the actual {@code File} is not equal to the given binary content. */ public SELF hasBinaryContent(byte[] expected) { @@ -261,7 +261,7 @@ public SELF usingCharset(Charset charset) { * @param expected the expected text content to compare the actual {@code File}'s content to. * @return {@code this} assertion object. * @throws NullPointerException if the given content is {@code null}. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the actual {@code Path} is {@code null}. * @throws AssertionError if the actual {@code Path} is not a {@link Files#isReadable(Path) readable} file. * @throws AssertionError if the content of the actual {@code File} is not equal to the given content. diff --git a/src/main/java/org/assertj/core/api/Assertions.java b/src/main/java/org/assertj/core/api/Assertions.java --- a/src/main/java/org/assertj/core/api/Assertions.java +++ b/src/main/java/org/assertj/core/api/Assertions.java @@ -18,6 +18,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.io.UncheckedIOException; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; @@ -68,7 +69,6 @@ import java.util.stream.Stream; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.filter.InFilter; @@ -2206,7 +2206,7 @@ public static NotFilter not(Object valueNotToMatch) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, Charset charset) { return Files.contentOf(file, charset); @@ -2223,7 +2223,7 @@ public static String contentOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, String charsetName) { return Files.contentOf(file, charsetName); @@ -2239,7 +2239,7 @@ public static String contentOf(File file, String charsetName) { * * @param file the file. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file) { return Files.contentOf(file, Charset.defaultCharset()); @@ -2253,7 +2253,7 @@ public static String contentOf(File file) { * @param file the file. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file) { return Files.linesOf(file, Charset.defaultCharset()); @@ -2267,7 +2267,7 @@ public static List<String> linesOf(File file) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, Charset charset) { return Files.linesOf(file, charset); @@ -2281,7 +2281,7 @@ public static List<String> linesOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, String charsetName) { return Files.linesOf(file, charsetName); @@ -2301,7 +2301,7 @@ public static List<String> linesOf(File file, String charsetName) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, Charset charset) { return URLs.contentOf(url, charset); @@ -2317,7 +2317,7 @@ public static String contentOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, String charsetName) { return URLs.contentOf(url, charsetName); @@ -2332,7 +2332,7 @@ public static String contentOf(URL url, String charsetName) { * * @param url the URL. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url) { return URLs.contentOf(url, Charset.defaultCharset()); @@ -2346,7 +2346,7 @@ public static String contentOf(URL url) { * @param url the URL. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url) { return URLs.linesOf(url, Charset.defaultCharset()); @@ -2360,7 +2360,7 @@ public static List<String> linesOf(URL url) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, Charset charset) { return URLs.linesOf(url, charset); @@ -2374,7 +2374,7 @@ public static List<String> linesOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, String charsetName) { return URLs.linesOf(url, charsetName); diff --git a/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java b/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java --- a/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java +++ b/src/main/java/org/assertj/core/api/AssertionsForClassTypes.java @@ -16,6 +16,7 @@ import java.io.File; import java.io.InputStream; +import java.io.UncheckedIOException; import java.math.BigDecimal; import java.net.URI; import java.net.URL; @@ -38,7 +39,6 @@ import java.util.concurrent.CompletableFuture; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.filter.InFilter; @@ -1444,7 +1444,7 @@ public static NotFilter not(Object valueNotToMatch) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, Charset charset) { return Files.contentOf(file, charset); @@ -1461,7 +1461,7 @@ public static String contentOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, String charsetName) { return Files.contentOf(file, charsetName); @@ -1477,7 +1477,7 @@ public static String contentOf(File file, String charsetName) { * * @param file the file. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file) { return Files.contentOf(file, Charset.defaultCharset()); @@ -1491,7 +1491,7 @@ public static String contentOf(File file) { * @param file the file. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file) { return Files.linesOf(file, Charset.defaultCharset()); @@ -1505,7 +1505,7 @@ public static List<String> linesOf(File file) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, Charset charset) { return Files.linesOf(file, charset); @@ -1519,7 +1519,7 @@ public static List<String> linesOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, String charsetName) { return Files.linesOf(file, charsetName); @@ -1539,7 +1539,7 @@ public static List<String> linesOf(File file, String charsetName) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, Charset charset) { return URLs.contentOf(url, charset); @@ -1555,7 +1555,7 @@ public static String contentOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, String charsetName) { return URLs.contentOf(url, charsetName); @@ -1570,7 +1570,7 @@ public static String contentOf(URL url, String charsetName) { * * @param url the URL. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url) { return URLs.contentOf(url, Charset.defaultCharset()); @@ -1584,7 +1584,7 @@ public static String contentOf(URL url) { * @param url the URL. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url) { return URLs.linesOf(url, Charset.defaultCharset()); @@ -1598,7 +1598,7 @@ public static List<String> linesOf(URL url) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, Charset charset) { return URLs.linesOf(url, charset); @@ -1612,7 +1612,7 @@ public static List<String> linesOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, String charsetName) { return URLs.linesOf(url, charsetName); diff --git a/src/main/java/org/assertj/core/api/Java6Assertions.java b/src/main/java/org/assertj/core/api/Java6Assertions.java --- a/src/main/java/org/assertj/core/api/Java6Assertions.java +++ b/src/main/java/org/assertj/core/api/Java6Assertions.java @@ -16,6 +16,7 @@ import java.io.File; import java.io.InputStream; +import java.io.UncheckedIOException; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; @@ -42,7 +43,6 @@ import java.util.concurrent.atomic.AtomicStampedReference; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.filter.InFilter; @@ -1972,7 +1972,7 @@ public static NotFilter not(Object valueNotToMatch) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, Charset charset) { return Files.contentOf(file, charset); @@ -1989,7 +1989,7 @@ public static String contentOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, String charsetName) { return Files.contentOf(file, charsetName); @@ -2005,7 +2005,7 @@ public static String contentOf(File file, String charsetName) { * * @param file the file. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file) { return Files.contentOf(file, Charset.defaultCharset()); @@ -2019,7 +2019,7 @@ public static String contentOf(File file) { * @param file the file. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file) { return Files.linesOf(file, Charset.defaultCharset()); @@ -2033,7 +2033,7 @@ public static List<String> linesOf(File file) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, Charset charset) { return Files.linesOf(file, charset); @@ -2047,7 +2047,7 @@ public static List<String> linesOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, String charsetName) { return Files.linesOf(file, charsetName); @@ -2067,7 +2067,7 @@ public static List<String> linesOf(File file, String charsetName) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, Charset charset) { return URLs.contentOf(url, charset); @@ -2083,7 +2083,7 @@ public static String contentOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, String charsetName) { return URLs.contentOf(url, charsetName); @@ -2098,7 +2098,7 @@ public static String contentOf(URL url, String charsetName) { * * @param url the URL. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url) { return URLs.contentOf(url, Charset.defaultCharset()); @@ -2112,7 +2112,7 @@ public static String contentOf(URL url) { * @param url the URL. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url) { return URLs.linesOf(url, Charset.defaultCharset()); @@ -2126,7 +2126,7 @@ public static List<String> linesOf(URL url) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, Charset charset) { return URLs.linesOf(url, charset); @@ -2140,7 +2140,7 @@ public static List<String> linesOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, String charsetName) { return URLs.linesOf(url, charsetName); diff --git a/src/main/java/org/assertj/core/api/WithAssertions.java b/src/main/java/org/assertj/core/api/WithAssertions.java --- a/src/main/java/org/assertj/core/api/WithAssertions.java +++ b/src/main/java/org/assertj/core/api/WithAssertions.java @@ -15,6 +15,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.io.UncheckedIOException; import java.math.BigDecimal; import java.math.BigInteger; import java.net.URI; @@ -65,7 +66,6 @@ import java.util.stream.Stream; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.api.filter.FilterOperator; import org.assertj.core.api.filter.Filters; import org.assertj.core.api.filter.InFilter; @@ -1638,7 +1638,7 @@ default <T> DoesNotHave<T> doesNotHave(final Condition<? super T> condition) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default String contentOf(final File file, final String charsetName) { return Assertions.contentOf(file, charsetName); @@ -1654,7 +1654,7 @@ default String contentOf(final File file, final String charsetName) { * * @param file the file. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default String contentOf(final File file) { return Assertions.contentOf(file); @@ -1671,7 +1671,7 @@ default String contentOf(final File file) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default String contentOf(final File file, final Charset charset) { return Assertions.contentOf(file, charset); @@ -1685,7 +1685,7 @@ default String contentOf(final File file, final Charset charset) { * @param file the file. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default List<String> linesOf(final File file) { return Assertions.linesOf(file); @@ -1699,7 +1699,7 @@ default List<String> linesOf(final File file) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default List<String> linesOf(final File file, final String charsetName) { return Assertions.linesOf(file, charsetName); @@ -1713,7 +1713,7 @@ default List<String> linesOf(final File file, final String charsetName) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ default List<String> linesOf(final File file, final Charset charset) { return Assertions.linesOf(file, charset); @@ -2596,7 +2596,7 @@ default NotFilter not(Object valueNotToMatch) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default String contentOf(URL url, Charset charset) { @@ -2613,7 +2613,7 @@ default String contentOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default String contentOf(URL url, String charsetName) { @@ -2629,7 +2629,7 @@ default String contentOf(URL url, String charsetName) { * * @param url the URL. * @return the content of the file. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default String contentOf(URL url) { @@ -2644,7 +2644,7 @@ default String contentOf(URL url) { * @param url the URL. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default List<String> linesOf(URL url) { @@ -2659,7 +2659,7 @@ default List<String> linesOf(URL url) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default List<String> linesOf(URL url, Charset charset) { @@ -2674,7 +2674,7 @@ default List<String> linesOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. * @since 3.9.0 */ default List<String> linesOf(URL url, String charsetName) { diff --git a/src/main/java/org/assertj/core/api/exception/RuntimeIOException.java b/src/main/java/org/assertj/core/api/exception/RuntimeIOException.java deleted file mode 100644 --- a/src/main/java/org/assertj/core/api/exception/RuntimeIOException.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - * - * Copyright 2012-2018 the original author or authors. - */ -package org.assertj.core.api.exception; - -import org.assertj.core.util.Files; -import org.assertj.core.util.URLs; - -/** - * Exception thrown by <code>{@link Files}</code> and <code>{@link URLs}</code>. - * - * @author Yvonne Wang - */ -public final class RuntimeIOException extends RuntimeException { - - private static final long serialVersionUID = -8328554403430790831L; - - /** - * Creates a new {@link RuntimeIOException}. - * - * @param message the detail message. - */ - public RuntimeIOException(String message) { - super(message); - } - - /** - * Creates a new {@link RuntimeIOException}. - * - * @param message the detail message. - * @param cause the cause of the error. - */ - public RuntimeIOException(String message, Throwable cause) { - super(message, cause); - } -} diff --git a/src/main/java/org/assertj/core/internal/Files.java b/src/main/java/org/assertj/core/internal/Files.java --- a/src/main/java/org/assertj/core/internal/Files.java +++ b/src/main/java/org/assertj/core/internal/Files.java @@ -35,12 +35,12 @@ import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.charset.MalformedInputException; import java.util.List; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.util.VisibleForTesting; import org.assertj.core.util.diff.Delta; @@ -88,7 +88,7 @@ public static Files instance() { * @throws IllegalArgumentException if {@code expected} is not an existing file. * @throws AssertionError if {@code actual} is {@code null}. * @throws AssertionError if {@code actual} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the given files do not have same content. */ public void assertSameContentAs(AssertionInfo info, File actual, Charset actualCharset, File expected, @@ -105,15 +105,15 @@ public void assertSameContentAs(AssertionInfo info, File actual, Charset actualC // compute a binary diff, if there is a binary diff, it it shows the offset of the malformed input BinaryDiffResult binaryDiffResult = binaryDiff.diff(actual, readAllBytes(expected.toPath())); if (binaryDiffResult.hasNoDiff()) { - // fall back to the RuntimeIOException : not throwing an error is wrong as there was one in the first place. + // fall back to the UncheckedIOException : not throwing an error is wrong as there was one in the first place. throw e; } throw failures.failure(info, shouldHaveBinaryContent(actual, binaryDiffResult)); } catch (IOException ioe) { - throw new RuntimeIOException(format(UNABLE_TO_COMPARE_FILE_CONTENTS, actual, expected), ioe); + throw new UncheckedIOException(format(UNABLE_TO_COMPARE_FILE_CONTENTS, actual, expected), ioe); } } catch (IOException e) { - throw new RuntimeIOException(format(UNABLE_TO_COMPARE_FILE_CONTENTS, actual, expected), e); + throw new UncheckedIOException(format(UNABLE_TO_COMPARE_FILE_CONTENTS, actual, expected), e); } } @@ -125,7 +125,7 @@ public void assertSameContentAs(AssertionInfo info, File actual, Charset actualC * @throws NullPointerException if {@code expected} is {@code null}. * @throws AssertionError if {@code actual} is {@code null}. * @throws AssertionError if {@code actual} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the file does not have the binary content. */ public void assertHasBinaryContent(AssertionInfo info, File actual, byte[] expected) { @@ -137,7 +137,7 @@ public void assertHasBinaryContent(AssertionInfo info, File actual, byte[] expec throw failures.failure(info, shouldHaveBinaryContent(actual, result)); } catch (IOException e) { String msg = String.format("Unable to verify binary contents of file:<%s>", actual); - throw new RuntimeIOException(msg, e); + throw new UncheckedIOException(msg, e); } } @@ -150,7 +150,7 @@ public void assertHasBinaryContent(AssertionInfo info, File actual, byte[] expec * @throws NullPointerException if {@code expected} is {@code null}. * @throws AssertionError if {@code actual} is {@code null}. * @throws AssertionError if {@code actual} is not an existing file. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the file does not have the text content. */ public void assertHasContent(AssertionInfo info, File actual, String expected, Charset charset) { @@ -162,7 +162,7 @@ public void assertHasContent(AssertionInfo info, File actual, String expected, C throw failures.failure(info, shouldHaveContent(actual, charset, diffs)); } catch (IOException e) { String msg = String.format("Unable to verify text contents of file:<%s>", actual); - throw new RuntimeIOException(msg, e); + throw new UncheckedIOException(msg, e); } } @@ -285,7 +285,7 @@ public void assertCanRead(AssertionInfo info, File actual) { * @param actual the given file. * @param expected the expected parent {@code File}. * @throws NullPointerException if the expected parent {@code File} is {@code null}. - * @throws RuntimeIOException if an I/O error occurs. + * @throws UncheckedIOException if an I/O error occurs. * @throws AssertionError if the given {@code File} is {@code null}. * @throws AssertionError if the given {@code File} does not have a parent. * @throws AssertionError if the given {@code File} parent is not equal to the expected one. @@ -298,7 +298,7 @@ public void assertHasParent(AssertionInfo info, File actual, File expected) { && areEqual(expected.getCanonicalFile(), actual.getParentFile().getCanonicalFile())) return; } catch (IOException e) { - throw new RuntimeIOException(String.format("Unable to get canonical form of [%s] or [%s].", actual, expected), e); + throw new UncheckedIOException(String.format("Unable to get canonical form of [%s] or [%s].", actual, expected), e); } throw failures.failure(info, shouldHaveParent(actual, expected)); } diff --git a/src/main/java/org/assertj/core/internal/Paths.java b/src/main/java/org/assertj/core/internal/Paths.java --- a/src/main/java/org/assertj/core/internal/Paths.java +++ b/src/main/java/org/assertj/core/internal/Paths.java @@ -38,6 +38,7 @@ import static org.assertj.core.util.Preconditions.checkNotNull; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.file.LinkOption; import java.nio.file.Path; @@ -45,7 +46,6 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.api.exception.PathsException; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.util.VisibleForTesting; import org.assertj.core.util.diff.Delta; @@ -280,7 +280,7 @@ public void assertHasContent(final AssertionInfo info, Path actual, String expec if (diffs.isEmpty()) return; throw failures.failure(info, shouldHaveContent(actual, charset, diffs)); } catch (IOException e) { - throw new RuntimeIOException(format("Unable to verify text contents of path:<%s>", actual), e); + throw new UncheckedIOException(format("Unable to verify text contents of path:<%s>", actual), e); } } @@ -292,7 +292,7 @@ public void assertHasBinaryContent(AssertionInfo info, Path actual, byte[] expec if (diffResult.hasNoDiff()) return; throw failures.failure(info, shouldHaveBinaryContent(actual, diffResult)); } catch (IOException e) { - throw new RuntimeIOException(format("Unable to verify binary contents of path:<%s>", actual), e); + throw new UncheckedIOException(format("Unable to verify binary contents of path:<%s>", actual), e); } } @@ -305,7 +305,7 @@ public void assertHasSameContentAs(AssertionInfo info, Path actual, Charset actu if (diffs.isEmpty()) return; throw failures.failure(info, shouldHaveSameContent(actual, expected, diffs)); } catch (IOException e) { - throw new RuntimeIOException(format("Unable to compare contents of paths:<%s> and:<%s>", actual, expected), e); + throw new UncheckedIOException(format("Unable to compare contents of paths:<%s> and:<%s>", actual, expected), e); } } diff --git a/src/main/java/org/assertj/core/util/Files.java b/src/main/java/org/assertj/core/util/Files.java --- a/src/main/java/org/assertj/core/util/Files.java +++ b/src/main/java/org/assertj/core/util/Files.java @@ -26,13 +26,12 @@ import java.io.IOException; import java.io.InputStreamReader; import java.io.StringWriter; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.UUID; -import org.assertj.core.api.exception.RuntimeIOException; - /** * Utility methods related to files. * @@ -86,12 +85,12 @@ private static List<String> fileNamesIn(File dir, boolean recurse) { * Returns the system's temporary directory. * * @return the system's temporary directory. - * @throws RuntimeIOException if this method cannot find or create the system's temporary directory. + * @throws RuntimeException if this method cannot find or create the system's temporary directory. */ public static File temporaryFolder() { File temp = new File(temporaryFolderPath()); if (!temp.isDirectory()) { - throw new RuntimeIOException("Unable to find temporary directory"); + throw new RuntimeException("Unable to find temporary directory"); } return temp; } @@ -133,9 +132,9 @@ public static File newTemporaryFolder() { * * @param path the path of the new file. * @return the new created file. - * @throws RuntimeIOException if the path belongs to an existing non-empty directory. - * @throws RuntimeIOException if the path belongs to an existing file. - * @throws RuntimeIOException if any I/O error is thrown when creating the new file. + * @throws RuntimeException if the path belongs to an existing non-empty directory. + * @throws RuntimeException if the path belongs to an existing file. + * @throws UncheckedIOException if any I/O error is thrown when creating the new file. */ public static File newFile(String path) { File file = createFileIfPathIsNotANonEmptyDirectory(path); @@ -154,9 +153,9 @@ public static File newFile(String path) { * * @param path the path of the new directory. * @return the new created directory. - * @throws RuntimeIOException if the path belongs to an existing non-empty directory. - * @throws RuntimeIOException if the path belongs to an existing file. - * @throws RuntimeIOException if any I/O error is thrown when creating the new directory. + * @throws RuntimeException if the path belongs to an existing non-empty directory. + * @throws RuntimeException if the path belongs to an existing file. + * @throws RuntimeException if any I/O error is thrown when creating the new directory. */ public static File newFolder(String path) { File file = createFileIfPathIsNotANonEmptyDirectory(path); @@ -178,36 +177,39 @@ private static File createFileIfPathIsNotANonEmptyDirectory(String path) { return file; } - private static RuntimeIOException cannotCreateNewFile(String path, String reason) { + private static UncheckedIOException cannotCreateNewFile(String path, String reason) { throw cannotCreateNewFile(path, reason, null); } - private static RuntimeIOException cannotCreateNewFile(String path, Exception cause) { + private static UncheckedIOException cannotCreateNewFile(String path, Exception cause) { throw cannotCreateNewFile(path, null, cause); } - private static RuntimeIOException cannotCreateNewFile(String path, String reason, Exception cause) { + private static UncheckedIOException cannotCreateNewFile(String path, String reason, Exception cause) { String message = String.format("Unable to create the new file %s", quote(path)); if (!Strings.isNullOrEmpty(reason)) { message = concat(message, ": ", reason); } - if (cause != null) { - throw new RuntimeIOException(message, cause); + if (cause == null) { + throw new RuntimeException(message); + } + if (cause instanceof IOException) { + throw new UncheckedIOException(message, (IOException) cause); } - throw new RuntimeIOException(message); + throw new RuntimeException(message, cause); } /** * Returns the current directory. * * @return the current directory. - * @throws RuntimeIOException if the current directory cannot be obtained. + * @throws UncheckedIOException if the current directory cannot be obtained. */ public static File currentFolder() { try { return new File(".").getCanonicalFile(); } catch (IOException e) { - throw new RuntimeIOException("Unable to get current directory", e); + throw new UncheckedIOException("Unable to get current directory", e); } } @@ -240,7 +242,7 @@ public static void delete(File file) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, String charsetName) { checkArgumentCharsetIsSupported(charsetName); @@ -254,14 +256,14 @@ public static String contentOf(File file, String charsetName) { * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(File file, Charset charset) { checkNotNull(charset, "The charset should not be null"); try { return loadContents(file, charset); } catch (IOException e) { - throw new RuntimeIOException("Unable to read " + file.getAbsolutePath(), e); + throw new UncheckedIOException("Unable to read " + file.getAbsolutePath(), e); } } @@ -284,14 +286,14 @@ private static String loadContents(File file, Charset charset) throws IOExceptio * @param charset the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, Charset charset) { checkNotNull(charset, "The charset should not be null"); try { return loadLines(file, charset); } catch (IOException e) { - throw new RuntimeIOException("Unable to read " + file.getAbsolutePath(), e); + throw new UncheckedIOException("Unable to read " + file.getAbsolutePath(), e); } } @@ -303,7 +305,7 @@ public static List<String> linesOf(File file, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(File file, String charsetName) { checkArgumentCharsetIsSupported(charsetName); diff --git a/src/main/java/org/assertj/core/util/URLs.java b/src/main/java/org/assertj/core/util/URLs.java --- a/src/main/java/org/assertj/core/util/URLs.java +++ b/src/main/java/org/assertj/core/util/URLs.java @@ -20,8 +20,6 @@ import java.nio.charset.Charset; import java.util.List; -import org.assertj.core.api.exception.RuntimeIOException; - /** * Utility methods related to URLs. * @@ -40,7 +38,7 @@ private URLs() { * @param charsetName the name of the character set to use. * @return the content of the file. * @throws IllegalArgumentException if the given character set is not supported on this platform. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, String charsetName) { checkArgumentCharsetIsSupported(charsetName); @@ -54,14 +52,14 @@ public static String contentOf(URL url, String charsetName) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static String contentOf(URL url, Charset charset) { checkNotNull(charset, "The charset should not be null"); try { return loadContents(url.openStream(), charset); } catch (IOException e) { - throw new RuntimeIOException("Unable to read " + url, e); + throw new UncheckedIOException("Unable to read " + url, e); } } @@ -73,14 +71,14 @@ public static String contentOf(URL url, Charset charset) { * @param charset the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, Charset charset) { checkNotNull(charset, "The charset should not be null"); try { return loadLines(url.openStream(), charset); } catch (IOException e) { - throw new RuntimeIOException("Unable to read " + url, e); + throw new UncheckedIOException("Unable to read " + url, e); } } @@ -92,7 +90,7 @@ public static List<String> linesOf(URL url, Charset charset) { * @param charsetName the name of the character set to use. * @return the content of the URL. * @throws NullPointerException if the given charset is {@code null}. - * @throws RuntimeIOException if an I/O exception occurs. + * @throws UncheckedIOException if an I/O exception occurs. */ public static List<String> linesOf(URL url, String charsetName) { checkArgumentCharsetIsSupported(charsetName);
diff --git a/src/test/java/org/assertj/core/api/WithAssertions_delegation_Test.java b/src/test/java/org/assertj/core/api/WithAssertions_delegation_Test.java --- a/src/test/java/org/assertj/core/api/WithAssertions_delegation_Test.java +++ b/src/test/java/org/assertj/core/api/WithAssertions_delegation_Test.java @@ -17,6 +17,7 @@ import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.math.BigDecimal; import java.net.MalformedURLException; import java.net.URI; @@ -43,7 +44,6 @@ import java.util.function.Predicate; import java.util.stream.Stream; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.condition.AnyOf; import org.assertj.core.data.MapEntry; import org.junit.Test; @@ -589,7 +589,7 @@ public void withAssertions_doesNotHave_condition_Test() { /** * Test that the delegate method is called. */ - @Test(expected = RuntimeIOException.class) + @Test(expected = UncheckedIOException.class) public void withAssertions_contentOf_Test() { contentOf(new File("/non-existent file")).contains("a"); } @@ -597,7 +597,7 @@ public void withAssertions_contentOf_Test() { /** * Test that the delegate method is called. */ - @Test(expected = RuntimeIOException.class) + @Test(expected = UncheckedIOException.class) public void withAssertions_contentOf_with_charset_Test() { contentOf(new File("/non-existent file", "UTF-8")).contains("a"); } @@ -605,7 +605,7 @@ public void withAssertions_contentOf_with_charset_Test() { /** * Test that the delegate method is called. */ - @Test(expected = RuntimeIOException.class) + @Test(expected = UncheckedIOException.class) public void withAssertions_linesOf_Test() { linesOf(new File("/non-existent file")).contains("a"); } @@ -613,7 +613,7 @@ public void withAssertions_linesOf_Test() { /** * Test that the delegate method is called. */ - @Test(expected = RuntimeIOException.class) + @Test(expected = UncheckedIOException.class) public void withAssertions_linesOf_with_charsetTest() { linesOf(new File("/non-existent file", "UTF-8")).contains("a"); } diff --git a/src/test/java/org/assertj/core/internal/files/Files_assertHasBinaryContent_Test.java b/src/test/java/org/assertj/core/internal/files/Files_assertHasBinaryContent_Test.java --- a/src/test/java/org/assertj/core/internal/files/Files_assertHasBinaryContent_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Files_assertHasBinaryContent_Test.java @@ -22,9 +22,9 @@ import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.BinaryDiffResult; import org.assertj.core.internal.Files; import org.assertj.core.internal.FilesBaseTest; @@ -86,7 +86,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException IOException cause = new IOException(); when(binaryDiff.diff(actual, expected)).thenThrow(cause); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); files.assertHasBinaryContent(someInfo(), actual, expected); } diff --git a/src/test/java/org/assertj/core/internal/files/Files_assertHasContent_Test.java b/src/test/java/org/assertj/core/internal/files/Files_assertHasContent_Test.java --- a/src/test/java/org/assertj/core/internal/files/Files_assertHasContent_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Files_assertHasContent_Test.java @@ -22,12 +22,12 @@ import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Files; import org.assertj.core.internal.FilesBaseTest; import org.assertj.core.util.Lists; @@ -92,7 +92,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException IOException cause = new IOException(); when(diff.diff(actual, expected, charset)).thenThrow(cause); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); files.assertHasContent(someInfo(), actual, expected, charset); } diff --git a/src/test/java/org/assertj/core/internal/files/Files_assertHasParent_Test.java b/src/test/java/org/assertj/core/internal/files/Files_assertHasParent_Test.java --- a/src/test/java/org/assertj/core/internal/files/Files_assertHasParent_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Files_assertHasParent_Test.java @@ -13,12 +13,12 @@ package org.assertj.core.internal.files; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.FilesBaseTest; import org.junit.Test; import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import static org.assertj.core.error.ShouldHaveParent.shouldHaveParent; import static org.assertj.core.test.TestData.someInfo; @@ -95,7 +95,7 @@ public void should_pass_if_actual_has_expected_parent_when_actual_form_is_canoni @Test public void should_throw_exception_when_canonical_form_representation_fail() throws Exception { - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); File actual = mock(File.class); File expectedParent = mock(File.class); @@ -108,7 +108,7 @@ public void should_throw_exception_when_canonical_form_representation_fail() thr @Test public void should_throw_exception_when_canonical_form_representation_fail_for_expected_parent() throws Exception { - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); File expectedParent = mock(File.class); when(expectedParent.getCanonicalFile()).thenThrow(new IOException()); diff --git a/src/test/java/org/assertj/core/internal/files/Files_assertSameContentAs_Test.java b/src/test/java/org/assertj/core/internal/files/Files_assertSameContentAs_Test.java --- a/src/test/java/org/assertj/core/internal/files/Files_assertSameContentAs_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Files_assertSameContentAs_Test.java @@ -25,12 +25,12 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.UncheckedIOException; import java.math.BigInteger; import java.nio.charset.StandardCharsets; import java.util.List; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.BinaryDiffResult; import org.assertj.core.internal.FilesBaseTest; import org.assertj.core.util.Lists; @@ -41,7 +41,7 @@ import org.junit.rules.TemporaryFolder; /** - * Tests for <code>{@link org.assertj.core.internal.Files#assertSameContentAs(org.assertj.core.api.AssertionInfo, java.io.File, java.io.File)}</code>. + * Tests for <code>{@link org.assertj.core.internal.Files#assertSameContentAs(org.assertj.core.api.AssertionInfo, java.io.File, java.nio.charset.Charset, java.io.File, java.nio.charset.Charset)}</code>. * * @author Yvonne Wang * @author Joel Costigliola @@ -103,7 +103,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException IOException cause = new IOException(); when(diff.diff(actual, defaultCharset(), expected, defaultCharset())).thenThrow(cause); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); files.assertSameContentAs(someInfo(), actual, defaultCharset(), expected, defaultCharset()); } @@ -125,7 +125,7 @@ public void should_fail_if_files_do_not_have_equal_content() throws IOException @Test public void should_throw_an_error_if_files_cant_be_compared_with_the_given_charsets_even_if_binary_identical() throws IOException { - thrown.expectWithMessageStartingWith(RuntimeIOException.class, "Unable to compare contents of files"); + thrown.expectWithMessageStartingWith(UncheckedIOException.class, "Unable to compare contents of files"); unMockedFiles.assertSameContentAs(someInfo(), createFileWithNonUTF8Character(), StandardCharsets.UTF_8, createFileWithNonUTF8Character(), StandardCharsets.UTF_8); diff --git a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasBinaryContent_Test.java b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasBinaryContent_Test.java --- a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasBinaryContent_Test.java +++ b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasBinaryContent_Test.java @@ -25,10 +25,10 @@ import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.file.Path; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.BinaryDiffResult; import org.assertj.core.internal.Paths; import org.assertj.core.internal.PathsBaseTest; @@ -111,7 +111,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException when(nioFilesWrapper.exists(path)).thenReturn(true); when(nioFilesWrapper.isReadable(path)).thenReturn(true); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); paths.assertHasBinaryContent(someInfo(), path, expected); } diff --git a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasContent_Test.java b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasContent_Test.java --- a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasContent_Test.java +++ b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasContent_Test.java @@ -25,13 +25,13 @@ import java.io.File; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Paths; import org.assertj.core.internal.PathsBaseTest; import org.assertj.core.util.diff.Delta; @@ -119,7 +119,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException when(nioFilesWrapper.exists(path)).thenReturn(true); when(nioFilesWrapper.isReadable(path)).thenReturn(true); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); paths.assertHasContent(someInfo(), path, expected, charset); } diff --git a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasSameContentAs_Test.java b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasSameContentAs_Test.java --- a/src/test/java/org/assertj/core/internal/paths/Paths_assertHasSameContentAs_Test.java +++ b/src/test/java/org/assertj/core/internal/paths/Paths_assertHasSameContentAs_Test.java @@ -26,18 +26,19 @@ import static org.mockito.Mockito.when; import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import org.assertj.core.api.AssertionInfo; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.internal.Paths; import org.assertj.core.util.diff.Delta; import org.junit.Test; /** - * Tests for <code>{@link Paths#assertHasSameContentAs(AssertionInfo, Path, Path)}</code>. + * Tests for <code>{@link Paths#assertHasSameContentAs(AssertionInfo, Path, Charset, Path, Charset)}</code>. */ public class Paths_assertHasSameContentAs_Test extends MockPathsBaseTest { @@ -109,7 +110,7 @@ public void should_throw_error_wrapping_catched_IOException() throws IOException when(nioFilesWrapper.isReadable(actual)).thenReturn(true); when(nioFilesWrapper.isReadable(other)).thenReturn(true); - thrown.expectWithCause(RuntimeIOException.class, cause); + thrown.expectWithCause(UncheckedIOException.class, cause); paths.assertHasSameContentAs(someInfo(), actual, defaultCharset(), other, defaultCharset()); } diff --git a/src/test/java/org/assertj/core/util/Files_contentOf_Test.java b/src/test/java/org/assertj/core/util/Files_contentOf_Test.java --- a/src/test/java/org/assertj/core/util/Files_contentOf_Test.java +++ b/src/test/java/org/assertj/core/util/Files_contentOf_Test.java @@ -16,10 +16,10 @@ import static org.assertj.core.test.ExpectedException.none; import java.io.File; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import org.assertj.core.api.exception.RuntimeIOException; import org.junit.Rule; import org.junit.Test; import org.assertj.core.test.ExpectedException; @@ -54,7 +54,7 @@ public void should_throw_exception_if_file_not_found() { File missingFile = new File("missing.txt"); assertThat(missingFile.exists()).isFalse(); - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); Files.contentOf(missingFile, Charset.defaultCharset()); } diff --git a/src/test/java/org/assertj/core/util/Files_linesOf_Test.java b/src/test/java/org/assertj/core/util/Files_linesOf_Test.java --- a/src/test/java/org/assertj/core/util/Files_linesOf_Test.java +++ b/src/test/java/org/assertj/core/util/Files_linesOf_Test.java @@ -12,12 +12,12 @@ */ package org.assertj.core.util; -import org.assertj.core.api.exception.RuntimeIOException; import org.assertj.core.test.ExpectedException; import org.junit.Rule; import org.junit.Test; import java.io.File; +import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.List; @@ -62,7 +62,7 @@ public void should_throw_exception_if_file_not_found() { File missingFile = new File("missing.txt"); assertThat(missingFile).doesNotExist(); - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); linesOf(missingFile, Charset.defaultCharset()); } diff --git a/src/test/java/org/assertj/core/util/Files_newFile_Test.java b/src/test/java/org/assertj/core/util/Files_newFile_Test.java --- a/src/test/java/org/assertj/core/util/Files_newFile_Test.java +++ b/src/test/java/org/assertj/core/util/Files_newFile_Test.java @@ -19,7 +19,6 @@ import java.io.File; -import org.assertj.core.api.exception.RuntimeIOException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -37,14 +36,14 @@ public class Files_newFile_Test extends Files_TestCase { @Test public void should_throw_error_if_file_path_belongs_to_directory_that_is_not_empty() { - thrown.expect(RuntimeIOException.class); + thrown.expect(RuntimeException.class); Files.newFile("root"); } @Test public void should_throw_error_if_file_path_belongs_to_an_existing_file() { String path = join("root", "dir_1", "file_1_1").with(separator); - thrown.expect(RuntimeIOException.class); + thrown.expect(RuntimeException.class); Files.newFile(path); } diff --git a/src/test/java/org/assertj/core/util/URLs_contentOf_Test.java b/src/test/java/org/assertj/core/util/URLs_contentOf_Test.java --- a/src/test/java/org/assertj/core/util/URLs_contentOf_Test.java +++ b/src/test/java/org/assertj/core/util/URLs_contentOf_Test.java @@ -13,12 +13,12 @@ package org.assertj.core.util; -import org.assertj.core.api.exception.RuntimeIOException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.File; +import java.io.UncheckedIOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; @@ -46,7 +46,7 @@ public void should_throw_exception_if_url_not_found() throws MalformedURLExcepti File missingFile = new File("missing.txt"); assertThat(missingFile.exists()).isFalse(); - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); URLs.contentOf(missingFile.toURI().toURL(), Charset.defaultCharset()); } diff --git a/src/test/java/org/assertj/core/util/URLs_linesOf_Test.java b/src/test/java/org/assertj/core/util/URLs_linesOf_Test.java --- a/src/test/java/org/assertj/core/util/URLs_linesOf_Test.java +++ b/src/test/java/org/assertj/core/util/URLs_linesOf_Test.java @@ -12,12 +12,12 @@ */ package org.assertj.core.util; -import org.assertj.core.api.exception.RuntimeIOException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.File; +import java.io.UncheckedIOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.Charset; @@ -48,7 +48,7 @@ public void should_throw_exception_if_url_not_found() throws MalformedURLExcepti File missingFile = new File("missing.txt"); assertThat(missingFile).doesNotExist(); - thrown.expect(RuntimeIOException.class); + thrown.expect(UncheckedIOException.class); URLs.linesOf(missingFile.toURI().toURL(), Charset.defaultCharset()); }
Replace RuntimeIOException with UncheckedIOException? Should we replace the AssertJ specific `RuntimeIOException` with `UncheckedIOException` which was added in Java 8?
I can work on this 👍 @joel-costigliola what should we do with the missing constructors in UncheckedIOException? e.g. RuntimeIOException(String message) UncheckedIOException Constructors: UncheckedIOException(IOException cause) UncheckedIOException(String message, IOException cause) can't we use `UncheckedIOException(String message, IOException cause)` ? if not it means it wasn't an IOException in the first place, then a simple `RuntimeException` should do the job Yes, you are right. Will fix wrong usage of RuntimeIOException with RuntimeException. Thanks!
2018-02-18T12:17:19Z
3.9
assertj/assertj
1,134
assertj__assertj-1134
[ "1128" ]
bd359b62a681cbf77827d906644093c6c8824d5a
diff --git a/src/main/java/org/assertj/core/api/AbstractAssert.java b/src/main/java/org/assertj/core/api/AbstractAssert.java --- a/src/main/java/org/assertj/core/api/AbstractAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractAssert.java @@ -50,6 +50,9 @@ */ public abstract class AbstractAssert<SELF extends AbstractAssert<SELF, ACTUAL>, ACTUAL> implements Assert<SELF, ACTUAL> { + // https://github.com/joel-costigliola/assertj-core/issues/1128 + public static boolean throwUnsupportedExceptionOnEquals = true; + private static final String ORG_ASSERTJ = "org.assert"; @VisibleForTesting @@ -540,7 +543,10 @@ public SELF withRepresentation(Representation representation) { @Override @Deprecated public boolean equals(Object obj) { - throw new UnsupportedOperationException("'equals' is not supported...maybe you intended to call 'isEqualTo'"); + if (throwUnsupportedExceptionOnEquals) { + throw new UnsupportedOperationException("'equals' is not supported...maybe you intended to call 'isEqualTo'"); + } + return super.equals(obj); } /**
diff --git a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_equal_hashCode_Test.java b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_equal_hashCode_Test.java --- a/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_equal_hashCode_Test.java +++ b/src/test/java/org/assertj/core/api/abstract_/AbstractAssert_equal_hashCode_Test.java @@ -41,9 +41,17 @@ public void should_fail_because_not_supported_operation() { assertions.equals("anotherString"); } + @Test + @SuppressWarnings("deprecation") + public void should_not_fail_when_equals_exceptions_is_deactivated() { + AbstractAssert.throwUnsupportedExceptionOnEquals = false; + assertions.equals("anotherString"); + } + + @Test public void shouldReturnOneAsHashCode() { assertThat(assertions.hashCode()).isEqualTo(1); } -} \ No newline at end of file +}
Must AbstractAssert#equals throw UnsupportedOperationException #### Summary @joel-costigliola Is it necessary for the `AbstractAssert#equals` to throw the `UnsupportedOperationException`? This is against the contract of `Object#equals` and brakes other libraries which are not handling this. I know that it should be the deal of the libraries, if they can handle this error but this situation is absolutely not obvious. Is is possible, to change this? In fact, currently it breaks the coroutines of Kotlin. #### Example ```java org.assertj.core.api.AbstractAssert /** * {@inheritDoc} * * @deprecated use {@link #isEqualTo} instead * * @throws UnsupportedOperationException if this method is called. */ @Override @Deprecated public boolean equals(Object obj) { throw new UnsupportedOperationException("'equals' is not supported...maybe you intended to call 'isEqualTo'"); } ``` #### Java 8 specific ? * YES : create Pull Request from the `master` branch * NO : create Pull Request from the `2.x` branch
This is done to avoid misusing the API like that: ```java // BAD assertThat(actual).equals(expected); // GOOD assertThat(actual).isEqualTo(expected); ``` `*Assert` are not meant to be compared, what is the concrete use case for that (I don't know what coroutines are) ? is it Kotlin specific ? Ok, I understand the reason why you added the exception to the method. From this point of view it is useful to prevent the user to use the `equals`-method unintentionally. The problem itself is in a kotlin library. When a lambda is used like the following ``` doSomething { assertThat(foo).isNull() } ``` the `isNull` itself returns again the `...AbstractAsserter`. When the lambda can handle return values, this `....AbstractAsserter` is returned to the caller. In this case, the `coroutines` library does some magic under the hood. One step in the magic is to compare the result with the last result `thisone.equals(lastone)` and here the problem happens, because the equals throws an exception which the library doesn't handles. I know that this is the problem of the library, but as I said, IMO breaking the contract of `equals` isn't the best way to do.
2017-12-19T06:53:29Z
3.8
assertj/assertj
1,014
assertj__assertj-1014
[ "1009", "1009" ]
92c396e52844a283a07b8ecf81ca71be757d3631
diff --git a/src/main/java/org/assertj/core/api/AbstractIterableAssert.java b/src/main/java/org/assertj/core/api/AbstractIterableAssert.java --- a/src/main/java/org/assertj/core/api/AbstractIterableAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractIterableAssert.java @@ -222,6 +222,14 @@ public SELF containsExactlyInAnyOrder(@SuppressWarnings("unchecked") ELEMENT... return myself; } + /** + * {@inheritDoc} + */ + @Override + public SELF containsExactlyInAnyOrderElementsOf(Iterable<? extends ELEMENT> values) { + return containsExactlyInAnyOrder(toArray(values)); + } + /** * {@inheritDoc} */ diff --git a/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java b/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java --- a/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractObjectArrayAssert.java @@ -358,6 +358,13 @@ public SELF containsExactlyInAnyOrder(@SuppressWarnings("unchecked") ELEMENT... arrays.assertContainsExactlyInAnyOrder(info, actual, values); return myself; } + /** + * {@inheritDoc + */ + @Override + public SELF containsExactlyInAnyOrderElementsOf(Iterable<? extends ELEMENT> values) { + return containsExactlyInAnyOrder(toArray(values)); + } /** * Same as {@link #containsExactly(Object...)} but handles the {@link Iterable} to array conversion : verifies that diff --git a/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java b/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java --- a/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java +++ b/src/main/java/org/assertj/core/api/AtomicReferenceArrayAssert.java @@ -439,6 +439,35 @@ public AtomicReferenceArrayAssert<T> containsExactlyInAnyOrder(@SuppressWarnings return myself; } + /** + * Verifies that the actual AtomicReferenceArray contains exactly the given values and nothing else, <b>in any order</b>.<br> + * + * <p> + * Example : + * <pre><code class='java'> + * AtomicReferenceArray&lt;Ring&gt; elvesRings = new AtomicReferenceArray(new Ring[]{vilya, nenya, narya, vilya}); + * AtomicReferenceArray&lt;Ring&gt; elvesRingsSomeMissing = new AtomicReferenceArray(new Ring[]{vilya, nenya, narya}); + * AtomicReferenceArray&lt;Ring&gt; elvesRingsDifferentOrder = new AtomicReferenceArray(new Ring[]{nenya, narya, vilya, vilya}); + * + * // assertion will pass + * assertThat(elvesRings).containsExactlyInAnyOrder(elvesRingsDifferentOrder); + * + * // assertion will fail as vilya is contained twice in elvesRings. + * assertThat(elvesRings).containsExactlyInAnyOrder(elvesRingsSomeMissing);</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones. + * @since 2.9.0 / 3.9.0 + */ + @Override + public AtomicReferenceArrayAssert<T> containsExactlyInAnyOrderElementsOf(Iterable<? extends T> values) { + return containsExactlyInAnyOrder(toArray(values)); + } + /** * Same as {@link #containsExactly(Object...)} but handles the {@link Iterable} to array conversion : verifies that * actual contains all elements of the given {@code Iterable} and nothing else <b>in the same order</b>. diff --git a/src/main/java/org/assertj/core/api/ObjectEnumerableAssert.java b/src/main/java/org/assertj/core/api/ObjectEnumerableAssert.java --- a/src/main/java/org/assertj/core/api/ObjectEnumerableAssert.java +++ b/src/main/java/org/assertj/core/api/ObjectEnumerableAssert.java @@ -149,6 +149,32 @@ public interface ObjectEnumerableAssert<SELF extends ObjectEnumerableAssert<SELF */ SELF containsExactlyInAnyOrder(@SuppressWarnings("unchecked") ELEMENT... values); + /** + * Verifies that the actual group contains exactly the given values and nothing else, <b>in any order</b>.<br> + * + * <p> + * Example : + * <pre><code class='java'> // an Iterable is used in the example but it would also work with an array + * Iterable&lt;Ring&gt; elvesRings = newArrayList(vilya, nenya, narya, vilya); + * Iterable&lt;Ring&gt; elvesRingsSomeMissing = newArrayList(vilya, nenya, narya); + * Iterable&lt;Ring&gt; elvesRingsDifferentOrder = newArrayList(nenya, narya, vilya, vilya); + * + * // assertion will pass + * assertThat(elvesRings).containsExactlyInAnyOrder(elvesRingsDifferentOrder); + * + * // assertion will fail as vilya is contained twice in elvesRings. + * assertThat(elvesRings).containsExactlyInAnyOrder(elvesRingsSomeMissing);</code></pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones. + * @since 2.9.0 / 3.9.0 + */ + SELF containsExactlyInAnyOrderElementsOf(Iterable<? extends ELEMENT> values); + /** * Verifies that the actual group contains the given sequence in the correct order and <b>without extra values between the sequence values</b>. * <p>
diff --git a/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java b/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java @@ -0,0 +1,37 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2017 the original author or authors. + */ +package org.assertj.core.api.atomic.referencearray; + +import static org.assertj.core.util.Lists.newArrayList; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.AtomicReferenceArrayAssert; +import org.assertj.core.api.AtomicReferenceArrayAssertBaseTest; + +/** + * Test for {@link AtomicReferenceArrayAssert#containsExactlyInAnyOrderElementsOf(Iterable)} + * @author Filip Hrisafov + */ +public class AtomicReferenceArrayAssert_containsExactlyInAnyOrderElementsOf_Test + extends AtomicReferenceArrayAssertBaseTest { + + @Override + protected AtomicReferenceArrayAssert<Object> invoke_api_method() { + return assertions.containsExactlyInAnyOrderElementsOf(newArrayList("Yoda", "Luke")); + } + + @Override + protected void verify_internal_effects() { + verify(arrays).assertContainsExactlyInAnyOrder(info(), internalArray(), new String[] { "Yoda", "Luke" }); + } +} diff --git a/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingFieldByFieldElementComparator_Test.java b/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingFieldByFieldElementComparator_Test.java --- a/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingFieldByFieldElementComparator_Test.java +++ b/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_usingFieldByFieldElementComparator_Test.java @@ -17,6 +17,7 @@ import static org.assertj.core.test.AlwaysEqualComparator.ALWAY_EQUALS_STRING; import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; import static org.assertj.core.util.Arrays.array; +import static org.assertj.core.util.Lists.newArrayList; import java.util.Comparator; import java.util.Objects; @@ -106,6 +107,14 @@ public void successful_containsExactlyInAnyOrder_assertion_using_field_by_field_ new Snake(15)); } + @Test + public void successful_containsExactlyInAnyOrderElementsOf_assertion_using_field_by_field_element_comparator_with_heterogeneous_array() { + Snake snake = new Snake(15); + AtomicReferenceArray<Animal> array1 = new AtomicReferenceArray<>(array(new Bird("White"), snake, snake)); + assertThat(array1).usingFieldByFieldElementComparator().containsExactlyInAnyOrderElementsOf( + newArrayList(new Snake(15), new Bird("White"), new Snake(15))); + } + @Test public void successful_containsOnly_assertion_using_field_by_field_element_comparator_with_unordered_array() { Person goodObiwan = new Person("Obi-Wan", "Kenobi", "good man"); diff --git a/src/test/java/org/assertj/core/api/iterable/IterableAssert_containsExactlyInAnyOrderElementsOf_Test.java b/src/test/java/org/assertj/core/api/iterable/IterableAssert_containsExactlyInAnyOrderElementsOf_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/iterable/IterableAssert_containsExactlyInAnyOrderElementsOf_Test.java @@ -0,0 +1,39 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright 2012-2017 the original author or authors. + */ +package org.assertj.core.api.iterable; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.AbstractIterableAssert; +import org.assertj.core.api.ConcreteIterableAssert; +import org.assertj.core.api.IterableAssertBaseTest; +import org.assertj.core.util.Lists; + +/** + * Tests for {@link AbstractIterableAssert#containsExactlyInAnyOrderElementsOf(Iterable)}. + * + * @author Filip Hrisafov + */ +public class IterableAssert_containsExactlyInAnyOrderElementsOf_Test extends IterableAssertBaseTest { + + @Override + protected ConcreteIterableAssert<Object> invoke_api_method() { + return assertions.containsExactlyInAnyOrderElementsOf(Lists.newArrayList("Yoda", "Luke", "Yoda")); + } + + @Override + protected void verify_internal_effects() { + Object[] values = { "Yoda", "Luke", "Yoda" }; + verify(iterables).assertContainsExactlyInAnyOrder(getInfo(assertions), getActual(assertions), values); + } +} diff --git a/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingFieldByFieldElementComparator_Test.java b/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingFieldByFieldElementComparator_Test.java --- a/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingFieldByFieldElementComparator_Test.java +++ b/src/test/java/org/assertj/core/api/iterable/IterableAssert_usingFieldByFieldElementComparator_Test.java @@ -105,6 +105,14 @@ public void successful_containsExactlyInAnyOrder_assertion_using_field_by_field_ new Snake(15)); } + @Test + public void successful_containsExactlyInAnyOrderElementsOf_assertion_using_field_by_field_element_comparator_with_heterogeneous_list() { + Snake snake = new Snake(15); + List<Animal> list1 = newArrayList(new Bird("White"), snake, snake); + assertThat(list1).usingFieldByFieldElementComparator().containsExactlyInAnyOrderElementsOf( + newArrayList(new Snake(15), new Bird("White"), new Snake(15))); + } + @Test public void successful_containsOnly_assertion_using_field_by_field_element_comparator_with_unordered_list() { Person goodObiwan = new Person("Obi-Wan", "Kenobi", "good man"); diff --git a/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java b/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java @@ -0,0 +1,38 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * <p> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p> + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * <p> + * Copyright 2012-2017 the original author or authors. + */ +package org.assertj.core.api.objectarray; + +import static org.assertj.core.util.Lists.newArrayList; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.ObjectArrayAssert; +import org.assertj.core.api.ObjectArrayAssertBaseTest; + +/** + * Tests for {@link ObjectArrayAssert#containsExactlyInAnyOrderElementsOf(Iterable)}. + * + * @author Filip Hrisafov + */ +public class ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test extends ObjectArrayAssertBaseTest { + + @Override + protected ObjectArrayAssert<Object> invoke_api_method() { + return assertions.containsExactlyInAnyOrderElementsOf(newArrayList("Yoda", "Luke")); + } + + @Override + protected void verify_internal_effects() { + verify(arrays) + .assertContainsExactlyInAnyOrder(getInfo(assertions), getActual(assertions), new String[] { "Yoda", "Luke" }); + } +} diff --git a/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingFieldByFieldElementComparator_Test.java b/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingFieldByFieldElementComparator_Test.java --- a/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingFieldByFieldElementComparator_Test.java +++ b/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingFieldByFieldElementComparator_Test.java @@ -15,6 +15,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.test.AlwaysEqualComparator.ALWAY_EQUALS_STRING; import static org.assertj.core.util.Arrays.array; +import static org.assertj.core.util.Lists.newArrayList; import java.util.Comparator; import java.util.Objects; @@ -102,6 +103,14 @@ public void successful_containsExactlyInAnyOrder_assertion_using_field_by_field_ new Snake(15)); } + @Test + public void successful_containsExactlyInAnyOrderElementsOf_assertion_using_field_by_field_element_comparator_with_heterogeneous_array() { + Snake snake = new Snake(15); + Animal[] array1 = array(new Bird("White"), snake, snake); + assertThat(array1).usingFieldByFieldElementComparator().containsExactlyInAnyOrderElementsOf( + newArrayList(new Snake(15), new Bird("White"), new Snake(15))); + } + @Test public void successful_containsOnly_assertion_using_field_by_field_element_comparator_with_unordered_array() { Person goodObiwan = new Person("Obi-Wan", "Kenobi", "good man");
containsExactlyInAnyOrderElementsOf #### Summary Variation of `containsExactlyInAnyOrder` with an `Iterable` parameter. The assertion is named `containsExactlyInAnyOrderElementsOf` for consistency with the API (ex `containsExactlyElementsOf`). #### Example ```java // an Iterable is used in the example but it would also work with an array Iterable<Ring> elvesRings = newArrayList(vilya, nenya, narya, vilya); Iterable<Ring> elvesRingsSomeMissing = newArrayList(vilya, nenya, narya); Iterable<Ring> elvesRingsDifferentOrder = newArrayList(nenya, narya, vilya, vilya); // assertion will pass assertThat(elvesRings).containsExactlyInAnyOrderElementsOf(elvesRingsDifferentOrder); // assertion will fail as vilya is contained twice in elvesRings. assertThat(elvesRings).containsExactlyInAnyOrderElementsOf(elvesRingsSomeMissing); ``` #### Java 8 specific ? * NO : create Pull Request from the `2.x` branch containsExactlyInAnyOrderElementsOf #### Summary Variation of `containsExactlyInAnyOrder` with an `Iterable` parameter. The assertion is named `containsExactlyInAnyOrderElementsOf` for consistency with the API (ex `containsExactlyElementsOf`). #### Example ```java // an Iterable is used in the example but it would also work with an array Iterable<Ring> elvesRings = newArrayList(vilya, nenya, narya, vilya); Iterable<Ring> elvesRingsSomeMissing = newArrayList(vilya, nenya, narya); Iterable<Ring> elvesRingsDifferentOrder = newArrayList(nenya, narya, vilya, vilya); // assertion will pass assertThat(elvesRings).containsExactlyInAnyOrderElementsOf(elvesRingsDifferentOrder); // assertion will fail as vilya is contained twice in elvesRings. assertThat(elvesRings).containsExactlyInAnyOrderElementsOf(elvesRingsSomeMissing); ``` #### Java 8 specific ? * NO : create Pull Request from the `2.x` branch
2017-06-20T17:33:19Z
2.8
assertj/assertj
813
assertj__assertj-813
[ "812" ]
d517f7761f4f7a13e86aa04c3f1aa8584fef881a
diff --git a/src/main/java/org/assertj/core/error/OptionalShouldContainInstanceOf.java b/src/main/java/org/assertj/core/error/OptionalShouldContainInstanceOf.java --- a/src/main/java/org/assertj/core/error/OptionalShouldContainInstanceOf.java +++ b/src/main/java/org/assertj/core/error/OptionalShouldContainInstanceOf.java @@ -12,6 +12,8 @@ */ package org.assertj.core.error; +import java.util.Optional; + import static java.lang.String.format; /** @@ -21,18 +23,25 @@ */ public class OptionalShouldContainInstanceOf extends BasicErrorMessageFactory { - private OptionalShouldContainInstanceOf(Object optional, Class<?> clazz) { - super(format("%nExpecting %s to contain a value of type %s.", optional.getClass().getSimpleName(), clazz.getName())); + private OptionalShouldContainInstanceOf(String message) { + super(message); } /** * Indicates that a value should be present in an empty {@link java.util.Optional}. * - * @param optional Optional to be checked. + * @param value Optional to be checked. * @return an error message factory. * @throws java.lang.NullPointerException if optional is null. */ - public static OptionalShouldContainInstanceOf shouldContainInstanceOf(Object optional, Class<?> clazz) { - return new OptionalShouldContainInstanceOf(optional, clazz); + public static OptionalShouldContainInstanceOf shouldContainInstanceOf(Object value, Class<?> clazz) { + Optional optional = (Optional) value; + if (optional.isPresent()) { + return new OptionalShouldContainInstanceOf(format("%nExpecting:%n <%s>%nto contain a value that is an instance of:%n <%s>%nbut did contain an instance of:%n <%s>", + optional.getClass().getSimpleName(), clazz.getName(), optional.get().getClass().getName())); + } else { + return new OptionalShouldContainInstanceOf(format("%nExpecting:%n <%s>%nto contain a value that is an instance of:%n <%s>%nbut was empty", + optional.getClass().getSimpleName(), clazz.getName())); + } } }
diff --git a/src/test/java/org/assertj/core/error/OptionalShouldContainInstanceOf_create_Test.java b/src/test/java/org/assertj/core/error/OptionalShouldContainInstanceOf_create_Test.java --- a/src/test/java/org/assertj/core/error/OptionalShouldContainInstanceOf_create_Test.java +++ b/src/test/java/org/assertj/core/error/OptionalShouldContainInstanceOf_create_Test.java @@ -23,8 +23,18 @@ public class OptionalShouldContainInstanceOf_create_Test { @Test - public void should_create_error_message_with_expected_type() { + public void should_create_error_message_with_empty() { String errorMessage = shouldContainInstanceOf(Optional.empty(), Object.class).create(); - assertThat(errorMessage).isEqualTo(format("%nExpecting Optional to contain a value of type java.lang.Object.")); + assertThat(errorMessage).isEqualTo(format("%nExpecting:%n <Optional>%n" + + "to contain a value that is an instance of:%n <java.lang.Object>%n" + + "but was empty")); + } + + @Test + public void should_create_error_message_with_expected_type() { + String errorMessage = shouldContainInstanceOf(Optional.of(Integer.MIN_VALUE), String.class).create(); + assertThat(errorMessage).isEqualTo(format("%nExpecting:%n <Optional>%n" + + "to contain a value that is an instance of:%n <java.lang.String>%n" + + "but did contain an instance of:%n <java.lang.Integer>")); } -} \ No newline at end of file +}
Improve error message for "optional.containsInstanceOf(clazz)" #### Summary The error message for OptionalAssert.containsInstanceOf() is less descriptive than the similar message for AbstractAssert.isInstanceOf. Pull request is in the works. #### Example Before change: ```java Expecting Optional to contain a value of type Number. at ... ``` After change: ```java Expecting: <Optional> to contain a value that is an instance of: <class java.lang.Number> but did contain instance of: <class java.lang.String> at ... ``` Existing isInstanceOf-message: ```java Expecting: <"StringValue"> to be an instance of: <java.lang.Number> but was instance of: <java.lang.String> ``` #### Java 8 specific ? * YES : create Pull Request from the `master` branch * NO : create Pull Request from the `2.x` branch
2016-11-28T11:27:01Z
3.6
assertj/assertj
656
assertj__assertj-656
[ "655" ]
319d0ce078adfd83575c15b21b7916470fe46d10
diff --git a/src/main/java/org/assertj/core/api/IterableAssert.java b/src/main/java/org/assertj/core/api/IterableAssert.java --- a/src/main/java/org/assertj/core/api/IterableAssert.java +++ b/src/main/java/org/assertj/core/api/IterableAssert.java @@ -120,6 +120,24 @@ public IterableAssert<T> isNotExactlyInstanceOf(Class<?> type) { } return super.isNotExactlyInstanceOf(type); } + + @Override + public IterableAssert<T> isSameAs(Object expected) { + if (actual instanceof LazyIterable) { + objects.assertSame(info, asLazyIterable().iterator, expected); + return myself; + } + return super.isSameAs(expected); + } + + @Override + public IterableAssert<T> isNotSameAs(Object expected) { + if (actual instanceof LazyIterable) { + objects.assertNotSame(info, asLazyIterable().iterator, expected); + return myself; + } + return super.isNotSameAs(expected); + } @SuppressWarnings("rawtypes") private LazyIterable asLazyIterable() {
diff --git a/src/test/java/org/assertj/core/api/Assertions_assertThat_with_Iterator_Test.java b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_Iterator_Test.java --- a/src/test/java/org/assertj/core/api/Assertions_assertThat_with_Iterator_Test.java +++ b/src/test/java/org/assertj/core/api/Assertions_assertThat_with_Iterator_Test.java @@ -14,6 +14,7 @@ import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; import static org.assertj.core.util.Sets.newLinkedHashSet; import static org.mockito.Mockito.mock; @@ -128,6 +129,25 @@ public void isNotOfAnyClassIn_should_check_the_original_iterator() { assertThat(stringIterator).isNotOfAnyClassIn(LazyIterable.class, String.class); } + @Test + public void isSameAs_should_check_the_original_iterator_without_consuming_it() { + Iterator<?> iterator = mock(Iterator.class); + assertThat(iterator).isSameAs(iterator); + verifyZeroInteractions(iterator); + } + + @Test + public void isNotSameAs_should_check_the_original_iterator_without_consuming_it() { + Iterator<?> iterator = mock(Iterator.class); + try{ + assertThat(iterator).isNotSameAs(iterator); + } catch(AssertionError e){ + verifyZeroInteractions(iterator); + return; + } + Assertions.fail("Expected assertionError, because assert notSame on same iterator."); + } + @Test public void iterator_can_be_asserted_twice_even_though_it_can_be_iterated_only_once() { Iterator<String> names = asList("Luke", "Leia").iterator();
IterableAssert should override isSameAs and isNotSameAs to unroll the actual instance. #### Summary IterableAssert does not override the methods to unroll the actual instance. #### Real life use case or example ``` @Test public void testIterator() { Iterator<String> iterator = new ArrayList<String>().iterator(); assertThat(iterator).isSameAs(iterator); } ``` leads to: java.lang.AssertionError: Expecting: <java.util.ArrayList$Itr@a4300e> and actual: <[]> to refer to the same object at TestIterator.testIterator(TestIterator.java:xx) #### Java 8 specific ? NO The PR addressing this issue if any must be based on the `master` if the issue Java 8 specific, use otherwise the `2.x` branch.
2016-05-02T13:04:13Z
3.4
assertj/assertj
54
assertj__assertj-54
[ "26" ]
0c554a37b677ec6fe2744d683c4f235d090dbb29
diff --git a/src/main/java/org/assertj/core/api/AbstractIterableAssert.java b/src/main/java/org/assertj/core/api/AbstractIterableAssert.java --- a/src/main/java/org/assertj/core/api/AbstractIterableAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractIterableAssert.java @@ -126,12 +126,37 @@ public S containsSequence(T... sequence) { return myself; } - /** {@inheritDoc} */ public S doesNotContain(T... values) { iterables.assertDoesNotContain(info, actual, values); return myself; } + /** + * Verifies that the actual group does not contain the given values. + * + * <pre> + * // this assertion succeed : + * List&lt;String&gt; actual = new ArrayList(&quot;GIT&quot;, &quot;CSV&quot;, &quot;SOURCESAFE&quot;); + * List&lt;String&gt; values = new ArrayList(&quot;git&quot;, &quot;csv&quot;, &quot;subversion&quot;); + * assertThat(actual).doesNotContainAnyElementsOf(values); + * + * // This one failed : + * List&lt;String&gt; actual = new ArrayList(&quot;GIT&quot;, &quot;csv&quot;, &quot;SOURCESAFE&quot;); + * List&lt;String&gt; values = new ArrayList(&quot;git&quot;, &quot;csv&quot;, &quot;subversion&quot;); + * assertThat(actual).doesNotContainAnyElementsOf(values); + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws IllegalArgumentException if the given argument is an empty iterable. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group contains any of the given values. + */ + public S doesNotContainAnyElementsOf(Iterable<? extends T> iterable) { + iterables.assertDoesNotContainAnyElementsOf(info, actual, iterable); + return myself; + } + /** {@inheritDoc} */ public S doesNotHaveDuplicates() { iterables.assertDoesNotHaveDuplicates(info, actual); diff --git a/src/main/java/org/assertj/core/internal/CommonErrors.java b/src/main/java/org/assertj/core/internal/CommonErrors.java --- a/src/main/java/org/assertj/core/internal/CommonErrors.java +++ b/src/main/java/org/assertj/core/internal/CommonErrors.java @@ -1,14 +1,14 @@ /* * Created on Nov 24, 2010 * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the - * License. You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" - * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language - * governing permissions and limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. * * Copyright @2010-2011 the original author or authors. */ @@ -16,7 +16,8 @@ /** * @author Alex Ruiz - * TODO : merge/use with ErrorMessages ! + * + * TODO : merge/use with ErrorMessages ! */ public final class CommonErrors { @@ -28,9 +29,18 @@ static NullPointerException iterableToLookForIsNull() { return new NullPointerException("The iterable to look for should not be null"); } + static NullPointerException iterableOfValuesForIsNull() { + return new NullPointerException("The iterable of values to look for should not be null"); + } + static IllegalArgumentException arrayOfValuesToLookForIsEmpty() { return new IllegalArgumentException("The array of values to look for should not be empty"); } - private CommonErrors() {} + static IllegalArgumentException iterableOfValuesToLookForIsEmpty() { + return new IllegalArgumentException("The iterable of values to look for should not be empty"); + } + + private CommonErrors() { + } } diff --git a/src/main/java/org/assertj/core/internal/CommonValidations.java b/src/main/java/org/assertj/core/internal/CommonValidations.java --- a/src/main/java/org/assertj/core/internal/CommonValidations.java +++ b/src/main/java/org/assertj/core/internal/CommonValidations.java @@ -16,6 +16,8 @@ import static org.assertj.core.internal.CommonErrors.arrayOfValuesToLookForIsEmpty; import static org.assertj.core.internal.CommonErrors.arrayOfValuesToLookForIsNull; +import static org.assertj.core.internal.CommonErrors.iterableOfValuesForIsNull; +import static org.assertj.core.internal.CommonErrors.iterableOfValuesToLookForIsEmpty; import org.assertj.core.data.Index; import org.assertj.core.data.Offset; @@ -49,15 +51,30 @@ static void checkIsNotEmpty(Object[] values) { } } + static void checkIsNotEmpty(Iterable<?> iterable) { + if (!iterable.iterator().hasNext()) { + throw iterableOfValuesToLookForIsEmpty(); + } + } + static void checkIsNotNull(Object[] values) { if (values == null) throw arrayOfValuesToLookForIsNull(); } + static void checkIsNotNull(Iterable<?> iterable) { + if (iterable == null) throw iterableOfValuesForIsNull(); + } + static void checkIsNotNullAndNotEmpty(Object[] values) { checkIsNotNull(values); checkIsNotEmpty(values); } + static void checkIsNotNullAndNotEmpty(Iterable<?> iterable) { + checkIsNotNull(iterable); + checkIsNotEmpty(iterable); + } + static void failIfEmptySinceActualIsNotEmpty(Object[] values) { if (values.length == 0) { throw new AssertionError("actual is not empty"); diff --git a/src/main/java/org/assertj/core/internal/Iterables.java b/src/main/java/org/assertj/core/internal/Iterables.java --- a/src/main/java/org/assertj/core/internal/Iterables.java +++ b/src/main/java/org/assertj/core/internal/Iterables.java @@ -460,6 +460,23 @@ public void assertDoesNotContain(AssertionInfo info, Iterable<?> actual, Object[ throw failures.failure(info, shouldNotContain(actual, values, found, comparisonStrategy)); } + /** + * Asserts that the given {@code Iterable} does not contain the given values. + * + * @param info contains information about the assertion. + * @param actual the given {@code Iterable}. + * @param values the values that are expected not to be in the given {@code Iterable}. + * @throws NullPointerException if the array of values is {@code null}. + * @throws IllegalArgumentException if the array of values is empty. + * @throws AssertionError if the given {@code Iterable} is {@code null}. + * @throws AssertionError if the given {@code Iterable} contains any of given values. + */ + public <T> void assertDoesNotContainAnyElementsOf(AssertionInfo info, Iterable<T> actual, Iterable<? extends T> iterable) { + checkIsNotNullAndNotEmpty(iterable); + List<T> values = newArrayList(iterable); + assertDoesNotContain(info, actual, values.toArray()); + } + /** * Asserts that the given {@code Iterable} does not have duplicate values. *
diff --git a/src/test/java/org/assertj/core/api/iterable/IterableAssert_doesNotContainAnyElementsOf_Test.java b/src/test/java/org/assertj/core/api/iterable/IterableAssert_doesNotContainAnyElementsOf_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/iterable/IterableAssert_doesNotContainAnyElementsOf_Test.java @@ -0,0 +1,42 @@ +/* + * Created on May 7, 2013 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.api.iterable; + +import org.assertj.core.api.AbstractIterableAssert; +import org.assertj.core.api.ConcreteIterableAssert; +import org.assertj.core.api.IterableAssertBaseTest; + +import static org.mockito.Mockito.verify; +import static org.assertj.core.util.Lists.newArrayList; + +/** + * Tests for <code>{@link AbstractIterableAssert#doesNotContainAnyElementsOf(Iterable)}</code>. + * + * @author William Delanoue + */ +public class IterableAssert_doesNotContainAnyElementsOf_Test extends IterableAssertBaseTest { + + private final Iterable<String> values = newArrayList("Yoda", "Luke"); + + @Override + protected ConcreteIterableAssert<Object> invoke_api_method() { + return assertions.doesNotContainAnyElementsOf(values); + } + + @Override + protected void verify_internal_effects() { + verify(iterables).assertDoesNotContainAnyElementsOf(getInfo(assertions), getActual(assertions), values); + } +} diff --git a/src/test/java/org/assertj/core/internal/iterables/Iterables_assertDoesNotContainAnyElementsOf_Test.java b/src/test/java/org/assertj/core/internal/iterables/Iterables_assertDoesNotContainAnyElementsOf_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/internal/iterables/Iterables_assertDoesNotContainAnyElementsOf_Test.java @@ -0,0 +1,118 @@ +/* + * Created on Oct 12, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.internal.iterables; + +import static java.util.Collections.emptyList; +import static org.assertj.core.error.ShouldNotContain.shouldNotContain; +import static org.assertj.core.test.ErrorMessages.*; +import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; +import static org.assertj.core.util.FailureMessages.actualIsNull; +import static org.assertj.core.util.Lists.newArrayList; +import static org.assertj.core.util.Sets.newLinkedHashSet; + +import static org.mockito.Mockito.verify; + +import java.util.Collections; +import java.util.List; + +import org.assertj.core.api.AssertionInfo; +import org.assertj.core.internal.Iterables; +import org.assertj.core.internal.IterablesBaseTest; +import org.junit.Test; + +/** + * Tests for <code>{@link Iterables#assertDoesNotContainAnyElementsOf(AssertionInfo, Iterable, Iterable)}</code>. + * + * @author Alex Ruiz + * @author Joel Costigliola + * @author William Delanoue + */ +public class Iterables_assertDoesNotContainAnyElementsOf_Test extends IterablesBaseTest { + + private static List<String> actual = newArrayList("Luke", "Yoda", "Leia");; + + @Test + public void should_pass_if_actual_does_not_contain_given_values() { + iterables.assertDoesNotContainAnyElementsOf(someInfo(), actual, newArrayList("Han")); + } + + @Test + public void should_pass_if_actual_does_not_contain_given_values_even_if_duplicated() { + iterables.assertDoesNotContainAnyElementsOf(someInfo(), actual, newArrayList("Han", "Han", "Anakin")); + } + + @Test + public void should_throw_error_if_array_of_values_to_look_for_is_empty() { + thrown.expectIllegalArgumentException(iterableValuesToLookForIsEmpty()); + iterables.assertDoesNotContainAnyElementsOf(someInfo(), actual, Collections.<String>emptyList()); + } + + @Test + public void should_throw_error_if_array_of_values_to_look_for_is_null() { + thrown.expectNullPointerException(iterableValuesToLookForIsNull()); + iterables.assertDoesNotContainAnyElementsOf(someInfo(), emptyList(), null); + } + + @Test + public void should_fail_if_actual_is_null() { + thrown.expectAssertionError(actualIsNull()); + iterables.assertDoesNotContainAnyElementsOf(someInfo(), null, newArrayList("Yoda")); + } + + @Test + public void should_fail_if_actual_contains_given_values() { + AssertionInfo info = someInfo(); + List<String> expected = newArrayList("Luke", "Yoda", "Han"); + try { + iterables.assertDoesNotContainAnyElementsOf(info, actual, expected); + } catch (AssertionError e) { + verify(failures).failure(info, shouldNotContain(actual, expected.toArray(), newLinkedHashSet("Luke", "Yoda"))); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + + // ------------------------------------------------------------------------------------------------------------------ + // tests using a custom comparison strategy + // ------------------------------------------------------------------------------------------------------------------ + + @Test + public void should_pass_if_actual_does_not_contain_given_values_according_to_custom_comparison_strategy() { + iterablesWithCaseInsensitiveComparisonStrategy.assertDoesNotContainAnyElementsOf(someInfo(), actual, + newArrayList("Han")); + } + + @Test + public void should_pass_if_actual_does_not_contain_given_values_even_if_duplicated_according_to_custom_comparison_strategy() { + iterablesWithCaseInsensitiveComparisonStrategy.assertDoesNotContainAnyElementsOf(someInfo(), actual, + newArrayList("Han", "Han", "Anakin")); + } + + @Test + public void should_fail_if_actual_contains_given_values_according_to_custom_comparison_strategy() { + AssertionInfo info = someInfo(); + List<String> expected = newArrayList("LuKe", "YODA", "Han"); + try { + iterablesWithCaseInsensitiveComparisonStrategy.assertDoesNotContainAnyElementsOf(info, actual, expected); + } catch (AssertionError e) { + verify(failures).failure(info, + shouldNotContain(actual, expected.toArray(), newLinkedHashSet("LuKe", "YODA"), comparisonStrategy)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + +} diff --git a/src/test/java/org/assertj/core/test/ErrorMessages.java b/src/test/java/org/assertj/core/test/ErrorMessages.java --- a/src/test/java/org/assertj/core/test/ErrorMessages.java +++ b/src/test/java/org/assertj/core/test/ErrorMessages.java @@ -84,6 +84,14 @@ public static String valuesToLookForIsNull() { return "The array of values to look for should not be null"; } + public static String iterableValuesToLookForIsEmpty() { + return "The iterable of values to look for should not be empty"; + } + + public static String iterableValuesToLookForIsNull() { + return "The iterable of values to look for should not be null"; + } + public static String dateToCompareActualWithIsNull() { return "The date to compare actual with should not be null"; }
New Iterable assertion : doesNotContainAnyElementsOf(Iterable) Exact signature : ``` java public S doesNotContainAnyElementsOf(Iterable<? extends T> iterable) ```
What the difference with the function : ``` java /** * Verifies that the actual group does not contain the given values. * * @param values the given values. * @return {@code this} assertion object. * @throws NullPointerException if the given argument is {@code null}. * @throws IllegalArgumentException if the given argument is an empty array. * @throws AssertionError if the actual group is {@code null}. * @throws AssertionError if the actual group contains any of the given values. */ S doesNotContain(T... values); ``` (why an another name if the function does the same things ?) The assertion is almost the same, it is just handy for users not to have to convert the Iterable to an array. I think though this valuable because it avoid users write boilerplate code. yes, I understand the need, but why change the name ? Well it can be confusing, when you call `doesNotContain(Iterable)` does it mean you assert that given Iterable is not present or its elements are not ? With `doesNotContainAnyElementsOf` I think there is no ambiguity.
2013-05-06T23:25:52Z
1.1
assertj/assertj
73
assertj__assertj-73
[ "18" ]
57bd41463aaa76a86e47090a91ca684e9abb90bc
diff --git a/src/main/java/org/assertj/core/api/BooleanArrayAssert.java b/src/main/java/org/assertj/core/api/BooleanArrayAssert.java --- a/src/main/java/org/assertj/core/api/BooleanArrayAssert.java +++ b/src/main/java/org/assertj/core/api/BooleanArrayAssert.java @@ -239,4 +239,32 @@ public final BooleanArrayAssert usingElementComparator(Comparator<? super Boolea public final BooleanArrayAssert usingDefaultElementComparator() { throw new UnsupportedOperationException("custom element Comparator is not supported for Boolean array comparison"); } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * boolean[] booleans = { true, false }; + * + * // assertion will pass + * assertThat(booleans).containsExactly(true, false); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(booleans).containsExactly(false, true); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public BooleanArrayAssert containsExactly(boolean... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/ByteArrayAssert.java b/src/main/java/org/assertj/core/api/ByteArrayAssert.java --- a/src/main/java/org/assertj/core/api/ByteArrayAssert.java +++ b/src/main/java/org/assertj/core/api/ByteArrayAssert.java @@ -229,4 +229,32 @@ public ByteArrayAssert usingDefaultElementComparator() { this.arrays = ByteArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * byte[] bytes = { 1, 2, 3 }; + * + * // assertion will pass + * assertThat(bytes).containsExactly((byte) 1, (byte) 2, (byte) 3); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(bytes).containsExactly((byte) 2, (byte) 1, (byte) 3); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public ByteArrayAssert containsExactly(byte... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/CharArrayAssert.java b/src/main/java/org/assertj/core/api/CharArrayAssert.java --- a/src/main/java/org/assertj/core/api/CharArrayAssert.java +++ b/src/main/java/org/assertj/core/api/CharArrayAssert.java @@ -229,4 +229,32 @@ public CharArrayAssert usingDefaultElementComparator() { this.arrays = CharArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * char[] chars = { 'a', 'b', 'c' }; + * + * // assertion will pass + * assertThat(chars).containsExactly('a', 'b', 'c'); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(chars).containsExactly('b', 'a', 'c'); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public CharArrayAssert containsExactly(char... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/DoubleArrayAssert.java b/src/main/java/org/assertj/core/api/DoubleArrayAssert.java --- a/src/main/java/org/assertj/core/api/DoubleArrayAssert.java +++ b/src/main/java/org/assertj/core/api/DoubleArrayAssert.java @@ -230,4 +230,31 @@ public DoubleArrayAssert usingDefaultElementComparator() { return myself; } + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * double[] doubles = { 1.0, 2.0, 3.0 }; + * + * // assertion will pass + * assertThat(doubles).containsExactly(1.0, 2.0, 3.0); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(doubles).containsExactly(2.0, 1.0, 3.0); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public DoubleArrayAssert containsExactly(double... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/FloatArrayAssert.java b/src/main/java/org/assertj/core/api/FloatArrayAssert.java --- a/src/main/java/org/assertj/core/api/FloatArrayAssert.java +++ b/src/main/java/org/assertj/core/api/FloatArrayAssert.java @@ -229,4 +229,32 @@ public FloatArrayAssert usingDefaultElementComparator() { this.arrays = FloatArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * float[] floats = { 1.0f, 2.0f, 3.0f }; + * + * // assertion will pass + * assertThat(floats).containsExactly(1.0f, 2.0f, 3.0f); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(floats).containsExactly(2.0f, 1.0f, 3.0f); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public FloatArrayAssert containsExactly(float... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/IntArrayAssert.java b/src/main/java/org/assertj/core/api/IntArrayAssert.java --- a/src/main/java/org/assertj/core/api/IntArrayAssert.java +++ b/src/main/java/org/assertj/core/api/IntArrayAssert.java @@ -229,4 +229,32 @@ public IntArrayAssert usingDefaultElementComparator() { this.arrays = IntArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * int[] ints = { 1, 2, 3 }; + * + * // assertion will pass + * assertThat(ints).containsExactly(1, 2, 3); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(ints).containsExactly(2, 1, 3); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public IntArrayAssert containsExactly(int... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/LongArrayAssert.java b/src/main/java/org/assertj/core/api/LongArrayAssert.java --- a/src/main/java/org/assertj/core/api/LongArrayAssert.java +++ b/src/main/java/org/assertj/core/api/LongArrayAssert.java @@ -229,4 +229,32 @@ public LongArrayAssert usingDefaultElementComparator() { this.arrays = LongArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * long[] longs = { 1L, 2L, 3L }; + * + * // assertion will pass + * assertThat(longs).containsExactly(1L, 2L, 3L); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(longs).containsExactly(2L, 1L, 3L); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public LongArrayAssert containsExactly(long... values) { + objects.assertEqual(info, actual, values); + return this; + } } diff --git a/src/main/java/org/assertj/core/api/ShortArrayAssert.java b/src/main/java/org/assertj/core/api/ShortArrayAssert.java --- a/src/main/java/org/assertj/core/api/ShortArrayAssert.java +++ b/src/main/java/org/assertj/core/api/ShortArrayAssert.java @@ -229,4 +229,32 @@ public ShortArrayAssert usingDefaultElementComparator() { this.arrays = ShortArrays.instance(); return myself; } + + /** + * Verifies that the actual group contains only the given values and nothing else, <b>in order</b>. + * <p> + * Example : + * + * <pre> + * short[] shorts = { 1, 2, 3 }; + * + * // assertion will pass + * assertThat(shorts).containsExactly((short) 1, (short) 2, (short) 3); + * + * // assertion will fail as actual and expected orders differ. + * assertThat(shorts).containsExactly((short) 2, (short) 1, (short) 3); + * </pre> + * + * @param values the given values. + * @return {@code this} assertion object. + * @throws NullPointerException if the given argument is {@code null}. + * @throws AssertionError if the actual group is {@code null}. + * @throws AssertionError if the actual group does not contain the given values with same order, i.e. the actual group + * contains some or none of the given values, or the actual group contains more values than the given ones + * or values are the same but the order is not. + */ + public ShortArrayAssert containsExactly(short... values) { + objects.assertEqual(info, actual, values); + return this; + } }
diff --git a/src/test/java/org/assertj/core/api/booleanarray/BooleanArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/booleanarray/BooleanArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/booleanarray/BooleanArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.booleanarray; + +import static org.assertj.core.test.BooleanArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.BooleanArrayAssert; +import org.assertj.core.api.BooleanArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.BooleanArrayAssert#containsExactly(boolean...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class BooleanArrayAssert_containsExactly_Test extends BooleanArrayAssertBaseTest { + + @Override + protected BooleanArrayAssert invoke_api_method() { + return assertions.containsExactly(true, false); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(true, false)); + } +} diff --git a/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.bytearray; + +import static org.assertj.core.test.ByteArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.ByteArrayAssert; +import org.assertj.core.api.ByteArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.ByteArrayAssert#containsExactly(byte...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class ByteArrayAssert_containsExactly_Test extends ByteArrayAssertBaseTest { + + @Override + protected ByteArrayAssert invoke_api_method() { + return assertions.containsExactly((byte) 1, (byte) 2); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1, 2)); + } +} diff --git a/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/chararray/CharArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.chararray; + +import static org.assertj.core.test.CharArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.CharArrayAssert; +import org.assertj.core.api.CharArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.CharArrayAssert#containsExactly(char...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class CharArrayAssert_containsExactly_Test extends CharArrayAssertBaseTest { + + @Override + protected CharArrayAssert invoke_api_method() { + return assertions.containsExactly('a', 'b'); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf('a', 'b')); + } +} diff --git a/src/test/java/org/assertj/core/api/doublearray/DoubleArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/doublearray/DoubleArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/doublearray/DoubleArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.doublearray; + +import static org.assertj.core.test.DoubleArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.DoubleArrayAssert; +import org.assertj.core.api.DoubleArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.DoubleArrayAssert#containsExactly(double...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class DoubleArrayAssert_containsExactly_Test extends DoubleArrayAssertBaseTest { + + @Override + protected DoubleArrayAssert invoke_api_method() { + return assertions.containsExactly(1d, 2d); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1d, 2d)); + } +} diff --git a/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.floatarray; + +import static org.assertj.core.test.FloatArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.FloatArrayAssert; +import org.assertj.core.api.FloatArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.FloatArrayAssert#containsExactly(float...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class FloatArrayAssert_containsExactly_Test extends FloatArrayAssertBaseTest { + + @Override + protected FloatArrayAssert invoke_api_method() { + return assertions.containsExactly(1.0f, 2.0f); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1.0f, 2.0f)); + } +} diff --git a/src/test/java/org/assertj/core/api/intarray/IntArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/intarray/IntArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/intarray/IntArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.intarray; + +import static org.assertj.core.test.IntArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.IntArrayAssert; +import org.assertj.core.api.IntArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.IntArrayAssert#containsExactly(int...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class IntArrayAssert_containsExactly_Test extends IntArrayAssertBaseTest { + + @Override + protected IntArrayAssert invoke_api_method() { + return assertions.containsExactly(1, 2); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1, 2)); + } +} diff --git a/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/longarray/LongArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.longarray; + +import static org.assertj.core.test.LongArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.LongArrayAssert; +import org.assertj.core.api.LongArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.LongArrayAssert#containsExactly(long...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class LongArrayAssert_containsExactly_Test extends LongArrayAssertBaseTest { + + @Override + protected LongArrayAssert invoke_api_method() { + return assertions.containsExactly(1L, 2L); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1L, 2L)); + } +} diff --git a/src/test/java/org/assertj/core/api/shortarray/ShortArrayAssert_containsExactly_Test.java b/src/test/java/org/assertj/core/api/shortarray/ShortArrayAssert_containsExactly_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/shortarray/ShortArrayAssert_containsExactly_Test.java @@ -0,0 +1,25 @@ +package org.assertj.core.api.shortarray; + +import static org.assertj.core.test.ShortArrays.arrayOf; +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.ShortArrayAssert; +import org.assertj.core.api.ShortArrayAssertBaseTest; + +/** + * Tests for <code>{@link org.assertj.core.api.ShortArrayAssert#containsExactly(short...)}</code>. + * + * @author Jean-Christophe Gay + */ +public class ShortArrayAssert_containsExactly_Test extends ShortArrayAssertBaseTest { + + @Override + protected ShortArrayAssert invoke_api_method() { + return assertions.containsExactly((short) 1, (short) 2); + } + + @Override + protected void verify_internal_effects() { + verify(objects).assertEqual(getInfo(assertions), getActual(assertions), arrayOf(1, 2)); + } +}
XXXArrayAssert : add containsExactly(XXX... elements) it would be handy to have ``` java int[] myArray = ... assertThat(myArray).containsExactly(1,4,7); ```
2013-05-19T21:29:51Z
1.2
assertj/assertj
169
assertj__assertj-169
[ "151", "151" ]
eee3ad92ce3ba261cd99435254b0c8574711f5b8
diff --git a/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java b/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java --- a/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractCharSequenceAssert.java @@ -143,6 +143,31 @@ public S hasSize(int expected) { return myself; } + /** + * Verifies that the actual {@code CharSequence} has a length that's the same as the length of the given + * {@code CharSequence}. + * <p> + * Examples : + * + * <pre> + * // assertion will pass + * assertThat(&quot;C-3PO&quot;).hasSameSizeAs(&quot;R2-D2&quot;); + * + * // assertion will fail as actual and expected sizes differ + * assertThat(&quot;C-3PO&quot;).hasSameSizeAs(&quot;B1 battle droid&quot;); + * </pre> + * + * @param other the given {@code CharSequence} to be used for size comparison. + * @return {@code this} assertion object. + * @throws AssertionError if the actual {@code CharSequence} has a length that's different from the length of the + * given {@code CharSequence}. + * @throws NullPointerException if the given {@code CharSequence} is {@code null}. + */ + public S hasSameSizeAs(CharSequence other) { + strings.assertHasSize(info, actual, other.length()); + return myself; + } + /** * Verifies that the actual {@code CharSequence} has a length that's the same as the number of elements in the given * array. diff --git a/src/main/java/org/assertj/core/api/EnumerableAssert.java b/src/main/java/org/assertj/core/api/EnumerableAssert.java --- a/src/main/java/org/assertj/core/api/EnumerableAssert.java +++ b/src/main/java/org/assertj/core/api/EnumerableAssert.java @@ -33,7 +33,7 @@ public interface EnumerableAssert<S extends EnumerableAssert<S, E>, E> { * Verifies that the actual group of values is {@code null} or empty. * @throws AssertionError if the actual group of values is not {@code null} or not empty. */ - void isNullOrEmpty() throws AssertionError; + void isNullOrEmpty(); /** * Verifies that the actual group of values is empty. diff --git a/src/main/java/org/assertj/core/description/TextDescription.java b/src/main/java/org/assertj/core/description/TextDescription.java --- a/src/main/java/org/assertj/core/description/TextDescription.java +++ b/src/main/java/org/assertj/core/description/TextDescription.java @@ -14,7 +14,6 @@ */ package org.assertj.core.description; -import java.util.regex.Pattern; import org.assertj.core.util.Arrays; import static org.assertj.core.util.Objects.HASH_CODE_PRIME; import static org.assertj.core.util.Objects.areEqual; @@ -57,7 +56,7 @@ public String value() { @Override public int hashCode() { - return HASH_CODE_PRIME * 1 + hashCodeFor(value) + hashCodeFor(args); + return HASH_CODE_PRIME + hashCodeFor(value) + hashCodeFor(args); } @Override diff --git a/src/main/java/org/assertj/core/internal/Dates.java b/src/main/java/org/assertj/core/internal/Dates.java --- a/src/main/java/org/assertj/core/internal/Dates.java +++ b/src/main/java/org/assertj/core/internal/Dates.java @@ -1,6 +1,5 @@ package org.assertj.core.internal; -import java.util.concurrent.TimeUnit; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; @@ -20,7 +19,6 @@ import static org.assertj.core.error.ShouldBeInTheFuture.shouldBeInTheFuture; import static org.assertj.core.error.ShouldBeInThePast.shouldBeInThePast; import static org.assertj.core.error.ShouldBeToday.shouldBeToday; -import static org.assertj.core.error.ShouldBeEqual.shouldBeEqual; import static org.assertj.core.error.ShouldBeWithin.shouldBeWithin; import static org.assertj.core.error.ShouldHaveTime.shouldHaveTime; import static org.assertj.core.error.ShouldNotBeBetween.shouldNotBeBetween; @@ -214,8 +212,7 @@ private boolean actualIsBetweenGivenPeriod(AssertionInfo info, Date actual, Date boolean checkLowerBoundaryPeriod = inclusiveStart ? isAfterOrEqualTo(actual, start) : isAfter(actual, start); boolean checkUpperBoundaryPeriod = inclusiveEnd ? isBeforeOrEqualTo(actual, end) : isBefore(actual, end); boolean isBetweenGivenPeriod = checkLowerBoundaryPeriod && checkUpperBoundaryPeriod; - if (isBetweenGivenPeriod) return true; - return false; + return isBetweenGivenPeriod; } /** diff --git a/src/main/java/org/assertj/core/internal/Iterables.java b/src/main/java/org/assertj/core/internal/Iterables.java --- a/src/main/java/org/assertj/core/internal/Iterables.java +++ b/src/main/java/org/assertj/core/internal/Iterables.java @@ -773,10 +773,7 @@ public <E> void assertAreAtLeast(AssertionInfo info, Iterable<? extends E> actua private <E> boolean conditionIsSatisfiedAtLeastNTimes(Iterable<? extends E> actual, int n, Condition<? super E> condition) { List<E> satisfiesCondition = satisfiesCondition(actual, condition); - if (satisfiesCondition.size() >= n) { - return true; - } - return false; + return satisfiesCondition.size() >= n; } /** @@ -837,10 +834,7 @@ public <E> void assertAreExactly(AssertionInfo info, Iterable<? extends E> actua private <E> boolean conditionIsSatisfiedNTimes(Iterable<? extends E> actual, Condition<? super E> condition, int times) { List<E> satisfiesCondition = satisfiesCondition(actual, condition); - if (satisfiesCondition.size() == times) { - return true; - } - return false; + return satisfiesCondition.size() == times; } /**
diff --git a/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_hasSameSizeAs_with_CharSequence_Test.java b/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_hasSameSizeAs_with_CharSequence_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_hasSameSizeAs_with_CharSequence_Test.java @@ -0,0 +1,43 @@ +/* + * Created on Feb 2, 2014 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * Copyright @2010-2014 the original author or authors. + */ +package org.assertj.core.api.charsequence; + +import static org.mockito.Mockito.verify; + +import org.assertj.core.api.CharSequenceAssert; +import org.assertj.core.api.CharSequenceAssertBaseTest; +import org.junit.BeforeClass; + +/** + * Tests for <code>{@link org.assertj.core.api.CharSequenceAssert#hasSameSizeAs(CharSequence)}</code>. + */ +public class CharSequenceAssert_hasSameSizeAs_with_CharSequence_Test extends CharSequenceAssertBaseTest { + private static CharSequence other; + + @BeforeClass + public static void setUpOnce() { + other = "Luke"; + } + + @Override + protected CharSequenceAssert invoke_api_method() { + return assertions.hasSameSizeAs(other); + } + + @Override + protected void verify_internal_effects() { + verify(strings).assertHasSize(getInfo(assertions), getActual(assertions), other.length()); + } +}
String .hasSameSizeAs(...) should accept other String We should extends this api to Strings for code like this ``` Assertions.assertThat("sss").hasSameSizeAs("dd"); ``` String .hasSameSizeAs(...) should accept other String We should extends this api to Strings for code like this ``` Assertions.assertThat("sss").hasSameSizeAs("dd"); ```
I would like to contribute, can I? Yes that would be great ! Follow the [contributing guide](https://github.com/joel-costigliola/assertj-core/blob/master/CONTRIBUTING.md) and don't hesitate if you have questions ... Thanks ! Probably this API should accept `CharSequence` interface to allow to pass not only strings but also `StringBuilder`, `StringBuffer` and other types. Good point ! The assertion must be added to `AbstractCharSequenceAssert` I would like to contribute, can I? Yes that would be great ! Follow the [contributing guide](https://github.com/joel-costigliola/assertj-core/blob/master/CONTRIBUTING.md) and don't hesitate if you have questions ... Thanks ! Probably this API should accept `CharSequence` interface to allow to pass not only strings but also `StringBuilder`, `StringBuffer` and other types. Good point ! The assertion must be added to `AbstractCharSequenceAssert`
2014-02-02T15:44:15Z
1.5
assertj/assertj
225
assertj__assertj-225
[ "217" ]
b7c1bc2dc62e4b419f1dd6d896395c7b8d204232
diff --git a/src/main/java/org/assertj/core/internal/Diff.java b/src/main/java/org/assertj/core/internal/Diff.java --- a/src/main/java/org/assertj/core/internal/Diff.java +++ b/src/main/java/org/assertj/core/internal/Diff.java @@ -109,7 +109,7 @@ private BufferedReader readerFor(String string) { private List<String> diff(BufferedReader actual, BufferedReader expected) throws IOException { List<String> diffs = new ArrayList<String>(); - int lineNumber = 0; + int lineNumber = 1; while (true) { String actualLine = actual.readLine(); String expectedLine = expected.readLine();
diff --git a/src/test/java/org/assertj/core/api/SoftAssertionsTest.java b/src/test/java/org/assertj/core/api/SoftAssertionsTest.java --- a/src/test/java/org/assertj/core/api/SoftAssertionsTest.java +++ b/src/test/java/org/assertj/core/api/SoftAssertionsTest.java @@ -168,7 +168,7 @@ public String toString() { assertThat(errors.get(20)).isEqualTo("\nInputStreams do not have equal content:" + System.getProperty("line.separator") - + "line:<0>, expected:<B> but was:<A>"); + + "line:<1>, expected:<B> but was:<A>"); assertThat(errors.get(21)).isEqualTo("expected:<2[1]> but was:<2[0]>"); assertThat(errors.get(22)).isEqualTo("expected:<2[3]> but was:<2[2]>"); diff --git a/src/test/java/org/assertj/core/internal/files/Diff_diff_File_String_Test.java b/src/test/java/org/assertj/core/internal/files/Diff_diff_File_String_Test.java --- a/src/test/java/org/assertj/core/internal/files/Diff_diff_File_String_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Diff_diff_File_String_Test.java @@ -73,7 +73,7 @@ public void should_return_diffs_if_file_and_string_do_not_have_equal_content() t String expected = "Touché"; List<String> diffs = diff.diff(actual, expected, ISO_8859_1); assertEquals(1, diffs.size()); - assertEquals("line:<0>, expected:<Touché> but was:<Touché>", diffs.get(0)); + assertEquals("line:<1>, expected:<Touché> but was:<Touché>", diffs.get(0)); } @Test @@ -83,7 +83,7 @@ public void should_return_diffs_if_content_of_actual_is_shorter_than_content_of_ List<String> diffs = diff.diff(actual, expected, Charset.defaultCharset()); System.out.println(diffs); assertEquals(1, diffs.size()); - assertEquals("line:<1>, expected:<line_1> but was:<EOF>", diffs.get(0)); + assertEquals("line:<2>, expected:<line_1> but was:<EOF>", diffs.get(0)); } @Test @@ -92,6 +92,6 @@ public void should_return_diffs_if_content_of_actual_is_longer_than_content_of_e String expected = "line_0"; List<String> diffs = diff.diff(actual, expected, Charset.defaultCharset()); assertEquals(1, diffs.size()); - assertEquals("line:<1>, expected:<EOF> but was:<line_1>", diffs.get(0)); + assertEquals("line:<2>, expected:<EOF> but was:<line_1>", diffs.get(0)); } } diff --git a/src/test/java/org/assertj/core/internal/files/Diff_diff_File_Test.java b/src/test/java/org/assertj/core/internal/files/Diff_diff_File_Test.java --- a/src/test/java/org/assertj/core/internal/files/Diff_diff_File_Test.java +++ b/src/test/java/org/assertj/core/internal/files/Diff_diff_File_Test.java @@ -74,8 +74,8 @@ public void should_return_diffs_if_files_do_not_have_equal_content() throws IOEx writer.write(expected, "line0", "line1"); List<String> diffs = diff.diff(actual, expected); assertEquals(2, diffs.size()); - assertEquals("line:<0>, expected:<line0> but was:<line_0>", diffs.get(0)); - assertEquals("line:<1>, expected:<line1> but was:<line_1>", diffs.get(1)); + assertEquals("line:<1>, expected:<line0> but was:<line_0>", diffs.get(0)); + assertEquals("line:<2>, expected:<line1> but was:<line_1>", diffs.get(1)); } @Test @@ -84,7 +84,7 @@ public void should_return_diffs_if_content_of_actual_is_shorter_than_content_of_ writer.write(expected, "line_0", "line_1"); List<String> diffs = diff.diff(actual, expected); assertEquals(1, diffs.size()); - assertEquals("line:<1>, expected:<line_1> but was:<EOF>", diffs.get(0)); + assertEquals("line:<2>, expected:<line_1> but was:<EOF>", diffs.get(0)); } @Test @@ -93,6 +93,6 @@ public void should_return_diffs_if_content_of_actual_is_longer_than_content_of_e writer.write(expected, "line_0"); List<String> diffs = diff.diff(actual, expected); assertEquals(1, diffs.size()); - assertEquals("line:<1>, expected:<EOF> but was:<line_1>", diffs.get(0)); + assertEquals("line:<2>, expected:<EOF> but was:<line_1>", diffs.get(0)); } } diff --git a/src/test/java/org/assertj/core/internal/inputstreams/Diff_diff_InputStream_Test.java b/src/test/java/org/assertj/core/internal/inputstreams/Diff_diff_InputStream_Test.java --- a/src/test/java/org/assertj/core/internal/inputstreams/Diff_diff_InputStream_Test.java +++ b/src/test/java/org/assertj/core/internal/inputstreams/Diff_diff_InputStream_Test.java @@ -67,8 +67,8 @@ public void should_return_diffs_if_inputstreams_do_not_have_equal_content() thro expected = stream("base", "line0", "line1"); List<String> diffs = diff.diff(actual, expected); assertEquals(2, diffs.size()); - assertEquals("line:<1>, expected:<line0> but was:<line_0>", diffs.get(0)); - assertEquals("line:<2>, expected:<line1> but was:<line_1>", diffs.get(1)); + assertEquals("line:<2>, expected:<line0> but was:<line_0>", diffs.get(0)); + assertEquals("line:<3>, expected:<line1> but was:<line_1>", diffs.get(1)); } @Test @@ -77,7 +77,7 @@ public void should_return_diffs_if_content_of_actual_is_shorter_than_content_of_ expected = stream("base", "line_0", "line_1"); List<String> diffs = diff.diff(actual, expected); assertEquals(1, diffs.size()); - assertEquals("line:<2>, expected:<line_1> but was:<EOF>", diffs.get(0)); + assertEquals("line:<3>, expected:<line_1> but was:<EOF>", diffs.get(0)); } @Test @@ -86,6 +86,6 @@ public void should_return_diffs_if_content_of_actual_is_longer_than_content_of_e expected = stream("base", "line_0"); List<String> diffs = diff.diff(actual, expected); assertEquals(1, diffs.size()); - assertEquals("line:<2>, expected:<EOF> but was:<line_1>", diffs.get(0)); + assertEquals("line:<3>, expected:<EOF> but was:<line_1>", diffs.get(0)); } }
Line count should start at 1 instead of 0 when comparing file content In the example below, the first line is not correct, the error should mention line 1 instead of line 0. ``` Caused by: java.lang.AssertionError: File: </home/joel/prog/assertj/assertj-assertions-generator/target/org/assertj/assertions/generator/data/OuterClassStaticNestedPersonAssert.java> read with charset <UTF-8> does not have the expected content: line:<0>, expected:<package org.assertj.assertions.generator.datas;> but was:<package org.assertj.assertions.generator.data;> ```
2014-08-08T22:09:38Z
1.6
assertj/assertj
101
assertj__assertj-101
[ "100" ]
6fea2219f1917fa5ce6cb121bb7791bab56c3ab9
diff --git a/src/main/java/org/assertj/core/api/AbstractAssert.java b/src/main/java/org/assertj/core/api/AbstractAssert.java --- a/src/main/java/org/assertj/core/api/AbstractAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractAssert.java @@ -80,7 +80,7 @@ protected WritableAssertionInfo getWritableAssertionInfo() { * Utility method to ease write write custom assertions classes, you can use format specifiers in error message, they * will be replaced by the given arguments. * <p> - * Moreover, this method honors any description ({@link #as(String)} or overridden error message defined by the user ( + * Moreover, this method honors any description ({@link #as(String, Object...)} or overridden error message defined by the user ( * {@link #overridingErrorMessage(String, Object...)}. * <p> * Example : @@ -109,8 +109,8 @@ protected void failWithMessage(String errorMessage, Object... arguments) { /** {@inheritDoc} */ @Override - public S as(String description) { - return describedAs(description); + public S as(String description, Object... args) { + return describedAs(description, args); } /** {@inheritDoc} */ @@ -121,8 +121,8 @@ public S as(Description description) { /** {@inheritDoc} */ @Override - public S describedAs(String description) { - info.description(description); + public S describedAs(String description, Object... args) { + info.description(description, args); return myself; } @@ -301,7 +301,7 @@ public S isNotOfAnyClassIn(Class<?>... types) { } /** - * The description of this assertion set with {@link #describedAs(String)} or {@link #describedAs(Description)}. + * The description of this assertion set with {@link #describedAs(String, Object...)} or {@link #describedAs(Description)}. * * @return the description String representation of this assertion. */ diff --git a/src/main/java/org/assertj/core/api/Condition.java b/src/main/java/org/assertj/core/api/Condition.java --- a/src/main/java/org/assertj/core/api/Condition.java +++ b/src/main/java/org/assertj/core/api/Condition.java @@ -60,14 +60,14 @@ public Condition(Description description) { /** {@inheritDoc} */ @Override - public Condition<T> describedAs(String newDescription) { - return as(newDescription); + public Condition<T> describedAs(String newDescription, Object... args) { + return as(newDescription, args); } /** {@inheritDoc} */ @Override - public Condition<T> as(String newDescription) { - description = checkIsNotNull(newDescription); + public Condition<T> as(String newDescription, Object... args) { + description = checkIsNotNull(newDescription, args); return this; } diff --git a/src/main/java/org/assertj/core/api/Descriptable.java b/src/main/java/org/assertj/core/api/Descriptable.java --- a/src/main/java/org/assertj/core/api/Descriptable.java +++ b/src/main/java/org/assertj/core/api/Descriptable.java @@ -30,11 +30,12 @@ public interface Descriptable<S extends Descriptable<S>> { /** * Sets the description of this object. * @param description the new description to set. + * @param args optional parameter if description is a format String. * @return {@code this} object. * @throws NullPointerException if the description is {@code null}. - * @see #describedAs(String) + * @see #describedAs(String, Object...) */ - S as(String description); + S as(String description, Object... args); /** * Sets the description of this object. To remove or clear the description, pass a <code>{@link EmptyTextDescription}</code> as @@ -52,16 +53,16 @@ public interface Descriptable<S extends Descriptable<S>> { S as(Description description); /** - * Alias for <code>{@link #as(String)}</code> since "as" is a keyword in <a href="http://groovy.codehaus.org/" + * Alias for <code>{@link #as(String, Object...)}</code> since "as" is a keyword in <a href="http://groovy.codehaus.org/" * target="_blank">Groovy</a>. * @param description the new description to set. * @return {@code this} object. * @throws NullPointerException if the description is {@code null}. */ - S describedAs(String description); + S describedAs(String description, Object... args); /** - * Alias for <code>{@link #as(String)}</code> since "as" is a keyword in <a href="http://groovy.codehaus.org/" + * Alias for <code>{@link #as(String, Object...)}</code> since "as" is a keyword in <a href="http://groovy.codehaus.org/" * target="_blank">Groovy</a>. To remove or clear the description, pass a <code>{@link EmptyTextDescription}</code> as argument. * <p> * This overloaded version of "describedAs" offers more flexibility than the one taking a {@code String} by allowing users to diff --git a/src/main/java/org/assertj/core/api/DescriptionValidations.java b/src/main/java/org/assertj/core/api/DescriptionValidations.java --- a/src/main/java/org/assertj/core/api/DescriptionValidations.java +++ b/src/main/java/org/assertj/core/api/DescriptionValidations.java @@ -22,9 +22,9 @@ */ final class DescriptionValidations { - static Description checkIsNotNull(String d) { + static Description checkIsNotNull(String d, Object... args) { if (d == null) throw bomb(); - return new TextDescription(d); + return new TextDescription(d, args); } static Description checkIsNotNull(Description d) { diff --git a/src/main/java/org/assertj/core/api/WritableAssertionInfo.java b/src/main/java/org/assertj/core/api/WritableAssertionInfo.java --- a/src/main/java/org/assertj/core/api/WritableAssertionInfo.java +++ b/src/main/java/org/assertj/core/api/WritableAssertionInfo.java @@ -65,11 +65,12 @@ public String descriptionText() { /** * Sets the description of an assertion. * @param newDescription the new description. + * @param args if {@code newDescription} is a format String, {@code args} is argument of {@link String#format(String, Object...)} * @throws NullPointerException if the given description is {@code null}. * @see #description(Description) */ - public void description(String newDescription) { - description = checkIsNotNull(newDescription); + public void description(String newDescription, Object... args) { + description = checkIsNotNull(newDescription, args); } /** diff --git a/src/main/java/org/assertj/core/description/TextDescription.java b/src/main/java/org/assertj/core/description/TextDescription.java --- a/src/main/java/org/assertj/core/description/TextDescription.java +++ b/src/main/java/org/assertj/core/description/TextDescription.java @@ -14,7 +14,11 @@ */ package org.assertj.core.description; -import static org.assertj.core.util.Objects.*; +import java.util.regex.Pattern; +import org.assertj.core.util.Arrays; +import static org.assertj.core.util.Objects.HASH_CODE_PRIME; +import static org.assertj.core.util.Objects.areEqual; +import static org.assertj.core.util.Objects.hashCodeFor; import static org.assertj.core.util.Preconditions.checkNotNull; import org.assertj.core.util.VisibleForTesting; @@ -22,33 +26,40 @@ /** * A text-based description. - * + * * @author Yvonne Wang * @author Alex Ruiz + * @author William Delanoue */ public class TextDescription extends Description { @VisibleForTesting final String value; + final Object[] args; + /** * Creates a new </code>{@link TextDescription}</code>. - * + * * @param value the value of this description. * @throws NullPointerException if the given value is {@code null}. */ - public TextDescription(String value) { + public TextDescription(String value, Object... args) { checkNotNull(value); this.value = value; + this.args = Arrays.isNullOrEmpty(args) ? null : args.clone(); } @Override public String value() { + if(args != null) { + return org.assertj.core.util.Strings.formatIfArgs(value.replaceAll("%", "%%").replaceAll(Pattern.quote("{}"), "%s"), args); + } return value; } @Override public int hashCode() { - return HASH_CODE_PRIME * 1 + hashCodeFor(value); + return HASH_CODE_PRIME * 1 + hashCodeFor(value) + hashCodeFor(args); } @Override @@ -63,6 +74,6 @@ public boolean equals(Object obj) { return false; } TextDescription other = (TextDescription) obj; - return areEqual(value, other.value); + return areEqual(value, other.value) && areEqual(args, other.args); } }
diff --git a/src/test/java/org/assertj/core/description/TextDescription_toString_Test.java b/src/test/java/org/assertj/core/description/TextDescription_toString_Test.java --- a/src/test/java/org/assertj/core/description/TextDescription_toString_Test.java +++ b/src/test/java/org/assertj/core/description/TextDescription_toString_Test.java @@ -32,4 +32,10 @@ public void should_return_value() { TextDescription description = new TextDescription("Flash"); assertEquals(description.value, description.toString()); } + + @Test + public void should_return_formatted_value() { + TextDescription description = new TextDescription("Flash %s {}", "MacQueen"); + assertEquals("Flash %s MacQueen", description.toString()); + } } diff --git a/src/test/java/org/assertj/core/description/TextDescription_value_Test.java b/src/test/java/org/assertj/core/description/TextDescription_value_Test.java --- a/src/test/java/org/assertj/core/description/TextDescription_value_Test.java +++ b/src/test/java/org/assertj/core/description/TextDescription_value_Test.java @@ -32,4 +32,10 @@ public void should_return_value() { TextDescription description = new TextDescription("Robin"); assertEquals(description.value, description.value()); } + + @Test + public void should_return_formatted_value() { + TextDescription description = new TextDescription("Robin {}", "Hood"); + assertEquals("Robin Hood", description.value()); + } }
Allow String.format for test description Instead of: ``` java assertThat(itemCount).as("Check " + item + " count").isEqualTo(5); ``` We would write: ``` java assertThat(itemCount).as("Check %s count", item).isEqualTo(5); ```
2013-08-20T23:19:06Z
1.3
assertj/assertj
120
assertj__assertj-120
[ "118" ]
8e140203418fdb7b01cacd12bd2a76cf33047171
diff --git a/src/main/java/org/assertj/core/api/AbstractDateAssert.java b/src/main/java/org/assertj/core/api/AbstractDateAssert.java --- a/src/main/java/org/assertj/core/api/AbstractDateAssert.java +++ b/src/main/java/org/assertj/core/api/AbstractDateAssert.java @@ -16,7 +16,7 @@ /** * Base class for all implementations of assertions for {@link Date}s. - * <p> + * <p/> * Note that assertions with date parameter comes with two flavor, one is obviously a {@link Date} and the other is a * {@link String} representing a Date.<br> * For the latter, the default format follows ISO 8901 : "yyyy-MM-dd", user can override it with a custom format by @@ -24,14 +24,14 @@ * The user custom format will then be used for all next Date assertions (i.e not limited to the current assertion) in * the test suite.<br> * To turn back to default format, simply call {@link #withIsoDateFormat()}. - * + * * @param <S> the "self" type of this assertion class. Please read &quot;<a href="http://bit.ly/anMa4g" - * target="_blank">Emulating 'self types' using Java Generics to simplify fluent API implementation</a>&quot; - * for more details. - * + * target="_blank">Emulating 'self types' using Java Generics to simplify fluent API implementation</a>&quot; + * for more details. * @author Tomasz Nurkiewicz (thanks for giving assertions idea) * @author Joel Costigliola * @author Mikhail Mazursky + * @author William Delanoue */ public abstract class AbstractDateAssert<S extends AbstractDateAssert<S>> extends AbstractAssert<S, Date> { @@ -54,20 +54,20 @@ protected AbstractDateAssert(Date actual, Class<?> selfType) { * Same assertion as {@link AbstractAssert#isEqualTo(Object) isEqualTo(Date date)} but given Date is represented as * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method * {@link #withDateFormat(DateFormat)}). - * + * <p/> * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isEqualTo(&quot;2002-12-18&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isEqualTo(&quot;2002-12-19&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual and given Date represented as String are not equal. @@ -77,24 +77,262 @@ public S isEqualTo(String dateAsString) { return isEqualTo(parse(dateAsString)); } + /** + * Same assertion as {@link AbstractDateAssert#isEqualToIgnoringHours(Date)} but given Date is represented as + * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method + * {@link #withDateFormat(DateFormat)}). + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * <p/> + * withDateFormat("yyyy-MM-dd'T'HH:mm:ss"); + * // OK : all dates fields are the same up to minutes excluded + * assertThat("2003-04-26T13:01:35").isEqualToIgnoringHours("2003-04-26T14:02:35"); + * <p/> + * // KO : fail as day fields differ + * assertThat("2003-04-26T14:01:35").isEqualToIgnoringHours("2003-04-27T13:02:35"); + * </pre> + * <p/> + * </p> + * + * @param dateAsString the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring hours, minutes, seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringHours(String dateAsString) { + return isEqualToIgnoringHours(parse(dateAsString)); + } + + /** + * Same assertion as {@link AbstractAssert#isEqualTo(Object) isEqualTo(Date date)} but given Date is represented as + * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method + * {@link #withDateFormat(DateFormat)}). + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * Date date1 = parseDatetime("2003-04-26T13:01:35"); + * Date date2 = parseDatetime("2003-04-26T14:01:00"); + * Date date3 = parseDatetime("2003-04-27T13:01:35"); + * <p/> + * // OK : all dates fields are the same up to hours excluded + * assertThat(date1).isEqualToIgnoringHours(date2); + * <p/> + * // KO : fail as day fields differ + * assertThat(date1).isEqualToIgnoringHours(date3); + * </pre> + * <p/> + * </p> + * + * @param date the given Date. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring hours, minutes, seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringHours(Date date) { + dates.assertIsEqualWithPrecision(info, actual, date, TimeUnit.HOURS); + return myself; + } + + /** + * Same assertion as {@link AbstractDateAssert#isEqualToIgnoringMinutes(Date)} but given Date is represented as + * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method + * {@link #withDateFormat(DateFormat)}). + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * <p/> + * withDateFormat("yyyy-MM-dd'T'HH:mm:ss"); + * // OK : all dates fields are the same up to minutes excluded + * assertThat("2003-04-26T13:01:35").isEqualToIgnoringMinutes("2003-04-26T13:02:35"); + * <p/> + * // KO : fail as hour fields differ + * assertThat("2003-04-26T14:01:35").isEqualToIgnoringMinutes("2003-04-26T13:02:35"); + * </pre> + * <p/> + * </p> + * + * @param dateAsString the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring minutes, seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringMinutes(String dateAsString) { + return isEqualToIgnoringMinutes(parse(dateAsString)); + } + + /** + * Same assertion as {@link AbstractAssert#isEqualTo(Object)}}} but given Date should not take care of minutes, seconds and milliseconds precision. + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * Date date1 = parseDatetime("2003-04-26T13:01:35"); + * Date date2 = parseDatetime("2003-04-26T13:02:00"); + * Date date3 = parseDatetime("2003-04-26T14:02:00"); + * <p/> + * // OK : all dates fields are the same up to minutes excluded + * assertThat(date1).isEqualToIgnoringMinutes(date2); + * <p/> + * // KO : fail as hour fields differ + * assertThat(date1).isEqualToIgnoringMinutes(date3); + * </pre> + * <p/> + * </p> + * + * @param date the given Date. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring minutes, seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringMinutes(Date date) { + dates.assertIsEqualWithPrecision(info, actual, date, TimeUnit.MINUTES); + return myself; + } + + /** + * Same assertion as {@link AbstractDateAssert#isEqualToIgnoringSeconds(Date)} but given Date is represented as + * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method + * {@link #withDateFormat(DateFormat)}). + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * <p/> + * withDateFormat("yyyy-MM-dd'T'HH:mm:ss"); + * // OK : all dates fields are the same up to seconds excluded + * assertThat("2003-04-26T13:01:35").isEqualToIgnoringSeconds("2003-04-26T13:01:57"); + * <p/> + * // KO : fail as minute fields differ + * assertThat("2003-04-26T13:01:35").isEqualToIgnoringMinutes("2003-04-26T13:02:35"); + * </pre> + * <p/> + * </p> + * + * @param dateAsString the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringSeconds(String dateAsString) { + return isEqualToIgnoringSeconds(parse(dateAsString)); + } + + /** + * Same assertion as {@link AbstractAssert#isEqualTo(Object)}}} but given Date should not take care of seconds and milliseconds precision. + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * Date date1 = parseDatetime("2003-04-26T13:01:35"); + * Date date2 = parseDatetime("2003-04-26T13:01:36"); + * Date date3 = parseDatetime("2003-04-26T14:02:00"); + * <p/> + * // OK : all dates fields are the same up to seconds excluded + * assertThat(date1).isEqualToIgnoringSeconds(date2); + * <p/> + * // KO : fail as minute fields differ + * assertThat(date1).isEqualToIgnoringSeconds(date3); + * </pre> + * <p/> + * </p> + * + * @param date the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring seconds and milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringSeconds(Date date) { + dates.assertIsEqualWithPrecision(info, actual, date, TimeUnit.SECONDS); + return myself; + } + + /** + * Same assertion as {@link AbstractDateAssert#isEqualToIgnoringMillis(Date)} but given Date is represented as + * String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method + * {@link #withDateFormat(DateFormat)}). + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * <p/> + * withDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"); + * // OK : all dates fields are the same up to minutes excluded + * assertThat("2003-04-26T13:01:35.998").isEqualToIgnoringSeconds("2003-04-26T13:01:35.997"); + * <p/> + * // KO : fail as seconds fields differ + * assertThat("2003-04-26T13:01:35.998").isEqualToIgnoringMinutes("2003-04-26T13:01:36.995"); + * </pre> + * <p/> + * </p> + * + * @param dateAsString the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringMillis(String dateAsString) { + return isEqualToIgnoringMillis(parse(dateAsString)); + } + + /** + * Same assertion as {@link AbstractAssert#isEqualTo(Object)}}} but given Date should not take care of milliseconds precision. + * <p/> + * <p> + * Example: + * <p/> + * <pre> + * Date date1 = parseDatetimeAndMs("2003-04-26T13:01:35.001"); + * Date date2 = parseDatetimeAndMs("2003-04-26T13:01:35.002"); + * Date date3 = parseDatetimeAndMs("2003-04-26T14:01:36.001"); + * <p/> + * // OK : all dates fields are the same up to milliseconds excluded + * assertThat(date1).isEqualToIgnoringMillis(date2); + * <p/> + * // KO : fail as second fields differ + * assertThat(date1).isEqualToIgnoringMillis(date3); + * </pre> + * <p/> + * </p> + * + * @param date the given Date represented as String in default or custom date format. + * @return this assertion object. + * @throws AssertionError if actual and given Date represented as String are not equal ignoring milliseconds. + * @throws AssertionError if the given date as String could not be converted to a Date. + */ + public S isEqualToIgnoringMillis(Date date) { + dates.assertIsEqualWithPrecision(info, actual, date, TimeUnit.MILLISECONDS); + return myself; + } + /** * Same assertion as {@link AbstractAssert#isNotEqualTo(Object) isNotEqualTo(Date date)} but given Date is represented * as String either with ISO date format (yyyy-MM-dd) or user custom date format (set with method * {@link #withDateFormat(DateFormat)}). - * + * <p/> * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isNotEqualTo(&quot;2002-12-19&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isNotEqualTo(&quot;2002-12-18&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual and given Date represented as String are equal. @@ -109,17 +347,17 @@ public S isNotEqualTo(String dateAsString) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isIn(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, &quot;2002-12-19&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isIn(&quot;2002-12-17&quot;, &quot;2002-12-19&quot;, &quot;2002-12-20&quot;); * </pre> - * + * <p/> * </p> - * + * * @param datesAsString the given Dates represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual is not in given Dates represented as String. @@ -140,19 +378,19 @@ public S isIn(String... datesAsString) { * <code>isIn(Collection&lt;Date&gt;)</code> since java collection type are erased at runtime. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isInWithStringDateCollection( * Arrays.asList(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, &quot;2002-12-19&quot;)); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isInWithStringDateCollection( * Arrays.asList(&quot;2002-12-17&quot;, &quot;2002-12-19&quot;, &quot;2002-12-20&quot;)); * </pre> - * + * <p/> * </p> - * + * * @param datesAsString the given Dates represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual is not in given Dates represented as String. @@ -171,17 +409,17 @@ public S isInWithStringDateCollection(Collection<String> datesAsString) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isNotIn(&quot;2002-12-17&quot;, &quot;2002-12-19&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isNotIn(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;); * </pre> - * + * <p/> * </p> - * + * * @param datesAsString the given Dates represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual is in given Dates represented as String. @@ -202,17 +440,17 @@ public S isNotIn(String... datesAsString) { * <code>isNotIn(Collection&lt;Date&gt;)</code> since java collection type are erased at runtime. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isNotInWithStringDateCollection(Arrays.asList(&quot;2002-12-17&quot;, &quot;2002-12-19&quot;)); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isNotInWithStringDateCollection(Arrays.asList(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;)); * </pre> - * + * <p/> * </p> - * + * * @param datesAsString the given Dates represented as String in default or custom date format. * @return this assertion object. * @throws AssertionError if actual is in given Dates represented as String. @@ -230,22 +468,22 @@ public S isNotInWithStringDateCollection(Collection<String> datesAsString) { * Verifies that the actual {@code Date} is <b>strictly</b> before the given one. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBefore(theReturnOfTheKing.getReleaseDate()); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBefore(theFellowshipOfTheRing.getReleaseDate()); * </pre> - * + * <p/> * </p> - * + * * @param other the given Date. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if other {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not strictly before the given one. + * @throws AssertionError if the actual {@code Date} is not strictly before the given one. */ public S isBefore(Date other) { dates.assertIsBefore(info, actual, other); @@ -257,24 +495,24 @@ public S isBefore(Date other) { * (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBefore(&quot;2002-12-19&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBefore(&quot;2002-12-17&quot;); * assertThat(theTwoTowers.getReleaseDate()).isBefore(&quot;2002-12-18&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if given date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not strictly before the given Date represented as String. - * @throws AssertionError if the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not strictly before the given Date represented as String. + * @throws AssertionError if the given date as String could not be converted to a Date. */ public S isBefore(String dateAsString) { return isBefore(parse(dateAsString)); @@ -285,9 +523,9 @@ public S isBefore(String dateAsString) { * * @param other the given Date. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if other {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not before or equals to the given one. + * @throws AssertionError if the actual {@code Date} is not before or equals to the given one. */ public S isBeforeOrEqualsTo(Date other) { dates.assertIsBeforeOrEqualsTo(info, actual, other); @@ -299,24 +537,24 @@ public S isBeforeOrEqualsTo(Date other) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBeforeOrEqualsTo(&quot;2002-12-19&quot;); * assertThat(theTwoTowers.getReleaseDate()).isBeforeOrEqualsTo(&quot;2002-12-18&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBeforeOrEqualsTo(&quot;2002-12-17&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if given date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not before or equals to the given Date represented as String. - * @throws AssertionError if the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not before or equals to the given Date represented as String. + * @throws AssertionError if the given date as String could not be converted to a Date. */ public S isBeforeOrEqualsTo(String dateAsString) { return isBeforeOrEqualsTo(parse(dateAsString)); @@ -326,22 +564,22 @@ public S isBeforeOrEqualsTo(String dateAsString) { * Verifies that the actual {@code Date} is <b>strictly</b> after the given one. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isAfter(theFellowshipOfTheRing.getReleaseDate()); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isAfter(theReturnOfTheKing.getReleaseDate()); * </pre> - * + * <p/> * </p> - * + * * @param other the given Date. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if other {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not strictly after the given one. + * @throws AssertionError if the actual {@code Date} is not strictly after the given one. */ public S isAfter(Date other) { dates.assertIsAfter(info, actual, other); @@ -353,24 +591,24 @@ public S isAfter(Date other) { * (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isAfter(&quot;2002-12-17&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isAfter(&quot;2002-12-18&quot;); * assertThat(theTwoTowers.getReleaseDate()).isAfter(&quot;2002-12-19&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if given date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not strictly after the given Date represented as String. - * @throws AssertionError if the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not strictly after the given Date represented as String. + * @throws AssertionError if the given date as String could not be converted to a Date. */ public S isAfter(String dateAsString) { return isAfter(parse(dateAsString)); @@ -378,12 +616,12 @@ public S isAfter(String dateAsString) { /** * Verifies that the actual {@code Date} is after or equals to the given one. - * + * * @param other the given Date. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if other {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not after or equals to the given one. + * @throws AssertionError if the actual {@code Date} is not after or equals to the given one. */ public S isAfterOrEqualsTo(Date other) { dates.assertIsAfterOrEqualsTo(info, actual, other); @@ -395,24 +633,24 @@ public S isAfterOrEqualsTo(Date other) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isAfterOrEqualsTo(&quot;2002-12-17&quot;); * assertThat(theTwoTowers.getReleaseDate()).isAfterOrEqualsTo(&quot;2002-12-18&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isAfterOrEqualsTo(&quot;2002-12-19&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if given date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not after or equals to the given Date represented as String. - * @throws AssertionError if the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not after or equals to the given Date represented as String. + * @throws AssertionError if the given date as String could not be converted to a Date. */ public S isAfterOrEqualsTo(String dateAsString) { return isAfterOrEqualsTo(parse(dateAsString)); @@ -422,26 +660,26 @@ public S isAfterOrEqualsTo(String dateAsString) { * Verifies that the actual {@code Date} is in [start, end[ period (start included, end excluded). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBetween(theFellowshipOfTheRing.getReleaseDate(), * theReturnOfTheKing.getReleaseDate()); - * + * * // assertion will fail * assertThat(theFellowshipOfTheRing.getReleaseDate()).isBetween(theTwoTowers.getReleaseDate(), * theReturnOfTheKing.getReleaseDate()); * </pre> - * + * <p/> * </p> - * + * * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if start {@code Date} is {@code null}. * @throws NullPointerException if end {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in [start, end[ period. + * @throws AssertionError if the actual {@code Date} is not in [start, end[ period. */ public S isBetween(Date start, Date end) { return isBetween(start, end, true, false); @@ -452,25 +690,25 @@ public S isBetween(Date start, Date end) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBetween(&quot;2002-12-17&quot;, &quot;2002-12-19&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBetween(&quot;2002-12-15&quot;, &quot;2002-12-17&quot;); * </pre> - * + * <p/> * </p> - * + * * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if start Date as String is {@code null}. * @throws NullPointerException if end Date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in [start, end[ period. - * @throws AssertionError if one of the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not in [start, end[ period. + * @throws AssertionError if one of the given date as String could not be converted to a Date. */ public S isBetween(String start, String end) { return isBetween(parse(start), parse(end)); @@ -480,16 +718,16 @@ public S isBetween(String start, String end) { * Verifies that the actual {@code Date} is in the given period defined by start and end dates.<br> * To include start in the period set inclusiveStart parameter to <code>true</code>.<br> * To include end in the period set inclusiveEnd parameter to <code>true</code>.<br> - * - * @param start the period start, expected not to be null. - * @param end the period end, expected not to be null. + * + * @param start the period start, expected not to be null. + * @param end the period end, expected not to be null. * @param inclusiveStart whether to include start date in period. - * @param inclusiveEnd whether to include end date in period. + * @param inclusiveEnd whether to include end date in period. * @return this assertion object. - * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if {@code actual} is {@code null}. * @throws NullPointerException if start {@code Date} is {@code null}. * @throws NullPointerException if end {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in (start, end) period. + * @throws AssertionError if the actual {@code Date} is not in (start, end) period. */ public S isBetween(Date start, Date end, boolean inclusiveStart, boolean inclusiveEnd) { dates.assertIsBetween(info, actual, start, end, inclusiveStart, inclusiveEnd); @@ -501,28 +739,28 @@ public S isBetween(Date start, Date end, boolean inclusiveStart, boolean inclusi * with ISO date format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBetween(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, false, true); * assertThat(theTwoTowers.getReleaseDate()).isBetween(&quot;2002-12-18&quot;, &quot;2002-12-19&quot;, true, false); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBetween(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, false, false); * </pre> - * + * <p/> * </p> - * - * @param start the period start, expected not to be null. - * @param end the period end, expected not to be null. + * + * @param start the period start, expected not to be null. + * @param end the period end, expected not to be null. * @param inclusiveStart whether to include start date in period. - * @param inclusiveEnd whether to include end date in period. + * @param inclusiveEnd whether to include end date in period. * @return this assertion object. - * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if {@code actual} is {@code null}. * @throws NullPointerException if start Date as String is {@code null}. * @throws NullPointerException if end Date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in (start, end) period. - * @throws AssertionError if one of the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not in (start, end) period. + * @throws AssertionError if one of the given date as String could not be converted to a Date. */ public S isBetween(String start, String end, boolean inclusiveStart, boolean inclusiveEnd) { dates.assertIsBetween(info, actual, parse(start), parse(end), inclusiveStart, inclusiveEnd); @@ -533,16 +771,16 @@ public S isBetween(String start, String end, boolean inclusiveStart, boolean inc * Verifies that the actual {@code Date} is not in the given period defined by start and end dates.<br> * To include start in the period set inclusiveStart parameter to <code>true</code>.<br> * To include end in the period set inclusiveEnd parameter to <code>true</code>.<br> - * - * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * + * @param start the period start (inclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @param inclusiveStart whether to include start date in period. - * @param inclusiveEnd whether to include end date in period. + * @param inclusiveEnd whether to include end date in period. * @return this assertion object. - * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if {@code actual} is {@code null}. * @throws NullPointerException if start {@code Date} is {@code null}. * @throws NullPointerException if end {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in (start, end) period. + * @throws AssertionError if the actual {@code Date} is not in (start, end) period. */ public S isNotBetween(Date start, Date end, boolean inclusiveStart, boolean inclusiveEnd) { dates.assertIsNotBetween(info, actual, start, end, inclusiveStart, inclusiveEnd); @@ -555,28 +793,28 @@ public S isNotBetween(Date start, Date end, boolean inclusiveStart, boolean incl * {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isNotBetween(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, false, false); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isNotBetween(&quot;2002-12-17&quot;, &quot;2002-12-18&quot;, false, true); * assertThat(theTwoTowers.getReleaseDate()).isNotBetween(&quot;2002-12-18&quot;, &quot;2002-12-19&quot;, true, false); * </pre> - * + * <p/> * </p> - * - * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * + * @param start the period start (inclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @param inclusiveStart whether to include start date in period. - * @param inclusiveEnd whether to include end date in period. + * @param inclusiveEnd whether to include end date in period. * @return this assertion object. - * @throws AssertionError if {@code actual} is {@code null}. + * @throws AssertionError if {@code actual} is {@code null}. * @throws NullPointerException if start Date as String is {@code null}. * @throws NullPointerException if end Date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is not in (start, end) period. - * @throws AssertionError if one of the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is not in (start, end) period. + * @throws AssertionError if one of the given date as String could not be converted to a Date. */ public S isNotBetween(String start, String end, boolean inclusiveStart, boolean inclusiveEnd) { return isNotBetween(parse(start), parse(end), inclusiveStart, inclusiveEnd); @@ -586,13 +824,13 @@ public S isNotBetween(String start, String end, boolean inclusiveStart, boolean * Verifies that the actual {@code Date} is not in [start, end[ period * * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if start {@code Date} is {@code null}. * @throws NullPointerException if end {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} is in [start, end[ period. - * @throws AssertionError if one of the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is in [start, end[ period. + * @throws AssertionError if one of the given date as String could not be converted to a Date. */ public S isNotBetween(Date start, Date end) { return isNotBetween(start, end, true, false); @@ -603,25 +841,25 @@ public S isNotBetween(Date start, Date end) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theFellowshipOfTheRing.getReleaseDate()).isNotBetween(&quot;2002-12-01&quot;, &quot;2002-12-10&quot;); - * + * * // assertion will fail * assertThat(theFellowshipOfTheRing.getReleaseDate()).isNotBetween(&quot;2002-12-01&quot;, &quot;2002-12-19&quot;); * </pre> - * + * <p/> * </p> - * + * * @param start the period start (inclusive), expected not to be null. - * @param end the period end (exclusive), expected not to be null. + * @param end the period end (exclusive), expected not to be null. * @return this assertion object. - * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws NullPointerException if start Date as String is {@code null}. * @throws NullPointerException if end Date as String is {@code null}. - * @throws AssertionError if the actual {@code Date} is in [start, end[ period. - * @throws AssertionError if one of the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is in [start, end[ period. + * @throws AssertionError if one of the given date as String could not be converted to a Date. */ public S isNotBetween(String start, String end) { return isNotBetween(parse(start), parse(end), true, false); @@ -631,14 +869,14 @@ public S isNotBetween(String start, String end) { * Verifies that the actual {@code Date} is strictly in the past. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isInThePast(); * </pre> - * + * <p/> * </p> - * + * * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws AssertionError if the actual {@code Date} is not in the past. @@ -653,17 +891,17 @@ public S isInThePast() { * minute, second, milliseconds). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date()).isToday(); - * + * * // assertion will fail * assertThat(theFellowshipOfTheRing.getReleaseDate()).isToday(); * </pre> - * + * <p/> * </p> - * + * * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws AssertionError if the actual {@code Date} is not today. @@ -677,14 +915,14 @@ public S isToday() { * Verifies that the actual {@code Date} is strictly in the future. * <p> * Example: - * + * <p/> * <pre> * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isInTheFuture(); * </pre> - * + * <p/> * </p> - * + * * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws AssertionError if the actual {@code Date} is not in the future. @@ -698,18 +936,18 @@ public S isInTheFuture() { * Verifies that the actual {@code Date} is <b>strictly</b> before the given year. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isBeforeYear(2004); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isBeforeYear(2002); * assertThat(theTwoTowers.getReleaseDate()).isBeforeYear(2000); * </pre> - * + * <p/> * </p> - * + * * @param year the year to compare actual year to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -724,18 +962,18 @@ public S isBeforeYear(int year) { * Verifies that the actual {@code Date} is <b>strictly</b> after the given year. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isAfterYear(2001); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isAfterYear(2002); * assertThat(theTwoTowers.getReleaseDate()).isAfterYear(2004); * </pre> - * + * <p/> * </p> - * + * * @param year the year to compare actual year to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -752,17 +990,17 @@ public S isAfterYear(int year) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isWithinYear(2002); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isWithinYear(2004); * </pre> - * + * <p/> * </p> - * + * * @param year the year to compare actual year to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -780,17 +1018,17 @@ public S isWithinYear(int year) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isWithinMonth(12); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isWithinMonth(10); * </pre> - * + * <p/> * </p> - * + * * @param month the month to compare actual month to, <b>month value starting at 1</b> (January=1, February=2, ...). * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -807,7 +1045,7 @@ public S isWithinMonth(int month) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isWithinDayOfMonth(18); @@ -815,8 +1053,9 @@ public S isWithinMonth(int month) { * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isWithinDayOfMonth(20); * </pre> - * + * <p/> * </p> + * * @param dayOfMonth the day of month to compare actual day of month to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -834,19 +1073,19 @@ public S isWithinDayOfMonth(int dayOfMonth) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinDayOfWeek(Calendar.SATURDAY); - * + * * // assertion will fail * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinDayOfWeek(Calendar.MONDAY); * </pre> - * + * <p/> * </p> - * + * * @param dayOfWeek the day of week to compare actual day of week to, see {@link Calendar#DAY_OF_WEEK} for valid - * values + * values * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. * @throws AssertionError if the actual {@code Date} week is not equal to the given day of week. @@ -862,17 +1101,17 @@ public S isWithinDayOfWeek(int dayOfWeek) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinHourOfDay(13); - * + * * // assertion will fail * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinHourOfDay(22); * </pre> - * + * <p/> * </p> - * + * * @param hourOfDay the hour of day to compare actual hour of day to (24-hour clock) * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -889,17 +1128,17 @@ public S isWithinHourOfDay(int hourOfDay) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinMinute(20); - * + * * // assertion will fail * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinMinute(17); * </pre> - * + * <p/> * </p> - * + * * @param minute the minute to compare actual minute to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -916,17 +1155,17 @@ public S isWithinMinute(int minute) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinSecond(35); - * + * * // assertion will fail * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime()).isWithinSecond(11); * </pre> - * + * <p/> * </p> - * + * * @param second the second to compare actual second to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -943,17 +1182,17 @@ public S isWithinSecond(int second) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime() + 17).isWithinMillisecond(17); - * + * * // assertion will fail * assertThat(new Date(parseDatetime("2003-04-26T13:20:35").getTime() + 17).isWithinMillisecond(25); * </pre> - * + * <p/> * </p> - * + * * @param millisecond the millisecond to compare actual millisecond to * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -966,14 +1205,14 @@ public S isWithinMillisecond(int millisecond) { /** * Verifies that actual and given {@code Date} are in the same year. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same year. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same year. */ public S isInSameYearAs(Date other) { dates.assertIsInSameYearAs(info, actual, other); @@ -985,23 +1224,23 @@ public S isInSameYearAs(Date other) { * (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isInSameYearAs(&quot;2002-06-20&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isInSameYearAs(&quot;2001-12-18&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given Date represented as String are not in the same year. - * @throws AssertionError if the given date as String could not be converted to a Date. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given Date represented as String are not in the same year. + * @throws AssertionError if the given date as String could not be converted to a Date. */ public S isInSameYearAs(String dateAsString) { return isInSameYearAs(parse(dateAsString)); @@ -1009,18 +1248,18 @@ public S isInSameYearAs(String dateAsString) { /** * Verifies that actual and given {@code Date} are chronologically in the same month (and thus in the same year). - * <p> + * <p/> * If you want to compare month only (without year), use : * <code>assertThat(myDate).isWithinMonth(monthOf(otherDate))</code><br> * See {@link org.assertj.core.util.Dates#monthOf(Date)} to get the month of a given Date. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same month. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same month. */ public S isInSameMonthAs(Date other) { dates.assertIsInSameMonthAs(info, actual, other); @@ -1032,22 +1271,22 @@ public S isInSameMonthAs(Date other) { * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isInSameMonthAs(&quot;2000-12-20&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isInSameMonthAs(&quot;2002-06-18&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same month. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same month. */ public S isInSameMonthAs(String dateAsString) { return isInSameMonthAs(parse(dateAsString)); @@ -1056,18 +1295,18 @@ public S isInSameMonthAs(String dateAsString) { /** * Verifies that actual and given {@code Date} are chronologically in the same day of month (and thus in the same * month and year). - * <p> + * <p/> * If you want to compare day of month only (without month and year), you could write : * <code>assertThat(myDate).isWithinDayOfMonth(dayOfMonthOf(otherDate))</code><br> * see {@link org.assertj.core.util.Dates#dayOfMonthOf(Date)} to get the day of month of a given Date. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same day of month. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same day of month. */ public S isInSameDayAs(Date other) { dates.assertIsInSameDayAs(info, actual, other); @@ -1079,22 +1318,22 @@ public S isInSameDayAs(Date other) { * (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). * <p> * Example: - * + * <p/> * <pre> * // assertion will pass * assertThat(theTwoTowers.getReleaseDate()).isInSameDayAs(&quot;2000-06-18&quot;); - * + * * // assertion will fail * assertThat(theTwoTowers.getReleaseDate()).isInSameDayAs(&quot;2002-12-20&quot;); * </pre> - * + * <p/> * </p> - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same day of month. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same day of month. */ public S isInSameDayAs(String dateAsString) { return isInSameDayAs(parse(dateAsString)); @@ -1103,18 +1342,18 @@ public S isInSameDayAs(String dateAsString) { /** * Verifies that actual and given {@code Date} are chronologically in the same hour (and thus in the same day, month * and year). - * <p> + * <p/> * If you want to compare hour only (without day, month and year), you could write : * <code>assertThat(myDate).isWithinHour(hourOfDayOf(otherDate))</code><br> * see {@link org.assertj.core.util.Dates#hourOfDay(Date)} to get the hour of a given Date. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same hour. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same hour. */ public S isInSameHourAs(Date other) { dates.assertIsInSameHourAs(info, actual, other); @@ -1124,12 +1363,12 @@ public S isInSameHourAs(Date other) { /** * Same assertion as {@link #isInSameHourAs(Date)} but given Date is represented as String either with ISO date format * (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same hour. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same hour. */ public S isInSameHourAs(String dateAsString) { return isInSameHourAs(parse(dateAsString)); @@ -1138,18 +1377,18 @@ public S isInSameHourAs(String dateAsString) { /** * Verifies that actual and given {@code Date} are chronologically in the same minute (and thus in the same hour, day, * month and year). - * <p> + * <p/> * If you want to compare minute only (without hour, day, month and year), you could write : * <code>assertThat(myDate).isWithinMinute(minuteOf(otherDate))</code><br> * see {@link org.assertj.core.util.Dates#minuteOf(Date)} to get the minute of a given Date. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same minute. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same minute. */ public S isInSameMinuteAs(Date other) { dates.assertIsInSameMinuteAs(info, actual, other); @@ -1159,12 +1398,12 @@ public S isInSameMinuteAs(Date other) { /** * Same assertion as {@link #isInSameMinuteAs(Date)} but given Date is represented as String either with ISO date * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same minute. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same minute. */ public S isInSameMinuteAs(String dateAsString) { return isInSameMinuteAs(parse(dateAsString)); @@ -1173,18 +1412,18 @@ public S isInSameMinuteAs(String dateAsString) { /** * Verifies that actual and given {@code Date} are chronologically in the same second (and thus in the same minute, * hour, day, month and year). - * <p> + * <p/> * If you want to compare second only (without minute, hour, day, month and year), you could write : * <code>assertThat(myDate).isWithinSecond(secondOf(otherDate))</code><br> * see {@link org.assertj.core.util.Dates#secondOf(Date)} to get the second of a given Date. - * <p> + * <p/> * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. - * + * * @param other the given {@code Date} to compare actual {@code Date} to. * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same second. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same second. */ public S isInSameSecondAs(Date other) { dates.assertIsInSameSecondAs(info, actual, other); @@ -1194,12 +1433,12 @@ public S isInSameSecondAs(Date other) { /** * Same assertion as {@link #isInSameSecondAs(Date)} but given Date is represented as String either with ISO date * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). - * + * * @param dateAsString the given Date represented as String in default or custom date format. * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if actual and given {@code Date} are not in the same second. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if actual and given {@code Date} are not in the same second. */ public S isInSameSecondAs(String dateAsString) { return isInSameSecondAs(parse(dateAsString)); @@ -1215,12 +1454,12 @@ public S isInSameSecondAs(String dateAsString) { * Note that using a custom comparator has no effect on this assertion (see {@link #usingComparator(Comparator)}. * <p> * Example: - * + * <p/> * <pre> - * + * * Date date1 = new Date(); * Date date2 = new Date(date1.getTime() + 100); - * + * * // assertion will pass * assertThat(date1).isCloseTo(date2, 80); * assertThat(date1).isCloseTo(date2, 100); @@ -1228,15 +1467,15 @@ public S isInSameSecondAs(String dateAsString) { * // assertion will fail * assertThat(date1).isCloseTo(date2, 101); * </pre> - * + * <p/> * </p> - * - * @param other the date to compare actual to + * + * @param other the date to compare actual to * @param deltaInMilliseconds the delta used for date comparison, expressed in milliseconds * @return this assertion object. * @throws NullPointerException if {@code Date} parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} week is not close to the given date by less than delta. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} week is not close to the given date by less than delta. */ public S isCloseTo(Date other, long deltaInMilliseconds) { dates.assertIsCloseTo(info, actual, other, deltaInMilliseconds); @@ -1246,13 +1485,13 @@ public S isCloseTo(Date other, long deltaInMilliseconds) { /** * Same assertion as {@link #isCloseTo(Date, long)} but given Date is represented as String either with ISO date * format (yyyy-MM-dd) or user custom date format (set with method {@link #withDateFormat(DateFormat)}). - * - * @param dateAsString the given Date represented as String in default or custom date format. + * + * @param dateAsString the given Date represented as String in default or custom date format. * @param deltaInMilliseconds the delta used for date comparison, expressed in milliseconds * @return this assertion object. * @throws NullPointerException if dateAsString parameter is {@code null}. - * @throws AssertionError if the actual {@code Date} is {@code null}. - * @throws AssertionError if the actual {@code Date} week is not close to the given date by less than delta. + * @throws AssertionError if the actual {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} week is not close to the given date by less than delta. */ public S isCloseTo(String dateAsString, long deltaInMilliseconds) { return isCloseTo(parse(dateAsString), deltaInMilliseconds); @@ -1260,9 +1499,9 @@ public S isCloseTo(String dateAsString, long deltaInMilliseconds) { /** * Verifies that the actual {@code Date} has the same time as the given timestamp. - * <p> + * <p/> * Both time or timestamp express a number of milliseconds since January 1, 1970, 00:00:00 GMT. - * + * * @param timestamp the timestamp to compare actual time to. * @return this assertion object. * @throws AssertionError if the actual {@code Date} is {@code null}. @@ -1277,13 +1516,13 @@ public S hasTime(long timestamp) { /** * For String based Date assertions like {@link #isBefore(String)}, given String is expected to follow the default * Date format, that is ISO 8601 format : "yyyy-MM-dd". - * <p> + * <p/> * With this method, user can specify its own date format, replacing the current date format for all future Date * assertions in the test suite (i.e. not only the current assertions) since custom DateFormat is stored in a static * field. - * <p> + * <p/> * To revert to default format simply call {@link #withIsoDateFormat()}. - * + * * @param userCustomDateFormat the new Date format used for String based Date assertions. * @return this assertion object. */ @@ -1295,13 +1534,13 @@ public S withDateFormat(DateFormat userCustomDateFormat) { /** * For String based Date assertions like {@link #isBefore(String)}, given String is expected to follow the default * Date format, that is ISO 8601 format : "yyyy-MM-dd". - * <p> + * <p/> * With this method, user can specify its own date format, replacing the current date format for all future Date * assertions in the test suite (i.e. not only the current assertions) since custom DateFormat is stored in a static * field. - * <p> + * <p/> * To revert to default format simply call {@link #useIsoDateFormat()} (static method) or {@link #withIsoDateFormat()}. - * + * * @param userCustomDateFormat the new Date format used for String based Date assertions. */ public static void useDateFormat(DateFormat userCustomDateFormat) { @@ -1311,7 +1550,7 @@ public static void useDateFormat(DateFormat userCustomDateFormat) { /** * Use ISO 8601 date format ("yyyy-MM-dd") for String based Date assertions. - * + * * @return this assertion object. */ public S withIsoDateFormat() { @@ -1329,13 +1568,15 @@ public static void useIsoDateFormat() { /** * Utility method to parse a Date with {@link #dateFormat}, note that it is thread safe.<br> * Returns <code>null</code> if dateAsString parameter is <code>null</code>. - * + * * @param dateAsString the string to parse as a Date with {@link #dateFormat} * @return the corresponding Date, null if dateAsString parameter is null. * @throws AssertionError if the string can't be parsed as a Date */ private static Date parse(String dateAsString) { - if (dateAsString == null) { return null; } + if (dateAsString == null) { + return null; + } try { // synchronized is used because SimpleDateFormat which is not thread safe (sigh). synchronized (dateFormat) { diff --git a/src/main/java/org/assertj/core/error/ShouldBeEqualWithTimePrecision.java b/src/main/java/org/assertj/core/error/ShouldBeEqualWithTimePrecision.java new file mode 100644 --- /dev/null +++ b/src/main/java/org/assertj/core/error/ShouldBeEqualWithTimePrecision.java @@ -0,0 +1,45 @@ +/* + * Created on Aug 5, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.error; + +import java.util.Date; +import java.util.concurrent.TimeUnit; + +/** + * Creates an <code>{@link AssertionError}</code> indicating that an assertion that verifies that two dates (with max precision) are equals failed. + * + * @author William Delanoue + */ +public class ShouldBeEqualWithTimePrecision extends BasicErrorMessageFactory { + + private static final String EXPECTED_BUT_WAS_MESSAGE = "\nExpecting:\n <%s>\nto be equal to:\n <%s>\nbut was not using max precision of <%s>."; + + /** + * Creates a new <code>{@link org.assertj.core.error.ShouldBeEqualWithTimePrecision}</code>. + * + * @param actual the actual value in the failed assertion. + * @param expected the expected value in the failed assertion. + * @param precision the {@link TimeUnit} used to compare actual with expected. + * @return the created {@code AssertionErrorFactory}. + */ + public static ErrorMessageFactory shouldBeEqual(Date actual, Date expected, TimeUnit precision) { + return new ShouldBeEqualWithTimePrecision(actual, expected, precision); + } + + private ShouldBeEqualWithTimePrecision(Date actual, Date expected, TimeUnit precision) { + super(EXPECTED_BUT_WAS_MESSAGE, actual, expected, precision.name()); + } + +} diff --git a/src/main/java/org/assertj/core/internal/Dates.java b/src/main/java/org/assertj/core/internal/Dates.java --- a/src/main/java/org/assertj/core/internal/Dates.java +++ b/src/main/java/org/assertj/core/internal/Dates.java @@ -1,11 +1,13 @@ package org.assertj.core.internal; +import java.util.concurrent.TimeUnit; import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter; import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo; import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore; import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo; import static org.assertj.core.error.ShouldBeBetween.shouldBeBetween; import static org.assertj.core.error.ShouldBeCloseTo.shouldBeCloseTo; +import org.assertj.core.error.ShouldBeEqualWithTimePrecision; import static org.assertj.core.error.ShouldBeInSameDay.shouldBeInSameDay; import static org.assertj.core.error.ShouldBeInSameHour.shouldBeInSameHour; import static org.assertj.core.error.ShouldBeInSameMinute.shouldBeInSameMinute; @@ -15,6 +17,7 @@ import static org.assertj.core.error.ShouldBeInTheFuture.shouldBeInTheFuture; import static org.assertj.core.error.ShouldBeInThePast.shouldBeInThePast; import static org.assertj.core.error.ShouldBeToday.shouldBeToday; +import static org.assertj.core.error.ShouldBeEqual.shouldBeEqual; import static org.assertj.core.error.ShouldBeWithin.shouldBeWithin; import static org.assertj.core.error.ShouldHaveTime.shouldHaveTime; import static org.assertj.core.error.ShouldNotBeBetween.shouldNotBeBetween; @@ -41,6 +44,7 @@ * Reusable assertions for <code>{@link Date}</code>s. * * @author Joel Costigliola + * @author William Delanoue */ public class Dates { @@ -139,6 +143,45 @@ public void assertIsAfterOrEqualsTo(AssertionInfo info, Date actual, Date other) throw failures.failure(info, shouldBeAfterOrEqualsTo(actual, other, comparisonStrategy)); } + /** + * Verifies that the actual {@code Date} is equal to the given one with precision. + * @param info contains information about the assertion. + * @param actual the "actual" {@code Date}. + * @param other the given Date. + * @param precision maximum precision for the comparison. + * @throws AssertionError if {@code actual} is {@code null}. + * @throws NullPointerException if other {@code Date} is {@code null}. + * @throws AssertionError if the actual {@code Date} is not equal to the given one. + */ + public void assertIsEqualWithPrecision(AssertionInfo info, Date actual, Date other, TimeUnit precision) { + assertNotNull(info, actual); + Calendar calendarActual = Calendar.getInstance(); + calendarActual.setTime(actual); + Calendar calendarOther = Calendar.getInstance(); + calendarOther.setTime(other); + switch (precision) { + case DAYS: + calendarActual.set(Calendar.DAY_OF_WEEK, 0); + calendarOther.set(Calendar.DAY_OF_WEEK, 0); + case HOURS: + calendarActual.set(Calendar.HOUR, 0); + calendarOther.set(Calendar.HOUR, 0); + case MINUTES: + calendarActual.set(Calendar.MINUTE, 0); + calendarOther.set(Calendar.MINUTE, 0); + case SECONDS: + calendarActual.set(Calendar.SECOND, 0); + calendarOther.set(Calendar.SECOND, 0); + case MILLISECONDS: + calendarActual.set(Calendar.MILLISECOND, 0); + calendarOther.set(Calendar.MILLISECOND, 0); + case MICROSECONDS: + break; + } + if(calendarActual.compareTo(calendarOther) != 0) + throw failures.failure(info, ShouldBeEqualWithTimePrecision.shouldBeEqual(actual, other, precision)); + } + /** * Verifies that the actual {@code Date} is in <i>start:end</i> period.<br> * start date belongs to the period if inclusiveStart is true.<br>
diff --git a/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringHours.java b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringHours.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringHours.java @@ -0,0 +1,45 @@ +/* + * Created on Dec 21, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.api.date; + +import java.util.Date; +import java.util.concurrent.TimeUnit; +import org.assertj.core.api.DateAssert; +import static org.mockito.Mockito.verify; + + +/** + * Tests for <code>{@link org.assertj.core.api.DateAssert#isEqualToIgnoringHours(java.util.Date)}</code>. + * + * @author William Delanoue + */ +public class DateAssert_isEqualToIgnoringHours extends AbstractDateAssertWithDateArg_Test { + + @Override + protected DateAssert assertionInvocationWithDateArg() { + return assertions.isEqualToIgnoringHours(otherDate); + } + + @Override + protected DateAssert assertionInvocationWithStringArg(String date) { + return assertions.isEqualToIgnoringHours(date); + } + + @Override + protected void verifyAssertionInvocation(Date date) { + verify(dates).assertIsEqualWithPrecision(getInfo(assertions), getActual(assertions), date, TimeUnit.HOURS); + } + +} diff --git a/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMillis.java b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMillis.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMillis.java @@ -0,0 +1,45 @@ +/* + * Created on Dec 21, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.api.date; + +import java.util.Date; +import java.util.concurrent.TimeUnit; +import org.assertj.core.api.DateAssert; +import static org.mockito.Mockito.verify; + + +/** + * Tests for <code>{@link org.assertj.core.api.DateAssert#isEqualToIgnoringMillis(java.util.Date)}</code>. + * + * @author William Delanoue + */ +public class DateAssert_isEqualToIgnoringMillis extends AbstractDateAssertWithDateArg_Test { + + @Override + protected DateAssert assertionInvocationWithDateArg() { + return assertions.isEqualToIgnoringMillis(otherDate); + } + + @Override + protected DateAssert assertionInvocationWithStringArg(String date) { + return assertions.isEqualToIgnoringMillis(date); + } + + @Override + protected void verifyAssertionInvocation(Date date) { + verify(dates).assertIsEqualWithPrecision(getInfo(assertions), getActual(assertions), date, TimeUnit.MILLISECONDS); + } + +} diff --git a/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMinutes.java b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMinutes.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringMinutes.java @@ -0,0 +1,45 @@ +/* + * Created on Dec 21, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.api.date; + +import java.util.Date; +import java.util.concurrent.TimeUnit; +import org.assertj.core.api.DateAssert; +import static org.mockito.Mockito.verify; + + +/** + * Tests for <code>{@link org.assertj.core.api.DateAssert#isEqualToIgnoringMinutes(java.util.Date)}</code>. + * + * @author William Delanoue + */ +public class DateAssert_isEqualToIgnoringMinutes extends AbstractDateAssertWithDateArg_Test { + + @Override + protected DateAssert assertionInvocationWithDateArg() { + return assertions.isEqualToIgnoringMinutes(otherDate); + } + + @Override + protected DateAssert assertionInvocationWithStringArg(String date) { + return assertions.isEqualToIgnoringMinutes(date); + } + + @Override + protected void verifyAssertionInvocation(Date date) { + verify(dates).assertIsEqualWithPrecision(getInfo(assertions), getActual(assertions), date, TimeUnit.MINUTES); + } + +} diff --git a/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringSeconds.java b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringSeconds.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/api/date/DateAssert_isEqualToIgnoringSeconds.java @@ -0,0 +1,45 @@ +/* + * Created on Dec 21, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.api.date; + +import java.util.Date; +import java.util.concurrent.TimeUnit; +import org.assertj.core.api.DateAssert; +import static org.mockito.Mockito.verify; + + +/** + * Tests for <code>{@link org.assertj.core.api.DateAssert#isEqualToIgnoringSeconds(java.util.Date)}</code>. + * + * @author William Delanoue + */ +public class DateAssert_isEqualToIgnoringSeconds extends AbstractDateAssertWithDateArg_Test { + + @Override + protected DateAssert assertionInvocationWithDateArg() { + return assertions.isEqualToIgnoringSeconds(otherDate); + } + + @Override + protected DateAssert assertionInvocationWithStringArg(String date) { + return assertions.isEqualToIgnoringSeconds(date); + } + + @Override + protected void verifyAssertionInvocation(Date date) { + verify(dates).assertIsEqualWithPrecision(getInfo(assertions), getActual(assertions), date, TimeUnit.SECONDS); + } + +} diff --git a/src/test/java/org/assertj/core/internal/DatesBaseTest.java b/src/test/java/org/assertj/core/internal/DatesBaseTest.java --- a/src/test/java/org/assertj/core/internal/DatesBaseTest.java +++ b/src/test/java/org/assertj/core/internal/DatesBaseTest.java @@ -68,6 +68,15 @@ protected static Date parseDatetime(String dateAsString) { return org.assertj.core.util.Dates.parseDatetime(dateAsString); } + /** + * Simply delegate to {@link org.assertj.core.util.Dates#parseDatetimeWithMs(String)}} + * @param dateAsString see {@link org.assertj.core.util.Dates#parseDatetimeWithMs(String)} } + * @return see {@link org.assertj.core.util.Dates#parseDatetimeWithMs(String)}} + */ + protected static Date parseDatetimeWithMs(String dateAsString) { + return org.assertj.core.util.Dates.parseDatetimeWithMs(dateAsString); + } + protected Comparator<?> comparatorForCustomComparisonStrategy() { return yearAndMonthComparator; } diff --git a/src/test/java/org/assertj/core/internal/dates/Dates_assertIsEqualWithPrecision_Test.java b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsEqualWithPrecision_Test.java new file mode 100644 --- /dev/null +++ b/src/test/java/org/assertj/core/internal/dates/Dates_assertIsEqualWithPrecision_Test.java @@ -0,0 +1,142 @@ +/* + * Created on Dec 24, 2010 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the + * License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" + * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + * + * Copyright @2010-2011 the original author or authors. + */ +package org.assertj.core.internal.dates; + +import static org.assertj.core.error.ShouldBeEqualWithTimePrecision.shouldBeEqual; +import static org.assertj.core.test.TestData.someInfo; +import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown; +import static org.mockito.Mockito.verify; + +import java.util.Date; +import java.util.concurrent.TimeUnit; + +import org.assertj.core.api.AssertionInfo; +import org.assertj.core.internal.DatesBaseTest; +import org.junit.Test; + + +/** + * Tests for <code>{@link org.assertj.core.internal.Dates#assertIsEqualWithPrecision(org.assertj.core.api.AssertionInfo, java.util.Date, java.util.Date, java.util.concurrent.TimeUnit)}</code>. + * + * @author William Delanoue + */ +public class Dates_assertIsEqualWithPrecision_Test extends DatesBaseTest { + + @Override + protected void initActualDate() { + actual = parseDatetimeWithMs("2011-09-27T12:23:35.999"); + } + + @Test + public void should_pass_if_ms_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:23:35.998"); + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.MILLISECONDS); + } + + @Test + public void should_pass_if_seconds_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:23:36.999"); + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.SECONDS); + } + + @Test + public void should_pass_if_minutes_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:24:35.999"); + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.MINUTES); + } + + @Test + public void should_pass_if_hour_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T13:23:35.999"); + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.HOURS); + } + + @Test + public void should_pass_if_day_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-28T12:23:35.999"); + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.DAYS); + } + + @Test + public void should_fail_if_ms_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:23:35.998"); + try { + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.MICROSECONDS); + } catch (AssertionError e) { + verify(failures).failure(info, shouldBeEqual(actual, other, TimeUnit.MICROSECONDS)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + + @Test + public void should_fail_if_seconds_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:23:36.999"); + try { + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.MILLISECONDS); + } catch (AssertionError e) { + verify(failures).failure(info, shouldBeEqual(actual, other, TimeUnit.MILLISECONDS)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + + @Test + public void should_fail_if_minutes_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T12:24:35.999"); + try { + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.SECONDS); + } catch (AssertionError e) { + verify(failures).failure(info, shouldBeEqual(actual, other, TimeUnit.SECONDS)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + + @Test + public void should_fail_if_hour_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-27T13:23:35.999"); + try { + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.MINUTES); + } catch (AssertionError e) { + verify(failures).failure(info, shouldBeEqual(actual, other, TimeUnit.MINUTES)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + + @Test + public void should_fail_if_day_not_equal() { + AssertionInfo info = someInfo(); + Date other = parseDatetimeWithMs("2011-09-28T12:23:35.999"); + try { + dates.assertIsEqualWithPrecision(info, actual, other, TimeUnit.HOURS); + } catch (AssertionError e) { + verify(failures).failure(info, shouldBeEqual(actual, other, TimeUnit.HOURS)); + return; + } + failBecauseExpectedAssertionErrorWasNotThrown(); + } + +}
New date assertions comparing dates given a precision level. For example, one can compare two dates up to minutes precision: ``` java Date date1 = parseDatetime("2003-04-26T13:01:35"); Date date2 = parseDatetime("2003-04-26T13:02:00"); // OK : all dates fields are the same up to minutes excluded assertThat(date1).isEqualToIgnoringMinutes(date2); // KO : fail as minute fields differ assertThat(date1).isEqualToIgnoringSeconds(date2); ```
2013-10-07T13:38:43Z
1.4
assertj/assertj
613
assertj__assertj-613
[ "611" ]
3118bf043099fba418bc908388afc0fbd25cf772
diff --git a/src/main/java/org/assertj/core/internal/Longs.java b/src/main/java/org/assertj/core/internal/Longs.java --- a/src/main/java/org/assertj/core/internal/Longs.java +++ b/src/main/java/org/assertj/core/internal/Longs.java @@ -73,7 +73,7 @@ public void assertIsCloseToPercentage(AssertionInfo info, Long actual, Long othe checkPercentageIsNotNull(percentage); checkNumberIsNotNull(other); - Offset<Double> calculatedOffset = offset(percentage.value * other / 100d); + Offset<Double> calculatedOffset = offset(abs(percentage.value * other / 100d)); Long absDiff = abs(other - actual); if (absDiff > calculatedOffset.value) diff --git a/src/main/java/org/assertj/core/internal/Numbers.java b/src/main/java/org/assertj/core/internal/Numbers.java --- a/src/main/java/org/assertj/core/internal/Numbers.java +++ b/src/main/java/org/assertj/core/internal/Numbers.java @@ -184,7 +184,7 @@ protected double absDiff(NUMBER actual, NUMBER expected) { } private Offset<Double> computeOffset(NUMBER referenceValue, Percentage percentage) { - return offset(percentage.value * referenceValue.doubleValue() / 100d); + return offset(abs(percentage.value * referenceValue.doubleValue() / 100d)); } } \ No newline at end of file diff --git a/src/main/java/org/assertj/core/internal/Shorts.java b/src/main/java/org/assertj/core/internal/Shorts.java --- a/src/main/java/org/assertj/core/internal/Shorts.java +++ b/src/main/java/org/assertj/core/internal/Shorts.java @@ -73,7 +73,7 @@ public void assertIsCloseToPercentage(AssertionInfo info, Short actual, Short ot assertNotNull(info, actual); checkPercentageIsNotNull(percentage); checkNumberIsNotNull(other); - Offset<Double> calculatedOffset = offset(percentage.value * other / 100d); + Offset<Double> calculatedOffset = offset(abs(percentage.value * other / 100d)); short absDiff = (short) abs(other - actual); if (absDiff > calculatedOffset.value) throw failures.failure(info, shouldBeEqualWithinPercentage(actual, other, percentage, absDiff));
diff --git a/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimals_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimals_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimals_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimals_assertIsCloseToPercentage_Test.java @@ -28,10 +28,13 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.BigDecimalsBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; -public class BigDecimals_assertIsCloseToPercentage_Test extends BigDecimalsBaseTest { +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; - private static final BigDecimal TWO = new BigDecimal(2); +@RunWith(DataProviderRunner.class) +public class BigDecimals_assertIsCloseToPercentage_Test extends BigDecimalsBaseTest { @Test public void should_fail_if_actual_is_null() { @@ -59,16 +62,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { bigDecimals.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - bigDecimals.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(1)); - bigDecimals.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(BigDecimal actual, BigDecimal other, Integer percentage) { + bigDecimals.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - bigDecimals.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(0)); - bigDecimals.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(BigDecimal actual, BigDecimal other, Integer percentage) { + bigDecimals.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/bytes/Bytes_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/bytes/Bytes_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/bytes/Bytes_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/bytes/Bytes_assertIsCloseToPercentage_Test.java @@ -23,12 +23,16 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.BytesBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; + +@RunWith(DataProviderRunner.class) public class Bytes_assertIsCloseToPercentage_Test extends BytesBaseTest { private static final Byte ZERO = 0; private static final Byte ONE = 1; - private static final Byte TWO = 2; private static final Byte TEN = 10; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { bytes.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - bytes.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(1)); - bytes.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Byte actual, Byte other, Byte percentage) { + bytes.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - bytes.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(0)); - bytes.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Byte actual, Byte other, Byte percentage) { + bytes.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/doubles/Doubles_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/doubles/Doubles_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/doubles/Doubles_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/doubles/Doubles_assertIsCloseToPercentage_Test.java @@ -23,12 +23,16 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.DoublesBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; + +@RunWith(DataProviderRunner.class) public class Doubles_assertIsCloseToPercentage_Test extends DoublesBaseTest { private static final Double ZERO = 0d; private static final Double ONE = 1d; - private static final Double TWO = 2d; private static final Double TEN = 10d; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { doubles.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101.0)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - doubles.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(0.1)); - doubles.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100.0)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Double actual, Double other, Double percentage) { + doubles.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - doubles.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(ZERO)); - doubles.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50.0)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Double actual, Double other, Double percentage) { + doubles.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/floats/Floats_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/floats/Floats_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/floats/Floats_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/floats/Floats_assertIsCloseToPercentage_Test.java @@ -23,12 +23,16 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.FloatsBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; + +@RunWith(DataProviderRunner.class) public class Floats_assertIsCloseToPercentage_Test extends FloatsBaseTest { private static final Float ZERO = 0f; private static final Float ONE = 1f; - private static final Float TWO = 2f; private static final Float TEN = 10f; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { floats.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101.0f)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - floats.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(0.1f)); - floats.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100.0f)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Float actual, Float other, Float percentage) { + floats.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - floats.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(ZERO)); - floats.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50.0f)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Float actual, Float other, Float percentage) { + floats.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/integers/Integers_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/integers/Integers_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/integers/Integers_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/integers/Integers_assertIsCloseToPercentage_Test.java @@ -23,12 +23,16 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.IntegersBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; + +@RunWith(DataProviderRunner.class) public class Integers_assertIsCloseToPercentage_Test extends IntegersBaseTest { private static final Integer ZERO = 0; private static final Integer ONE = 1; - private static final Integer TWO = 2; private static final Integer TEN = 10; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { integers.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - integers.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(1)); - integers.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Integer actual, Integer other, Integer percentage) { + integers.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - integers.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(ZERO)); - integers.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Integer actual, Integer other, Integer percentage) { + integers.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/longs/Longs_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/longs/Longs_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/longs/Longs_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/longs/Longs_assertIsCloseToPercentage_Test.java @@ -15,6 +15,10 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.LongsBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; + +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; import static org.assertj.core.api.Assertions.withinPercentage; import static org.assertj.core.data.Percentage.withPercentage; @@ -24,11 +28,11 @@ import static org.assertj.core.util.FailureMessages.actualIsNull; import static org.mockito.Mockito.verify; +@RunWith(DataProviderRunner.class) public class Longs_assertIsCloseToPercentage_Test extends LongsBaseTest { private static final Long ZERO = 0L; private static final Long ONE = 1L; - private static final Long TWO = 2L; private static final Long TEN = 10L; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { longs.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage(101L)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - longs.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(1L)); - longs.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(100L)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Long actual, Long other, Long percentage) { + longs.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - longs.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(ZERO)); - longs.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage(50L)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Long actual, Long other, Long percentage) { + longs.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test diff --git a/src/test/java/org/assertj/core/internal/shorts/Shorts_assertIsCloseToPercentage_Test.java b/src/test/java/org/assertj/core/internal/shorts/Shorts_assertIsCloseToPercentage_Test.java --- a/src/test/java/org/assertj/core/internal/shorts/Shorts_assertIsCloseToPercentage_Test.java +++ b/src/test/java/org/assertj/core/internal/shorts/Shorts_assertIsCloseToPercentage_Test.java @@ -23,12 +23,16 @@ import org.assertj.core.api.AssertionInfo; import org.assertj.core.internal.ShortsBaseTest; import org.junit.Test; +import org.junit.runner.RunWith; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; + +@RunWith(DataProviderRunner.class) public class Shorts_assertIsCloseToPercentage_Test extends ShortsBaseTest { private static final Short ZERO = (short) 0; private static final Short ONE = (short) 1; - private static final Short TWO = (short) 2; private static final Short TEN = (short) 10; @Test @@ -57,16 +61,32 @@ public void should_fail_if_percentage_is_greater_than_one_hundred() { shorts.assertIsCloseToPercentage(someInfo(), ONE, ZERO, withPercentage((short) 101)); } + // @format:off @Test - public void should_pass_if_difference_is_less_than_given_percentage() { - shorts.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage((short) 1)); - shorts.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage((short) 100)); + @DataProvider({ + "1, 1, 1", + "1, 2, 100", + "-1, -1, 1", + "-1, -2, 100" + }) + // @format:on + public void should_pass_if_difference_is_less_than_given_percentage(Short actual, Short other, Short percentage) { + shorts.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } + // @format:off @Test - public void should_pass_if_difference_is_equal_to_given_percentage() { - shorts.assertIsCloseToPercentage(someInfo(), ONE, ONE, withPercentage(ZERO)); - shorts.assertIsCloseToPercentage(someInfo(), ONE, TWO, withPercentage((short) 50)); + @DataProvider({ + "1, 1, 0", + "2, 1, 100", + "1, 2, 50", + "-1, -1, 0", + "-2, -1, 100", + "-1, -2, 50" + }) + // @format:on + public void should_pass_if_difference_is_equal_to_given_percentage(Short actual, Short other, Short percentage) { + shorts.assertIsCloseToPercentage(someInfo(), actual, other, withPercentage(percentage)); } @Test
Assert "isCloseTo(double expected, Percentage p)" doesn't work for negative expected values I have two tests whether an actual value is within a given percentage of the expected value. The test for a positive expected value works without problems. The test for a negative expected value throws `IllegalArgumentException` My test methods are: ``` @Test public void checkPositiveNumber() { assertThat(11d).isCloseTo(10d, withinPercentage(10)); } @Test public void checkNegativeNumber() { assertThat(-11d).isCloseTo(-10d, withinPercentage(10)); } ``` I've tested only for doubles but I suspect the same problem exists for all numeric types.
True :crying_cat_face: We screwed up, the computed offset is negative, we should have used an absolute values. Thanks for reporting this. will be fixed in 2.4.0/3.4.0
2016-02-06T11:44:56Z
2.3
nodejs/undici
4,178
nodejs__undici-4178
[ "4173" ]
0daba937990d92f670b858216e4715ec9f92da44
diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -20,7 +20,12 @@ const { AbortError } = require('../core/errors.js') */ function needsRevalidation (result, cacheControlDirectives) { if (cacheControlDirectives?.['no-cache']) { - // Always revalidate requests with the no-cache directive + // Always revalidate requests with the no-cache request directive + return true + } + + if (result.cacheControlDirectives?.['no-cache'] && !Array.isArray(result.cacheControlDirectives['no-cache'])) { + // Always revalidate requests with unqualified no-cache response directive return true }
diff --git a/test/interceptors/cache.js b/test/interceptors/cache.js --- a/test/interceptors/cache.js +++ b/test/interceptors/cache.js @@ -128,6 +128,45 @@ describe('Cache Interceptor', () => { } }) + test('revalidates reponses with no-cache directive, regardless of cacheByDefault', async () => { + let requestCount = 0 + const server = createServer({ joinDuplicateHeaders: true }, (req, res) => { + ++requestCount + res.setHeader('Vary', 'Accept-Encoding') + res.setHeader('cache-control', 'no-cache') + res.end(`Request count: ${requestCount}`) + }).listen(0) + + after(async () => { + server.close() + + await once(server, 'close') + }) + + await once(server, 'listening') + + const client = new Client(`http://localhost:${server.address().port}`) + .compose(interceptors.cache({ + cacheByDefault: 1000 + })) + + const request = { + origin: 'localhost', + method: 'GET', + path: '/' + } + + const res1 = await client.request(request) + const body1 = await res1.body.text() + strictEqual(body1, 'Request count: 1') + strictEqual(requestCount, 1) + + const res2 = await client.request(request) + const body2 = await res2.body.text() + strictEqual(body2, 'Request count: 2') + strictEqual(requestCount, 2) + }) + test('stale responses are revalidated before deleteAt (if-modified-since)', async () => { const clock = FakeTimers.install({ shouldClearNativeTimers: true
Cache prioritises cacheByDefault/max-age over `no-cache` response directive ## Bug Description When `cacheByDefault` is set, no revalidation occurs for a response which had the `no-cache` directive. ## Reproducible By ```js const http = require('node:http'); const server = http.createServer((req, res) => { console.log('request received'); req.on('data', () => {}); req.on('end', () => { res.setHeader('cache-control', 'no-cache'); res.end('OK'); console.log('Closing server...'); server.close(() => console.log('Server closed.')); }); }); const port = 1234; server.listen(port, async () => { console.log('server listening on port', port); const undici = require('undici'); const dispatcher = new undici.Agent().compose(undici.interceptors.cache({ cacheByDefault: 1000, })); const res1 = await undici.fetch(`http://localhost:${port}/`, { dispatcher }); console.log('res1.status:', res1.status); await new Promise(resolve => setTimeout(resolve, 200)); const res2 = await undici.fetch(`http://localhost:${port}/`, { dispatcher }); console.log('res2.status:', res2.status); }); ``` ## Expected Behavior https://datatracker.ietf.org/doc/html/rfc9111#name-no-cache-2: > The no-cache response directive, in its unqualified form (without an argument), indicates that the response **MUST NOT** be used to satisfy any other request without forwarding it for validation and receiving a successful response ## Logs & Screenshots ``` server listening on port 1234 request received Closing server... Server closed. res1.status: 200 res2.status: 200 ``` ## Environment node: v22.12.0 undici: 7.8.0 ### Additional context <!-- Add any other context about the problem here. -->
If this is indeed a bug, something like this should fix it: ```js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -20,7 +20,12 @@ const { AbortError } = require('../core/errors.js') */ function needsRevalidation (result, cacheControlDirectives) { if (cacheControlDirectives?.['no-cache']) { - // Always revalidate requests with the no-cache directive + // Always revalidate requests with the no-cache request directive + return true + } + + if (result.cacheControlDirectives?.['no-cache']) { + // Always revalidate requests with the no-cache response directive return true } ```
2025-04-23T05:33:43Z
7.8
nodejs/undici
4,131
nodejs__undici-4131
[ "3904" ]
67adf4237ea78c00e9db931c6638366d661119a1
diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -233,7 +233,7 @@ function handleResult ( } let headers = { - ...normaliseHeaders(opts), + ...opts.headers, 'if-modified-since': new Date(result.cachedAt).toUTCString() } @@ -319,6 +319,11 @@ module.exports = (opts = {}) => { return dispatch(opts, handler) } + opts = { + ...opts, + headers: normaliseHeaders(opts) + } + const reqCacheControl = opts.headers?.['cache-control'] ? parseCacheControlHeader(opts.headers['cache-control']) : undefined diff --git a/lib/util/cache.js b/lib/util/cache.js --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -12,13 +12,11 @@ function makeCacheKey (opts) { throw new Error('opts.origin is undefined') } - const headers = normaliseHeaders(opts) - return { origin: opts.origin.toString(), method: opts.method, path: opts.path, - headers + headers: opts.headers } }
diff --git a/test/issue-3904.js b/test/issue-3904.js new file mode 100644 --- /dev/null +++ b/test/issue-3904.js @@ -0,0 +1,58 @@ +const { describe, test, after } = require('node:test') +const assert = require('node:assert') +const { createServer } = require('node:http') +const { once } = require('node:events') +const MemoryCacheStore = require('../lib/cache/memory-cache-store.js') +const { Agent, interceptors, request, setGlobalDispatcher } = require('..') + +describe('Cache with cache-control: no-store request header', () => { + [ + 'CACHE-CONTROL', + 'cache-control', + 'Cache-Control' + ].forEach(headerName => { + test(`should not cache response for request with header: "${headerName}: no-store`, async () => { + const store = new MemoryCacheStore() + let requestCount = 0 + const server = createServer({ joinDuplicateHeaders: true }, (req, res) => { + ++requestCount + res.setHeader('Vary', 'Accept-Encoding') + res.setHeader('Cache-Control', 'max-age=60') + res.end(`Request count: ${requestCount}`) + }) + + after(async () => { + server.close() + + await once(server, 'close') + }) + + await new Promise(resolve => server.listen(0, resolve)) + const { port } = server.address() + const url = `http://localhost:${port}` + + const agent = new Agent() + setGlobalDispatcher( + agent.compose( + interceptors.cache({ + store, + cacheByDefault: 1000, + methods: ['GET'] + }) + ) + ) + + const res1 = await request(url, { headers: { [headerName]: 'no-store' } }) + const body1 = await res1.body.text() + assert.strictEqual(body1, 'Request count: 1') + assert.strictEqual(requestCount, 1) + + const res2 = await request(url) + const body2 = await res2.body.text() + assert.strictEqual(body2, 'Request count: 2') + assert.strictEqual(requestCount, 2) + + await new Promise(resolve => server.close(resolve)) + }) + }) +})
Cache interceptor does not process capitalized `Cache-Control` header in request ## Bug Description Cache interceptor does not process `Cache-Control: no-store` request header if header name is capitalized. In code we can see that you look for lowercased header name only. You should convert header names to lowercase before search for `cache-control`; ## Reproducible By Please. see above. ## Expected Behavior Please. see above. ## Logs & Screenshots <!-- If applicable, add screenshots to help explain your problem, or alternatively add your console logs here. --> ## Environment <!-- This is just your OS and environment information [e.g. Ubuntu 18.04 LTS, Node v14.14.0] --> undici v7 ### Additional context <!-- Add any other context about the problem here. -->
Can you add a reproduction? It's working correctly here. ```javascript import { Agent, fetch, interceptors } from "undici"; const dispatcher = new Agent().compose( interceptors.cache() ); for ( let n = 0; n < 2; n++ ) { const res = await fetch( "https://httpbingo.org/cache/60", { dispatcher, "headers": { "X-Data": new Date().toISOString(), "Cache-Control": "no-store", }, } ); console.log( ( await res.json() ).headers[ "X-Data" ] ); } ``` 1. You can see, that data not changed - this means, that cached response used. 2. If you change `Cache-Control` header to the lower case - cache will be bypassed. Ah right, thanks! @mcollina this is related to that "normalizeHeaders" function I was working on that would always normalize the user passed headers. I think we should probably have a "pre-interceptor" that always does this to make things easier down chain. Cannot be done on the `DispatcherBase#dispatch` directly?
2025-04-01T15:29:49Z
7.6
nodejs/undici
4,112
nodejs__undici-4112
[ "4103" ]
c584721dbc5fa6abdf36433b22c478635e1ccc99
diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -6,7 +6,7 @@ const util = require('../core/util') const CacheHandler = require('../handler/cache-handler') const MemoryCacheStore = require('../cache/memory-cache-store') const CacheRevalidationHandler = require('../handler/cache-revalidation-handler') -const { assertCacheStore, assertCacheMethods, makeCacheKey, parseCacheControlHeader } = require('../util/cache.js') +const { assertCacheStore, assertCacheMethods, makeCacheKey, normaliseHeaders, parseCacheControlHeader } = require('../util/cache.js') const { AbortError } = require('../core/errors.js') /** @@ -233,7 +233,7 @@ function handleResult ( } let headers = { - ...opts.headers, + ...normaliseHeaders(opts), 'if-modified-since': new Date(result.cachedAt).toUTCString() } diff --git a/lib/util/cache.js b/lib/util/cache.js --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -12,7 +12,21 @@ function makeCacheKey (opts) { throw new Error('opts.origin is undefined') } - /** @type {Record<string, string[] | string>} */ + const headers = normaliseHeaders(opts) + + return { + origin: opts.origin.toString(), + method: opts.method, + path: opts.path, + headers + } +} + +/** + * @param {Record<string, string[] | string>} + * @return {Record<string, string[] | string>} + */ +function normaliseHeaders (opts) { let headers if (opts.headers == null) { headers = {} @@ -38,12 +52,7 @@ function makeCacheKey (opts) { throw new Error('opts.headers is not an object') } - return { - origin: opts.origin.toString(), - method: opts.method, - path: opts.path, - headers - } + return headers } /** @@ -350,6 +359,7 @@ function assertCacheMethods (methods, name = 'CacheMethods') { module.exports = { makeCacheKey, + normaliseHeaders, assertCacheKey, assertCacheValue, parseCacheControlHeader,
diff --git a/test/interceptors/cache.js b/test/interceptors/cache.js --- a/test/interceptors/cache.js +++ b/test/interceptors/cache.js @@ -135,22 +135,30 @@ describe('Cache Interceptor', () => { let requestsToOrigin = 0 let revalidationRequests = 0 + let serverError const server = createServer({ joinDuplicateHeaders: true }, (req, res) => { res.setHeader('date', 0) res.setHeader('cache-control', 's-maxage=1, stale-while-revalidate=10') - if (req.headers['if-modified-since']) { - revalidationRequests++ + try { + if (req.headers['if-modified-since']) { + equal(req.headers['if-modified-since'].length, 29) + + revalidationRequests++ - if (revalidationRequests === 2) { - res.end('updated') + if (revalidationRequests === 3) { + res.end('updated') + } else { + res.statusCode = 304 + res.end() + } } else { - res.statusCode = 304 - res.end() + requestsToOrigin++ + res.end('asd') } - } else { - requestsToOrigin++ - res.end('asd') + } catch (err) { + serverError = err + res.end() } }).listen(0) @@ -188,6 +196,10 @@ describe('Cache Interceptor', () => { // Send initial request. This should reach the origin { const res = await client.request(request) + if (serverError) { + throw serverError + } + equal(requestsToOrigin, 1) equal(revalidationRequests, 0) strictEqual(await res.body.text(), 'asd') @@ -198,16 +210,42 @@ describe('Cache Interceptor', () => { // Response is now stale, the origin should get a revalidation request { const res = await client.request(request) + if (serverError) { + throw serverError + } + equal(requestsToOrigin, 1) equal(revalidationRequests, 1) strictEqual(await res.body.text(), 'asd') } + // Response is still stale, extra header should be overwritten, and the + // origin should get a revalidation request + { + const res = await client.request({ + ...request, + headers: { + 'if-modified-SINCE': 'Thu, 01 Jan 1970 00:00:00 GMT' + } + }) + if (serverError) { + throw serverError + } + + equal(requestsToOrigin, 1) + equal(revalidationRequests, 2) + strictEqual(await res.body.text(), 'asd') + } + // Response is still stale, but revalidation should fail now. { const res = await client.request(request) + if (serverError) { + throw serverError + } + equal(requestsToOrigin, 1) - equal(revalidationRequests, 2) + equal(revalidationRequests, 3) strictEqual(await res.body.text(), 'updated') } }) @@ -230,7 +268,7 @@ describe('Cache Interceptor', () => { equal(req.headers['if-none-match'], '"asd123"') - if (revalidationRequests === 2) { + if (revalidationRequests === 3) { res.end('updated') } else { res.statusCode = 304 @@ -296,6 +334,24 @@ describe('Cache Interceptor', () => { strictEqual(await res.body.text(), 'asd') } + // Response is still stale, extra headers should be overwritten, and the + // origin should get a revalidation request + { + const res = await client.request({ + ...request, + headers: { + 'if-NONE-match': '"nonsense-etag"' + } + }) + if (serverError) { + throw serverError + } + + equal(requestsToOrigin, 1) + equal(revalidationRequests, 2) + strictEqual(await res.body.text(), 'asd') + } + // Response is still stale, but revalidation should fail now. { const res = await client.request(request) @@ -304,7 +360,7 @@ describe('Cache Interceptor', () => { } equal(requestsToOrigin, 1) - equal(revalidationRequests, 2) + equal(revalidationRequests, 3) strictEqual(await res.body.text(), 'updated') } }) @@ -327,13 +383,13 @@ describe('Cache Interceptor', () => { if (ifNoneMatch) { revalidationRequests++ notEqual(req.headers.a, undefined) - notEqual(req.headers.b, undefined) + notEqual(req.headers['b-mixed-case'], undefined) res.statusCode = 304 res.end() } else { requestsToOrigin++ - res.setHeader('vary', 'a, b') + res.setHeader('vary', 'a, B-MIXED-CASe') res.setHeader('etag', '"asd"') res.end('asd') } @@ -360,15 +416,17 @@ describe('Cache Interceptor', () => { const request = { origin: 'localhost', path: '/', - method: 'GET', - headers: { - a: 'asd', - b: 'asd' - } + method: 'GET' } { - const response = await client.request(request) + const response = await client.request({ + ...request, + headers: { + a: 'asd', + 'b-Mixed-case': 'asd' + } + }) if (serverError) { throw serverError } @@ -380,7 +438,13 @@ describe('Cache Interceptor', () => { clock.tick(1500) { - const response = await client.request(request) + const response = await client.request({ + ...request, + headers: { + a: 'asd', + 'B-mixed-CASE': 'asd' + } + }) if (serverError) { throw serverError }
Cache can add duplicate if-none-match, if-modified-since & varied headers (case-sensitivity) ## Bug Description The code at https://github.com/nodejs/undici/blob/e43d01552adf78e57211f188ad6ce0f0a0529ab7/lib/interceptor/cache.js#L235-L249 will overwrite lower-cased user-defined headers, but duplicate non-lower-cased headers. ## Reproducible By ```js import { Agent, fetch, interceptors } from 'undici'; import express from 'express'; const app = express(); app.get('/', (req, res) => { res.set('Cache-Control', 'max-age=1'); res.set('ETag', '"asdf1234"'); res.send({ requestHeaders:req.headers }); }); const dispatcher = new Agent().compose(interceptors.cache({ cacheByDefault: Number.MAX_SAFE_INTEGER, type: 'private', })); let prevEtag; const s = app.listen(4444, async () => { for(let i=1; i<5; ++i) { const headers = {}; if(i == 2) headers['If-None-Match'] = prevEtag; if(i == 3) headers['if-none-match'] = prevEtag; const res = await fetch('http://localhost:4444', { dispatcher, headers }); prevEtag = res.headers.get('etag'); console.log(i, 'res.body:', (await res.json()).requestHeaders['if-none-match']); await new Promise(resolve => setTimeout(resolve, 2000)); } s.close(); }); ``` ### Output: ``` 1 res.body: undefined 2 res.body: "asdf1234", "asdf1234" 3 res.body: "asdf1234" 4 res.body: "asdf1234" ``` ## Expected Behavior <!-- A clear and concise description of what you expected to happen. --> Either: 1. cache code defers to user-defined header values, or 2. cache code consistently overwrites user-defined header values ## Logs & Screenshots <!-- If applicable, add screenshots to help explain your problem, or alternatively add your console logs here. --> ## Environment <!-- This is just your OS and environment information [e.g. Ubuntu 18.04 LTS, Node v14.14.0] --> ```sh $ echo "node: $(node --version)"; echo "undici: $(jq -r .version ./node_modules/undici/package.json)"; echo "express: $(jq -r .version ./node_modules/express/package.json)" node: v22.14.0 undici: 7.5.0 express: 4.21.2 ``` ### Additional context <!-- Add any other context about the problem here. --> There are some other issues relating to cache & case-sensitivity of headers: * https://github.com/nodejs/undici/issues/3904 * https://github.com/nodejs/undici/pull/3990 * https://github.com/nodejs/undici/pull/4031
Thanks for the report! >cache code consistently overwrites user-defined header values I believe this should be the way to move forward, as these are handled by the interceptor itself and overwrite them can lead to undefined behaviour. Would you like to send a PR to address that? > Would you like to send a PR to address that? I'll take a look :+1:
2025-03-21T08:22:05Z
7.5
nodejs/undici
4,088
nodejs__undici-4088
[ "3895" ]
ef276d4a296b99f547ed563a11e74ae71bc84bc1
diff --git a/lib/dispatcher/pool.js b/lib/dispatcher/pool.js --- a/lib/dispatcher/pool.js +++ b/lib/dispatcher/pool.js @@ -70,6 +70,20 @@ class Pool extends PoolBase { ? { ...options.interceptors } : undefined this[kFactory] = factory + + this.on('connectionError', (origin, targets, error) => { + // If a connection error occurs, we remove the client from the pool, + // and emit a connectionError event. They will not be re-used. + // Fixes https://github.com/nodejs/undici/issues/3895 + for (const target of targets) { + // Do not use kRemoveClient here, as it will close the client, + // but the client cannot be closed in this state. + const idx = this[kClients].indexOf(target) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + } + }) } [kGetDispatcher] () {
diff --git a/test/pool-connection-error-memory-leak.js b/test/pool-connection-error-memory-leak.js new file mode 100644 --- /dev/null +++ b/test/pool-connection-error-memory-leak.js @@ -0,0 +1,158 @@ +'use strict' + +const { test } = require('node:test') +const assert = require('node:assert') +const { Pool } = require('..') +const { createServer } = require('node:http') +const { kClients } = require('../lib/dispatcher/pool-base') + +// This test verifies that clients are properly removed from the pool when they encounter connection errors, +// which is the fix implemented for issue #3895 (memory leak with connection errors) +test('Pool client count does not grow on repeated connection errors', async (t) => { + // Setup a pool pointing to a non-existent server + const pool = new Pool('http://localhost:1', { + connections: 10, + connectTimeout: 100, // Short timeout to speed up the test + bodyTimeout: 100, + headersTimeout: 100 + }) + + try { + const clientCounts = [] + + // Track initial client count + clientCounts.push(pool[kClients].length) + + // Make several requests that will fail with connection errors + const requests = 5 + for (let i = 0; i < requests; i++) { + try { + await pool.request({ + path: `/${i}`, + method: 'GET' + }) + assert.fail('Request should have failed with a connection error') + } catch (err) { + // We expect connection errors, but the error might be wrapped + assert.ok( + err.code === 'ECONNREFUSED' || + err.cause?.code === 'ECONNREFUSED' || + err.code === 'UND_ERR_CONNECT', + `Expected connection error but got: ${err.message} (${err.code})` + ) + } + + // Track client count after each request + clientCounts.push(pool[kClients].length) + + // Small delay to allow for client cleanup + await new Promise(resolve => setTimeout(resolve, 10)) + } + + // The key test: verify that client count doesn't increase monotonically, + // which would indicate the memory leak that was fixed + const maxCount = Math.max(...clientCounts) + assert.ok( + clientCounts[clientCounts.length - 1] <= maxCount, + `Client count should not increase continuously. Counts: ${clientCounts.join(', ')}` + ) + + // Ensure the last two counts are similar (stabilized) + const lastCount = clientCounts[clientCounts.length - 1] + const secondLastCount = clientCounts[clientCounts.length - 2] + + assert.ok( + Math.abs(lastCount - secondLastCount) <= 1, + `Client count should stabilize. Last counts: ${secondLastCount}, ${lastCount}` + ) + + // Additional verification: make many more requests to check for significant growth + const moreRequests = 10 + const startCount = pool[kClients].length + + for (let i = 0; i < moreRequests; i++) { + try { + await pool.request({ + path: `/more-${i}`, + method: 'GET' + }) + } catch (err) { + // Expected error + } + + // Small delay to allow for client cleanup + await new Promise(resolve => setTimeout(resolve, 10)) + } + + const endCount = pool[kClients].length + + // The maximum tolerable growth - some growth may occur due to timing issues, + // but it should be limited and not proportional to the number of requests + const maxGrowth = 3 + + assert.ok( + endCount - startCount <= maxGrowth, + `Client count should not grow significantly after many failed requests. Start: ${startCount}, End: ${endCount}` + ) + } finally { + await pool.close() + } +}) + +// This test specifically verifies the fix in pool-base.js for connectionError event +test('Pool clients are removed on connectionError event', async (t) => { + // Create a server we'll use to track connection events + const server = createServer((req, res) => { + res.writeHead(200, { 'Content-Type': 'text/plain' }) + res.end('ok') + }) + + await new Promise(resolve => server.listen(0, resolve)) + const port = server.address().port + + const pool = new Pool(`http://localhost:${port}`, { + connections: 3 // Small pool to make testing easier + }) + + try { + // Make an initial successful request to create a client + await pool.request({ + path: '/', + method: 'GET' + }) + + // Save the initial number of clients + const initialCount = pool[kClients].length + assert.ok(initialCount > 0, 'Should have at least one client after a successful request') + + // Manually trigger a connectionError on all clients + for (const client of [...pool[kClients]]) { + client.emit('connectionError', 'origin', [client], new Error('Simulated connection error')) + } + + // Allow some time for the event to be processed + await new Promise(resolve => setTimeout(resolve, 50)) + + // After the fix, all clients should be removed when they emit a connectionError + assert.strictEqual( + pool[kClients].length, + 0, + 'All clients should be removed from pool after connectionError events' + ) + + // Make another request to verify the pool can create new clients + await pool.request({ + path: '/after-error', + method: 'GET' + }) + + // Verify new clients were created + assert.ok( + pool[kClients].length > 0, + 'Pool should create new clients after previous ones were removed' + ) + } finally { + await pool.close() + await new Promise(resolve => server.close(resolve)) + } +}) diff --git a/test/tls-cert-leak.js b/test/tls-cert-leak.js new file mode 100644 --- /dev/null +++ b/test/tls-cert-leak.js @@ -0,0 +1,209 @@ +'use strict' + +const { test } = require('node:test') +const assert = require('node:assert') +const { tspl } = require('@matteo.collina/tspl') +const { fetch } = require('..') +const https = require('node:https') +const fs = require('node:fs') +const path = require('node:path') +const { closeServerAsPromise } = require('./utils/node-http') + +const hasGC = typeof global.gc !== 'undefined' + +// This test verifies that there is no memory leak when handling TLS certificate errors. +// It simulates the error by using a server with a self-signed certificate. +test('no memory leak with TLS certificate errors', { timeout: 20000 }, async (t) => { + if (!hasGC) { + throw new Error('gc is not available. Run with \'--expose-gc\'.') + } + + const { ok } = tspl(t, { plan: 1 }) + + // Create HTTPS server with self-signed certificate + const serverOptions = { + key: fs.readFileSync(path.join(__dirname, 'fixtures', 'key.pem')), + cert: fs.readFileSync(path.join(__dirname, 'fixtures', 'cert.pem')) + } + + // Create a server that always responds with a simple message + const server = https.createServer(serverOptions, (req, res) => { + res.writeHead(200) + res.end('test response') + }) + + // Start server on a random port + await new Promise(resolve => server.listen(0, resolve)) + const serverUrl = `https://localhost:${server.address().port}` + + t.after(closeServerAsPromise(server)) + + // Function to make a request that will trigger a certificate error + async function makeRequest (i) { + try { + // The request will fail with CERT_SIGNATURE_FAILURE or similar + // because we're using a self-signed certificate and not telling + // Node.js to accept it + const res = await fetch(`${serverUrl}/request-${i}`, { + signal: AbortSignal.timeout(2000) // Short timeout to prevent hanging + }) + const text = await res.text() + return { status: res.status, text } + } catch (e) { + // In real code, without the fix, this would leak memory + if (e?.cause?.code === 'CERT_SIGNATURE_FAILURE' || + e?.cause?.code === 'DEPTH_ZERO_SELF_SIGNED_CERT' || + e?.cause?.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + return { status: 524, text: 'Certificate Error' } + } + // Return for any other error to avoid test interruption + return { status: 500, text: e.message } + } + } + + // Counter for completed requests + let complete = 0 + const requestCount = 400 + + // Track memory usage + const measurements = [] + let baselineMemory = 0 + + // Process a batch of requests + async function processBatch (start, batchSize) { + const promises = [] + const end = Math.min(start + batchSize, requestCount) + + for (let i = start; i < end; i++) { + promises.push(makeRequest(i)) + } + + await Promise.all(promises) + complete += promises.length + + // Measure memory after each batch + if (complete % 50 === 0 || complete === end) { + // Run GC multiple times to get more stable readings + global.gc() + await new Promise(resolve => setTimeout(resolve, 50)) + global.gc() + + const memUsage = process.memoryUsage() + + // Establish baseline after first batch + if (measurements.length === 0) { + baselineMemory = memUsage.heapUsed + } + + measurements.push({ + complete, + heapUsed: memUsage.heapUsed + }) + + console.log(`Completed ${complete}/${requestCount}: Heap: ${Math.round(memUsage.heapUsed / 1024 / 1024)}MB`) + + // Check memory trend after we have enough data + if (measurements.length >= 4) { + const hasLeak = checkMemoryTrend() + if (hasLeak) { + return true // Indicates a leak was detected + } + } + } + + return false // No leak detected + } + + // Main test logic + async function runTest () { + const batchSize = 50 + + for (let i = 0; i < requestCount; i += batchSize) { + const leakDetected = await processBatch(i, batchSize) + if (leakDetected) { + // If a leak is detected, fail the test + assert.fail('Memory leak detected: heap usage is consistently increasing at a significant rate') + return + } + + // Check if we have sufficient measurements or have done 350 requests + if (measurements.length >= 7 || complete >= 350) { + break + } + } + + // Final check + const finalCheckResult = finalMemoryCheck() + if (finalCheckResult) { + assert.fail(`Memory leak detected: ${finalCheckResult}`) + } else { + ok(true, 'Memory usage has stabilized') + } + } + + // Check if memory usage has a concerning trend + function checkMemoryTrend () { + // Calculate memory growth between each measurement + const growthRates = [] + for (let i = 1; i < measurements.length; i++) { + const prev = measurements[i - 1].heapUsed + const current = measurements[i].heapUsed + growthRates.push((current - prev) / prev) + } + + // Calculate growth from baseline + const totalGrowthFromBaseline = (measurements[measurements.length - 1].heapUsed - baselineMemory) / baselineMemory + + // Calculate average growth rate + const avgGrowthRate = growthRates.reduce((sum, rate) => sum + rate, 0) / growthRates.length + + console.log(`Growth from baseline: ${(totalGrowthFromBaseline * 100).toFixed(2)}%`) + console.log(`Average growth rate: ${(avgGrowthRate * 100).toFixed(2)}%`) + console.log(`Growth rates: ${growthRates.map(r => (r * 100).toFixed(2) + '%').join(', ')}`) + + // Only flag as leak if all conditions are met: + // 1. Consistent growth (majority of measurements show growth) + // 2. Average growth rate is significant (>2%) + // 3. Total growth from baseline is significant (>20%) + + // Count how many positive growth rates we have + const positiveGrowthRates = growthRates.filter(rate => rate > 0.01).length + + return ( + positiveGrowthRates >= Math.ceil(growthRates.length * 0.75) && // 75% of measurements show growth >1% + avgGrowthRate > 0.02 && // Average growth >2% + totalGrowthFromBaseline > 0.2 // Total growth >20% + ) + } + + // Final memory check with adjusted requirements + function finalMemoryCheck () { + if (measurements.length < 4) return false + + // Calculate growth from baseline to the last measurement + const totalGrowthFromBaseline = (measurements[measurements.length - 1].heapUsed - baselineMemory) / baselineMemory + console.log(`Final growth from baseline: ${(totalGrowthFromBaseline * 100).toFixed(2)}%`) + + // Calculate final slope over the last 150 requests + const lastMeasurements = measurements.slice(-3) + const finalSlope = (lastMeasurements[2].heapUsed - lastMeasurements[0].heapUsed) / + (lastMeasurements[2].complete - lastMeasurements[0].complete) + + console.log(`Final memory slope: ${finalSlope.toFixed(2)} bytes per request`) + + // Only consider it a leak if: + // 1. Total growth is very significant (>25%) + if (totalGrowthFromBaseline > 0.25) { + return `Excessive memory growth of ${(totalGrowthFromBaseline * 100).toFixed(2)}%` + } + + // 2. Memory is still growing rapidly at the end (>2000 bytes per request) + if (finalSlope > 2000) { + return `Memory still growing rapidly at ${finalSlope.toFixed(2)} bytes per request` + } + + return false + } + + await runTest() +})
fetch memory leak ## Bug Description `undici.fetch()` continues to use more memory the more requests made ## Reproducible By Run `npm add undici@7.0.0` to install undici and then run the following code: ```js const { fetch } = require('undici'); async function main() { for (var i = 0; i <= 5000; i++) { const res = await fetch(`https://jsonplaceholder.typicode.com/photos/${i}`); if (i % 50 === 0) { console.log({ i: i.toString().padStart(4, '0'), status: res.status, mem: process.memoryUsage.rss().toString().padStart(9, '0') }); } } } main().catch(console.error); ``` ## Expected Behavior Sometime during the 5000 requests, we should see memory decrease, preferably back down to the initial size around 90MB. Instead we see memory continue to increase up to 136MB. ## Logs & Screenshots Here is the resulting output: ```js { i: '0000', status: 404, mem: '069959680' } { i: '0050', status: 200, mem: '090046464' } { i: '0100', status: 200, mem: '093208576' } { i: '0150', status: 200, mem: '095010816' } { i: '0200', status: 200, mem: '089260032' } { i: '0250', status: 200, mem: '092127232' } { i: '0300', status: 200, mem: '094109696' } { i: '0350', status: 200, mem: '096436224' } { i: '0400', status: 200, mem: '098844672' } { i: '0450', status: 200, mem: '099696640' } { i: '0500', status: 200, mem: '099713024' } { i: '0550', status: 200, mem: '100122624' } { i: '0600', status: 200, mem: '100597760' } { i: '0650', status: 200, mem: '102858752' } { i: '0700', status: 200, mem: '104398848' } { i: '0750', status: 200, mem: '109363200' } { i: '0800', status: 200, mem: '109363200' } { i: '0850', status: 200, mem: '109379584' } { i: '0900', status: 200, mem: '109379584' } { i: '0950', status: 200, mem: '108904448' } { i: '1000', status: 200, mem: '110575616' } { i: '1050', status: 200, mem: '111411200' } { i: '1100', status: 200, mem: '112132096' } { i: '1150', status: 200, mem: '112132096' } { i: '1200', status: 200, mem: '112148480' } { i: '1250', status: 200, mem: '112771072' } { i: '1300', status: 200, mem: '112836608' } { i: '1350', status: 200, mem: '112836608' } { i: '1400', status: 200, mem: '113147904' } { i: '1450', status: 200, mem: '113147904' } { i: '1500', status: 200, mem: '116752384' } { i: '1550', status: 200, mem: '118652928' } { i: '1600', status: 200, mem: '118669312' } { i: '1650', status: 200, mem: '119226368' } { i: '1700', status: 200, mem: '120111104' } { i: '1750', status: 200, mem: '120487936' } { i: '1800', status: 200, mem: '120487936' } { i: '1850', status: 200, mem: '120602624' } { i: '1900', status: 200, mem: '120684544' } { i: '1950', status: 200, mem: '120684544' } { i: '2000', status: 200, mem: '121733120' } { i: '2050', status: 200, mem: '121749504' } { i: '2100', status: 200, mem: '121749504' } { i: '2150', status: 200, mem: '121798656' } { i: '2200', status: 200, mem: '122748928' } { i: '2250', status: 200, mem: '122912768' } { i: '2300', status: 200, mem: '122929152' } { i: '2350', status: 200, mem: '122994688' } { i: '2400', status: 200, mem: '125403136' } { i: '2450', status: 200, mem: '126337024' } { i: '2500', status: 200, mem: '126353408' } { i: '2550', status: 200, mem: '126353408' } { i: '2600', status: 200, mem: '126353408' } { i: '2650', status: 200, mem: '126353408' } { i: '2700', status: 200, mem: '126369792' } { i: '2750', status: 200, mem: '126369792' } { i: '2800', status: 200, mem: '126500864' } { i: '2850', status: 200, mem: '126517248' } { i: '2900', status: 200, mem: '126517248' } { i: '2950', status: 200, mem: '126795776' } { i: '3000', status: 200, mem: '126828544' } { i: '3050', status: 200, mem: '127746048' } { i: '3100', status: 200, mem: '128286720' } { i: '3150', status: 200, mem: '128319488' } { i: '3200', status: 200, mem: '128319488' } { i: '3250', status: 200, mem: '128319488' } { i: '3300', status: 200, mem: '128319488' } { i: '3350', status: 200, mem: '128319488' } { i: '3400', status: 200, mem: '128319488' } { i: '3450', status: 200, mem: '128319488' } { i: '3500', status: 200, mem: '128335872' } { i: '3550', status: 200, mem: '128335872' } { i: '3600', status: 200, mem: '128335872' } { i: '3650', status: 200, mem: '128335872' } { i: '3700', status: 200, mem: '128335872' } { i: '3750', status: 200, mem: '128352256' } { i: '3800', status: 200, mem: '128352256' } { i: '3850', status: 200, mem: '128614400' } { i: '3900', status: 200, mem: '128663552' } { i: '3950', status: 200, mem: '128663552' } { i: '4000', status: 200, mem: '128663552' } { i: '4050', status: 200, mem: '128663552' } { i: '4100', status: 200, mem: '128679936' } { i: '4150', status: 200, mem: '129253376' } { i: '4200', status: 200, mem: '129253376' } { i: '4250', status: 200, mem: '129253376' } { i: '4300', status: 200, mem: '129253376' } { i: '4350', status: 200, mem: '129253376' } { i: '4400', status: 200, mem: '130236416' } { i: '4450', status: 200, mem: '130236416' } { i: '4500', status: 200, mem: '130236416' } { i: '4550', status: 200, mem: '130940928' } { i: '4600', status: 200, mem: '133595136' } { i: '4650', status: 200, mem: '134709248' } { i: '4700', status: 200, mem: '134709248' } { i: '4750', status: 200, mem: '134725632' } { i: '4800', status: 200, mem: '134725632' } { i: '4850', status: 200, mem: '134758400' } { i: '4900', status: 200, mem: '136708096' } { i: '4950', status: 200, mem: '136708096' } { i: '5000', status: 200, mem: '136708096' } ``` ## Environment - macOS 15.1.1 - Node.js 22.11.0 ### Additional context The repeated values are suspicious so perhaps I'm doing something wrong.
There is no leak. This is tested at https://github.com/nodejs/undici/blob/main/test/fetch/fire-and-forget.js. What you are experiencing is V8 seeing that there is a lot of memory in the system and it’s being lazy in collecting it @mcollina How long does it take to collect? 🤔 I noticed your test uses `node --expose-gc` so I tried that calling `gc(true)` each loop but that didn't work. I also tried `node --max-old-space-size=100 example.cjs` to limit to 100MB but that didn't work either, it exceeded 100MB. What you are doing in that example is: 1. issuing `fetch()` requests 2. avoid consuming the body A body is kept in memory for a while after `fetch()` resolves. --- > @mcollina How long does it take to collect? You are not measuring it in your example. You need to monitor `heapUsed` and `heapTotal`. --- > I also tried node --max-old-space-size=100 example.cjs to limit to 100MB but that didn't work either, it exceeded 100MB. This means "keep 100MB of old space plus all the other memory Node.js needs". Reaching 130-150MB in that case would be ok. I've added some more data to your script: ```js const { fetch } = require('.'); async function main() { for (var i = 0; i <= 5000; i++) { const res = await fetch(`https://jsonplaceholder.typicode.com/photos/${i}`); if (i % 50 === 0) { console.log({ i: i.toString().padStart(4, '0'), status: res.status, rss: process.memoryUsage().rss.toString().padStart(9, '0'), heapUsed: process.memoryUsage().heapUsed.toString().padStart(9, '0'), heapTotal: process.memoryUsage().heapTotal.toString().padStart(9, '0') }); } } } main().catch(console.error); ``` And here are the last few results: ``` { i: '4700', status: 200, rss: '086245376', heapUsed: '013260704', heapTotal: '015368192' } { i: '4750', status: 200, rss: '086228992', heapUsed: '010794960', heapTotal: '016416768' } { i: '4800', status: 200, rss: '086245376', heapUsed: '012110992', heapTotal: '015368192' } { i: '4850', status: 200, rss: '086245376', heapUsed: '009548680', heapTotal: '015368192' } { i: '4900', status: 200, rss: '086245376', heapUsed: '010769768', heapTotal: '015368192' } { i: '4950', status: 200, rss: '086278144', heapUsed: '012100208', heapTotal: '015368192' } { i: '5000', status: 200, rss: '086360064', heapUsed: '009805912', heapTotal: '015368192' } ``` As you can see, memory is stable. > What you are doing in that example is: > issuing fetch() requests > avoid consuming the body > A body is kept in memory for a while after fetch() resolves. Oh yes, classic mistake 🤦 I had been stripping the code down to the bare minimum to reproduce the issue. This is closer to what I started with: ```js const { fetch } = require('undici'); async function getPhoto(i) { const res = await fetch(`https://jsonplaceholder.typicode.com/photos/${i}`); const status = res.status;; const buffer = await res.arrayBuffer(); return { status, buffer }; } async function main() { for (var i = 0; i <= 5000; i++) { const res = await getPhoto(i); if (i % 50 === 0 && res.status && res.buffer) { console.log( i.toString().padStart(4, '0') + ' ' + process.memoryUsage().rss.toString().padStart(9, '0') + ' ' + process.memoryUsage().heapUsed.toString().padStart(9, '0') + ' ' + process.memoryUsage().heapTotal.toString().padStart(9, '0') ); } } } main().catch(console.error); ``` And the results still show RSS continue to climb. <details> <summary>View Output</summary> <p> ``` 0000 071008256 009882448 018776064 0050 090095616 013469784 020086784 0100 094404608 011058872 029786112 0150 098320384 015244304 030048256 0200 100990976 014350248 030310400 0250 105037824 014720632 030310400 0300 105070592 018526904 030310400 0350 106774528 018642432 032145408 0400 107102208 013181760 031096832 0450 107249664 016931360 031096832 0500 107266048 016032752 031096832 0550 108462080 012931368 047349760 0600 108494848 016664000 047349760 0650 113999872 020463624 047349760 0700 117309440 014893016 047349760 0750 119472128 018588728 047349760 0800 125042688 022287696 047349760 0850 128073728 012345776 047874048 0900 128073728 016057648 047874048 0950 128221184 019773976 047874048 1000 129990656 023564768 048136192 1050 132038656 018142704 048136192 1100 132071424 021759768 048136192 1150 132546560 011677848 048398336 1200 132546560 015274792 048398336 1250 133038080 018869216 048398336 1300 133038080 022446840 048398336 1350 133054464 016831544 048398336 1400 133054464 020388728 048398336 1450 133922816 023976424 048398336 1500 134463488 012854784 048660480 1550 135086080 016561840 048660480 1600 135430144 020138168 048660480 1650 135462912 014598288 048660480 1700 135462912 018112024 048660480 1750 135479296 021659344 048660480 1800 102760448 009999008 011698176 1850 105480192 011254608 015630336 1900 107331584 012334224 021135360 1950 110247936 013957280 021397504 2000 110411776 010678840 021135360 2050 111296512 012895256 021135360 2100 111968256 010572544 029523968 2150 113377280 014218560 029523968 2200 115671040 013352656 029523968 2250 118603776 013849528 030310400 2300 118636544 017339208 030572544 2350 121077760 010764824 047611904 2400 121077760 014227440 047611904 2450 122290176 017683152 047611904 2500 125583360 021159552 047611904 2550 127467520 015600040 047611904 2600 129892352 019052288 047611904 2650 133201920 022524664 047611904 2700 134709248 012369888 047874048 2750 134709248 015869032 047874048 2800 134709248 019340096 047874048 2850 134709248 022801080 047874048 2900 135200768 017271128 047874048 2950 135217152 020741776 047874048 3000 135659520 010681352 048136192 3050 136380416 014495408 048398336 3100 136413184 017988664 048398336 3150 136413184 021417976 048398336 3200 136577024 015913600 048398336 3250 136577024 019341336 048398336 3300 136577024 022761064 048398336 3350 136740864 012587280 048922624 3400 136740864 016024008 048922624 3450 136773632 019474920 048922624 3500 136773632 022882064 048922624 3550 136806400 017398656 048922624 3600 136806400 020811536 048922624 3650 136970240 010700216 049184768 3700 136970240 014262912 049184768 3750 137019392 017837848 049446912 3800 137691136 021403168 049446912 3850 138706944 016010304 049446912 3900 139132928 019578592 049446912 3950 139132928 023030440 049446912 4000 139247616 012387624 049709056 4050 139264000 016038400 049709056 4100 139264000 019486456 049709056 4150 139264000 023000680 049709056 4200 139296768 017456520 049709056 4250 139296768 020909960 049709056 4300 139296768 010710288 049709056 4350 139296768 014165528 049709056 4400 139296768 017620312 049709056 4450 139313152 021141832 049709056 4500 139313152 015613984 049709056 4550 139313152 019030328 049709056 4600 139329536 022440488 049709056 4650 139771904 012355296 049709056 4700 139771904 015790592 049709056 4750 139771904 019252632 049709056 4800 139788288 022700712 049709056 4850 139788288 017150760 049709056 4900 139804672 020591304 049709056 4950 139902976 010401400 049971200 5000 139902976 013832016 049971200 ``` </p> </details> My understanding is that RSS is the number to watch if you want to avoid Out Of Memory errors. Can you get it to Out-of-memory? V8 is really _lazy_ in cleaning things up, so I expect data to accumulate. Yes, I can get OOM in docker by limiting memory to 75MM. When I bump to 100MB, it doesn't seem to OOM though. Might need more iterations or perhaps it depends on the response body size. Not sure yet. Here is the repo https://github.com/uncurated-tests/fetch-memory-leak I can set it to 85mb memory limit and still get OOM. https://github.com/uncurated-tests/fetch-memory-leak/tree/85mb Although I'm starting to doubt this is an issue with `fetch()` since I can reproduce the OOM with `https.get()` too. Bumping the memory up to 60MB seems to fix it. https://github.com/uncurated-tests/fetch-memory-leak/tree/55mb We are also experience memory leaks and I see undici connection timeout errors in the log. That in combination with several reposrts about the undici fetch led me here. I am running undici through svelte kit so I am not sure exactly which version we run. undici-types in node_modules has version 6.19.8 @styfle If you can crash the program only at very low amount of memory... then the program needs at least that amount of memory to perform its operation. Now, that amount of memory seems _excessive_ to me, but at its core it's not a leak. @henryson if you have reproducible errors and/or leaks, please open new issues. Tbh. I could not reproduce this few months ago. Set max old space to e.g. 100 MB and run the fire and forget test, and you will see the garbage collector will evict faster and never go significantly above the targetted 100 MB. If you of course set the memory to 500 MB and max old space is calculated to be like 350 MB, then it could happen, that the rest of 150 MB is used already by other processes and boom. > that the rest of 150 MB is used already by other processes and boom. I believe that is what is happening to me. The example I provided is contrived in that every request returns the same payload. However, in production each response can have very big difference in size (ranging from a 2 KB to 200MB). So if GC isn't reclaiming that memory, then it can cause OOM after 5 days because the available memory isn't enough for a 200MB response. The reason why I created this issue is that this problem started happened when I switched from `undici.request()` to `fetch()` around October 31 at 12:00: ![image](https://github.com/user-attachments/assets/0cf7664d-cb16-44a5-a7c2-d606c7a4b441) Memory continues to rise very slowly until the process is restarted. You can see the sharp drop in memory Wed at 12:00 when the process restarted: <img width="1147" alt="image" src="https://github.com/user-attachments/assets/5dad1826-8f79-4cab-a60b-ec44feb02cb2"> > Memory continues to rise very slowly until the process is restarted. How is the process restarted? > How is the process restarted? Either it gets OOM error and crashes so a new pod is scheduled to replace it or new code is deployed in which case the old pod is stopped and a new pod takes its place. The screenshot above was from deploying new code which is why you don't see it spike up. Here's a screenshot from the OOM case: <img width="1158" alt="image" src="https://github.com/user-attachments/assets/26173e8b-8568-4bb1-92d1-319584f17684"> My 2 cents is that you have a legit memory leak (when it did OOM), but that's not captured by the snippet in the OP. Let me expand: considering the reports that I've seen, I think it's _likely_ that there is a memory leak lurking in the error (or abort) path of `fetch()` or in some other "rare" condition. We do quite a lot of "fancy" tricks to meet the spec requirements, including abusing `FinalizationRegistry`: we are threading the needle between the GC and the spec/DX, and I suspect we are essentially forgetting to clear up some references in case of errors. Based on all things I've seen, the OP behavior has no leak and it's consistent with Node.js performance. The increased memory usage is the result of using WebStreams in Node.js. --- @styfle in your memory graph, you see that the spike of memory is quite sudden. Something happened in your system that caused that spike, and I would start your investigation there. > in your memory graph, you see that the spike of memory is quite sudden. Something happened in your system that caused that spike, and I would start your investigation there. Agreed, that is the bigger problem. > We do quite a lot of "fancy" tricks to meet the spec requirements, including abusing FinalizationRegistry: we are threading the needle between the GC and the spec/DX, and I suspect we are essentially forgetting to clear up some references in case of errors. I see. I also compared Bun's `fetch()` implementation to see if it also had a similar problem and it does. https://github.com/uncurated-tests/fetch-memory-leak/tree/bunjs I'll close this for now since I'm not able to prove a memory leak with these simple examples. I think for my case, the long term solution is to switch buffering to streaming but that will require all downstream dependencies work with a stream (and also web streams vs node streams are different 😅). @mcollina I'm reopening this issue because I can reproduce now, even bumping memory up to 128MB max and I still get OOM. In particular, I'm making a request to a server with a bad tls cert so fetch throws `ERR_TLS_CERT_ALTNAME_INVALID`. https://github.com/uncurated-tests/fetch-memory-leak/tree/err-tls-cert-altname-invalid Confirmed.
2025-03-11T23:42:43Z
7.4
nodejs/undici
3,977
nodejs__undici-3977
[ "3975" ]
ad7ac027ed6d0fca7414a0810dcf136cda35065e
diff --git a/lib/handler/retry-handler.js b/lib/handler/retry-handler.js --- a/lib/handler/retry-handler.js +++ b/lib/handler/retry-handler.js @@ -133,7 +133,7 @@ class RetryHandler { ? Math.min(retryAfterHeader, maxTimeout) : Math.min(minTimeout * timeoutFactor ** (counter - 1), maxTimeout) - setTimeout(() => cb(null), retryTimeout).unref() + setTimeout(() => cb(null), retryTimeout) } onResponseStart (controller, statusCode, headers, statusMessage) {
diff --git a/test/fixtures/interceptors/retry-event-loop.js b/test/fixtures/interceptors/retry-event-loop.js new file mode 100644 --- /dev/null +++ b/test/fixtures/interceptors/retry-event-loop.js @@ -0,0 +1,33 @@ +'use strict' + +const { createServer } = require('node:http') +const { once } = require('node:events') +const { + Client, + interceptors: { retry } +} = require('../../..') + +const server = createServer() + +server.on('request', (req, res) => { + res.writeHead(418, { 'Content-Type': 'text/plain' }) + res.end('teapot') +}) + +server.listen(0) +once(server, 'listening').then(() => { + const client = new Client( + `http://localhost:${server.address().port}` + ).compose( + retry({ + maxTimeout: 1000, + maxRetries: 3, + statusCodes: [418] + }) + ) + + return client.request({ + method: 'GET', + path: '/' + }) +}) diff --git a/test/interceptors/retry.js b/test/interceptors/retry.js --- a/test/interceptors/retry.js +++ b/test/interceptors/retry.js @@ -4,6 +4,7 @@ const { tspl } = require('@matteo.collina/tspl') const { test, after } = require('node:test') const { createServer } = require('node:http') const { once } = require('node:events') +const { spawnSync } = require('node:child_process') const { Client, interceptors } = require('../..') const { retry, redirect, dns } = interceptors @@ -559,3 +560,16 @@ test('should not error if request is not meant to be retried', async t => { t.equal(response.statusCode, 400) t.equal(await response.body.text(), 'Bad request') }) + +test('#3975 - keep event loop ticking', async t => { + const suite = tspl(t, { plan: 3 }) + + const res = spawnSync('node', ['./test/fixtures/interceptors/retry-event-loop.js'], { + stdio: 'pipe' + }) + + const output = res.stderr.toString() + suite.ok(output.includes("code: 'UND_ERR_REQ_RETRY'")) + suite.ok(output.includes('RequestRetryError: Request failed')) + suite.ok(output.includes('statusCode: 418')) +})
RetryAgent doesn't keep the event loop alive ## Bug Description When using RetryAgent, Node exits while there are pending retries in flight. ## Reproducible By I've been playing around in a brand new node project with only `unidici` installed and trying to run the following script. This is mostly copy/pasted from the docs - I don't think I'm doing anything unorthodox here. ```js import { request, Agent, RetryAgent } from "undici"; const resp = await request("http://httpbin.org/status/418", { dispatcher: new RetryAgent(new Agent(), { statusCodes: [418], }), headers: { "User-Agent": "teapot" }, }); console.log("status:", resp.statusCode); console.log("headers:", resp.headers); ``` ## Expected Behavior I'd expect to get either an HTTP response or a retry error. ## Logs & Screenshots ``` $ node example.js; echo "exit code: $status" Warning: Detected unsettled top-level await at file:///[redacted]/example.js:3 const resp = await request("http://httpbin.org/status/418", { ^ exit code: 13 ``` ### Additional context ``` $ node --version v23.5.0 $ grep undici package.json "undici": "^7.2.0" ```
Thanks for reporting! Would you like to send a Pull Request to address this issue? Remember to add unit tests. > Thanks for reporting! Would you like to send a Pull Request to address this issue? Remember to add unit tests. It's not obvious to me what's wrong, or I would have sent one already! From skimming the RetryAgent code I can't tell why Node would decide the task queue is empty.
2024-12-31T08:01:58Z
7.2
nodejs/undici
3,855
nodejs__undici-3855
[ "3848", "3848" ]
61ec3531a64ffeec953a990c11735ff09455de4e
diff --git a/lib/api/api-request.js b/lib/api/api-request.js --- a/lib/api/api-request.js +++ b/lib/api/api-request.js @@ -73,7 +73,7 @@ class RequestHandler extends AsyncResource { this.removeAbortListener = util.addAbortListener(this.signal, () => { this.reason = this.signal.reason ?? new RequestAbortedError() if (this.res) { - util.destroy(this.res, this.reason) + util.destroy(this.res.on('error', util.nop), this.reason) } else if (this.abort) { this.abort(this.reason) }
diff --git a/test/client-request.js b/test/client-request.js --- a/test/client-request.js +++ b/test/client-request.js @@ -1252,3 +1252,39 @@ test('request post body DataView', async (t) => { await t.completed }) + +test('#3736 - Aborted Response (without consuming body)', async (t) => { + const plan = tspl(t, { plan: 1 }) + + const controller = new AbortController() + const server = createServer((req, res) => { + setTimeout(() => { + res.writeHead(200, 'ok', { + 'content-type': 'text/plain' + }) + res.write('hello from server') + res.end() + }, 100) + }) + + server.listen(0) + + await EE.once(server, 'listening') + const client = new Client(`http://localhost:${server.address().port}`) + + after(server.close.bind(server)) + after(client.destroy.bind(client)) + + const { signal } = controller + const promise = client.request({ + path: '/', + method: 'GET', + signal + }) + + controller.abort() + + await plan.rejects(promise, { message: 'This operation was aborted' }) + + await plan.completed +}) diff --git a/test/http2.js b/test/http2.js --- a/test/http2.js +++ b/test/http2.js @@ -334,23 +334,15 @@ test( after(() => server.close()) after(() => client.close()) - t = tspl(t, { plan: 2 }) + t = tspl(t, { plan: 1 }) - try { - await client.request({ - path: '/', - method: 'GET', - headers: { - 'x-my-header': 'foo' - } - }) - } catch (error) { - t.strictEqual( - error.message, - 'Client network socket disconnected before secure TLS connection was established' - ) - t.strictEqual(error.code, 'ECONNRESET') - } + await t.rejects(client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + })) } )
Uncaught exception thrown in way that can't be intercepted in userland ## Bug Description We had a recent incident wherein a specific workload was able to cause Node.js to crash due to an uncaught exception. We saw two distinct cases of uncaught exceptions: ``` Error: read ECONNRESET at Pipe.onStreamRead (node:internal/stream_base_commons:217:20) ``` ``` SocketError: other side closed at Socket.<anonymous> (/data/node_modules/undici/lib/dispatcher/client-h1.js:701:24) at Socket.emit (node:events:529:35) at Socket.emit (node:domain:489:12) at endReadableNT (node:internal/streams/readable:1400:12) at process.processTicksAndRejections (node:internal/process/task_queues:82:21) ``` Both of these were caught via `process.on('uncaughtException')` and had the `origin` of `uncaughtException` (these were not unhandled rejections AFAICT). To the best of our knowledge, all opportunities for exhaustive error handling have been exercised though we've been unable to reproduce a minimal repro outside our codebase. Within our codebase, we've been able to get deeper stack traces through Chrome Dev Tools: ``` onUncaughtException (graceful_termination.js:53) emit (node:events:529) emit (node:domain:489) (anonymous) (node:internal/process/execution:158) TickObject init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) nextTick (node:internal/process/task_queues:132) onDestroy (node:internal/streams/destroy:103) (anonymous) (readable.js:68) processImmediate (node:internal/timers:476) topLevelDomainCallback (node:domain:161) callbackTrampoline (node:internal/async_hooks:126) Immediate init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) initAsyncResource (node:internal/timers:164) Immediate (node:internal/timers:620) setImmediate (node:timers:307) _destroy (readable.js:67) _destroy (node:internal/streams/destroy:109) destroy (node:internal/streams/destroy:71) destroy (readable.js:58) destroy (util.js:290) (anonymous) (api-request.js:176) (anonymous) (node:internal/process/task_queues:140) runInAsyncScope (node:async_hooks:203) runMicrotask (node:internal/process/task_queues:137) Microtask init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) AsyncResource (node:async_hooks:186) queueMicrotask (node:internal/process/task_queues:152) onError (api-request.js:175) onError (request.js:299) errorRequest (util.js:638) (anonymous) (client-h1.js:740) emit (node:events:529) emit (node:domain:489) (anonymous) (node:net:350) callbackTrampoline (node:internal/async_hooks:128) PIPEWRAP init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) Socket.connect (node:net:1218) connect (node:net:249) connect (connect.js:126) socket (client.js:428) connect (client.js:427) _resume (client.js:600) resume (client.js:534) Client.<computed> (client.js:259) [dispatch] (client.js:314) Intercept (redirect-interceptor.js:11) dispatch (dispatcher-base.js:177) [dispatch] (pool-base.js:143) dispatch (dispatcher-base.js:177) request (api-request.js:203) (anonymous) (api-request.js:196) request (api-request.js:195) executeValidatedActionsBatch (execute_actions_batch.js:493) // Where we invoke `.request()` of a `Pool` instance. // ... snip ... ``` In that longer stack trace, you can see that we call `request` on a `Pool` instance in the `executeValidatedActionsBatch` function, which is declared as `async function`. While that usually allows us to capture errors via `Promise` rejection, in the case of the incident, a customer's workload was reliably causing these exceptions to bubble up to the process level. We have some weak hypotheses: 1. Timing issue between a socket `end` event and gaining access to the request `.body` Readable. We don't have a reference to the `Readable` by the time the error happens. 2. Timing issue between `AbortSignal` and other clean up. 3. 😕 ❓ ## Reproducible By This is reproduced (sometimes) when the process tree within which the target server is running is suddenly OOM killed. The undici `Pool` is connected via unix domain socket which might present unique ways of blowing up vs the usual tcp sockets. ## Expected Behavior We expect that no process level uncaught exceptions or unhandled rejections are possible in pseudo code like this: ```js // Wrap undici in an async function so that we can handle all rejections in the async // continuation. async function doRequest(options, sink) { const res = await pool.request({ ...options, signal }); // Avoid uncaughts via EventEmitter legacy footguns. res.body.on('error', () => {}); const firstChunk = await consumeStreamUpToDelimiter(res.body, '\n', { signal }); // Do stuff with first chunk. Conditionally do another client request whose body // uses `res.body`. // After we've done all the client requests, we want to pipe the tail of the last request // back into a supplied sink (Writable stream). await pipeline([res.body, sink], { signal }); } ``` ## Logs & Screenshots Added in description ## Environment Docker image `node:18.19.1-bullseye` via docker-for-mac on MacOS 14.17.1. ### Additional context Sort of terrible drawing of what our stuff is doing: ``` Server request --> Repeat N times --> await pipeline(lastClientRes.body, server response) |--> lastClientRes = await Pool.request({ socketPath }) |--> for (i in [0...M]) { await consumeStreamUpToDelimiter(lastClientRes) } ``` Uncaught exception thrown in way that can't be intercepted in userland ## Bug Description We had a recent incident wherein a specific workload was able to cause Node.js to crash due to an uncaught exception. We saw two distinct cases of uncaught exceptions: ``` Error: read ECONNRESET at Pipe.onStreamRead (node:internal/stream_base_commons:217:20) ``` ``` SocketError: other side closed at Socket.<anonymous> (/data/node_modules/undici/lib/dispatcher/client-h1.js:701:24) at Socket.emit (node:events:529:35) at Socket.emit (node:domain:489:12) at endReadableNT (node:internal/streams/readable:1400:12) at process.processTicksAndRejections (node:internal/process/task_queues:82:21) ``` Both of these were caught via `process.on('uncaughtException')` and had the `origin` of `uncaughtException` (these were not unhandled rejections AFAICT). To the best of our knowledge, all opportunities for exhaustive error handling have been exercised though we've been unable to reproduce a minimal repro outside our codebase. Within our codebase, we've been able to get deeper stack traces through Chrome Dev Tools: ``` onUncaughtException (graceful_termination.js:53) emit (node:events:529) emit (node:domain:489) (anonymous) (node:internal/process/execution:158) TickObject init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) nextTick (node:internal/process/task_queues:132) onDestroy (node:internal/streams/destroy:103) (anonymous) (readable.js:68) processImmediate (node:internal/timers:476) topLevelDomainCallback (node:domain:161) callbackTrampoline (node:internal/async_hooks:126) Immediate init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) initAsyncResource (node:internal/timers:164) Immediate (node:internal/timers:620) setImmediate (node:timers:307) _destroy (readable.js:67) _destroy (node:internal/streams/destroy:109) destroy (node:internal/streams/destroy:71) destroy (readable.js:58) destroy (util.js:290) (anonymous) (api-request.js:176) (anonymous) (node:internal/process/task_queues:140) runInAsyncScope (node:async_hooks:203) runMicrotask (node:internal/process/task_queues:137) Microtask init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) emitInitScript (node:internal/async_hooks:503) AsyncResource (node:async_hooks:186) queueMicrotask (node:internal/process/task_queues:152) onError (api-request.js:175) onError (request.js:299) errorRequest (util.js:638) (anonymous) (client-h1.js:740) emit (node:events:529) emit (node:domain:489) (anonymous) (node:net:350) callbackTrampoline (node:internal/async_hooks:128) PIPEWRAP init (node:internal/inspector_async_hook:25) emitInitNative (node:internal/async_hooks:200) Socket.connect (node:net:1218) connect (node:net:249) connect (connect.js:126) socket (client.js:428) connect (client.js:427) _resume (client.js:600) resume (client.js:534) Client.<computed> (client.js:259) [dispatch] (client.js:314) Intercept (redirect-interceptor.js:11) dispatch (dispatcher-base.js:177) [dispatch] (pool-base.js:143) dispatch (dispatcher-base.js:177) request (api-request.js:203) (anonymous) (api-request.js:196) request (api-request.js:195) executeValidatedActionsBatch (execute_actions_batch.js:493) // Where we invoke `.request()` of a `Pool` instance. // ... snip ... ``` In that longer stack trace, you can see that we call `request` on a `Pool` instance in the `executeValidatedActionsBatch` function, which is declared as `async function`. While that usually allows us to capture errors via `Promise` rejection, in the case of the incident, a customer's workload was reliably causing these exceptions to bubble up to the process level. We have some weak hypotheses: 1. Timing issue between a socket `end` event and gaining access to the request `.body` Readable. We don't have a reference to the `Readable` by the time the error happens. 2. Timing issue between `AbortSignal` and other clean up. 3. 😕 ❓ ## Reproducible By This is reproduced (sometimes) when the process tree within which the target server is running is suddenly OOM killed. The undici `Pool` is connected via unix domain socket which might present unique ways of blowing up vs the usual tcp sockets. ## Expected Behavior We expect that no process level uncaught exceptions or unhandled rejections are possible in pseudo code like this: ```js // Wrap undici in an async function so that we can handle all rejections in the async // continuation. async function doRequest(options, sink) { const res = await pool.request({ ...options, signal }); // Avoid uncaughts via EventEmitter legacy footguns. res.body.on('error', () => {}); const firstChunk = await consumeStreamUpToDelimiter(res.body, '\n', { signal }); // Do stuff with first chunk. Conditionally do another client request whose body // uses `res.body`. // After we've done all the client requests, we want to pipe the tail of the last request // back into a supplied sink (Writable stream). await pipeline([res.body, sink], { signal }); } ``` ## Logs & Screenshots Added in description ## Environment Docker image `node:18.19.1-bullseye` via docker-for-mac on MacOS 14.17.1. ### Additional context Sort of terrible drawing of what our stuff is doing: ``` Server request --> Repeat N times --> await pipeline(lastClientRes.body, server response) |--> lastClientRes = await Pool.request({ socketPath }) |--> for (i in [0...M]) { await consumeStreamUpToDelimiter(lastClientRes) } ```
This is indeed a bad bug. However it'd be __impossible__ to fix without a reproduction. Would you mind to send a PR? Looks to me like the body is missing an error handler, e.g. through `consumeStreamUpToDelimiter` > Looks to me like the body is missing an error handler, e.g. through `consumeStreamUpToDelimiter` @ronag we've tried to cover that by systematically attaching an `"error"` handler to `clientRes.body` immediately in the same microtask that we first get a reference to it. Are there additional forms of error handling we should be considering? Additional debugging shows some additional symptoms: - Prior to actual breakage, we see several requests fail in a consistent way. The target endpoint to which `undici`'s `Pool` is managing connections get's in a broken state (AFAICT, totally our fault but nonetheless a contributing factor here). - The behaviour manifests after several consecutive failures of this sort using the same `Pool` instance. Other `Pool` instances are unaffected (we have many pools in this multi-tenant system). Given those symptoms, I wonder if another potential avenue of exploration is whether the `Pool`'s socket management might be a contributing factor here. Are there windows of opportunity wherein a `Socket` owned by a `Pool` has no `"error"` listener? Unfortunately, our local repro is statistically frequent but not 100%. I'll try using a different type of `Dispatcher` that doesn't use `keepAlive` and see if I can still repro. I've been able to reproduce the symptoms in our system where things would normally break but without the process-level crash. To do so, I stopped using long-lived `Pool` instances and instead swapped to per-request `Client` instances (with `pipelining=0`, which is a possible red-herring). This observation strengthens the hypothesis that a potential bug might lie within the `Pool`'s socket lifecycle management outside the critical request path. Without a repro there is not much we can do here... Understood. Hopefully this issue can provide context just in case someone's spelunking through the codebase and a spark of brilliance emerges. In the interim, we'll be taking a bit of perf hit and using per-operation `Client` instances as we've been unable to reproduce the issue when we avoid socket re-use. > To do so, I stopped using long-lived Pool instances and instead swapped to per-request Client instances (with pipelining=0, which is a possible red-herring). Note that `Client` holds the socket. `Pool` keep a list of `Client` around. Switching to one--client per request, means that the problem is in the logic that keep socket long-lived in `Client`. I think you should try adding a `socket.on('error', console.error)` somewhere before here: https://github.com/nodejs/undici/blob/a427e4b948c4fdae8d86a013565c3929111601b2/lib/dispatcher/client.js#L434. This is indeed a bad bug. However it'd be __impossible__ to fix without a reproduction. Would you mind to send a PR? Looks to me like the body is missing an error handler, e.g. through `consumeStreamUpToDelimiter` > Looks to me like the body is missing an error handler, e.g. through `consumeStreamUpToDelimiter` @ronag we've tried to cover that by systematically attaching an `"error"` handler to `clientRes.body` immediately in the same microtask that we first get a reference to it. Are there additional forms of error handling we should be considering? Additional debugging shows some additional symptoms: - Prior to actual breakage, we see several requests fail in a consistent way. The target endpoint to which `undici`'s `Pool` is managing connections get's in a broken state (AFAICT, totally our fault but nonetheless a contributing factor here). - The behaviour manifests after several consecutive failures of this sort using the same `Pool` instance. Other `Pool` instances are unaffected (we have many pools in this multi-tenant system). Given those symptoms, I wonder if another potential avenue of exploration is whether the `Pool`'s socket management might be a contributing factor here. Are there windows of opportunity wherein a `Socket` owned by a `Pool` has no `"error"` listener? Unfortunately, our local repro is statistically frequent but not 100%. I'll try using a different type of `Dispatcher` that doesn't use `keepAlive` and see if I can still repro. I've been able to reproduce the symptoms in our system where things would normally break but without the process-level crash. To do so, I stopped using long-lived `Pool` instances and instead swapped to per-request `Client` instances (with `pipelining=0`, which is a possible red-herring). This observation strengthens the hypothesis that a potential bug might lie within the `Pool`'s socket lifecycle management outside the critical request path. Without a repro there is not much we can do here... Understood. Hopefully this issue can provide context just in case someone's spelunking through the codebase and a spark of brilliance emerges. In the interim, we'll be taking a bit of perf hit and using per-operation `Client` instances as we've been unable to reproduce the issue when we avoid socket re-use. > To do so, I stopped using long-lived Pool instances and instead swapped to per-request Client instances (with pipelining=0, which is a possible red-herring). Note that `Client` holds the socket. `Pool` keep a list of `Client` around. Switching to one--client per request, means that the problem is in the logic that keep socket long-lived in `Client`. I think you should try adding a `socket.on('error', console.error)` somewhere before here: https://github.com/nodejs/undici/blob/a427e4b948c4fdae8d86a013565c3929111601b2/lib/dispatcher/client.js#L434.
2024-11-20T17:57:44Z
6.21
nodejs/undici
3,941
nodejs__undici-3941
[ "3934" ]
dd7473c2e0818161d182eab9bf9d709227851870
diff --git a/lib/handler/wrap-handler.js b/lib/handler/wrap-handler.js --- a/lib/handler/wrap-handler.js +++ b/lib/handler/wrap-handler.js @@ -53,8 +53,7 @@ module.exports = class WrapHandler { onRequestUpgrade (controller, statusCode, headers, socket) { const rawHeaders = [] for (const [key, val] of Object.entries(headers)) { - // TODO (fix): What if val is Array - rawHeaders.push(Buffer.from(key), Buffer.from(val)) + rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val)) } this.#handler.onUpgrade?.(statusCode, rawHeaders, socket) @@ -63,8 +62,7 @@ module.exports = class WrapHandler { onResponseStart (controller, statusCode, headers, statusMessage) { const rawHeaders = [] for (const [key, val] of Object.entries(headers)) { - // TODO (fix): What if val is Array - rawHeaders.push(Buffer.from(key), Buffer.from(val)) + rawHeaders.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val)) } if (this.#handler.onHeaders?.(statusCode, rawHeaders, () => controller.resume(), statusMessage) === false) { @@ -81,8 +79,7 @@ module.exports = class WrapHandler { onResponseEnd (controller, trailers) { const rawTrailers = [] for (const [key, val] of Object.entries(trailers)) { - // TODO (fix): What if val is Array - rawTrailers.push(Buffer.from(key), Buffer.from(val)) + rawTrailers.push(Buffer.from(key), Array.isArray(val) ? val.map(v => Buffer.from(v)) : Buffer.from(val)) } this.#handler.onComplete?.(rawTrailers)
diff --git a/test/client-node-max-header-size.js b/test/client-node-max-header-size.js --- a/test/client-node-max-header-size.js +++ b/test/client-node-max-header-size.js @@ -25,7 +25,7 @@ describe("Node.js' --max-http-header-size cli option", () => { test("respect Node.js' --max-http-header-size", async (t) => { t = tspl(t, { plan: 6 }) - const command = 'node -e "require(\'.\').request(\'http://localhost:' + server.address().port + '\')"' + const command = 'node --disable-warning=ExperimentalWarning -e "require(\'.\').request(\'http://localhost:' + server.address().port + '\')"' exec(`${command} --max-http-header-size=1`, { stdio: 'pipe' }, (err, stdout, stderr) => { t.strictEqual(err.code, 1) @@ -44,7 +44,7 @@ describe("Node.js' --max-http-header-size cli option", () => { test('--max-http-header-size with Client API', async (t) => { t = tspl(t, { plan: 6 }) - const command = 'node -e "new (require(\'.\').Client)(new URL(\'http://localhost:200\'))"' + const command = 'node --disable-warning=ExperimentalWarning -e "new (require(\'.\').Client)(new URL(\'http://localhost:200\'))"' exec(`${command} --max-http-header-size=0`, { stdio: 'pipe' }, (err, stdout, stderr) => { t.strictEqual(err.code, 1) diff --git a/test/issue-3934.js b/test/issue-3934.js new file mode 100644 --- /dev/null +++ b/test/issue-3934.js @@ -0,0 +1,32 @@ +'use strict' + +const { test } = require('node:test') +const { createServer } = require('node:http') +const { once } = require('node:events') +const assert = require('node:assert') +const { Agent, RetryAgent, request } = require('..') + +// https://github.com/nodejs/undici/issues/3934 +test('WrapHandler works with multiple header values', async (t) => { + const server = createServer(async (_req, res) => { + const headers = [ + ['set-cookie', 'a'], + ['set-cookie', 'b'], + ['set-cookie', 'c'] + ] + res.writeHead(200, headers) + res.end() + }).listen(0) + + await once(server, 'listening') + t.after(() => server.close()) + + const agent = new Agent() + const retryAgent = new RetryAgent(agent) + + const { + headers + } = await request(`http://localhost:${server.address().port}`, { dispatcher: retryAgent }) + + assert.deepStrictEqual(headers['set-cookie'], ['a', 'b', 'c']) +})
v7 `RetryAgent` converts header with multiple entries (eg. set-cookie) to `\x00` ## Bug Description After upgrading from `6.21.0` to `7.1.0` the response `set-cookie` header contains `\x00` instead of actual values. Expected ```js { server: 'nginx', date: 'Mon, 09 Dec 2024 07:11:11 GMT', 'content-type': 'application/json; charset=utf-8', 'content-length': '359', connection: 'keep-alive', 'set-cookie': [ 'CloudFront-Policy=xxx; Path=/; Domain=example.com; HTTPOnly', 'CloudFront-Key-Pair-Id=xxx; Path=/; Domain=example.com; HTTPOnly', 'CloudFront-Signature=xxx; Path=/; Domain=example.com; HTTPOnly' ] } ``` Received ```js { server: 'nginx', date: 'Mon, 09 Dec 2024 07:08:57 GMT', 'content-type': 'application/json; charset=utf-8', 'content-length': '364', connection: 'keep-alive', 'set-cookie': '\x00\x00\x00', } ``` ## Reproducible By <!-- A step by step list on how the bug can be reproduced for examination. --> ## Expected Behavior <!-- A clear and concise description of what you expected to happen. --> ## Logs & Screenshots <!-- If applicable, add screenshots to help explain your problem, or alternatively add your console logs here. --> ## Environment <!-- This is just your OS and environment information [e.g. Ubuntu 18.04 LTS, Node v14.14.0] --> ### Additional context <!-- Add any other context about the problem here. -->
Can you provide an [Minimum Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) to support you better? Unfortunately I'm unable to reproduce the issue locally. I tried `node:http` and `node:http2` as server and sending multiple `set-cookie` headers works in both cases. In production I'm fetching from `https://developer.api.autodesk.com` to get signed cookies, see https://aps.autodesk.com/blog/download-derivative-files-using-new-signedcookies-api-without-setting-cookies-first-header In this case the `set-cookie` header is always `\x00\x00\x00` using v7 but works using v6. Atm I don't have time to investigate this further. Unfortunately without reproduction is hard to assess what might be happening; it can be a bad encoding parsing of the signed header while receiving the response, but that can also be due to bad encoding from the source. Without isolated reproduction is hard to tell what can be going wrong. Happy to have a look if we can isolate the problem
2024-12-10T19:57:40Z
7.1
nodejs/undici
3,833
nodejs__undici-3833
[ "3829" ]
28b10fa5f8bf84d0e286ca576d13f75c50faf7cb
diff --git a/lib/web/fetch/headers.js b/lib/web/fetch/headers.js --- a/lib/web/fetch/headers.js +++ b/lib/web/fetch/headers.js @@ -451,7 +451,7 @@ class Headers { // 2. If init is given, then fill this with init. if (init !== undefined) { - init = webidl.converters.HeadersInit(init, 'Headers contructor', 'init') + init = webidl.converters.HeadersInit(init, 'Headers constructor', 'init') fill(this, init) } } diff --git a/lib/web/fetch/webidl.js b/lib/web/fetch/webidl.js --- a/lib/web/fetch/webidl.js +++ b/lib/web/fetch/webidl.js @@ -345,12 +345,14 @@ webidl.recordConverter = function (keyConverter, valueConverter) { const keys = [...Object.getOwnPropertyNames(O), ...Object.getOwnPropertySymbols(O)] for (const key of keys) { + const keyName = webidl.util.Stringify(key) + // 1. Let typedKey be key converted to an IDL value of type K. - const typedKey = keyConverter(key, prefix, argument) + const typedKey = keyConverter(key, prefix, `Key ${keyName} in ${argument}`) // 2. Let value be ? Get(O, key). // 3. Let typedValue be value converted to an IDL value of type V. - const typedValue = valueConverter(O[key], prefix, argument) + const typedValue = valueConverter(O[key], prefix, `${argument}[${keyName}]`) // 4. Set result[typedKey] to typedValue. result[typedKey] = typedValue @@ -501,8 +503,14 @@ webidl.converters.DOMString = function (V, prefix, argument, opts) { // https://webidl.spec.whatwg.org/#es-ByteString webidl.converters.ByteString = function (V, prefix, argument) { // 1. Let x be ? ToString(V). - // Note: DOMString converter perform ? ToString(V) - const x = webidl.converters.DOMString(V, prefix, argument) + if (typeof V === 'symbol') { + throw webidl.errors.exception({ + header: prefix, + message: `${argument} is a symbol, which cannot be converted to a ByteString.` + }) + } + + const x = String(V) // 2. If the value of any element of x is greater than // 255, then throw a TypeError.
diff --git a/test/fetch/headers.js b/test/fetch/headers.js --- a/test/fetch/headers.js +++ b/test/fetch/headers.js @@ -26,7 +26,7 @@ test('Headers initialization', async (t) => { throws(() => new Headers(['undici', 'fetch', 'fetch']), TypeError) throws( () => new Headers([0, 1, 2]), - TypeError('Headers contructor: init[0] (0) is not iterable.') + TypeError('Headers constructor: init[0] (0) is not iterable.') ) }) @@ -41,7 +41,7 @@ test('Headers initialization', async (t) => { const init = ['undici', 'fetch', 'fetch', 'undici'] throws( () => new Headers(init), - TypeError('Headers contructor: init[0] ("undici") is not iterable.') + TypeError('Headers constructor: init[0] ("undici") is not iterable.') ) }) }) @@ -767,3 +767,16 @@ test('Invalid Symbol.iterators', (t) => { new Headers(obj) // eslint-disable-line no-new }, TypeError) }) + +// https://github.com/nodejs/undici/issues/3829 +test('Invalid key/value records passed to constructor (issue #3829)', (t) => { + assert.throws( + () => new Headers({ [Symbol('x-fake-header')]: '??' }), + new TypeError('Headers constructor: Key Symbol(x-fake-header) in init is a symbol, which cannot be converted to a ByteString.') + ) + + assert.throws( + () => new Headers({ 'x-fake-header': Symbol('why is this here?') }), + new TypeError('Headers constructor: init["x-fake-header"] is a symbol, which cannot be converted to a ByteString.') + ) +}) diff --git a/test/webidl/errors.js b/test/webidl/errors.js --- a/test/webidl/errors.js +++ b/test/webidl/errors.js @@ -17,7 +17,7 @@ test('ByteString', (t) => { ]) { assert.throws( () => new Headers()[method](name, value), - new TypeError(`Headers.${method}: name is a symbol, which cannot be converted to a DOMString.`) + new TypeError(`Headers.${method}: name is a symbol, which cannot be converted to a ByteString.`) ) } })
Error message for invalid headers could be improved ## Bug Description I'm getting the error message: > TypeError: Headers contructor: init is a symbol, which cannot be converted to a DOMString. ## Reproducible By ``` const headers = new Headers({'x-fake-header': Symbol('why is this here?')}) ``` ## Expected Behavior `contructor` should not be typo'd "init is a symbol" is incorrect. It _contains_ a symbol, but isn't one ## Logs & Screenshots ``` TypeError: Headers contructor: init is a symbol, which cannot be converted to a DOMString. at webidl.errors.exception (node:internal/deps/undici/undici:3384:14) at webidl.converters.DOMString (node:internal/deps/undici/undici:3650:29) at webidl.converters.ByteString (node:internal/deps/undici/undici:3658:35) at Object.record<ByteString, ByteString> (node:internal/deps/undici/undici:3568:32) at webidl.converters.HeadersInit (node:internal/deps/undici/undici:8695:67) ``` ## Environment Tested on Node.js v20.17.0 and v22.11.0 ### Additional context Users were running into this in SvelteKit projects. I'm not quite sure how/why yet, but I think maybe `connect` (which is used by `vite`) has something like this in its headers and the headers are being copied from there
2024-11-13T20:13:36Z
7
nodejs/undici
3,842
nodejs__undici-3842
[ "3869" ]
1b58a51e203d99cb8441c25044c8f8ed5bc63554
diff --git a/eslint.config.js b/eslint.config.js --- a/eslint.config.js +++ b/eslint.config.js @@ -7,6 +7,7 @@ module.exports = [ ignores: [ 'lib/llhttp', 'test/fixtures/wpt', + 'test/fixtures/cache-tests', 'undici-fetch.js' ], noJsx: true, diff --git a/lib/cache/memory-cache-store.js b/lib/cache/memory-cache-store.js --- a/lib/cache/memory-cache-store.js +++ b/lib/cache/memory-cache-store.js @@ -89,6 +89,7 @@ class MemoryCacheStore { statusCode: entry.statusCode, headers: entry.headers, body: entry.body, + vary: entry.vary ? entry.vary : undefined, etag: entry.etag, cacheControlDirectives: entry.cacheControlDirectives, cachedAt: entry.cachedAt, diff --git a/lib/cache/sqlite-cache-store.js b/lib/cache/sqlite-cache-store.js --- a/lib/cache/sqlite-cache-store.js +++ b/lib/cache/sqlite-cache-store.js @@ -6,6 +6,9 @@ const { assertCacheKey, assertCacheValue } = require('../util/cache.js') const VERSION = 3 +// 2gb +const MAX_ENTRY_SIZE = 2 * 1000 * 1000 * 1000 + /** * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore * @implements {CacheStore} @@ -18,7 +21,7 @@ const VERSION = 3 * } & import('../../types/cache-interceptor.d.ts').default.CacheValue} SqliteStoreValue */ module.exports = class SqliteCacheStore { - #maxEntrySize = Infinity + #maxEntrySize = MAX_ENTRY_SIZE #maxCount = Infinity /** @@ -78,6 +81,11 @@ module.exports = class SqliteCacheStore { ) { throw new TypeError('SqliteCacheStore options.maxEntrySize must be a non-negative integer') } + + if (opts.maxEntrySize > MAX_ENTRY_SIZE) { + throw new TypeError('SqliteCacheStore options.maxEntrySize must be less than 2gb') + } + this.#maxEntrySize = opts.maxEntrySize } @@ -227,6 +235,7 @@ module.exports = class SqliteCacheStore { statusMessage: value.statusMessage, headers: value.headers ? JSON.parse(value.headers) : undefined, etag: value.etag ? value.etag : undefined, + vary: value.vary ?? undefined, cacheControlDirectives: value.cacheControlDirectives ? JSON.parse(value.cacheControlDirectives) : undefined, @@ -394,10 +403,10 @@ module.exports = class SqliteCacheStore { return undefined } - const vary = JSON.parse(value.vary) + value.vary = JSON.parse(value.vary) - for (const header in vary) { - if (!headerValueEquals(headers[header], vary[header])) { + for (const header in value.vary) { + if (!headerValueEquals(headers[header], value.vary[header])) { matches = false break } diff --git a/lib/handler/cache-handler.js b/lib/handler/cache-handler.js --- a/lib/handler/cache-handler.js +++ b/lib/handler/cache-handler.js @@ -10,7 +10,9 @@ const { function noop () {} /** - * @implements {import('../../types/dispatcher.d.ts').default.DispatchHandler} + * @typedef {import('../../types/dispatcher.d.ts').default.DispatchHandler} DispatchHandler + * + * @implements {DispatchHandler} */ class CacheHandler { /** @@ -18,6 +20,16 @@ class CacheHandler { */ #cacheKey + /** + * @type {import('../../types/cache-interceptor.d.ts').default.CacheHandlerOptions['type']} + */ + #cacheType + + /** + * @type {number | undefined} + */ + #cacheByDefault + /** * @type {import('../../types/cache-interceptor.d.ts').default.CacheStore} */ @@ -38,8 +50,10 @@ class CacheHandler { * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} cacheKey * @param {import('../../types/dispatcher.d.ts').default.DispatchHandler} handler */ - constructor ({ store }, cacheKey, handler) { + constructor ({ store, type, cacheByDefault }, cacheKey, handler) { this.#store = store + this.#cacheType = type + this.#cacheByDefault = cacheByDefault this.#cacheKey = cacheKey this.#handler = handler } @@ -83,24 +97,47 @@ class CacheHandler { } const cacheControlHeader = headers['cache-control'] - if (!cacheControlHeader) { + if (!cacheControlHeader && !headers['expires'] && !this.#cacheByDefault) { // Don't have the cache control header or the cache is full return downstreamOnHeaders() } - const cacheControlDirectives = parseCacheControlHeader(cacheControlHeader) - if (!canCacheResponse(statusCode, headers, cacheControlDirectives)) { + const cacheControlDirectives = cacheControlHeader ? parseCacheControlHeader(cacheControlHeader) : {} + if (!canCacheResponse(this.#cacheType, statusCode, headers, cacheControlDirectives)) { return downstreamOnHeaders() } + const age = getAge(headers) + const now = Date.now() - const staleAt = determineStaleAt(now, headers, cacheControlDirectives) + const staleAt = determineStaleAt(this.#cacheType, now, headers, cacheControlDirectives) ?? this.#cacheByDefault if (staleAt) { - const varyDirectives = this.#cacheKey.headers && headers.vary - ? parseVaryHeader(headers.vary, this.#cacheKey.headers) - : undefined - const deleteAt = determineDeleteAt(now, cacheControlDirectives, staleAt) + let baseTime = now + if (headers['date']) { + const parsedDate = parseInt(headers['date']) + const date = new Date(isNaN(parsedDate) ? headers['date'] : parsedDate) + if (date instanceof Date && !isNaN(date)) { + baseTime = date.getTime() + } + } + + const absoluteStaleAt = staleAt + baseTime + + if (now >= absoluteStaleAt || (age && age >= staleAt)) { + // Response is already stale + return downstreamOnHeaders() + } + + let varyDirectives + if (this.#cacheKey.headers && headers.vary) { + varyDirectives = parseVaryHeader(headers.vary, this.#cacheKey.headers) + if (!varyDirectives) { + // Parse error + return downstreamOnHeaders() + } + } + const deleteAt = determineDeleteAt(cacheControlDirectives, absoluteStaleAt) const strippedHeaders = stripNecessaryHeaders(headers, cacheControlDirectives) /** @@ -112,8 +149,8 @@ class CacheHandler { headers: strippedHeaders, vary: varyDirectives, cacheControlDirectives, - cachedAt: now, - staleAt, + cachedAt: age ? now - (age * 1000) : now, + staleAt: absoluteStaleAt, deleteAt } @@ -129,6 +166,7 @@ class CacheHandler { .on('drain', () => controller.resume()) .on('error', function () { // TODO (fix): Make error somehow observable? + handler.#writeStream = undefined }) .on('close', function () { if (handler.#writeStream === this) { @@ -167,25 +205,29 @@ class CacheHandler { /** * @see https://www.rfc-editor.org/rfc/rfc9111.html#name-storing-responses-to-authen * + * @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType * @param {number} statusCode * @param {Record<string, string | string[]>} headers * @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives */ -function canCacheResponse (statusCode, headers, cacheControlDirectives) { +function canCacheResponse (cacheType, statusCode, headers, cacheControlDirectives) { if (statusCode !== 200 && statusCode !== 307) { return false } if ( - cacheControlDirectives.private === true || cacheControlDirectives['no-cache'] === true || cacheControlDirectives['no-store'] ) { return false } + if (cacheType === 'shared' && cacheControlDirectives.private === true) { + return false + } + // https://www.rfc-editor.org/rfc/rfc9111.html#section-4.1-5 - if (headers.vary === '*') { + if (headers.vary?.includes('*')) { return false } @@ -214,60 +256,88 @@ function canCacheResponse (statusCode, headers, cacheControlDirectives) { } /** + * @param {Record<string, string | string[]>} headers + * @returns {number | undefined} + */ +function getAge (headers) { + if (!headers.age) { + return undefined + } + + const age = parseInt(Array.isArray(headers.age) ? headers.age[0] : headers.age) + if (isNaN(age) || age >= 2147483647) { + return undefined + } + + return age +} + +/** + * @param {import('../../types/cache-interceptor.d.ts').default.CacheOptions['type']} cacheType * @param {number} now * @param {Record<string, string | string[]>} headers * @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives * * @returns {number | undefined} time that the value is stale at or undefined if it shouldn't be cached */ -function determineStaleAt (now, headers, cacheControlDirectives) { - // Prioritize s-maxage since we're a shared cache - // s-maxage > max-age > Expire - // https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.10-3 - const sMaxAge = cacheControlDirectives['s-maxage'] - if (sMaxAge) { - return now + (sMaxAge * 1000) - } - - if (cacheControlDirectives.immutable) { - // https://www.rfc-editor.org/rfc/rfc8246.html#section-2.2 - return now + 31536000 +function determineStaleAt (cacheType, now, headers, cacheControlDirectives) { + if (cacheType === 'shared') { + // Prioritize s-maxage since we're a shared cache + // s-maxage > max-age > Expire + // https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.10-3 + const sMaxAge = cacheControlDirectives['s-maxage'] + if (sMaxAge) { + return sMaxAge * 1000 + } } const maxAge = cacheControlDirectives['max-age'] if (maxAge) { - return now + (maxAge * 1000) + return maxAge * 1000 } - if (headers.expire && typeof headers.expire === 'string') { + if (headers.expires && typeof headers.expires === 'string') { // https://www.rfc-editor.org/rfc/rfc9111.html#section-5.3 - const expiresDate = new Date(headers.expire) + const expiresDate = new Date(headers.expires) if (expiresDate instanceof Date && Number.isFinite(expiresDate.valueOf())) { - return now + (Date.now() - expiresDate.getTime()) + if (now >= expiresDate.getTime()) { + return undefined + } + + return expiresDate.getTime() - now } } + if (cacheControlDirectives.immutable) { + // https://www.rfc-editor.org/rfc/rfc8246.html#section-2.2 + return 31536000 + } + return undefined } /** - * @param {number} now * @param {import('../../types/cache-interceptor.d.ts').default.CacheControlDirectives} cacheControlDirectives * @param {number} staleAt */ -function determineDeleteAt (now, cacheControlDirectives, staleAt) { +function determineDeleteAt (cacheControlDirectives, staleAt) { let staleWhileRevalidate = -Infinity let staleIfError = -Infinity + let immutable = -Infinity if (cacheControlDirectives['stale-while-revalidate']) { - staleWhileRevalidate = now + (cacheControlDirectives['stale-while-revalidate'] * 1000) + staleWhileRevalidate = staleAt + (cacheControlDirectives['stale-while-revalidate'] * 1000) } if (cacheControlDirectives['stale-if-error']) { - staleIfError = now + (cacheControlDirectives['stale-if-error'] * 1000) + staleIfError = staleAt + (cacheControlDirectives['stale-if-error'] * 1000) } - return Math.max(staleAt, staleWhileRevalidate, staleIfError) + if (staleWhileRevalidate === -Infinity && staleIfError === -Infinity) { + immutable = 31536000 + } + + return Math.max(staleAt, staleWhileRevalidate, staleIfError, immutable) } /** @@ -277,7 +347,29 @@ function determineDeleteAt (now, cacheControlDirectives, staleAt) { * @returns {Record<string, string | string []>} */ function stripNecessaryHeaders (headers, cacheControlDirectives) { - const headersToRemove = ['connection'] + const headersToRemove = [ + 'connection', + 'proxy-authenticate', + 'proxy-authentication-info', + 'proxy-authorization', + 'proxy-connection', + 'te', + 'transfer-encoding', + 'upgrade', + // We'll add age back when serving it + 'age' + ] + + if (headers['connection']) { + if (Array.isArray(headers['connection'])) { + // connection: a + // connection: b + headersToRemove.push(...headers['connection'].map(header => header.trim())) + } else { + // connection: a, b + headersToRemove.push(...headers['connection'].split(',').map(header => header.trim())) + } + } if (Array.isArray(cacheControlDirectives['no-cache'])) { headersToRemove.push(...cacheControlDirectives['no-cache']) @@ -288,12 +380,13 @@ function stripNecessaryHeaders (headers, cacheControlDirectives) { } let strippedHeaders - for (const headerName of Object.keys(headers)) { - if (headersToRemove.includes(headerName)) { + for (const headerName of headersToRemove) { + if (headers[headerName]) { strippedHeaders ??= { ...headers } - delete headers[headerName] + delete strippedHeaders[headerName] } } + return strippedHeaders ?? headers } diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -1,4 +1,3 @@ -// @ts-check 'use strict' const assert = require('node:assert') @@ -196,7 +195,6 @@ function handleResult ( if (!result) { return handleUncachedResponse(dispatch, globalOpts, cacheKey, handler, opts, reqCacheControl) } - if (!result.body && opts.method !== 'HEAD') { throw new Error('body is undefined but method isn\'t HEAD') } @@ -228,15 +226,24 @@ function handleResult ( withinStaleIfErrorThreshold = now < (result.staleAt + (staleIfErrorExpiry * 1000)) } + let headers = { + ...opts.headers, + 'if-modified-since': new Date(result.cachedAt).toUTCString(), + 'if-none-match': result.etag + } + + if (result.vary) { + headers = { + ...headers, + ...result.vary + } + } + // We need to revalidate the response return dispatch( { ...opts, - headers: { - ...opts.headers, - 'if-modified-since': new Date(result.cachedAt).toUTCString(), - 'if-none-match': result.etag - } + headers }, new CacheRevalidationHandler( (success, context) => { @@ -267,7 +274,9 @@ function handleResult ( module.exports = (opts = {}) => { const { store = new MemoryCacheStore(), - methods = ['GET'] + methods = ['GET'], + cacheByDefault = undefined, + type = 'shared' } = opts if (typeof opts !== 'object' || opts === null) { @@ -277,9 +286,19 @@ module.exports = (opts = {}) => { assertCacheStore(store, 'opts.store') assertCacheMethods(methods, 'opts.methods') + if (typeof cacheByDefault !== 'undefined' && typeof cacheByDefault !== 'number') { + throw new TypeError(`exepcted opts.cacheByDefault to be number or undefined, got ${typeof cacheByDefault}`) + } + + if (typeof type !== 'undefined' && type !== 'shared' && type !== 'private') { + throw new TypeError(`exepcted opts.type to be shared, private, or undefined, got ${typeof type}`) + } + const globalOpts = { store, - methods + methods, + cacheByDefault, + type } const safeMethodsToNotCache = util.safeHTTPMethods.filter(method => methods.includes(method) === false) diff --git a/lib/util/cache.js b/lib/util/cache.js --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -106,7 +106,17 @@ function parseCacheControlHeader (header) { */ const output = {} - const directives = Array.isArray(header) ? header : header.split(',') + let directives + if (Array.isArray(header)) { + directives = [] + + for (const directive of header) { + directives.push(...directive.split(',')) + } + } else { + directives = header.split(',') + } + for (let i = 0; i < directives.length; i++) { const directive = directives[i].toLowerCase() const keyValueDelimiter = directive.indexOf('=') @@ -114,10 +124,8 @@ function parseCacheControlHeader (header) { let key let value if (keyValueDelimiter !== -1) { - key = directive.substring(0, keyValueDelimiter).trim() - value = directive - .substring(keyValueDelimiter + 1) - .trim() + key = directive.substring(0, keyValueDelimiter).trimStart() + value = directive.substring(keyValueDelimiter + 1) } else { key = directive.trim() } @@ -129,16 +137,28 @@ function parseCacheControlHeader (header) { case 's-maxage': case 'stale-while-revalidate': case 'stale-if-error': { - if (value === undefined) { + if (value === undefined || value[0] === ' ') { continue } + if ( + value.length >= 2 && + value[0] === '"' && + value[value.length - 1] === '"' + ) { + value = value.substring(1, value.length - 1) + } + const parsedValue = parseInt(value, 10) // eslint-disable-next-line no-self-compare if (parsedValue !== parsedValue) { continue } + if (key === 'max-age' && key in output && output[key] >= parsedValue) { + continue + } + output[key] = parsedValue break @@ -187,11 +207,19 @@ function parseCacheControlHeader (header) { headers[headers.length - 1] = lastHeader } - output[key] = headers + if (key in output) { + output[key] = output[key].concat(headers) + } else { + output[key] = headers + } } } else { // Something like `no-cache=some-header` - output[key] = [value] + if (key in output) { + output[key] = output[key].concat(value) + } else { + output[key] = [value] + } } break @@ -229,7 +257,7 @@ function parseCacheControlHeader (header) { * @returns {Record<string, string | string[]>} */ function parseVaryHeader (varyHeader, headers) { - if (typeof varyHeader === 'string' && varyHeader === '*') { + if (typeof varyHeader === 'string' && varyHeader.includes('*')) { return headers } @@ -243,6 +271,8 @@ function parseVaryHeader (varyHeader, headers) { if (headers[trimmedHeader]) { output[trimmedHeader] = headers[trimmedHeader] + } else { + return undefined } } diff --git a/types/cache-interceptor.d.ts b/types/cache-interceptor.d.ts --- a/types/cache-interceptor.d.ts +++ b/types/cache-interceptor.d.ts @@ -7,6 +7,10 @@ declare namespace CacheHandler { export interface CacheHandlerOptions { store: CacheStore + + cacheByDefault?: number + + type?: CacheOptions['type'] } export interface CacheOptions { @@ -20,6 +24,20 @@ declare namespace CacheHandler { * @see https://www.rfc-editor.org/rfc/rfc9110#section-9.2.1 */ methods?: CacheMethods[] + + /** + * RFC9111 allows for caching responses that we aren't explicitly told to + * cache or to not cache. + * @see https://www.rfc-editor.org/rfc/rfc9111.html#section-3-5 + * @default undefined + */ + cacheByDefault?: number + + /** + * TODO docs + * @default 'shared' + */ + type?: 'shared' | 'private' } export interface CacheControlDirectives { @@ -70,6 +88,7 @@ declare namespace CacheHandler { statusCode: number statusMessage: string headers: Record<string, string | string[]> + vary?: Record<string, string | string[]> etag?: string body: null | Readable | Iterable<Buffer> | AsyncIterable<Buffer> | Buffer | Iterable<string> | AsyncIterable<string> | string cacheControlDirectives: CacheControlDirectives,
diff --git a/test/cache-interceptor/cache-store-test-utils.js b/test/cache-interceptor/cache-store-test-utils.js --- a/test/cache-interceptor/cache-store-test-utils.js +++ b/test/cache-interceptor/cache-store-test-utils.js @@ -58,6 +58,7 @@ function cacheStoreTests (CacheStore) { deepStrictEqual(await readResponse(readResult), { ...requestValue, etag: undefined, + vary: undefined, cacheControlDirectives: {}, body: Buffer.concat(requestBody.map(x => Buffer.from(x))) }) @@ -97,6 +98,7 @@ function cacheStoreTests (CacheStore) { deepStrictEqual(await readResponse(readResult), { ...anotherValue, etag: undefined, + vary: undefined, cacheControlDirectives: {}, body: Buffer.concat(anotherBody.map(x => Buffer.from(x))) }) @@ -133,6 +135,7 @@ function cacheStoreTests (CacheStore) { deepStrictEqual(await readResponse(readResult), { ...requestValue, etag: undefined, + vary: undefined, cacheControlDirectives: {}, body: Buffer.concat(requestBody.map(x => Buffer.from(x))) }) @@ -208,6 +211,7 @@ function cacheStoreTests (CacheStore) { deepStrictEqual(await readResponse(readStream), { ...responseValue, etag: undefined, + vary: { 'some-header': 'hello world' }, cacheControlDirectives: {}, body: Buffer.concat(requestBody.map(x => Buffer.from(x))) }) diff --git a/test/cache-interceptor/cache-tests-worker.mjs b/test/cache-interceptor/cache-tests-worker.mjs new file mode 100644 --- /dev/null +++ b/test/cache-interceptor/cache-tests-worker.mjs @@ -0,0 +1,6 @@ +'use strict' + +import { parentPort } from 'node:worker_threads' + +await import('../fixtures/cache-tests/test-engine/server/server.mjs') +parentPort.postMessage('listening') diff --git a/test/cache-interceptor/cache-tests.mjs b/test/cache-interceptor/cache-tests.mjs new file mode 100644 --- /dev/null +++ b/test/cache-interceptor/cache-tests.mjs @@ -0,0 +1,339 @@ +'use strict' + +import { Worker } from 'node:worker_threads' +import { parseArgs, styleText } from 'node:util' +import { join } from 'node:path' +import { tmpdir } from 'node:os' +import { once } from 'node:events' +import { Agent, interceptors, setGlobalDispatcher, fetch } from '../../index.js' +import MemoryCacheStore from '../../lib/cache/memory-cache-store.js' +import { + getResults, + runTests as runTestSuite +} from '../fixtures/cache-tests/test-engine/client/runner.mjs' +import tests from '../fixtures/cache-tests/tests/index.mjs' +import { testResults, testUUIDs } from '../fixtures/cache-tests/test-engine/client/test.mjs' +import { determineTestResult, testLookup } from '../fixtures/cache-tests/test-engine/lib/results.mjs' + +/** + * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheOptions} CacheOptions + * + * @typedef {{ + * opts: CacheOptions, + * ignoredTests?: string[], + * }} TestEnvironment + * + * @typedef {{ + * total: number, + * skipped: number, + * passed: number, + * failed: number, + * optionalFailed: number, + * setupFailed: number, + * testHarnessFailed: number, + * dependencyFailed: number, + * retried: number + * }} TestStats + */ + +const CLI_OPTIONS = parseArgs({ + options: { + type: { + type: 'string', + multiple: true, + short: 't' + } + } +}) + +/** + * @type {TestEnvironment[]} + */ +const CACHE_TYPES = [ + { + opts: { type: 'shared', methods: ['GET', 'HEAD'] }, + ignoredTests: [ + 'freshness-max-age-s-maxage-private', + 'freshness-max-age-s-maxage-private-multiple' + ] + }, + { + opts: { type: 'private', methods: ['GET', 'HEAD'] } + } +] + +/** + * @type {TestEnvironment[]} + */ +const CACHE_STORES = [ + { opts: { store: new MemoryCacheStore() } } +] + +const PROTOCOL = 'http' +const PORT = 8000 +const BASE_URL = `${PROTOCOL}://localhost:${PORT}` +const PIDFILE = join(tmpdir(), 'http-cache-test-server.pid') + +console.log(`PROTOCOL: ${styleText('gray', PROTOCOL)}`) +console.log(` PORT: ${styleText('gray', `${PORT}`)}`) +console.log(`BASE_URL: ${styleText('gray', BASE_URL)}`) +console.log(` PIDFILE: ${styleText('gray', PIDFILE)}`) +console.log('') + +const testEnvironments = filterEnvironments( + buildTestEnvironments(0, [CACHE_TYPES, CACHE_STORES]) +) + +console.log(`Testing ${testEnvironments.length} environments`) + +for (const environment of testEnvironments) { + console.log('TEST ENVIRONMENT') + if (environment.opts.store) { + console.log(` store: ${styleText('gray', environment.opts.store?.constructor.name ?? 'undefined')}`) + } + if (environment.opts.methods) { + console.log(` methods: ${styleText('gray', JSON.stringify(environment.opts.methods) ?? 'undefined')}`) + } + if (environment.opts.cacheByDefault) { + console.log(` cacheByDefault: ${styleText('gray', `${environment.opts.cacheByDefault}`)}`) + } + if (environment.opts.type) { + console.log(` type: ${styleText('gray', environment.opts.type)}`) + } + if (environment.ignoredTests) { + console.log(` ignored tests: ${styleText('gray', JSON.stringify(environment.ignoredTests))}`) + } + + try { + await runTests(environment) + } catch (err) { + console.error(err) + } + + const stats = printResults(environment, getResults()) + printStats(stats) + + // Cleanup state + for (const key of Object.keys(testUUIDs)) { + delete testUUIDs[key] + } + for (const key of Object.keys(testResults)) { + delete testResults[key] + } + + console.log('') +} + +/** + * @param {number} idx + * @param {TestEnvironment[][]} testOptions + * @returns {TestEnvironment[]} + */ +function buildTestEnvironments (idx, testOptions) { + const baseEnvironments = testOptions[idx] + + if (idx + 1 >= testOptions.length) { + // We're at the end, nothing more to make a matrix out of + return baseEnvironments + } + + /** + * @type {TestEnvironment[]} + */ + const environments = [] + + // Get all of the environments below us + const subEnvironments = buildTestEnvironments(idx + 1, testOptions) + + for (const baseEnvironment of baseEnvironments) { + const combinedEnvironments = subEnvironments.map(subEnvironment => { + const ignoredTests = baseEnvironment.ignoredTests ?? [] + if (subEnvironment.ignoredTests) { + ignoredTests.push(...subEnvironment.ignoredTests) + } + + return { + opts: { + ...baseEnvironment.opts, + ...subEnvironment.opts + }, + ignoredTests: ignoredTests.length > 0 ? ignoredTests : undefined + } + }) + + environments.push(...combinedEnvironments) + } + + return environments +} + +/** + * @param {TestEnvironment[]} environments + * @returns {TestEnvironment[]} + */ +function filterEnvironments (environments) { + const { values } = CLI_OPTIONS + + if (values.type) { + environments = environments.filter(env => + env.opts.type === undefined || + values.type?.includes(env.opts.type) + ) + } + + return environments +} + +/** + * @param {TestEnvironment} environment + */ +async function runTests (environment) { + // Start the test server. We use a worker here since the suite doesn't expose it + const worker = new Worker(join(import.meta.dirname, 'cache-tests-worker.mjs'), { + env: { + npm_config_protocol: PROTOCOL, + npm_config_port: `${PORT}`, + npm_config_pidfile: PIDFILE + } + }) + + try { + await once(worker, 'message', { signal: AbortSignal.timeout(5000) }) + + const client = new Agent().compose(interceptors.cache(environment.opts)) + setGlobalDispatcher(client) + + // Run the tests + await runTestSuite(tests, fetch, true, BASE_URL) + } finally { + await worker.terminate() + } +} + +/** + * @param {TestEnvironment} environment + * @param {any} results + * @returns {TestStats} + */ +function printResults (environment, results) { + /** + * @type {TestStats} + */ + const stats = { + // TODO this won't always be this + total: Object.keys(results).length - (environment.ignoredTests?.length || 0), + skipped: 0, + passed: 0, + failed: 0, + optionalFailed: 0, + setupFailed: 0, + testHarnessFailed: 0, + dependencyFailed: 0, + retried: 0 + } + + for (const testId in results) { + if (environment.ignoredTests?.includes(testId)) { + continue + } + + const test = testLookup(tests, testId) + // eslint-disable-next-line no-unused-vars + const [code, _, icon] = determineTestResult(tests, testId, results, false) + + let status + let color + switch (code) { + case '-': + status = 'skipped' + color = 'gray' + stats.skipped++ + break + case '\uf058': + status = 'pass' + color = 'green' + stats.passed++ + break + case '\uf057': + status = 'failed' + color = 'red' + stats.failed++ + break + case '\uf05a': + status = 'failed (optional)' + color = 'yellow' + stats.optionalFailed++ + break + case '\uf055': + status = 'yes' + color = 'green' + stats.passed++ + break + case '\uf056': + status = 'no' + color = 'red' + stats.failed++ + break + case '\uf059': + status = 'setup failure' + color = 'red' + stats.setupFailed++ + break + case '\uf06a': + status = 'test harness failure' + color = 'red' + stats.testHarnessFailed++ + break + case '\uf192': + status = 'dependency failure' + color = 'red' + stats.dependencyFailed++ + break + case '\uf01e': + status = 'retry' + color = 'yellow' + stats.retried++ + break + default: + status = 'unknown' + color = ['strikethrough', 'white'] + break + } + + console.log(`${icon} ${styleText(color, `${status} - ${test.name}`)} (${styleText('gray', testId)})`) + + if (results[testId] !== true) { + const [type, message] = results[testId] + console.log(` ${styleText(color, `${type}: ${message}`)}`) + } + } + + return stats +} + +/** + * @param {TestStats} stats + */ +function printStats (stats) { + const { + total, + skipped, + passed, + failed, + optionalFailed, + setupFailed, + testHarnessFailed, + dependencyFailed, + retried + } = stats + + console.log(`\n Total tests: ${total}`) + console.log(` ${styleText('gray', 'Skipped')}: ${skipped} (${((skipped / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('green', 'Passed')}: ${passed} (${((passed / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('red', 'Failed')}: ${failed} (${((failed / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('yellow', 'Failed (optional)')}: ${optionalFailed} (${((optionalFailed / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('red', 'Setup failed')}: ${setupFailed} (${((setupFailed / total) * 100).toFixed(1)}%)`) + console.log(`${styleText('red', 'Test Harness Failed')}: ${testHarnessFailed} (${((testHarnessFailed / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('red', 'Dependency Failed')}: ${dependencyFailed} (${((dependencyFailed / total) * 100).toFixed(1)}%)`) + console.log(` ${styleText('yellow', 'Retried')}: ${retried} (${((retried / total) * 100).toFixed(1)}%)`) +} diff --git a/test/cache-interceptor/sqlite-cache-store-tests.js b/test/cache-interceptor/sqlite-cache-store-tests.js --- a/test/cache-interceptor/sqlite-cache-store-tests.js +++ b/test/cache-interceptor/sqlite-cache-store-tests.js @@ -69,6 +69,7 @@ test('SqliteCacheStore works nicely with multiple stores', async (t) => { deepStrictEqual(await readResponse(readable), { ...requestValue, etag: undefined, + vary: undefined, cacheControlDirectives: undefined, body: Buffer.concat(requestBody.map(x => Buffer.from(x))) }) @@ -79,6 +80,7 @@ test('SqliteCacheStore works nicely with multiple stores', async (t) => { deepStrictEqual(await readResponse(readable), { ...requestValue, etag: undefined, + vary: undefined, cacheControlDirectives: undefined, body: Buffer.concat(requestBody.map(x => Buffer.from(x))) }) diff --git a/test/fixtures/cache-tests/.editorconfig b/test/fixtures/cache-tests/.editorconfig new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/.editorconfig @@ -0,0 +1,16 @@ +root = true + +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +charset = utf-8 +trim_trailing_whitespace = true +max_line_length = 100 +indent_size = 2 + +[*.py] +indent_size = 4 + +[Makefile] +indent_style = tab diff --git a/test/fixtures/cache-tests/.github/dependabot.yml b/test/fixtures/cache-tests/.github/dependabot.yml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/.github/dependabot.yml @@ -0,0 +1,15 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "github-actions" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" + - package-ecosystem: "npm" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" diff --git a/test/fixtures/cache-tests/.github/workflows/lint.yml b/test/fixtures/cache-tests/.github/workflows/lint.yml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/.github/workflows/lint.yml @@ -0,0 +1,18 @@ +name: Lint + +on: [push, pull_request] + +jobs: + validate: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '14' + - run: npm install -g standard + - run: npm install ajv + - run: npm run lint + - run: npm run validate diff --git a/test/fixtures/cache-tests/.github/workflows/test.yml b/test/fixtures/cache-tests/.github/workflows/test.yml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/.github/workflows/test.yml @@ -0,0 +1,20 @@ +name: Cache Tests +on: workflow_dispatch + +jobs: + test: + + runs-on: macOS-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '14' + - name: Run browser tests + run: ./test-browser.sh + - name: Push update + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + ./update.sh diff --git a/test/fixtures/cache-tests/.gitignore b/test/fixtures/cache-tests/.gitignore new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/.gitignore @@ -0,0 +1,3 @@ +node_modules +package-lock.json +server.pid diff --git a/test/fixtures/cache-tests/CNAME b/test/fixtures/cache-tests/CNAME new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/CNAME @@ -0,0 +1 @@ +cache-tests.fyi \ No newline at end of file diff --git a/test/fixtures/cache-tests/CONTRIBUTING.md b/test/fixtures/cache-tests/CONTRIBUTING.md new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/CONTRIBUTING.md @@ -0,0 +1,108 @@ + +# Contributing + +Contributions from cache vendors and users are welcome. + +Over time we'll document guidelines and best practices for contribution, but in the meantime, feel free to file issues and create PRs. + + +## Test Format + +Each test run gets its own URL, randomized content, and operates independently. + +Tests are kept in JavaScript files in `tests/`, each file representing a suite. + +A suite is an object with the following members: + +- `name` - A concise description of the suite. Required. +- `id` - A short, stable identifier for the suite. Required. +- `description` - A longer description of the suite, can contain Markdown. Optional. +- `spec_anchors` - An array of strings that represent anchors in the HTTP Caching specification related to this suite. Optional. +- `tests` - see below. + +E.g., + +```javascript +export default { + name: 'Example Tests', + id: 'example', + description: 'These are the `Foo` tests!' + tests: [ ... ] +} +``` + +The `tests` member is an array of objects, with the following members: + +- `name` - A concise description of the test. Can contain Markdown. Required. +- `id` - A short, stable identifier for the test. Required. +- `description` - Longer details of the test. Optional. +- `kind` - One of: + - `required` - This is a conformance test for a requirement in the standard. Default. + - `optimal` - This test is to see if the cache behaves optimally. + - `check` - This test is gathering information about cache behaviour. +- `requests` - a list of request objects (see below). +- `browser_only` - if `true`, will only run on browser caches. Default `false`. +- `cdn_only` - if `true`, will only run on CDN caches. Default `false`. +- `browser_skip` - if `true, will not run on browser caches. Default `false`. +- `depends_on` - a list of test IDs that, when one fails, indicates that this test's results are not useful. Currently limited to test IDs in the same suite. Optional. +- `spec_anchors` - An array of strings that represent anchors in the HTTP Caching specification related to this test. Optional. + +Possible members of a request object: + +- `request_method` - A string containing the HTTP method to be used. Default `GET`. +- `request_headers` - An array of `[header_name_string, header_value_string]` arrays to + emit in the request. +- `request_body` - A string to use as the request body. +- `query_arg` - query arguments to add. +- `filename` - filename to use. +- `mode` - The mode string to pass to `fetch()`. +- `credentials` - The credentials string to pass to `fetch()`. +- `cache` - The cache string to pass to `fetch()`. +- `redirect` - The redirect string to pass to `fetch()`. +- `pause_after` - Boolean controlling a 3-second pause after the request completes. +- `disconnect` - Close the connection when receiving this request. +- `magic_locations` - Boolean; if `true`, the `Location` and `Content-Location` response headers will be rewritten to full URLs. +- `magic_ims` - Boolean; if `true`, the `If-Modified-Since` request header will be written as a delta against the previous response's `Last-Modified`, instead of `now`. +- `rfc850date` - Array of header names to use RFC850 format on when magically converting dates. +- `response_status` - A `[number, string]` array containing the HTTP status code + and phrase to return from the origin. Default `200` or `304`. +- `response_headers` - An array of `[header_name_string, header_value_string]` arrays to + emit in the origin response. These values will also be checked like + expected_response_headers, unless there is a third value that is + `false`. +- `response_body` - String to send as the response body from the origin. Defaults to + the test identifier. +- `response_pause` - Integer number of seconds for the server to pause before generating a response. +- `check_body` - Whether to check the response body. Default `true`. +- `expected_type` - One of: + - `cached`: The response is served from cache + - `not_cached`: The response is not served from cache; it comes from the origin + - `lm_validated`: The response comes from cache, but was validated on the origin with Last-Modified + - `etag_validated`: The response comes from cache, but was validated on the origin with an ETag +- `expected_method` - A string HTTP method; checked on the server. +- `expected_status` - A numeric HTTP status code; checked on the client. + If not set, the value of `response_status[0]` will be used; if that + is not set, 200 will be used. +- `expected_request_headers` - An array of `[header_name_string, header_value_string]` representing + headers to check the request for on the server, or an array of + strings representing header names to check for presence in the + request. +- `expected_request_headers_missing` - An array of `[header_name_string, header_value_string]` + representing headers to check for absence in the request for on the server, or an array of strings representing header names to check for absence in the request. +- `expected_response_headers` - An array of any combination of the following. See also `response_headers`. + - `header_name_string`: assert that the named header is present + - `[header_name_string, header_value_string]`: assert that the header has the given value + - `[header_name_string, '=', other_header_name]`: assert that the two headers have the same value + - `[header_name_string, '>', number]`: assert that the header's value is numerically greater than specified +- `expected_response_headers_missing` - An array of any combination of the following. + - `header_name_string` representing headers to check that the response on the client does not include. + - `[header_name_string, header_value_string]`: headers to check that the response is either missing, or if they're present, that they do _not_ contain the given value string (evaluated against the whole header value). +- `expected_response_text` - A string to check the response body against on the client. +- `setup` - Boolean to indicate whether this is a setup request; failures don't mean the actual test failed. +- `setup_tests` - Array of values that indicate whether the specified check is part of setup; failures don't mean the actual test failed. One of: `["expected_type", "expected_method", "expected_status", "expected_response_headers", "expected_response_text", "expected_request_headers"]` + +`server.js` stashes an entry containing observed headers for each request it receives. When the +test fetches have run, this state is retrieved and the expected_* lists are checked, including +their length. + +For convenience and clarity when writing tests, there are some request templates available in `templates.mjs`. Each template is a function which accepts a request object, as defined above, and returns a new request object. Any fields in the template are added to the request object unless a field of the same name is already present. diff --git a/test/fixtures/cache-tests/LICENSE b/test/fixtures/cache-tests/LICENSE new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2018, Mark Nottingham +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/fixtures/cache-tests/README.md b/test/fixtures/cache-tests/README.md new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/README.md @@ -0,0 +1,187 @@ +# Tests for HTTP Caches + +This is a test suite for the behaviours of [HTTP caches](https://httpwg.org/specs/rfc9111.html), +including browsers, proxy caches and CDNs. Its public results are available at +[cache-tests.fyi](https://cache-tests.fyi). + +<!-- START doctoc generated TOC please keep comment here to allow auto update --> +<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE --> + +- [Goals](#goals) +- [Installation](#installation) + - [Installing from NPM](#installing-from-npm) +- [Running tests with scripts](#running-tests-with-scripts) + - [Testing with docker](#testing-with-docker) + - [Testing browser caches](#testing-browser-caches) + - [Testing forward proxies](#testing-forward-proxies) +- [Running tests with npm](#running-tests-with-npm) + - [Starting the test server](#starting-the-test-server) + - [Running the client](#running-the-client) +- [Interpreting results](#interpreting-results) + - [Test results FAQ](#test-results-faq) +- [Getting your results onto cache-tests.fyi](#getting-your-results-onto-cache-testsfyi) +- [Creating new tests](#creating-new-tests) + +<!-- END doctoc generated TOC please keep comment here to allow auto update --> + + +## Goals + +Overall, the goal of these tests is to identify variances in the behaviour, both from the normative specifications and between implementations. This in turn can help avoid situations where they act in surprising ways. + +The underlying aim is to provide a basis for discussion about how HTTP caches -- especially in CDNs and reverse proxies -- should behave, so that over time we can adapt the tests and align implementations to behave more consistently. + +In other words, **passing all of the tests currently means nothing** -- this is not a conformance test suite, it's just the start of a conversation, and a **tool to assess how a cache behaves**. + +Therefore, if you believe a test should change (based upon common behaviour or your interpretation of the specifications), or have additional tests, please [contribute](CONTRIBUTING.md). + + +## Installation + +The tests require a recent version of [NodeJS](https://nodejs.org/) (14.8.0 or greater), which includes the `npm` package manager. + +To install the most recent source from GitHub (*recommended; things are moving fast*): + +> git clone https://github.com/http-tests/cache-tests.git + +and then install dependencies: + +> cd cache-tests; npm i + +### Installing from NPM + +Alternatively, for the most recent release: + +> npm i --legacy-bundling http-cache-tests + +Note that the version in the registry is not necessarily up-to-date. + + +## Running tests with scripts + +A number of scripts are supplied to simplify common test scenarios. + +### Testing with docker + +The `mnot/proxy-cache-tests` Docker image can be used to test common reverse proxy caches. Once you have docker running, you can run the CLI tests against a given proxy like this: + +> ./test-docker.sh squid + +Omit the proxy name to test all available in the Docker image. To run an individual test case, try: + +> ./test-docker.sh -i freshness-none nginx + + +### Testing browser caches + +To test a browser, just point it at `https://{hostname:port}/test-browser.html` after setting up the server. + +On OSX, you can use `test-browser.sh` to automate this: + +> ./test-browser.sh safari + +Again, omit the browser name to test all. Run a single case with: + +> ./test-browser.sh -i freshness-none safari + +Make sure that your browsers are not configured to use a proxy cache, and that the network being tested upon does not use an intercepting proxy cache. + + +### Testing forward proxies + +To test a forward proxy which listens on 127.0.0.1:8082, start the server: + +> npm run server + +and then run: + +> HTTP_PROXY=http://127.0.0.1:8082 npm run --silent cli --base=http://127.0.0.1:8000 + +or: + +> ./test-host.sh 127.0.0.1:8002 + +Again, pass `-i` to run a specific test. + + +## Running tests with npm + +If you don't want to run the test scripts (see above), this section documents how to run tests directly with NPM. First, you'll need to start the test server; then, you'll need to run the client against it. + +### Starting the test server + +First, start the server-side by running: + +> npm run server + +inside the directory (the repository's directory if you cloned from git, or `node_modules/http-cache-tests` if you installed from npm). + +By default, the server runs on port 8000; to choose a different port, use the `--port` argument; e.g., + +> npm run server --port=8080 + +If you want to run an HTTPS origin, you'll need to specify the `protocol`, `keyfile` and `certfile`: + +> npm run server --protocol=https --keyfile=/path/to/key.pem --certfile=/path-to-cert.pem + +Note that the default port for HTTPS is still 8000. + + +### Running the client + +To test a reverse proxy or CDN from the command line: + +> npm run --silent cli --base=http://server-url.example.org:8000/ + +... using the URL of the server you want to test. This will output the test results in JSON to STDOUT, suitable for inclusion in the `results` directory. See `lib/summary.mjs` for details of how to interpret that. + +To run a single test, use: + +> npm run cli --base=http://server-url.example.org:8000/ --id=test-id + +... where `test-id` is the identifier for the test. This will output the request and response headers as seen by the client and server, along with the results. This is useful for debugging a particular failure. + + +## Interpreting results + +HTTP caching by its nature is an optimisation; implementations aren't required to cache everything. However, when they do cache, their behaviour is constrained by [the specification](https://httpwg.org/specs/rfc9111.html). + +As a result, there are a few different kinds of test results (note that the HTML results use similar but slightly different symbols): + +* ✅ - The test was successful. +* ⛔️ - The test failed, and likely indicates a specification conformance problem. +* ⚠️ - The cache didn't behave in an optimal fashion (usually, it didn't use a stored response when it could have), but this is not a conformance problem. +* ● / ○ - These are tests to see how deployed caches behave; we use them to gather information for future specification work. "yes" and "no" respectively. + +Some additional results might pop up from time to time: + +* ⁉️ - The test harness failed; this is an internal error, please [file a bug if one doesn't exist](https://github.com/http-tests/cache-tests/issues/). +* 🔹 - The test failed during setup; something interfered with the harness's communication between the client and server. See below. +* ↻ - The cache retried a request; this means the test result needs to be interpreted manually, as it may or may not have behaved correctly. +* ⚪️ - Another test that this test depends on has failed; we use dependencies to help assure that we're actually testing the behaviour in question. +* `-` - Not tested; usually because the test isn't applicable to this cache. + +When you're testing with a browser, each test has a `uuid` that identifies that specific test run; this can be used to find its requests in the browser developer tools or proxy logs. Click ⚙︎ to copy it to the clipboard. + + +### Test results FAQ + +If you see a lot of failures, it might be one of a few different issues: + +* If you see lots of grey circles at the top (dependency failures), it's probably because the cache will store and reuse a response without explicit freshness or a validator; see the very first test (`freshness-none`). While this is technically legal in HTTP, it interferes with the tests. Disabling "default caching" or similar usually fixes this. + +* If you see lots of blue diamonds (setup failures), it's likely that the cache is refusing `PUT` requests. Enable them to clear this; the tests use PUT to synchronise state between the client and the server. + + +## Getting your results onto cache-tests.fyi + +[cache-tests.fyi](https://cache-tests.fyi) collects results from caches in browsers, reverse proxies, and CDNs. Its purpose is to gather information about how HTTP caching works "in the wild", to help the [HTTP Working Group](https://httpwg.org) make decisions about how to evolve the specification. + +If your implementation isn't listed and you want it to be, please file an issue, or contact [Mark Nottingham](mailto:mnot@mnot.net). Both open source and proprietary implementations are welcome; if there are commercial concerns about disclosing your results, your identity can be anonymised (e.g., "CDN A"), and will not be disclosed to anyone. + +Right now, all of the reverse proxy and CDN implementations are run by a script on a server, using the command-line client; to keep results up-to-date as the tests evolve, it's most helpful if you can provide an endpoint to test (for reverse proxies and CDNs). + + +## Creating new tests + +See [CONTRIBUTING.md](CONTRIBUTING.md) diff --git a/test/fixtures/cache-tests/asset/badge.png b/test/fixtures/cache-tests/asset/badge.png new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/badge.png differ diff --git a/test/fixtures/cache-tests/asset/fonts/FontAwesome.otf b/test/fixtures/cache-tests/asset/fonts/FontAwesome.otf new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/fonts/FontAwesome.otf differ diff --git a/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.eot b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.eot new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.eot differ diff --git a/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.svg b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.svg new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ +<?xml version="1.0" standalone="no"?> +<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" > +<svg> +<metadata> +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. +</metadata> +<defs> +<font id="FontAwesome" horiz-adv-x="1536" > + <font-face + font-family="FontAwesome" + font-weight="400" + font-stretch="normal" + units-per-em="1792" + panose-1="0 0 0 0 0 0 0 0 0 0" + ascent="1536" + descent="-256" + bbox="-1.02083 -256.962 2304.6 1537.02" + underline-thickness="0" + underline-position="0" + unicode-range="U+0020-F500" + /> +<missing-glyph horiz-adv-x="896" +d="M224 112h448v1312h-448v-1312zM112 0v1536h672v-1536h-672z" /> + <glyph glyph-name=".notdef" horiz-adv-x="896" +d="M224 112h448v1312h-448v-1312zM112 0v1536h672v-1536h-672z" /> + <glyph glyph-name=".null" horiz-adv-x="0" + /> + <glyph glyph-name="nonmarkingreturn" horiz-adv-x="597" + /> + <glyph glyph-name="space" unicode=" " horiz-adv-x="448" + /> + <glyph glyph-name="dieresis" unicode="&#xa8;" horiz-adv-x="1792" + /> + <glyph glyph-name="copyright" unicode="&#xa9;" horiz-adv-x="1792" + /> + <glyph glyph-name="registered" unicode="&#xae;" horiz-adv-x="1792" + /> + <glyph glyph-name="acute" unicode="&#xb4;" horiz-adv-x="1792" + /> + <glyph glyph-name="AE" unicode="&#xc6;" horiz-adv-x="1792" + /> + <glyph glyph-name="Oslash" unicode="&#xd8;" horiz-adv-x="1792" + /> + <glyph glyph-name="trademark" unicode="&#x2122;" horiz-adv-x="1792" + /> + <glyph glyph-name="infinity" unicode="&#x221e;" horiz-adv-x="1792" + /> + <glyph glyph-name="notequal" unicode="&#x2260;" horiz-adv-x="1792" + /> + <glyph glyph-name="glass" unicode="&#xf000;" horiz-adv-x="1792" +d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" /> + <glyph glyph-name="music" unicode="&#xf001;" +d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 +t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" /> + <glyph glyph-name="search" unicode="&#xf002;" horiz-adv-x="1664" +d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 +t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> + <glyph glyph-name="envelope" unicode="&#xf003;" horiz-adv-x="1792" +d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 +t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 29.5q208 165 401 317q54 43 100.5 115.5t46.5 131.5z +M1792 1120v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> + <glyph glyph-name="heart" unicode="&#xf004;" horiz-adv-x="1792" +d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 +q-18 -18 -44 -18z" /> + <glyph glyph-name="star" unicode="&#xf005;" horiz-adv-x="1664" +d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 +l502 -73q56 -9 56 -46z" /> + <glyph glyph-name="star_empty" unicode="&#xf006;" horiz-adv-x="1664" +d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 +l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" /> + <glyph glyph-name="user" unicode="&#xf007;" horiz-adv-x="1280" +d="M1280 137q0 -109 -62.5 -187t-150.5 -78h-854q-88 0 -150.5 78t-62.5 187q0 85 8.5 160.5t31.5 152t58.5 131t94 89t134.5 34.5q131 -128 313 -128t313 128q76 0 134.5 -34.5t94 -89t58.5 -131t31.5 -152t8.5 -160.5zM1024 1024q0 -159 -112.5 -271.5t-271.5 -112.5 +t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" /> + <glyph glyph-name="film" unicode="&#xf008;" horiz-adv-x="1920" +d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 +q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45 +t45 -19h128q26 0 45 19t19 45zM1792 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 704v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1792 320v128 +q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 704v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1792 1088v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19 +t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1920 1248v-1344q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1344q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> + <glyph glyph-name="th_large" unicode="&#xf009;" horiz-adv-x="1664" +d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 +h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> + <glyph glyph-name="th" unicode="&#xf00a;" horiz-adv-x="1792" +d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 +q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 +h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192 +q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68z" /> + <glyph glyph-name="th_list" unicode="&#xf00b;" horiz-adv-x="1792" +d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 +q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28 +h960q40 0 68 -28t28 -68zM1792 1312v-192q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68z" /> + <glyph glyph-name="ok" unicode="&#xf00c;" horiz-adv-x="1792" +d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" /> + <glyph glyph-name="remove" unicode="&#xf00d;" horiz-adv-x="1408" +d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 +t-28 -68l-294 -294l294 -294q28 -28 28 -68z" /> + <glyph glyph-name="zoom_in" unicode="&#xf00e;" horiz-adv-x="1664" +d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 +q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5 +t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" /> + <glyph glyph-name="zoom_out" unicode="&#xf010;" horiz-adv-x="1664" +d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z +M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z +" /> + <glyph glyph-name="off" unicode="&#xf011;" +d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 +t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0 52 38 90t90 38t90 -38t38 -90z" /> + <glyph glyph-name="signal" unicode="&#xf012;" horiz-adv-x="1792" +d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 +v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1472q0 14 9 23t23 9h192q14 0 23 -9t9 -23z" /> + <glyph glyph-name="cog" unicode="&#xf013;" +d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 +q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5t-8 23.5v222q0 12 8 23t19 13 +l186 28q14 46 39 92q-40 57 -107 138q-10 12 -10 24q0 10 9 23q26 36 98.5 107.5t94.5 71.5q13 0 26 -10l138 -107q44 23 91 38q16 136 29 186q7 28 36 28h222q14 0 24.5 -8.5t11.5 -21.5l28 -184q49 -16 90 -37l142 107q9 9 24 9q13 0 25 -10q129 -119 165 -170q7 -8 7 -22 +q0 -12 -8 -23q-15 -21 -51 -66.5t-54 -70.5q26 -50 41 -98l183 -28q13 -2 21 -12.5t8 -23.5z" /> + <glyph glyph-name="trash" unicode="&#xf014;" horiz-adv-x="1408" +d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 +q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832 +q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" /> + <glyph glyph-name="home" unicode="&#xf015;" horiz-adv-x="1664" +d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 +l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" /> + <glyph glyph-name="file_alt" unicode="&#xf016;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +" /> + <glyph glyph-name="time" unicode="&#xf017;" +d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="road" unicode="&#xf018;" horiz-adv-x="1920" +d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 +q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t26 -33l417 -1044q26 -62 26 -116z" /> + <glyph glyph-name="download_alt" unicode="&#xf019;" horiz-adv-x="1664" +d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 +q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q42 0 59 -39z" /> + <glyph glyph-name="download" unicode="&#xf01a;" +d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 +t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="upload" unicode="&#xf01b;" +d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 +t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="inbox" unicode="&#xf01c;" +d="M1023 576h316q-1 3 -2.5 8.5t-2.5 7.5l-212 496h-708l-212 -496q-1 -3 -2.5 -8.5t-2.5 -7.5h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 +q25 -61 25 -123z" /> + <glyph glyph-name="play_circle" unicode="&#xf01d;" +d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="repeat" unicode="&#xf01e;" +d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q15 0 25 -9 +l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" /> + <glyph glyph-name="refresh" unicode="&#xf021;" +d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 +q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 22.5v7q65 268 270 434.5t480 166.5 +q146 0 284 -55.5t245 -156.5l130 129q19 19 45 19t45 -19t19 -45z" /> + <glyph glyph-name="list_alt" unicode="&#xf022;" horiz-adv-x="1792" +d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5z +M1536 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5t9.5 -22.5zM1536 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h960q13 0 22.5 -9.5 +t9.5 -22.5zM1664 160v832q0 13 -9.5 22.5t-22.5 9.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 1248v-1088q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1472q66 0 113 -47 +t47 -113z" /> + <glyph glyph-name="lock" unicode="&#xf023;" horiz-adv-x="1152" +d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" /> + <glyph glyph-name="flag" unicode="&#xf024;" horiz-adv-x="1792" +d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 +t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -19t19 -45z" /> + <glyph glyph-name="headphones" unicode="&#xf025;" horiz-adv-x="1664" +d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 +t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185 33l-20 49q-60 148 -60 314q0 151 67 291t179 242.5 +t266 163.5t320 61t320 -61t266 -163.5t179 -242.5t67 -291z" /> + <glyph glyph-name="volume_off" unicode="&#xf026;" horiz-adv-x="768" +d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" /> + <glyph glyph-name="volume_down" unicode="&#xf027;" horiz-adv-x="1152" +d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 36 +t12 56.5t-12 56.5t-29 36t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" /> + <glyph glyph-name="volume_up" unicode="&#xf028;" horiz-adv-x="1664" +d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 36 +t12 56.5t-12 56.5t-29 36t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56 29 76 44q74 54 115.5 135.5t41.5 173.5t-41.5 173.5 +t-115.5 135.5q-20 15 -76 44q-39 20 -39 59q0 26 19 45t45 19q13 0 26 -5q140 -59 225 -188.5t85 -282.5zM1664 640q0 -230 -127 -422.5t-338 -283.5q-13 -5 -26 -5q-26 0 -45 19t-19 45q0 36 39 59q7 4 22.5 10.5t22.5 10.5q46 25 82 51q123 91 192 227t69 289t-69 289 +t-192 227q-36 26 -82 51q-7 4 -22.5 10.5t-22.5 10.5q-39 23 -39 59q0 26 19 45t45 19q13 0 26 -5q211 -91 338 -283.5t127 -422.5z" /> + <glyph glyph-name="qrcode" unicode="&#xf029;" horiz-adv-x="1408" +d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z +M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" /> + <glyph glyph-name="barcode" unicode="&#xf02a;" horiz-adv-x="1792" +d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z +M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" /> + <glyph glyph-name="tag" unicode="&#xf02b;" +d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 +l715 -714q37 -39 37 -91z" /> + <glyph glyph-name="tags" unicode="&#xf02c;" horiz-adv-x="1920" +d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 +l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l715 -714q37 -39 37 -91z" /> + <glyph glyph-name="book" unicode="&#xf02d;" horiz-adv-x="1664" +d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 +q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 9h761q74 0 114 -56t18 -130l-274 -906 +q-36 -119 -71.5 -153.5t-128.5 -34.5h-869q-27 0 -38 -15q-11 -16 -1 -43q24 -70 144 -70h923q29 0 56 15.5t35 41.5l300 987q7 22 5 57q38 -15 59 -43zM575 1056q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5 +t-16.5 -22.5zM492 800q-4 -13 2 -22.5t20 -9.5h608q13 0 25.5 9.5t16.5 22.5l21 64q4 13 -2 22.5t-20 9.5h-608q-13 0 -25.5 -9.5t-16.5 -22.5z" /> + <glyph glyph-name="bookmark" unicode="&#xf02e;" horiz-adv-x="1280" +d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> + <glyph glyph-name="print" unicode="&#xf02f;" horiz-adv-x="1664" +d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 +v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" /> + <glyph glyph-name="camera" unicode="&#xf030;" horiz-adv-x="1920" +d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 +q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="font" unicode="&#xf031;" horiz-adv-x="1664" +d="M725 977l-170 -450q33 0 136.5 -2t160.5 -2q19 0 57 2q-87 253 -184 452zM0 -128l2 79q23 7 56 12.5t57 10.5t49.5 14.5t44.5 29t31 50.5l237 616l280 724h75h53q8 -14 11 -21l205 -480q33 -78 106 -257.5t114 -274.5q15 -34 58 -144.5t72 -168.5q20 -45 35 -57 +q19 -15 88 -29.5t84 -20.5q6 -38 6 -57q0 -5 -0.5 -13.5t-0.5 -12.5q-63 0 -190 8t-191 8q-76 0 -215 -7t-178 -8q0 43 4 78l131 28q1 0 12.5 2.5t15.5 3.5t14.5 4.5t15 6.5t11 8t9 11t2.5 14q0 16 -31 96.5t-72 177.5t-42 100l-450 2q-26 -58 -76.5 -195.5t-50.5 -162.5 +q0 -22 14 -37.5t43.5 -24.5t48.5 -13.5t57 -8.5t41 -4q1 -19 1 -58q0 -9 -2 -27q-58 0 -174.5 10t-174.5 10q-8 0 -26.5 -4t-21.5 -4q-80 -14 -188 -14z" /> + <glyph glyph-name="bold" unicode="&#xf032;" horiz-adv-x="1408" +d="M555 15q74 -32 140 -32q376 0 376 335q0 114 -41 180q-27 44 -61.5 74t-67.5 46.5t-80.5 25t-84 10.5t-94.5 2q-73 0 -101 -10q0 -53 -0.5 -159t-0.5 -158q0 -8 -1 -67.5t-0.5 -96.5t4.5 -83.5t12 -66.5zM541 761q42 -7 109 -7q82 0 143 13t110 44.5t74.5 89.5t25.5 142 +q0 70 -29 122.5t-79 82t-108 43.5t-124 14q-50 0 -130 -13q0 -50 4 -151t4 -152q0 -27 -0.5 -80t-0.5 -79q0 -46 1 -69zM0 -128l2 94q15 4 85 16t106 27q7 12 12.5 27t8.5 33.5t5.5 32.5t3 37.5t0.5 34v35.5v30q0 982 -22 1025q-4 8 -22 14.5t-44.5 11t-49.5 7t-48.5 4.5 +t-30.5 3l-4 83q98 2 340 11.5t373 9.5q23 0 68 -0.5t68 -0.5q70 0 136.5 -13t128.5 -42t108 -71t74 -104.5t28 -137.5q0 -52 -16.5 -95.5t-39 -72t-64.5 -57.5t-73 -45t-84 -40q154 -35 256.5 -134t102.5 -248q0 -100 -35 -179.5t-93.5 -130.5t-138 -85.5t-163.5 -48.5 +t-176 -14q-44 0 -132 3t-132 3q-106 0 -307 -11t-231 -12z" /> + <glyph glyph-name="italic" unicode="&#xf033;" horiz-adv-x="1024" +d="M0 -126l17 85q22 7 61.5 16.5t72 19t59.5 23.5q28 35 41 101q1 7 62 289t114 543.5t52 296.5v25q-24 13 -54.5 18.5t-69.5 8t-58 5.5l19 103q33 -2 120 -6.5t149.5 -7t120.5 -2.5q48 0 98.5 2.5t121 7t98.5 6.5q-5 -39 -19 -89q-30 -10 -101.5 -28.5t-108.5 -33.5 +q-8 -19 -14 -42.5t-9 -40t-7.5 -45.5t-6.5 -42q-27 -148 -87.5 -419.5t-77.5 -355.5q-2 -9 -13 -58t-20 -90t-16 -83.5t-6 -57.5l1 -18q17 -4 185 -31q-3 -44 -16 -99q-11 0 -32.5 -1.5t-32.5 -1.5q-29 0 -87 10t-86 10q-138 2 -206 2q-51 0 -143 -9t-121 -11z" /> + <glyph glyph-name="text_height" unicode="&#xf034;" horiz-adv-x="1792" +d="M1744 128q33 0 42 -18.5t-11 -44.5l-126 -162q-20 -26 -49 -26t-49 26l-126 162q-20 26 -11 44.5t42 18.5h80v1024h-80q-33 0 -42 18.5t11 44.5l126 162q20 26 49 26t49 -26l126 -162q20 -26 11 -44.5t-42 -18.5h-80v-1024h80zM81 1407l54 -27q12 -5 211 -5q44 0 132 2 +t132 2q36 0 107.5 -0.5t107.5 -0.5h293q6 0 21 -0.5t20.5 0t16 3t17.5 9t15 17.5l42 1q4 0 14 -0.5t14 -0.5q2 -112 2 -336q0 -80 -5 -109q-39 -14 -68 -18q-25 44 -54 128q-3 9 -11 48t-14.5 73.5t-7.5 35.5q-6 8 -12 12.5t-15.5 6t-13 2.5t-18 0.5t-16.5 -0.5 +q-17 0 -66.5 0.5t-74.5 0.5t-64 -2t-71 -6q-9 -81 -8 -136q0 -94 2 -388t2 -455q0 -16 -2.5 -71.5t0 -91.5t12.5 -69q40 -21 124 -42.5t120 -37.5q5 -40 5 -50q0 -14 -3 -29l-34 -1q-76 -2 -218 8t-207 10q-50 0 -151 -9t-152 -9q-3 51 -3 52v9q17 27 61.5 43t98.5 29t78 27 +q19 42 19 383q0 101 -3 303t-3 303v117q0 2 0.5 15.5t0.5 25t-1 25.5t-3 24t-5 14q-11 12 -162 12q-33 0 -93 -12t-80 -26q-19 -13 -34 -72.5t-31.5 -111t-42.5 -53.5q-42 26 -56 44v383z" /> + <glyph glyph-name="text_width" unicode="&#xf035;" +d="M81 1407l54 -27q12 -5 211 -5q44 0 132 2t132 2q70 0 246.5 1t304.5 0.5t247 -4.5q33 -1 56 31l42 1q4 0 14 -0.5t14 -0.5q2 -112 2 -336q0 -80 -5 -109q-39 -14 -68 -18q-25 44 -54 128q-3 9 -11 47.5t-15 73.5t-7 36q-10 13 -27 19q-5 2 -66 2q-30 0 -93 1t-103 1 +t-94 -2t-96 -7q-9 -81 -8 -136l1 -152v52q0 -55 1 -154t1.5 -180t0.5 -153q0 -16 -2.5 -71.5t0 -91.5t12.5 -69q40 -21 124 -42.5t120 -37.5q5 -40 5 -50q0 -14 -3 -29l-34 -1q-76 -2 -218 8t-207 10q-50 0 -151 -9t-152 -9q-3 51 -3 52v9q17 27 61.5 43t98.5 29t78 27 +q7 16 11.5 74t6 145.5t1.5 155t-0.5 153.5t-0.5 89q0 7 -2.5 21.5t-2.5 22.5q0 7 0.5 44t1 73t0 76.5t-3 67.5t-6.5 32q-11 12 -162 12q-41 0 -163 -13.5t-138 -24.5q-19 -12 -34 -71.5t-31.5 -111.5t-42.5 -54q-42 26 -56 44v383zM1310 125q12 0 42 -19.5t57.5 -41.5 +t59.5 -49t36 -30q26 -21 26 -49t-26 -49q-4 -3 -36 -30t-59.5 -49t-57.5 -41.5t-42 -19.5q-13 0 -20.5 10.5t-10 28.5t-2.5 33.5t1.5 33t1.5 19.5h-1024q0 -2 1.5 -19.5t1.5 -33t-2.5 -33.5t-10 -28.5t-20.5 -10.5q-12 0 -42 19.5t-57.5 41.5t-59.5 49t-36 30q-26 21 -26 49 +t26 49q4 3 36 30t59.5 49t57.5 41.5t42 19.5q13 0 20.5 -10.5t10 -28.5t2.5 -33.5t-1.5 -33t-1.5 -19.5h1024q0 2 -1.5 19.5t-1.5 33t2.5 33.5t10 28.5t20.5 10.5z" /> + <glyph glyph-name="align_left" unicode="&#xf036;" horiz-adv-x="1792" +d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 +t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> + <glyph glyph-name="align_center" unicode="&#xf037;" horiz-adv-x="1792" +d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 +h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" /> + <glyph glyph-name="align_right" unicode="&#xf038;" horiz-adv-x="1792" +d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 +t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> + <glyph glyph-name="align_justify" unicode="&#xf039;" horiz-adv-x="1792" +d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 +t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> + <glyph glyph-name="list" unicode="&#xf03a;" horiz-adv-x="1792" +d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 +t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344 +q13 0 22.5 -9.5t9.5 -22.5zM256 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 +t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v192 +q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5z" /> + <glyph glyph-name="indent_left" unicode="&#xf03b;" horiz-adv-x="1792" +d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 +t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 +q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> + <glyph glyph-name="indent_right" unicode="&#xf03c;" horiz-adv-x="1792" +d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 +t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088 +q13 0 22.5 -9.5t9.5 -22.5zM1792 1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5t9.5 -22.5z" /> + <glyph glyph-name="facetime_video" unicode="&#xf03d;" horiz-adv-x="1792" +d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 +q39 -17 39 -59z" /> + <glyph glyph-name="picture" unicode="&#xf03e;" horiz-adv-x="1920" +d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 +q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> + <glyph glyph-name="pencil" unicode="&#xf040;" +d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 +q53 0 91 -38l235 -234q37 -39 37 -91z" /> + <glyph glyph-name="map_marker" unicode="&#xf041;" horiz-adv-x="1024" +d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" /> + <glyph glyph-name="adjust" unicode="&#xf042;" +d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="tint" unicode="&#xf043;" horiz-adv-x="1024" +d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 +q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" /> + <glyph glyph-name="edit" unicode="&#xf044;" horiz-adv-x="1792" +d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 +q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 1216l288 -288l-672 -672h-288v288zM1756 1084l-92 -92 +l-288 288l92 92q28 28 68 28t68 -28l152 -152q28 -28 28 -68t-28 -68z" /> + <glyph glyph-name="share" unicode="&#xf045;" horiz-adv-x="1664" +d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 +q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34q-8 -2 -12 -2q-16 0 -26 13q-10 14 -21 31t-39.5 68.5t-49.5 99.5 +t-38.5 114t-17.5 122q0 49 3.5 91t14 90t28 88t47 81.5t68.5 74t94.5 61.5t124.5 48.5t159.5 30.5t196.5 11h160v192q0 42 39 59q13 5 25 5q26 0 45 -19l384 -384q19 -19 19 -45t-19 -45z" /> + <glyph glyph-name="check" unicode="&#xf046;" horiz-adv-x="1664" +d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 +q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263 -263l647 647q24 24 57 24t57 -24l110 -110 +q24 -24 24 -57t-24 -57z" /> + <glyph glyph-name="move" unicode="&#xf047;" horiz-adv-x="1792" +d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 +t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> + <glyph glyph-name="step_backward" unicode="&#xf048;" horiz-adv-x="1024" +d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 10 13 19z" /> + <glyph glyph-name="fast_backward" unicode="&#xf049;" horiz-adv-x="1792" +d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 10 13 19l710 710 +q19 19 32 13t13 -32v-710q4 10 13 19z" /> + <glyph glyph-name="backward" unicode="&#xf04a;" horiz-adv-x="1664" +d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q4 10 13 19z" /> + <glyph glyph-name="play" unicode="&#xf04b;" horiz-adv-x="1408" +d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" /> + <glyph glyph-name="pause" unicode="&#xf04c;" +d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" /> + <glyph glyph-name="stop" unicode="&#xf04d;" +d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> + <glyph glyph-name="forward" unicode="&#xf04e;" horiz-adv-x="1664" +d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q9 -9 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-4 -10 -13 -19z" /> + <glyph glyph-name="fast_forward" unicode="&#xf050;" horiz-adv-x="1792" +d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q9 -9 13 -19v710q0 26 13 32t32 -13l710 -710q9 -9 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-4 -10 -13 -19l-710 -710 +q-19 -19 -32 -13t-13 32v710q-4 -10 -13 -19z" /> + <glyph glyph-name="step_forward" unicode="&#xf051;" horiz-adv-x="1024" +d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q9 -9 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-4 -10 -13 -19z" /> + <glyph glyph-name="eject" unicode="&#xf052;" horiz-adv-x="1538" +d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" /> + <glyph glyph-name="chevron_left" unicode="&#xf053;" horiz-adv-x="1280" +d="M1171 1235l-531 -531l531 -531q19 -19 19 -45t-19 -45l-166 -166q-19 -19 -45 -19t-45 19l-742 742q-19 19 -19 45t19 45l742 742q19 19 45 19t45 -19l166 -166q19 -19 19 -45t-19 -45z" /> + <glyph glyph-name="chevron_right" unicode="&#xf054;" horiz-adv-x="1280" +d="M1107 659l-742 -742q-19 -19 -45 -19t-45 19l-166 166q-19 19 -19 45t19 45l531 531l-531 531q-19 19 -19 45t19 45l166 166q19 19 45 19t45 -19l742 -742q19 -19 19 -45t-19 -45z" /> + <glyph glyph-name="plus_sign" unicode="&#xf055;" +d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 +t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="minus_sign" unicode="&#xf056;" +d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 +t103 -385.5z" /> + <glyph glyph-name="remove_sign" unicode="&#xf057;" +d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 +q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="ok_sign" unicode="&#xf058;" +d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 +t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="question_sign" unicode="&#xf059;" +d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 +q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 +t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="info_sign" unicode="&#xf05a;" +d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 +t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="screenshot" unicode="&#xf05b;" +d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 +q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v143 +q-161 37 -278.5 154.5t-154.5 278.5h-143q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h143q37 161 154.5 278.5t278.5 154.5v143q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-143q161 -37 278.5 -154.5t154.5 -278.5h143q26 0 45 -19t19 -45z" /> + <glyph glyph-name="remove_circle" unicode="&#xf05c;" +d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 +l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5 +t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="ok_circle" unicode="&#xf05d;" +d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 +t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="ban_circle" unicode="&#xf05e;" +d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 +t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" /> + <glyph glyph-name="arrow_left" unicode="&#xf060;" +d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 +t32.5 -90.5z" /> + <glyph glyph-name="arrow_right" unicode="&#xf061;" +d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" /> + <glyph glyph-name="arrow_up" unicode="&#xf062;" horiz-adv-x="1664" +d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 +q37 -39 37 -91z" /> + <glyph glyph-name="arrow_down" unicode="&#xf063;" horiz-adv-x="1664" +d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" /> + <glyph glyph-name="share_alt" unicode="&#xf064;" horiz-adv-x="1792" +d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 +t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" /> + <glyph glyph-name="resize_full" unicode="&#xf065;" +d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 +q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" /> + <glyph glyph-name="resize_small" unicode="&#xf066;" +d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 +t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" /> + <glyph glyph-name="plus" unicode="&#xf067;" horiz-adv-x="1408" +d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" /> + <glyph glyph-name="minus" unicode="&#xf068;" horiz-adv-x="1408" +d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" /> + <glyph glyph-name="asterisk" unicode="&#xf069;" horiz-adv-x="1664" +d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 +q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" /> + <glyph glyph-name="exclamation_sign" unicode="&#xf06a;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 +q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" /> + <glyph glyph-name="gift" unicode="&#xf06b;" +d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 +q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 77 168 77q93 0 158.5 -65.5t65.5 -158.5 +t-65.5 -158.5t-158.5 -65.5h440q14 0 23 -9t9 -23z" /> + <glyph glyph-name="leaf" unicode="&#xf06c;" horiz-adv-x="1792" +d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 +q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-43 0 -63.5 17.5t-45.5 59.5q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 38t-16 44q-9 51 -9 104q0 115 43.5 220t119 184.5 +t170.5 139t204 95.5q55 18 145 25.5t179.5 9t178.5 6t163.5 24t113.5 56.5l29.5 29.5t29.5 28t27 20t36.5 16t43.5 4.5q39 0 70.5 -46t47.5 -112t24 -124t8 -96z" /> + <glyph glyph-name="fire" unicode="&#xf06d;" horiz-adv-x="1408" +d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 +q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150.5t27.5 -184z" /> + <glyph glyph-name="eye_open" unicode="&#xf06e;" horiz-adv-x="1792" +d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 +t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 368t499.5 139t499.5 -139t376.5 -368q20 -35 20 -69z" /> + <glyph glyph-name="eye_close" unicode="&#xf070;" horiz-adv-x="1792" +d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 +q-106 -189 -316 -567t-315 -566l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 18 -6t31 -15.5t33 -18.5t31.5 -18.5t19.5 -11.5 +q16 -10 16 -27zM1344 704q0 -139 -79 -253.5t-209 -164.5l280 502q8 -45 8 -84zM1792 576q0 -35 -20 -69q-39 -64 -109 -145q-150 -172 -347.5 -267t-419.5 -95l74 132q212 18 392.5 137t301.5 307q-115 179 -282 294l63 112q95 -64 182.5 -153t144.5 -184q20 -34 20 -69z +" /> + <glyph glyph-name="warning_sign" unicode="&#xf071;" horiz-adv-x="1792" +d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 +q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" /> + <glyph glyph-name="plane" unicode="&#xf072;" horiz-adv-x="1408" +d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 +q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" /> + <glyph glyph-name="calendar" unicode="&#xf073;" horiz-adv-x="1664" +d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z +M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h288v288h-288v-288zM1280 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64 +q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47 +h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> + <glyph glyph-name="random" unicode="&#xf074;" horiz-adv-x="1792" +d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 +t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5 +v192h-256q-48 0 -87 -15t-69 -45t-51 -61.5t-45 -77.5q-32 -62 -78 -171q-29 -66 -49.5 -111t-54 -105t-64 -100t-74 -83t-90 -68.5t-106.5 -42t-128 -16.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q48 0 87 15t69 45t51 61.5t45 77.5q32 62 78 171q29 66 49.5 111 +t54 105t64 100t74 83t90 68.5t106.5 42t128 16.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> + <glyph glyph-name="comment" unicode="&#xf075;" horiz-adv-x="1792" +d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 +q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" /> + <glyph glyph-name="magnet" unicode="&#xf076;" +d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 +q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45z" /> + <glyph glyph-name="chevron_up" unicode="&#xf077;" horiz-adv-x="1792" +d="M1683 205l-166 -165q-19 -19 -45 -19t-45 19l-531 531l-531 -531q-19 -19 -45 -19t-45 19l-166 165q-19 19 -19 45.5t19 45.5l742 741q19 19 45 19t45 -19l742 -741q19 -19 19 -45.5t-19 -45.5z" /> + <glyph glyph-name="chevron_down" unicode="&#xf078;" horiz-adv-x="1792" +d="M1683 728l-742 -741q-19 -19 -45 -19t-45 19l-742 741q-19 19 -19 45.5t19 45.5l166 165q19 19 45 19t45 -19l531 -531l531 531q19 19 45 19t45 -19l166 -165q19 -19 19 -45.5t-19 -45.5z" /> + <glyph glyph-name="retweet" unicode="&#xf079;" horiz-adv-x="1920" +d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -10 7 -21 +zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5 -8t3 -11.5t1 -11.5v-13v-11v-160v-416h192q26 0 45 -19t19 -45z +" /> + <glyph glyph-name="shopping_cart" unicode="&#xf07a;" horiz-adv-x="1664" +d="M640 0q0 -52 -38 -90t-90 -38t-90 38t-38 90t38 90t90 38t90 -38t38 -90zM1536 0q0 -52 -38 -90t-90 -38t-90 38t-38 90t38 90t90 38t90 -38t38 -90zM1664 1088v-512q0 -24 -16.5 -42.5t-40.5 -21.5l-1044 -122q13 -60 13 -70q0 -16 -24 -64h920q26 0 45 -19t19 -45 +t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 11 8 31.5t16 36t21.5 40t15.5 29.5l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t19.5 -15.5t13 -24.5t8 -26t5.5 -29.5t4.5 -26h1201q26 0 45 -19t19 -45z" /> + <glyph glyph-name="folder_close" unicode="&#xf07b;" horiz-adv-x="1664" +d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> + <glyph glyph-name="folder_open" unicode="&#xf07c;" horiz-adv-x="1920" +d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 +t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" /> + <glyph glyph-name="resize_vertical" unicode="&#xf07d;" horiz-adv-x="768" +d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" /> + <glyph glyph-name="resize_horizontal" unicode="&#xf07e;" horiz-adv-x="1792" +d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" /> + <glyph glyph-name="bar_chart" unicode="&#xf080;" horiz-adv-x="2048" +d="M640 640v-512h-256v512h256zM1024 1152v-1024h-256v1024h256zM2048 0v-128h-2048v1536h128v-1408h1920zM1408 896v-768h-256v768h256zM1792 1280v-1152h-256v1152h256z" /> + <glyph glyph-name="twitter_sign" unicode="&#xf081;" +d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 +q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5 +t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="facebook_sign" unicode="&#xf082;" +d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-188v595h199l30 232h-229v148q0 56 23.5 84t91.5 28l122 1v207q-63 9 -178 9q-136 0 -217.5 -80t-81.5 -226v-171h-200v-232h200v-595h-532q-119 0 -203.5 84.5t-84.5 203.5v960 +q0 119 84.5 203.5t203.5 84.5h960z" /> + <glyph glyph-name="camera_retro" unicode="&#xf083;" horiz-adv-x="1792" +d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 +t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 -90.5t-90.5 -37.5h-1536q-53 0 -90.5 37.5t-37.5 90.5v1280 +q0 53 37.5 90.5t90.5 37.5h1536q53 0 90.5 -37.5t37.5 -90.5z" /> + <glyph glyph-name="key" unicode="&#xf084;" horiz-adv-x="1792" +d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 +l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189 -131 -365l355 -355l96 96q-3 3 -26 24.5t-40 38.5t-33 36.5 +t-16 28.5q0 17 49 66t66 49q13 0 23 -10q6 -6 46 -44.5t82 -79.5t86.5 -86t73 -78t28.5 -41z" /> + <glyph glyph-name="cogs" unicode="&#xf085;" horiz-adv-x="1920" +d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 +t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -11 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11 -108 -23 -155q-7 -24 -30 -24h-186q-11 0 -20 7.5t-10 17.5 +l-23 153q-34 10 -75 31l-118 -89q-7 -7 -20 -7q-11 0 -21 8q-144 133 -144 160q0 9 7 19q10 14 41 53t47 61q-23 44 -35 82l-152 24q-10 1 -17 9.5t-7 19.5v185q0 10 7 19.5t16 10.5l155 24q11 35 32 76q-34 48 -90 115q-7 11 -7 20q0 12 7 20q22 30 82 89t79 59q11 0 21 -7 +l115 -90q34 18 77 32q11 108 23 154q7 24 30 24h186q11 0 20 -7.5t10 -17.5l23 -153q34 -10 75 -31l118 89q8 7 20 7q11 0 21 -8q144 -133 144 -160q0 -8 -7 -19q-12 -16 -42 -54t-45 -60q23 -48 34 -82l152 -23q10 -2 17 -10.5t7 -19.5zM1920 198v-140q0 -16 -149 -31 +q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20 +t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31zM1920 1222v-140q0 -16 -149 -31q-12 -27 -30 -52q51 -113 51 -138q0 -4 -4 -7q-122 -71 -124 -71q-8 0 -46 47t-52 68 +q-20 -2 -30 -2t-30 2q-14 -21 -52 -68t-46 -47q-2 0 -124 71q-4 3 -4 7q0 25 51 138q-18 25 -30 52q-149 15 -149 31v140q0 16 149 31q13 29 30 52q-51 113 -51 138q0 4 4 7q4 2 35 20t59 34t30 16q8 0 46 -46.5t52 -67.5q20 2 30 2t30 -2q51 71 92 112l6 2q4 0 124 -70 +q4 -3 4 -7q0 -25 -51 -138q17 -23 30 -52q149 -15 149 -31z" /> + <glyph glyph-name="comments" unicode="&#xf086;" horiz-adv-x="1792" +d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 +q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7 +q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230z" /> + <glyph glyph-name="thumbs_up_alt" unicode="&#xf087;" +d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 +t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5q32 1 53.5 47t21.5 81zM1536 769 +q0 -89 -49 -163q9 -33 9 -69q0 -77 -38 -144q3 -21 3 -43q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5h-36h-93q-96 0 -189.5 22.5t-216.5 65.5q-116 40 -138 40h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h274q36 24 137 155q58 75 107 128 +q24 25 35.5 85.5t30.5 126.5t62 108q39 37 90 37q84 0 151 -32.5t102 -101.5t35 -186q0 -93 -48 -192h176q104 0 180 -76t76 -179z" /> + <glyph glyph-name="thumbs_down_alt" unicode="&#xf088;" +d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 31 18 69q0 37 -17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 +t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h352q50 0 89 38.5t39 89.5z +M1536 511q0 -103 -76 -179t-180 -76h-176q48 -99 48 -192q0 -118 -35 -186q-35 -69 -102 -101.5t-151 -32.5q-51 0 -90 37q-34 33 -54 82t-25.5 90.5t-17.5 84.5t-31 64q-48 50 -107 127q-101 131 -137 155h-274q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5 +h288q22 0 138 40q128 44 223 66t200 22h112q140 0 226.5 -79t85.5 -216v-5q60 -77 60 -178q0 -22 -3 -43q38 -67 38 -144q0 -36 -9 -69q49 -73 49 -163z" /> + <glyph glyph-name="star_half" unicode="&#xf089;" horiz-adv-x="896" +d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" /> + <glyph glyph-name="heart_empty" unicode="&#xf08a;" horiz-adv-x="1792" +d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 +q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5 +q224 0 351 -124t127 -344z" /> + <glyph glyph-name="signout" unicode="&#xf08b;" horiz-adv-x="1664" +d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 +q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45z" /> + <glyph glyph-name="linkedin_sign" unicode="&#xf08c;" +d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 +q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="pushpin" unicode="&#xf08d;" horiz-adv-x="1152" +d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 +t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" /> + <glyph glyph-name="external_link" unicode="&#xf08e;" horiz-adv-x="1792" +d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 +q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q26 0 45 -19t19 -45z" /> + <glyph glyph-name="signin" unicode="&#xf090;" +d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 +q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="trophy" unicode="&#xf091;" horiz-adv-x="1664" +d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 +t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 44.5t-173 97.5t-112 130t-41.5 143v128q0 40 28 68t68 28h288v96 +q0 66 47 113t113 47h576q66 0 113 -47t47 -113v-96h288q40 0 68 -28t28 -68z" /> + <glyph glyph-name="github_sign" unicode="&#xf092;" +d="M519 336q4 6 -3 13q-9 7 -14 2q-4 -6 3 -13q9 -7 14 -2zM491 377q-5 7 -12 4q-6 -4 0 -12q7 -8 12 -5q6 4 0 13zM450 417q2 4 -5 8q-7 2 -8 -2q-3 -5 4 -8q8 -2 9 2zM471 394q2 1 1.5 4.5t-3.5 5.5q-6 7 -10 3t1 -11q6 -6 11 -2zM557 319q2 7 -9 11q-9 3 -13 -4 +q-2 -7 9 -11q9 -3 13 4zM599 316q0 8 -12 8q-10 0 -10 -8t11 -8t11 8zM638 323q-2 7 -13 5t-9 -9q2 -8 12 -6t10 10zM1280 640q0 212 -150 362t-362 150t-362 -150t-150 -362q0 -167 98 -300.5t252 -185.5q18 -3 26.5 5t8.5 20q0 52 -1 95q-6 -1 -15.5 -2.5t-35.5 -2t-48 4 +t-43.5 20t-29.5 41.5q-23 59 -57 74q-2 1 -4.5 3.5l-8 8t-7 9.5t4 7.5t19.5 3.5q6 0 15 -2t30 -15.5t33 -35.5q16 -28 37.5 -42t43.5 -14t38 3.5t30 9.5q7 47 33 69q-49 6 -86 18.5t-73 39t-55.5 76t-19.5 119.5q0 79 53 137q-24 62 5 136q19 6 54.5 -7.5t60.5 -29.5l26 -16 +q58 17 128 17t128 -17q11 7 28.5 18t55.5 26t57 9q29 -74 5 -136q53 -58 53 -137q0 -57 -14 -100.5t-35.5 -70t-53.5 -44.5t-62.5 -26t-68.5 -12q35 -31 35 -95q0 -40 -0.5 -89t-0.5 -51q0 -12 8.5 -20t26.5 -5q154 52 252 185.5t98 300.5zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="upload_alt" unicode="&#xf093;" horiz-adv-x="1664" +d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 +t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 -19l448 -448q31 -30 14 -69z" /> + <glyph glyph-name="lemon" unicode="&#xf094;" +d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 +q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t-281 -180.5q-124 -44 -326 -44 +q-57 0 -170 14.5t-169 14.5q-24 0 -72.5 -14.5t-73.5 -14.5q-73 0 -123.5 55.5t-50.5 128.5q0 24 11 68t11 67q0 40 -12.5 120.5t-12.5 121.5q0 111 18 217.5t54.5 209.5t100.5 194t150 156q78 59 232 120q194 78 316 78q60 0 175.5 -24t173.5 -24q19 0 57 5t58 5 +q81 0 118 -50.5t37 -134.5q0 -23 -5 -68t-5 -68q0 -13 2 -25t3.5 -16.5t7.5 -20.5t8 -20q16 -40 25 -118.5t9 -136.5z" /> + <glyph glyph-name="phone" unicode="&#xf095;" horiz-adv-x="1408" +d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -53 3.5t-57.5 12.5t-47 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-127 79 -264 216t-216 264q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47t-12.5 57.5t-3.5 53q0 92 51 186 +q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5 -20.5t14 -24t11.5 -19q76 -137 174 -235t235 -174q2 -1 19 -11.5t24 -14 +t20.5 -8.5t22.5 -5q18 0 46 28.5t53 62t55 62t50 28.5q14 0 28.5 -7t35.5 -21.5t25 -17.5q25 -15 53.5 -31t63.5 -35t54 -30q70 -35 76 -53q3 -7 3 -21z" /> + <glyph glyph-name="check_empty" unicode="&#xf096;" horiz-adv-x="1408" +d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 +q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="bookmark_empty" unicode="&#xf097;" horiz-adv-x="1280" +d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 +q0 34 19.5 62t52.5 41q21 9 44 9h1048z" /> + <glyph glyph-name="phone_sign" unicode="&#xf098;" +d="M1280 343q0 11 -2 16t-18 16.5t-40.5 25t-47.5 26.5t-45.5 25t-28.5 15q-5 3 -19 13t-25 15t-21 5q-15 0 -36.5 -20.5t-39.5 -45t-38.5 -45t-33.5 -20.5q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170 126.5t-127 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5 +t-3.5 16.5q0 13 20.5 33.5t45 38.5t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216.5 -320.5 +t320.5 -216.5q6 -2 30 -11t33 -12.5t29.5 -10t33 -9t30.5 -5t34 -2.5q57 0 130.5 34t94.5 80q22 53 22 101zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z +" /> + <glyph glyph-name="twitter" unicode="&#xf099;" horiz-adv-x="1664" +d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 +q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" /> + <glyph glyph-name="facebook" unicode="&#xf09a;" horiz-adv-x="1024" +d="M959 1524v-264h-157q-86 0 -116 -36t-30 -108v-189h293l-39 -296h-254v-759h-306v759h-255v296h255v218q0 186 104 288.5t277 102.5q147 0 228 -12z" /> + <glyph glyph-name="github" unicode="&#xf09b;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -40 7t-13 30q0 3 0.5 76.5t0.5 134.5q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 119 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24 +q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-85 13.5q-45 -113 -8 -204q-79 -87 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-39 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5 +t-5 -11.5t9 -14t13 -12l7 -5q22 -10 43.5 -38t31.5 -51l10 -23q13 -38 44 -61.5t67 -30t69.5 -7t55.5 3.5l23 4q0 -38 0.5 -88.5t0.5 -54.5q0 -18 -13 -30t-40 -7q-232 77 -378.5 277.5t-146.5 451.5q0 209 103 385.5t279.5 279.5t385.5 103zM291 305q3 7 -7 12 +q-10 3 -13 -2q-3 -7 7 -12q9 -6 13 2zM322 271q7 5 -2 16q-10 9 -16 3q-7 -5 2 -16q10 -10 16 -3zM352 226q9 7 0 19q-8 13 -17 6q-9 -5 0 -18t17 -7zM394 184q8 8 -4 19q-12 12 -20 3q-9 -8 4 -19q12 -12 20 -3zM451 159q3 11 -13 16q-15 4 -19 -7t13 -15q15 -6 19 6z +M514 154q0 13 -17 11q-16 0 -16 -11q0 -13 17 -11q16 0 16 11zM572 164q-2 11 -18 9q-16 -3 -14 -15t18 -8t14 14z" /> + <glyph glyph-name="unlock" unicode="&#xf09c;" horiz-adv-x="1664" +d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 +t316.5 -131.5t131.5 -316.5z" /> + <glyph glyph-name="credit_card" unicode="&#xf09d;" horiz-adv-x="1920" +d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 +q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" /> + <glyph glyph-name="rss" unicode="&#xf09e;" horiz-adv-x="1408" +d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 +t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5v143q0 28 20 46q18 18 44 18h3q262 -13 501.5 -120t425.5 -294 +q187 -186 294 -425.5t120 -501.5z" /> + <glyph glyph-name="hdd" unicode="&#xf0a0;" +d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 +h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v320q0 25 16 75 +l197 606q17 53 63 86t101 33h782q55 0 101 -33t63 -86l197 -606q16 -50 16 -75z" /> + <glyph glyph-name="bullhorn" unicode="&#xf0a1;" horiz-adv-x="1792" +d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 +t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" /> + <glyph glyph-name="bell" unicode="&#xf0a2;" horiz-adv-x="1792" +d="M912 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM246 128h1300q-266 300 -266 832q0 51 -24 105t-69 103t-121.5 80.5t-169.5 31.5t-169.5 -31.5t-121.5 -80.5t-69 -103t-24 -105q0 -532 -266 -832z +M1728 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q50 42 91 88t85 119.5t74.5 158.5t50 206t19.5 260q0 152 117 282.5t307 158.5q-8 19 -8 39q0 40 28 68t68 28t68 -28t28 -68q0 -20 -8 -39q190 -28 307 -158.5 +t117 -282.5q0 -139 19.5 -260t50 -206t74.5 -158.5t85 -119.5t91 -88z" /> + <glyph glyph-name="certificate" unicode="&#xf0a3;" +d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 +l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 -188l186 53q41 12 70 -19q31 -29 19 -70 +l-53 -186l188 -48q40 -10 52 -51q10 -42 -20 -70z" /> + <glyph glyph-name="hand_right" unicode="&#xf0a4;" horiz-adv-x="1792" +d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 +q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-180.5 -76h-169q-4 -62 -37 -119q3 -21 3 -43 +q0 -101 -60 -178q1 -139 -85 -219.5t-227 -80.5q-133 0 -322 69q-164 59 -223 59h-288q-53 0 -90.5 37.5t-37.5 90.5v640q0 53 37.5 90.5t90.5 37.5h288q10 0 21.5 4.5t23.5 14t22.5 18t24 22.5t20.5 21.5t19 21.5t14 17q65 74 100 129q13 21 33 62t37 72t40.5 63t55 49.5 +t69.5 17.5q125 0 206.5 -67t81.5 -189q0 -68 -22 -128h374q104 0 180 -76t76 -179z" /> + <glyph glyph-name="hand_left" unicode="&#xf0a5;" horiz-adv-x="1792" +d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-8 9 -12 14q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576q-50 0 -89 -38.5 +t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32zM1664 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45z +M1792 768v-640q0 -53 -37.5 -90.5t-90.5 -37.5h-288q-59 0 -223 -59q-190 -69 -317 -69q-142 0 -230 77.5t-87 217.5l1 5q-61 76 -61 178q0 22 3 43q-33 57 -37 119h-169q-105 0 -180.5 76t-75.5 181q0 103 76 179t180 76h374q-22 60 -22 128q0 122 81.5 189t206.5 67 +q38 0 69.5 -17.5t55 -49.5t40.5 -63t37 -72t33 -62q35 -55 100 -129q2 -3 14 -17t19 -21.5t20.5 -21.5t24 -22.5t22.5 -18t23.5 -14t21.5 -4.5h288q53 0 90.5 -37.5t37.5 -90.5z" /> + <glyph glyph-name="hand_up" unicode="&#xf0a6;" +d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 +q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 -59 -223v-288q0 -53 -37.5 -90.5 +t-90.5 -37.5h-640q-53 0 -90.5 37.5t-37.5 90.5v288q0 10 -4.5 21.5t-14 23.5t-18 22.5t-22.5 24t-21.5 20.5t-21.5 19t-17 14q-74 65 -129 100q-21 13 -62 33t-72 37t-63 40.5t-49.5 55t-17.5 69.5q0 125 67 206.5t189 81.5q68 0 128 -22v374q0 104 76 180t179 76 +q105 0 181 -75.5t76 -180.5v-169q62 -4 119 -37q21 3 43 3q101 0 178 -60q139 1 219.5 -85t80.5 -227z" /> + <glyph glyph-name="hand_down" unicode="&#xf0a7;" +d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 +t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 580 +q0 -142 -77.5 -230t-217.5 -87l-5 1q-76 -61 -178 -61q-22 0 -43 3q-54 -30 -119 -37v-169q0 -105 -76 -180.5t-181 -75.5q-103 0 -179 76t-76 180v374q-54 -22 -128 -22q-121 0 -188.5 81.5t-67.5 206.5q0 38 17.5 69.5t49.5 55t63 40.5t72 37t62 33q55 35 129 100 +q3 2 17 14t21.5 19t21.5 20.5t22.5 24t18 22.5t14 23.5t4.5 21.5v288q0 53 37.5 90.5t90.5 37.5h640q53 0 90.5 -37.5t37.5 -90.5v-288q0 -59 59 -223q69 -190 69 -317z" /> + <glyph glyph-name="circle_arrow_left" unicode="&#xf0a8;" +d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="circle_arrow_right" unicode="&#xf0a9;" +d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="circle_arrow_up" unicode="&#xf0aa;" +d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="circle_arrow_down" unicode="&#xf0ab;" +d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="globe" unicode="&#xf0ac;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 +q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 11t-9.5 10q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5q-3 2 -6 1.5t-4.5 -1t-4.5 -3t-5 -3.5 +q-3 -2 -8.5 -3t-8.5 -2q15 5 -1 11q-10 4 -16 3q9 4 7.5 12t-8.5 14h5q-1 4 -8.5 8.5t-17.5 8.5t-13 6q-8 5 -34 9.5t-33 0.5q-5 -6 -4.5 -10.5t4 -14t3.5 -12.5q1 -6 -5.5 -13t-6.5 -12q0 -7 14 -15.5t10 -21.5q-3 -8 -16 -16t-16 -12q-5 -8 -1.5 -18.5t10.5 -16.5 +q2 -2 1.5 -4t-3.5 -4.5t-5.5 -4t-6.5 -3.5l-3 -2q-11 -5 -20.5 6t-13.5 26q-7 25 -16 30q-23 8 -29 -1q-5 13 -41 26q-25 9 -58 4q6 1 0 15q-7 15 -19 12q3 6 4 17.5t1 13.5q3 13 12 23q1 1 7 8.5t9.5 13.5t0.5 6q35 -4 50 11q5 5 11.5 17t10.5 17q9 6 14 5.5t14.5 -5.5 +t14.5 -5q14 -1 15.5 11t-7.5 20q12 -1 3 17q-4 7 -8 9q-12 4 -27 -5q-8 -4 2 -8q-1 1 -9.5 -10.5t-16.5 -17.5t-16 5q-1 1 -5.5 13.5t-9.5 13.5q-8 0 -16 -15q3 8 -11 15t-24 8q19 12 -8 27q-7 4 -20.5 5t-19.5 -4q-5 -7 -5.5 -11.5t5 -8t10.5 -5.5t11.5 -4t8.5 -3 +q14 -10 8 -14q-2 -1 -8.5 -3.5t-11.5 -4.5t-6 -4q-3 -4 0 -14t-2 -14q-5 5 -9 17.5t-7 16.5q7 -9 -25 -6l-10 1q-4 0 -16 -2t-20.5 -1t-13.5 8q-4 8 0 20q1 4 4 2q-4 3 -11 9.5t-10 8.5q-46 -15 -94 -41q6 -1 12 1q5 2 13 6.5t10 5.5q34 14 42 7l5 5q14 -16 20 -25 +q-7 4 -30 1q-20 -6 -22 -12q7 -12 5 -18q-4 3 -11.5 10t-14.5 11t-15 5q-16 0 -22 -1q-146 -80 -235 -222q7 -7 12 -8q4 -1 5 -9t2.5 -11t11.5 3q9 -8 3 -19q1 1 44 -27q19 -17 21 -21q3 -11 -10 -18q-1 2 -9 9t-9 4q-3 -5 0.5 -18.5t10.5 -12.5q-7 0 -9.5 -16t-2.5 -35.5 +t-1 -23.5l2 -1q-3 -12 5.5 -34.5t21.5 -19.5q-13 -3 20 -43q6 -8 8 -9q3 -2 12 -7.5t15 -10t10 -10.5q4 -5 10 -22.5t14 -23.5q-2 -6 9.5 -20t10.5 -23q-1 0 -2.5 -1t-2.5 -1q3 -7 15.5 -14t15.5 -13q1 -3 2 -10t3 -11t8 -2q2 20 -24 62q-15 25 -17 29q-3 5 -5.5 15.5 +t-4.5 14.5q2 0 6 -1.5t8.5 -3.5t7.5 -4t2 -3q-3 -7 2 -17.5t12 -18.5t17 -19t12 -13q6 -6 14 -19.5t0 -13.5q9 0 20 -10.5t17 -19.5q5 -8 8 -26t5 -24q2 -7 8.5 -13.5t12.5 -9.5l16 -8t13 -7q5 -2 18.5 -10.5t21.5 -11.5q10 -4 16 -4t14.5 2.5t13.5 3.5q15 2 29 -15t21 -21 +q36 -19 55 -11q-2 -1 0.5 -7.5t8 -15.5t9 -14.5t5.5 -8.5q5 -6 18 -15t18 -15q6 4 7 9q-3 -8 7 -20t18 -10q14 3 14 32q-31 -15 -49 18q0 1 -2.5 5.5t-4 8.5t-2.5 8.5t0 7.5t5 3q9 0 10 3.5t-2 12.5t-4 13q-1 8 -11 20t-12 15q-5 -9 -16 -8t-16 9q0 -1 -1.5 -5.5t-1.5 -6.5 +q-13 0 -15 1q1 3 2.5 17.5t3.5 22.5q1 4 5.5 12t7.5 14.5t4 12.5t-4.5 9.5t-17.5 2.5q-19 -1 -26 -20q-1 -3 -3 -10.5t-5 -11.5t-9 -7q-7 -3 -24 -2t-24 5q-13 8 -22.5 29t-9.5 37q0 10 2.5 26.5t3 25t-5.5 24.5q3 2 9 9.5t10 10.5q2 1 4.5 1.5t4.5 0t4 1.5t3 6q-1 1 -4 3 +q-3 3 -4 3q7 -3 28.5 1.5t27.5 -1.5q15 -11 22 2q0 1 -2.5 9.5t-0.5 13.5q5 -27 29 -9q3 -3 15.5 -5t17.5 -5q3 -2 7 -5.5t5.5 -4.5t5 0.5t8.5 6.5q10 -14 12 -24q11 -40 19 -44q7 -3 11 -2t4.5 9.5t0 14t-1.5 12.5l-1 8v18l-1 8q-15 3 -18.5 12t1.5 18.5t15 18.5q1 1 8 3.5 +t15.5 6.5t12.5 8q21 19 15 35q7 0 11 9q-1 0 -5 3t-7.5 5t-4.5 2q9 5 2 16q5 3 7.5 11t7.5 10q9 -12 21 -2q8 8 1 16q5 7 20.5 10.5t18.5 9.5q7 -2 8 2t1 12t3 12q4 5 15 9t13 5l17 11q3 4 0 4q18 -2 31 11q10 11 -6 20q3 6 -3 9.5t-15 5.5q3 1 11.5 0.5t10.5 1.5 +q15 10 -7 16q-17 5 -43 -12zM879 10q206 36 351 189q-3 3 -12.5 4.5t-12.5 3.5q-18 7 -24 8q1 7 -2.5 13t-8 9t-12.5 8t-11 7q-2 2 -7 6t-7 5.5t-7.5 4.5t-8.5 2t-10 -1l-3 -1q-3 -1 -5.5 -2.5t-5.5 -3t-4 -3t0 -2.5q-21 17 -36 22q-5 1 -11 5.5t-10.5 7t-10 1.5t-11.5 -7 +q-5 -5 -6 -15t-2 -13q-7 5 0 17.5t2 18.5q-3 6 -10.5 4.5t-12 -4.5t-11.5 -8.5t-9 -6.5t-8.5 -5.5t-8.5 -7.5q-3 -4 -6 -12t-5 -11q-2 4 -11.5 6.5t-9.5 5.5q2 -10 4 -35t5 -38q7 -31 -12 -48q-27 -25 -29 -40q-4 -22 12 -26q0 -7 -8 -20.5t-7 -21.5q0 -6 2 -16z" /> + <glyph glyph-name="wrench" unicode="&#xf0ad;" horiz-adv-x="1664" +d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 +t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" /> + <glyph glyph-name="tasks" unicode="&#xf0ae;" horiz-adv-x="1792" +d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 +t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" /> + <glyph glyph-name="filter" unicode="&#xf0b0;" horiz-adv-x="1408" +d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" /> + <glyph glyph-name="briefcase" unicode="&#xf0b1;" horiz-adv-x="1792" +d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 +t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" /> + <glyph glyph-name="fullscreen" unicode="&#xf0b2;" +d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 +l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0 -42 -39 -59q-13 -5 -25 -5q-26 0 -45 19z +" /> + <glyph glyph-name="group" unicode="&#xf0c0;" horiz-adv-x="1920" +d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 +t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75 +t75 -181zM1344 896q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5zM1920 671q0 -78 -56 -118.5t-138 -40.5h-134q-103 123 -265 128q81 117 81 256q0 29 -5 66q66 -23 133 -23q59 0 119 21.5t97.5 42.5 +t43.5 21q124 0 124 -353zM1792 1280q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181z" /> + <glyph glyph-name="link" unicode="&#xf0c1;" horiz-adv-x="1664" +d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 +l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3.5 27.5q0 40 28 68t68 28q15 0 27.5 -3.5t25.5 -13t19 -15 +t21.5 -21.5t18.5 -19q33 31 33 73zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-206 207q-83 83 -83 203q0 123 88 209l-88 88q-86 -88 -208 -88q-120 0 -204 84l-208 208q-84 84 -84 204t85 203l147 146q83 83 203 83q121 0 204 -85l206 -207 +q83 -83 83 -203q0 -123 -88 -209l88 -88q86 88 208 88q120 0 204 -84l208 -208q84 -84 84 -204z" /> + <glyph glyph-name="cloud" unicode="&#xf0c2;" horiz-adv-x="1920" +d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z +" /> + <glyph glyph-name="beaker" unicode="&#xf0c3;" horiz-adv-x="1664" +d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" /> + <glyph glyph-name="cut" unicode="&#xf0c4;" horiz-adv-x="1792" +d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 +q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 79 222 79q145 0 277 -84q83 -52 132 -123t56 -148 +q4 -48 -10 -97q4 -1 12 -5l110 -66l690 387q14 8 31 8q16 0 29 -7l128 -64q30 -16 35 -51q3 -36 -25 -56zM579 836q46 42 21 108t-106 117q-92 59 -192 59q-74 0 -113 -36q-46 -42 -21 -108t106 -117q92 -59 192 -59q74 0 113 36zM494 91q81 51 106 117t-21 108 +q-39 36 -113 36q-100 0 -192 -59q-81 -51 -106 -117t21 -108q39 -36 113 -36q100 0 192 59zM672 704l96 -58v11q0 36 33 56l14 8l-79 47l-26 -26q-3 -3 -10 -11t-12 -12q-2 -2 -4 -3.5t-3 -2.5zM896 480l96 -32l736 576l-128 64l-768 -431v-113l-160 -96l9 -8q2 -2 7 -6 +q4 -4 11 -12t11 -12l26 -26zM1600 64l128 64l-520 408l-177 -138q-2 -3 -13 -7z" /> + <glyph glyph-name="copy" unicode="&#xf0c5;" horiz-adv-x="1792" +d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 +h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" /> + <glyph glyph-name="paper_clip" unicode="&#xf0c6;" horiz-adv-x="1408" +d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 +l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0 82 57 139t139 57q88 0 149 -63l581 -581q100 -98 100 -235 +z" /> + <glyph glyph-name="save" unicode="&#xf0c7;" +d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 +h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 -28 48 -76t20 -88z" /> + <glyph glyph-name="sign_blank" unicode="&#xf0c8;" +d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="reorder" unicode="&#xf0c9;" +d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 +t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> + <glyph glyph-name="ul" unicode="&#xf0ca;" horiz-adv-x="1792" +d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 +t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z +M1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> + <glyph glyph-name="ol" unicode="&#xf0cb;" horiz-adv-x="1792" +d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 +q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t-35.5 -52.5h127v60h105zM1792 224v-192q0 -13 -9.5 -22.5 +t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1123v-99h-335v99h107q0 41 0.5 121.5t0.5 121.5v12h-2q-8 -17 -50 -54l-71 76l136 127h106v-404h108zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216 +q-13 0 -22.5 9.5t-9.5 22.5v192q0 14 9 23t23 9h1216q13 0 22.5 -9.5t9.5 -22.5zM1792 1248v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5z" /> + <glyph glyph-name="strikethrough" unicode="&#xf0cc;" horiz-adv-x="1792" +d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 98 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 +l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -56 -71 -104q-37 -35 -109 -81q-80 -48 -153 -66q-80 -21 -203 -21q-114 0 -195 23 +l-140 40q-57 16 -72 28q-8 8 -8 22v13q0 108 -2 156q-1 30 0 68l2 37v44l102 2q15 -34 30 -71t22.5 -56t12.5 -27q35 -57 80 -94q43 -36 105 -57q59 -22 132 -22q64 0 139 27q77 26 122 86q47 61 47 129q0 84 -81 157q-34 29 -137 71z" /> + <glyph glyph-name="underline" unicode="&#xf0cd;" +d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 +q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -13q-73 -11 -79 -17q-15 -15 -15 -41 +q0 -7 1.5 -27t1.5 -31q8 -19 22 -396q6 -195 -15 -304q-15 -76 -41 -122q-38 -65 -112 -123q-75 -57 -182 -89q-109 -33 -255 -33q-167 0 -284 46q-119 47 -179 122q-61 76 -83 195q-16 80 -16 237v333q0 188 -17 213q-25 36 -147 39zM1536 -96v64q0 14 -9 23t-23 9h-1472 +q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h1472q14 0 23 9t9 23z" /> + <glyph glyph-name="table" unicode="&#xf0ce;" horiz-adv-x="1664" +d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 +v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 160v192 +q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192 +q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1536 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1664 1248v-1088q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1344q66 0 113 -47t47 -113 +z" /> + <glyph glyph-name="magic" unicode="&#xf0d0;" horiz-adv-x="1664" +d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 +l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" /> + <glyph glyph-name="truck" unicode="&#xf0d1;" horiz-adv-x="1792" +d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 +t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5 18.5t-4 26.5q0 26 19 45t45 19v320q0 8 -0.5 35t0 38 +t2.5 34.5t6.5 37t14 30.5t22.5 30l198 198q19 19 50.5 32t58.5 13h160v192q0 26 19 45t45 19h1024q26 0 45 -19t19 -45z" /> + <glyph glyph-name="pinterest" unicode="&#xf0d2;" +d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 +q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11.5 75.5q0 50 -27 83t-77 33 +q-62 0 -105 -57t-43 -142q0 -73 25 -122l-99 -418q-17 -70 -13 -177q-206 91 -333 281t-127 423q0 209 103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="pinterest_sign" unicode="&#xf0d3;" +d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 +t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 -97 43.5t-23 103.5q8 34 26.5 92.5 +t29.5 102t11 74.5q0 49 -26.5 81.5t-75.5 32.5q-61 0 -103.5 -56.5t-42.5 -139.5q0 -72 24 -121l-98 -414q-24 -100 -7 -254h-183q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960z" /> + <glyph glyph-name="google_plus_sign" unicode="&#xf0d4;" +d="M917 631q0 26 -6 64h-362v-132h217q-3 -24 -16.5 -50t-37.5 -53t-66.5 -44.5t-96.5 -17.5q-99 0 -169 71t-70 171t70 171t169 71q92 0 153 -59l104 101q-108 100 -257 100q-160 0 -272 -112.5t-112 -271.5t112 -271.5t272 -112.5q165 0 266.5 105t101.5 270zM1262 585 +h109v110h-109v110h-110v-110h-110v-110h110v-110h110v110zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="google_plus" unicode="&#xf0d5;" horiz-adv-x="2304" +d="M1437 623q0 -208 -87 -370.5t-248 -254t-369 -91.5q-149 0 -285 58t-234 156t-156 234t-58 285t58 285t156 234t234 156t285 58q286 0 491 -192l-199 -191q-117 113 -292 113q-123 0 -227.5 -62t-165.5 -168.5t-61 -232.5t61 -232.5t165.5 -168.5t227.5 -62 +q83 0 152.5 23t114.5 57.5t78.5 78.5t49 83t21.5 74h-416v252h692q12 -63 12 -122zM2304 745v-210h-209v-209h-210v209h-209v210h209v209h210v-209h209z" /> + <glyph glyph-name="money" unicode="&#xf0d6;" horiz-adv-x="1920" +d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 +v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45 19h1792q26 0 45 -19t19 -45z" /> + <glyph glyph-name="caret_down" unicode="&#xf0d7;" horiz-adv-x="1024" +d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> + <glyph glyph-name="caret_up" unicode="&#xf0d8;" horiz-adv-x="1024" +d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> + <glyph glyph-name="caret_left" unicode="&#xf0d9;" horiz-adv-x="640" +d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" /> + <glyph glyph-name="caret_right" unicode="&#xf0da;" horiz-adv-x="640" +d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" /> + <glyph glyph-name="columns" unicode="&#xf0db;" horiz-adv-x="1664" +d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" /> + <glyph glyph-name="sort" unicode="&#xf0dc;" horiz-adv-x="1024" +d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> + <glyph glyph-name="sort_down" unicode="&#xf0dd;" horiz-adv-x="1024" +d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" /> + <glyph glyph-name="sort_up" unicode="&#xf0de;" horiz-adv-x="1024" +d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" /> + <glyph glyph-name="envelope_alt" unicode="&#xf0e0;" horiz-adv-x="1792" +d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 +q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t118.5 52h1472q65 0 112.5 -47t47.5 -113z" /> + <glyph glyph-name="linkedin" unicode="&#xf0e1;" +d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 +q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" /> + <glyph glyph-name="undo" unicode="&#xf0e2;" +d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 +t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298z" /> + <glyph glyph-name="legal" unicode="&#xf0e3;" horiz-adv-x="1792" +d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 +t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 -12.5q-14 14 -14 34t14 34l348 348q14 14 34 14t34 -14 +q-2 2 -12.5 12t-12.5 13t-10 11.5t-10 13.5t-6 13.5t-5.5 16.5t-1.5 18q0 38 28 68q3 3 16.5 18t19 20.5t18.5 16.5t22 15.5t22 9t26 4.5q40 0 68 -28l408 -408q28 -28 28 -68q0 -13 -4.5 -26t-9 -22t-15.5 -22t-16.5 -18.5t-20.5 -19t-18 -16.5q-30 -28 -68 -28 +q-10 0 -18 1.5t-16.5 5.5t-13.5 6t-13.5 10t-11.5 10t-13 12.5t-12 12.5q14 -14 14 -34t-14 -34l-126 -126l256 -256q43 43 96 43q52 0 91 -37l363 -363q37 -39 37 -91z" /> + <glyph glyph-name="dashboard" unicode="&#xf0e4;" horiz-adv-x="1792" +d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 +t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 1024q0 53 -37.5 90.5 +t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1472 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 384q0 -261 -141 -483q-19 -29 -54 -29h-1402q-35 0 -54 29 +q-141 221 -141 483q0 182 71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="comment_alt" unicode="&#xf0e5;" horiz-adv-x="1792" +d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 +q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 174 120 321.5 +t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" /> + <glyph glyph-name="comments_alt" unicode="&#xf0e6;" horiz-adv-x="1792" +d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 +t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224q0 139 94 257t256.5 186.5 +t353.5 68.5zM1526 111q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 -5t2 -6t0.5 -6.5t-1 -6.5q-3 -14 -13 -22t-22 -7q-50 7 -86 16q-154 40 -278 128q-90 -16 -176 -16q-271 0 -472 132q58 -4 88 -4q161 0 309 45t264 129 +q125 92 192 212t67 254q0 77 -23 152q129 -71 204 -178t75 -230q0 -120 -71 -224.5t-195 -176.5z" /> + <glyph glyph-name="bolt" unicode="&#xf0e7;" horiz-adv-x="896" +d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" /> + <glyph glyph-name="sitemap" unicode="&#xf0e8;" horiz-adv-x="1792" +d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 +q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h512q52 0 90 -38t38 -90v-192h96q40 0 68 -28t28 -68 +z" /> + <glyph glyph-name="umbrella" unicode="&#xf0e9;" horiz-adv-x="1664" +d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 +q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 28q-43 60 -103 97t-128 37q-58 0 -102 -23t-93 -69 +q-12 -10 -23 -10q-13 0 -22.5 9.5t-9.5 22.5q0 5 1 7q45 183 172.5 319.5t298 204.5t360.5 68q140 0 274.5 -40t246.5 -113.5t194.5 -187t115.5 -251.5q1 -2 1 -7zM896 1408v-98q-42 2 -64 2t-64 -2v98q0 26 19 45t45 19t45 -19t19 -45z" /> + <glyph glyph-name="paste" unicode="&#xf0ea;" horiz-adv-x="1792" +d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 +h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" /> + <glyph glyph-name="light_bulb" unicode="&#xf0eb;" horiz-adv-x="1024" +d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 +q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q0 -37 -25 -64q25 -27 25 -64q0 -52 -45 -81q13 -23 13 -47 +q0 -46 -31.5 -71t-77.5 -25q-20 -44 -60 -70t-87 -26t-87 26t-60 70q-46 0 -77.5 25t-31.5 71q0 24 13 47q-45 29 -45 81q0 37 25 64q-25 27 -25 64q0 54 47 82q-4 50 -34 107.5t-59.5 95.5t-74.5 87q-103 113 -103 268q0 99 44.5 184.5t117 142t164 89t186.5 32.5 +t186.5 -32.5t164 -89t117 -142t44.5 -184.5z" /> + <glyph glyph-name="exchange" unicode="&#xf0ec;" horiz-adv-x="1792" +d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 +q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" /> + <glyph glyph-name="cloud_download" unicode="&#xf0ed;" horiz-adv-x="1920" +d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 +q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> + <glyph glyph-name="cloud_upload" unicode="&#xf0ee;" horiz-adv-x="1920" +d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 +q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" /> + <glyph glyph-name="user_md" unicode="&#xf0f0;" horiz-adv-x="1408" +d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 +t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 38 90t90 38t90 -38t38 -90v-89q-32 -29 -32 -71q0 -40 28 -68 +t68 -28t68 28t28 68q0 42 -32 71v89q0 68 -34.5 127.5t-93.5 93.5q0 10 0.5 42.5t0 48t-2.5 41.5t-7 47t-13 40q68 -15 120 -60.5t81 -103t47.5 -132.5t24 -138t5.5 -131zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 +t271.5 -112.5t112.5 -271.5z" /> + <glyph glyph-name="stethoscope" unicode="&#xf0f1;" horiz-adv-x="1408" +d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 +t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 -48q10 2 16 2q26 0 45 -19t19 -45v-512q0 -144 -110 -252 +t-274 -128v-132q0 -106 94 -181t226 -75t226 75t94 181v395q-57 21 -92.5 70t-35.5 111q0 80 56 136t136 56t136 -56t56 -136z" /> + <glyph glyph-name="suitcase" unicode="&#xf0f2;" horiz-adv-x="1792" +d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 +t66 -158z" /> + <glyph glyph-name="bell_alt" unicode="&#xf0f3;" horiz-adv-x="1792" +d="M912 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1728 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q50 42 91 88t85 119.5t74.5 158.5 +t50 206t19.5 260q0 152 117 282.5t307 158.5q-8 19 -8 39q0 40 28 68t68 28t68 -28t28 -68q0 -20 -8 -39q190 -28 307 -158.5t117 -282.5q0 -139 19.5 -260t50 -206t74.5 -158.5t85 -119.5t91 -88z" /> + <glyph glyph-name="coffee" unicode="&#xf0f4;" horiz-adv-x="1920" +d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 +t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" /> + <glyph glyph-name="food" unicode="&#xf0f5;" horiz-adv-x="1408" +d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 +t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t19 -45z" /> + <glyph glyph-name="file_text_alt" unicode="&#xf0f6;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M384 736q0 14 9 23t23 9h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64zM1120 512q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704zM1120 256q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704 +q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704z" /> + <glyph glyph-name="building" unicode="&#xf0f7;" horiz-adv-x="1408" +d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 992v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M896 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 1248v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M896 -128h384v1536h-1152v-1536h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM1408 1472v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280q26 0 45 -19t19 -45z" /> + <glyph glyph-name="hospital" unicode="&#xf0f8;" horiz-adv-x="1408" +d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M1152 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM896 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M640 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M896 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1152 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z +M896 -128h384v1152h-256v-32q0 -40 -28 -68t-68 -28h-448q-40 0 -68 28t-28 68v32h-256v-1152h384v224q0 13 9.5 22.5t22.5 9.5h320q13 0 22.5 -9.5t9.5 -22.5v-224zM896 1056v320q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-96h-128v96q0 13 -9.5 22.5 +t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5v96h128v-96q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1408 1088v-1280q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1280q0 26 19 45t45 19h320 +v288q0 40 28 68t68 28h448q40 0 68 -28t28 -68v-288h320q26 0 45 -19t19 -45z" /> + <glyph glyph-name="ambulance" unicode="&#xf0f9;" horiz-adv-x="1920" +d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 +t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM1920 1344v-1152 +q0 -26 -19 -45t-45 -19h-192q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-128q-26 0 -45 19t-19 45t19 45t45 19v416q0 26 13 58t32 51l198 198q19 19 51 32t58 13h160v320q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> + <glyph glyph-name="medkit" unicode="&#xf0fa;" horiz-adv-x="1792" +d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 +q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 158 -66t66 -158z" /> + <glyph glyph-name="fighter_jet" unicode="&#xf0fb;" horiz-adv-x="1920" +d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 +q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q128 -28 200 -52t80 -34z" /> + <glyph glyph-name="beer" unicode="&#xf0fc;" horiz-adv-x="1664" +d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" /> + <glyph glyph-name="h_sign" unicode="&#xf0fd;" +d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="f0fe" unicode="&#xf0fe;" +d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="double_angle_left" unicode="&#xf100;" horiz-adv-x="1024" +d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 +t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" /> + <glyph glyph-name="double_angle_right" unicode="&#xf101;" horiz-adv-x="1024" +d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 +l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> + <glyph glyph-name="double_angle_up" unicode="&#xf102;" horiz-adv-x="1152" +d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 +q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> + <glyph glyph-name="double_angle_down" unicode="&#xf103;" horiz-adv-x="1152" +d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 +t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> + <glyph glyph-name="angle_left" unicode="&#xf104;" horiz-adv-x="640" +d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> + <glyph glyph-name="angle_right" unicode="&#xf105;" horiz-adv-x="640" +d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> + <glyph glyph-name="angle_up" unicode="&#xf106;" horiz-adv-x="1152" +d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" /> + <glyph glyph-name="angle_down" unicode="&#xf107;" horiz-adv-x="1152" +d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" /> + <glyph glyph-name="desktop" unicode="&#xf108;" horiz-adv-x="1920" +d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 +t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> + <glyph glyph-name="laptop" unicode="&#xf109;" horiz-adv-x="1920" +d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z +M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" /> + <glyph glyph-name="tablet" unicode="&#xf10a;" horiz-adv-x="1152" +d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 +q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" /> + <glyph glyph-name="mobile_phone" unicode="&#xf10b;" horiz-adv-x="768" +d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 +q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" /> + <glyph glyph-name="circle_blank" unicode="&#xf10c;" +d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 +t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="quote_left" unicode="&#xf10d;" horiz-adv-x="1664" +d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z +M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z" /> + <glyph glyph-name="quote_right" unicode="&#xf10e;" horiz-adv-x="1664" +d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 +v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136z" /> + <glyph glyph-name="spinner" unicode="&#xf110;" horiz-adv-x="1792" +d="M526 142q0 -53 -37.5 -90.5t-90.5 -37.5q-52 0 -90 38t-38 90q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1024 -64q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -53 -37.5 -90.5t-90.5 -37.5 +t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1522 142q0 -52 -38 -90t-90 -38q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM558 1138q0 -66 -47 -113t-113 -47t-113 47t-47 113t47 113t113 47t113 -47t47 -113z +M1728 640q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1088 1344q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1618 1138q0 -93 -66 -158.5t-158 -65.5q-93 0 -158.5 65.5t-65.5 158.5 +q0 92 65.5 158t158.5 66q92 0 158 -66t66 -158z" /> + <glyph glyph-name="circle" unicode="&#xf111;" +d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="reply" unicode="&#xf112;" horiz-adv-x="1792" +d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 +l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" /> + <glyph glyph-name="github_alt" unicode="&#xf113;" horiz-adv-x="1664" +d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 +q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM1664 496q0 -207 -61 -331q-38 -77 -105.5 -133t-141 -86 +t-170 -47.5t-171.5 -22t-167 -4.5q-78 0 -142 3t-147.5 12.5t-152.5 30t-137 51.5t-121 81t-86 115q-62 123 -62 331q0 237 136 396q-27 82 -27 170q0 116 51 218q108 0 190 -39.5t189 -123.5q147 35 309 35q148 0 280 -32q105 82 187 121t189 39q51 -102 51 -218 +q0 -87 -27 -168q136 -160 136 -398z" /> + <glyph glyph-name="folder_close_alt" unicode="&#xf114;" horiz-adv-x="1664" +d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 +q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" /> + <glyph glyph-name="folder_open_alt" unicode="&#xf115;" horiz-adv-x="1920" +d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 +v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158v-160h192q54 0 99 -24.5t67 -70.5q15 -32 15 -68z +" /> + <glyph glyph-name="expand_alt" unicode="&#xf116;" horiz-adv-x="1792" + /> + <glyph glyph-name="collapse_alt" unicode="&#xf117;" horiz-adv-x="1792" + /> + <glyph glyph-name="smile" unicode="&#xf118;" +d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 +t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5 +t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="frown" unicode="&#xf119;" +d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 +t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204 +t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="meh" unicode="&#xf11a;" +d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 +t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="gamepad" unicode="&#xf11b;" horiz-adv-x="1920" +d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 +t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -150q-192 0 -338 128h-220q-146 -128 -338 -128q-212 0 -362 150 +t-150 362t150 362t362 150h896q212 0 362 -150t150 -362z" /> + <glyph glyph-name="keyboard" unicode="&#xf11c;" horiz-adv-x="1920" +d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 +h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1024 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16 +h96q16 0 16 -16zM896 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1280 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1152 880v-96 +q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1664 880v-352q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h112v240q0 16 16 16h96q16 0 16 -16zM1792 128v896h-1664v-896 +h1664zM1920 1024v-896q0 -53 -37.5 -90.5t-90.5 -37.5h-1664q-53 0 -90.5 37.5t-37.5 90.5v896q0 53 37.5 90.5t90.5 37.5h1664q53 0 90.5 -37.5t37.5 -90.5z" /> + <glyph glyph-name="flag_alt" unicode="&#xf11d;" horiz-adv-x="1792" +d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 +h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102 +q-15 -9 -33 -9q-16 0 -32 8q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> + <glyph glyph-name="flag_checkered" unicode="&#xf11e;" horiz-adv-x="1792" +d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 +q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v1266 +q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -99 48t-91 29t-114 14q-102 0 -235.5 -44t-228.5 -102q-15 -9 -33 -9q-16 0 -32 8 +q-32 19 -32 56v742q0 35 31 55q35 21 78.5 42.5t114 52t152.5 49.5t155 19q112 0 209 -31t209 -86q38 -19 89 -19q122 0 310 112q22 12 31 17q31 16 62 -2q31 -20 31 -55z" /> + <glyph glyph-name="terminal" unicode="&#xf120;" horiz-adv-x="1664" +d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 +t9 -23z" /> + <glyph glyph-name="code" unicode="&#xf121;" horiz-adv-x="1920" +d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 +l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23z" /> + <glyph glyph-name="reply_all" unicode="&#xf122;" horiz-adv-x="1792" +d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 +q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 -173 169 -509z" /> + <glyph glyph-name="star_half_empty" unicode="&#xf123;" horiz-adv-x="1664" +d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 +l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" /> + <glyph glyph-name="location_arrow" unicode="&#xf124;" horiz-adv-x="1408" +d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" /> + <glyph glyph-name="crop" unicode="&#xf125;" horiz-adv-x="1664" +d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 +v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" /> + <glyph glyph-name="code_fork" unicode="&#xf126;" horiz-adv-x="1024" +d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 +q-2 -287 -226 -414q-67 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136q0 -52 -26 -96.5t-70 -69.5v-497 +q54 26 154 57q55 17 87.5 29.5t70.5 31t59 39.5t40.5 51t28 69.5t8.5 91.5q-44 25 -70 69.5t-26 96.5q0 80 56 136t136 56t136 -56t56 -136z" /> + <glyph glyph-name="unlink" unicode="&#xf127;" horiz-adv-x="1664" +d="M439 265l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 +q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239q35 -21 56 -42l336 -336q84 -86 84 -204zM1031 1044l-239 -18 +l-273 274q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l274 -274l-18 -240q-35 21 -56 42l-336 336q-84 86 -84 204q0 120 85 203l147 146q83 83 203 83q121 0 204 -85l334 -335q21 -21 42 -56zM1664 960q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9 +t-9 23t9 23t23 9h320q14 0 23 -9t9 -23zM1120 1504v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM1527 1353l-256 -256q-11 -9 -23 -9t-23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" /> + <glyph glyph-name="question" unicode="&#xf128;" horiz-adv-x="1024" +d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 +t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 161 -31t146 -83t106 -127.5t41 -158.5z" /> + <glyph glyph-name="_279" unicode="&#xf129;" horiz-adv-x="640" +d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 +q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" /> + <glyph glyph-name="exclamation" unicode="&#xf12a;" horiz-adv-x="640" +d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" /> + <glyph glyph-name="superscript" unicode="&#xf12b;" +d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3q-1 -3 -2.5 -6.5t-3.5 -8t-3 -6.5q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109z +M1534 846v-206h-514l-3 27q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t-82 -50.5 +t-65.5 -51.5t-30.5 -63h232v80h126z" /> + <glyph glyph-name="subscript" unicode="&#xf12c;" +d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3q-1 -3 -2.5 -6.5t-3.5 -8t-3 -6.5q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109z +M1536 -50v-206h-514l-4 27q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t-87 -63t-41 -73 +h232v80h126z" /> + <glyph glyph-name="_283" unicode="&#xf12d;" horiz-adv-x="1920" +d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" /> + <glyph glyph-name="puzzle_piece" unicode="&#xf12e;" horiz-adv-x="1664" +d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 +t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t34 -5t21.5 -3.5q150 -24 245 -24q80 0 117 35q46 44 46 89 +q0 22 -15 50.5t-33.5 53t-33.5 64.5t-15 83q0 82 59 127.5t144 45.5q80 0 134 -44.5t54 -123.5q0 -41 -17.5 -77.5t-38 -59t-38 -56.5t-17.5 -71q0 -57 42 -83.5t103 -26.5q64 0 180 15t163 17v-2q-1 -2 -3.5 -17.5t-5 -34t-3.5 -21.5q-24 -150 -24 -245q0 -80 35 -117 +q44 -46 89 -46q22 0 50.5 15t53 33.5t64.5 33.5t83 15q82 0 127.5 -59t45.5 -143z" /> + <glyph glyph-name="microphone" unicode="&#xf130;" horiz-adv-x="1152" +d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 +t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" /> + <glyph glyph-name="microphone_off" unicode="&#xf131;" horiz-adv-x="1408" +d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 +q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l1234 1234q10 10 23 10t23 -10l82 -82q10 -10 10 -23 +t-10 -23zM1005 1325l-621 -621v512q0 132 94 226t226 94q102 0 184.5 -59t116.5 -152z" /> + <glyph glyph-name="shield" unicode="&#xf132;" horiz-adv-x="1280" +d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 +t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" /> + <glyph glyph-name="calendar_empty" unicode="&#xf133;" horiz-adv-x="1664" +d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 +q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> + <glyph glyph-name="fire_extinguisher" unicode="&#xf134;" horiz-adv-x="1408" +d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 +q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-25 42 -25 86q0 66 47 113t113 47t113 -47t47 -113 +q0 -33 -14 -64h302q0 11 7 20t18 11l448 96q3 1 7 1q12 0 20 -7q12 -9 12 -25z" /> + <glyph glyph-name="rocket" unicode="&#xf135;" horiz-adv-x="1664" +d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 +q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" /> + <glyph glyph-name="maxcdn" unicode="&#xf136;" horiz-adv-x="1792" +d="M1745 763l-164 -763h-334l178 832q13 56 -15 88q-27 33 -83 33h-169l-204 -953h-334l204 953h-286l-204 -953h-334l204 953l-153 327h1276q101 0 189.5 -40.5t147.5 -113.5q60 -73 81 -168.5t0 -194.5z" /> + <glyph glyph-name="chevron_sign_left" unicode="&#xf137;" +d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 +t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="chevron_sign_right" unicode="&#xf138;" +d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 +t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="chevron_sign_up" unicode="&#xf139;" +d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 +t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="chevron_sign_down" unicode="&#xf13a;" +d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 +t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="html5" unicode="&#xf13b;" horiz-adv-x="1408" +d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" /> + <glyph glyph-name="css3" unicode="&#xf13c;" horiz-adv-x="1792" +d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" /> + <glyph glyph-name="anchor" unicode="&#xf13d;" horiz-adv-x="1792" +d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-12 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 +q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 -181q0 -70 -35 -128.5t-93 -92.5v-163h192q26 0 45 -19 +t19 -45v-128q0 -26 -19 -45t-45 -19h-192v-647q149 20 271.5 82.5t189.5 153.5l-100 100q-15 16 -7 35q8 20 30 20h352q14 0 23 -9t9 -23z" /> + <glyph glyph-name="unlock_alt" unicode="&#xf13e;" horiz-adv-x="1152" +d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 +v-320h736z" /> + <glyph glyph-name="bullseye" unicode="&#xf140;" +d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 +t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5zM1536 640 +q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="ellipsis_horizontal" unicode="&#xf141;" horiz-adv-x="1408" +d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 +q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> + <glyph glyph-name="ellipsis_vertical" unicode="&#xf142;" horiz-adv-x="384" +d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 +q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" /> + <glyph glyph-name="_303" unicode="&#xf143;" +d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 233 -176.5 396.5t-396.5 176.5q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128 +q13 0 23 10t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="play_sign" unicode="&#xf144;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 +q16 -8 32 -8q17 0 32 9z" /> + <glyph glyph-name="ticket" unicode="&#xf145;" horiz-adv-x="1792" +d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 +t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" /> + <glyph glyph-name="minus_sign_alt" unicode="&#xf146;" +d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 +t84.5 -203.5z" /> + <glyph glyph-name="check_minus" unicode="&#xf147;" horiz-adv-x="1408" +d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 +t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="level_up" unicode="&#xf148;" horiz-adv-x="1024" +d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" /> + <glyph glyph-name="level_down" unicode="&#xf149;" horiz-adv-x="1024" +d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" /> + <glyph glyph-name="check_sign" unicode="&#xf14a;" +d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 +t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="edit_sign" unicode="&#xf14b;" +d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 +v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_312" unicode="&#xf14c;" +d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 +q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="share_sign" unicode="&#xf14d;" +d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q11 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 +t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="compass" unicode="&#xf14e;" +d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 +t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="collapse" unicode="&#xf150;" +d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 +v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="collapse_top" unicode="&#xf151;" +d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_317" unicode="&#xf152;" +d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 +t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="eur" unicode="&#xf153;" horiz-adv-x="1024" +d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 +t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126 0 -226 -64t-150 -176h468q16 0 25 -12q10 -12 7 -26 +l-24 -114q-5 -26 -32 -26h-488q-3 -37 0 -105h459q15 0 25 -12q9 -12 6 -27l-24 -112q-2 -11 -11 -18.5t-20 -7.5h-387q48 -117 149.5 -185.5t228.5 -68.5q18 0 36 1.5t33.5 3.5t29.5 4.5t24.5 5t18.5 4.5l12 3l5 2q13 5 26 -2q12 -7 15 -21z" /> + <glyph glyph-name="gbp" unicode="&#xf154;" horiz-adv-x="1024" +d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 +q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" /> + <glyph glyph-name="usd" unicode="&#xf155;" horiz-adv-x="1024" +d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 +t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5 9.5h135q14 0 23 -9t9 -23v-176q57 -6 110.5 -23t87 -33.5 +t63.5 -37.5t39 -29t15 -14q17 -18 5 -38l-81 -146q-8 -15 -23 -16q-14 -3 -27 7q-3 3 -14.5 12t-39 26.5t-58.5 32t-74.5 26t-85.5 11.5q-95 0 -155 -43t-60 -111q0 -26 8.5 -48t29.5 -41.5t39.5 -33t56 -31t60.5 -27t70 -27.5q53 -20 81 -31.5t76 -35t75.5 -42.5t62 -50 +t53 -63.5t31.5 -76.5t13 -94z" /> + <glyph glyph-name="inr" unicode="&#xf156;" horiz-adv-x="898" +d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 +q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" /> + <glyph glyph-name="jpy" unicode="&#xf157;" horiz-adv-x="1027" +d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 +l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q0 -13 -9.5 -22.5t-22.5 -9.5z" /> + <glyph glyph-name="rub" unicode="&#xf158;" horiz-adv-x="1280" +d="M1043 971q0 100 -65 162t-171 62h-320v-448h320q106 0 171 62t65 162zM1280 971q0 -193 -126.5 -315t-326.5 -122h-340v-118h505q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9h-505v-192q0 -14 -9.5 -23t-22.5 -9h-167q-14 0 -23 9t-9 23v192h-224q-14 0 -23 9t-9 23v128 +q0 14 9 23t23 9h224v118h-224q-14 0 -23 9t-9 23v149q0 13 9 22.5t23 9.5h224v629q0 14 9 23t23 9h539q200 0 326.5 -122t126.5 -315z" /> + <glyph glyph-name="krw" unicode="&#xf159;" horiz-adv-x="1792" +d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 +t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h109l-89 344q-5 15 5 28 +q10 12 26 12h137q26 0 31 -24l90 -360h359l97 360q7 24 31 24h126q24 0 31 -24l98 -360h365l93 360q5 24 31 24h137q16 0 26 -12q10 -13 5 -28l-91 -344h111q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-145l-34 -128h179q14 0 23 -9t9 -23z" /> + <glyph glyph-name="btc" unicode="&#xf15a;" horiz-adv-x="1280" +d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 +l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0.5t48 -0.5t53 1.5t58.5 4t57 8.5t55.5 14t47.5 21t39.5 30 +t24.5 40t9.5 51zM881 827q0 33 -12.5 58.5t-30.5 42t-48 28t-55 16.5t-61.5 8t-58 2.5t-54 -1t-39.5 -0.5v-307q5 0 34.5 -0.5t46.5 0t50 2t55 5.5t51.5 11t48.5 18.5t37 27t27 38.5t9 51z" /> + <glyph glyph-name="file" unicode="&#xf15b;" +d="M1024 1024v472q22 -14 36 -28l408 -408q14 -14 28 -36h-472zM896 992q0 -40 28 -68t68 -28h544v-1056q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h800v-544z" /> + <glyph glyph-name="file_text" unicode="&#xf15c;" +d="M1468 1060q14 -14 28 -36h-472v472q22 -14 36 -28zM992 896h544v-1056q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h800v-544q0 -40 28 -68t68 -28zM1152 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704 +q14 0 23 9t9 23zM1152 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1152 672v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23z" /> + <glyph glyph-name="sort_by_alphabet" unicode="&#xf15d;" horiz-adv-x="1664" +d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 +v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162 +l230 -662h70z" /> + <glyph glyph-name="_329" unicode="&#xf15e;" horiz-adv-x="1664" +d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 +v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -10v-3l14 3q9 1 30 1h248 +v119h121z" /> + <glyph glyph-name="sort_by_attributes" unicode="&#xf160;" horiz-adv-x="1792" +d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 +q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1216 1504v-192q0 -14 -9 -23t-23 -9h-256 +q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23z" /> + <glyph glyph-name="sort_by_attributes_alt" unicode="&#xf161;" horiz-adv-x="1792" +d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 +q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1792 1504v-192q0 -14 -9 -23t-23 -9h-832 +q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832q14 0 23 -9t9 -23z" /> + <glyph glyph-name="sort_by_order" unicode="&#xf162;" +d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 +zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5 +t82 -252.5zM1456 882v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165z" /> + <glyph glyph-name="sort_by_order_alt" unicode="&#xf163;" +d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 +t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13 +q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 172.5q0 105 72 178t181 73q123 0 205 -94.5t82 -252.5z" /> + <glyph glyph-name="_334" unicode="&#xf164;" horiz-adv-x="1664" +d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 +q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24 2 76 59t101 121q68 87 101 120q18 18 31 48t17.5 48.5 +t13.5 60.5q7 39 12.5 61t19.5 52t34 50q19 19 45 19q46 0 82.5 -10.5t60 -26t40 -40.5t24 -45t12 -50t5 -45t0.5 -39q0 -38 -9.5 -76t-19 -60t-27.5 -56q-3 -6 -10 -18t-11 -22t-8 -24h277q78 0 135 -57t57 -135z" /> + <glyph glyph-name="_335" unicode="&#xf165;" horiz-adv-x="1664" +d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 +t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 -13.5 60.5t-17.5 48.5t-31 48q-33 33 -101 120q-49 64 -101 121 +t-76 59q-25 2 -43 20.5t-18 43.5v641q0 26 19 44.5t45 19.5q35 1 158 44q77 26 120.5 39.5t121.5 29t144 15.5h17h76h36q133 -2 197 -78q58 -69 49 -181q39 -37 54 -94q17 -61 0 -117q46 -61 43 -137q0 -32 -15 -76z" /> + <glyph glyph-name="youtube_sign" unicode="&#xf166;" +d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 17 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 +q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86v129q0 59 20 86q29 38 80 38t78 -38 +q21 -29 21 -86v-76h-133v-65q0 -51 34 -51q24 0 30 26q0 1 0.5 7t0.5 16.5v21.5h68zM785 1079v-156q0 -51 -32 -51t-32 51v156q0 52 32 52t32 -52zM1318 366q0 177 -19 260q-10 44 -43 73.5t-76 34.5q-136 15 -412 15q-275 0 -411 -15q-44 -5 -76.5 -34.5t-42.5 -73.5 +q-20 -87 -20 -260q0 -176 20 -260q10 -43 42.5 -73t75.5 -35q137 -15 412 -15t412 15q43 5 75.5 35t42.5 73q20 84 20 260zM563 1017l90 296h-75l-51 -195l-53 195h-78q7 -23 23 -69l24 -69q35 -103 46 -158v-201h74v201zM852 936v130q0 58 -21 87q-29 38 -78 38 +q-51 0 -78 -38q-21 -29 -21 -87v-130q0 -58 21 -87q27 -38 78 -38q49 0 78 38q21 27 21 87zM1033 816h67v370h-67v-283q-22 -31 -42 -31q-15 0 -16 16q-1 2 -1 26v272h-67v-293q0 -37 6 -55q11 -27 43 -27q36 0 77 45v-40zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5 +h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="youtube" unicode="&#xf167;" +d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 +q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39 51 -106 51q-68 0 -107 -51 +q-28 -37 -28 -116v-173q0 -79 29 -116q39 -51 108 -51q72 0 108 53q18 27 21 54q2 9 2 58zM790 1011v210q0 69 -43 69t-43 -69v-210q0 -70 43 -70t43 70zM1509 260q0 -234 -26 -350q-14 -59 -58 -99t-102 -46q-184 -21 -555 -21t-555 21q-58 6 -102.5 46t-57.5 99 +q-26 112 -26 350q0 234 26 350q14 59 58 99t103 47q183 20 554 20t555 -20q58 -7 102.5 -47t57.5 -99q26 -112 26 -350zM511 1536h102l-121 -399v-271h-100v271q-14 74 -61 212q-37 103 -65 187h106l71 -263zM881 1203v-175q0 -81 -28 -118q-38 -51 -106 -51q-67 0 -105 51 +q-28 38 -28 118v175q0 80 28 117q38 51 105 51q68 0 106 -51q28 -37 28 -117zM1216 1365v-499h-91v55q-53 -62 -103 -62q-46 0 -59 37q-8 24 -8 75v394h91v-367q0 -33 1 -35q3 -22 21 -22q27 0 57 43v381h91z" /> + <glyph glyph-name="xing" unicode="&#xf168;" horiz-adv-x="1408" +d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 +q25 45 64 45h241q22 0 31 -15z" /> + <glyph glyph-name="xing_sign" unicode="&#xf169;" +d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 +l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="youtube_play" unicode="&#xf16a;" horiz-adv-x="1792" +d="M711 408l484 250l-484 253v-503zM896 1270q168 0 324.5 -4.5t229.5 -9.5l73 -4q1 0 17 -1.5t23 -3t23.5 -4.5t28.5 -8t28 -13t31 -19.5t29 -26.5q6 -6 15.5 -18.5t29 -58.5t26.5 -101q8 -64 12.5 -136.5t5.5 -113.5v-40v-136q1 -145 -18 -290q-7 -55 -25 -99.5t-32 -61.5 +l-14 -17q-14 -15 -29 -26.5t-31 -19t-28 -12.5t-28.5 -8t-24 -4.5t-23 -3t-16.5 -1.5q-251 -19 -627 -19q-207 2 -359.5 6.5t-200.5 7.5l-49 4l-36 4q-36 5 -54.5 10t-51 21t-56.5 41q-6 6 -15.5 18.5t-29 58.5t-26.5 101q-8 64 -12.5 136.5t-5.5 113.5v40v136 +q-1 145 18 290q7 55 25 99.5t32 61.5l14 17q14 15 29 26.5t31 19.5t28 13t28.5 8t23.5 4.5t23 3t17 1.5q251 18 627 18z" /> + <glyph glyph-name="dropbox" unicode="&#xf16b;" horiz-adv-x="1792" +d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" /> + <glyph glyph-name="stackexchange" unicode="&#xf16c;" +d="M1289 -96h-1118v480h-160v-640h1438v640h-160v-480zM347 428l33 157l783 -165l-33 -156zM450 802l67 146l725 -339l-67 -145zM651 1158l102 123l614 -513l-102 -123zM1048 1536l477 -641l-128 -96l-477 641zM330 65v159h800v-159h-800z" /> + <glyph glyph-name="instagram" unicode="&#xf16d;" +d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1162 640q0 -164 -115 -279t-279 -115t-279 115t-115 279t115 279t279 115t279 -115t115 -279zM1270 1050q0 -38 -27 -65t-65 -27t-65 27t-27 65t27 65t65 27t65 -27t27 -65zM768 1270 +q-7 0 -76.5 0.5t-105.5 0t-96.5 -3t-103 -10t-71.5 -18.5q-50 -20 -88 -58t-58 -88q-11 -29 -18.5 -71.5t-10 -103t-3 -96.5t0 -105.5t0.5 -76.5t-0.5 -76.5t0 -105.5t3 -96.5t10 -103t18.5 -71.5q20 -50 58 -88t88 -58q29 -11 71.5 -18.5t103 -10t96.5 -3t105.5 0t76.5 0.5 +t76.5 -0.5t105.5 0t96.5 3t103 10t71.5 18.5q50 20 88 58t58 88q11 29 18.5 71.5t10 103t3 96.5t0 105.5t-0.5 76.5t0.5 76.5t0 105.5t-3 96.5t-10 103t-18.5 71.5q-20 50 -58 88t-88 58q-29 11 -71.5 18.5t-103 10t-96.5 3t-105.5 0t-76.5 -0.5zM1536 640q0 -229 -5 -317 +q-10 -208 -124 -322t-322 -124q-88 -5 -317 -5t-317 5q-208 10 -322 124t-124 322q-5 88 -5 317t5 317q10 208 124 322t322 124q88 5 317 5t317 -5q208 -10 322 -124t124 -322q5 -88 5 -317z" /> + <glyph glyph-name="flickr" unicode="&#xf16e;" +d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 +t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" /> + <glyph glyph-name="adn" unicode="&#xf170;" +d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="f171" unicode="&#xf171;" horiz-adv-x="1408" +d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 +t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q-86 -48 -189.5 -71.5t-202 -22t-201.5 18.5q-46 8 -81.5 18 +t-76.5 27t-73 43.5t-52 61.5q-25 96 -57 292l6 16l18 9q223 -148 506.5 -148t507.5 148q21 -6 24 -23t-5 -45t-8 -37zM1403 1166q-26 -167 -111 -655q-5 -30 -27 -56t-43.5 -40t-54.5 -31q-252 -126 -610 -88q-248 27 -394 139q-15 12 -25.5 26.5t-17 35t-9 34t-6 39.5 +t-5.5 35q-9 50 -26.5 150t-28 161.5t-23.5 147.5t-22 158q3 26 17.5 48.5t31.5 37.5t45 30t46 22.5t48 18.5q125 46 313 64q379 37 676 -50q155 -46 215 -122q16 -20 16.5 -51t-5.5 -54z" /> + <glyph glyph-name="bitbucket_sign" unicode="&#xf172;" +d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 +t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 428 53q34 19 49 51.5t22.5 85.5t12.5 71z +M1272 1020q9 53 -8 75q-43 55 -155 88q-216 63 -487 36q-132 -12 -226 -46q-38 -15 -59.5 -25t-47 -34t-29.5 -54q8 -68 19 -138t29 -171t24 -137q1 -5 5 -31t7 -36t12 -27t22 -28q105 -80 284 -100q259 -28 440 63q24 13 39.5 23t31 29t19.5 40q48 267 80 473zM1536 1120 +v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="tumblr" unicode="&#xf173;" horiz-adv-x="1024" +d="M944 207l80 -237q-23 -35 -111 -66t-177 -32q-104 -2 -190.5 26t-142.5 74t-95 106t-55.5 120t-16.5 118v544h-168v215q72 26 129 69.5t91 90t58 102t34 99t15 88.5q1 5 4.5 8.5t7.5 3.5h244v-424h333v-252h-334v-518q0 -30 6.5 -56t22.5 -52.5t49.5 -41.5t81.5 -14 +q78 2 134 29z" /> + <glyph glyph-name="tumblr_sign" unicode="&#xf174;" +d="M1136 75l-62 183q-44 -22 -103 -22q-36 -1 -62 10.5t-38.5 31.5t-17.5 40.5t-5 43.5v398h257v194h-256v326h-188q-8 0 -9 -10q-5 -44 -17.5 -87t-39 -95t-77 -95t-118.5 -68v-165h130v-418q0 -57 21.5 -115t65 -111t121 -85.5t176.5 -30.5q69 1 136.5 25t85.5 50z +M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="long_arrow_down" unicode="&#xf175;" horiz-adv-x="768" +d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" /> + <glyph glyph-name="long_arrow_up" unicode="&#xf176;" horiz-adv-x="768" +d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" /> + <glyph glyph-name="long_arrow_left" unicode="&#xf177;" horiz-adv-x="1792" +d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" /> + <glyph glyph-name="long_arrow_right" unicode="&#xf178;" horiz-adv-x="1792" +d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" /> + <glyph glyph-name="apple" unicode="&#xf179;" horiz-adv-x="1408" +d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q113 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 +q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11q0 -4 0.5 -10t0.5 -10z" /> + <glyph glyph-name="windows" unicode="&#xf17a;" horiz-adv-x="1664" +d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" /> + <glyph glyph-name="android" unicode="&#xf17b;" horiz-adv-x="1408" +d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 +t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78 32t-32 78v666h918zM931 1255q107 -55 171 -153.5t64 -215.5 +h-925q0 117 64 215.5t172 153.5l-71 131q-7 13 5 20q13 6 20 -6l72 -132q95 42 201 42t201 -42l72 132q7 12 20 6q12 -7 5 -20zM1408 767v-430q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73v430q0 43 30 72.5t72 29.5q43 0 73 -29.5t30 -72.5z" /> + <glyph glyph-name="linux" unicode="&#xf17c;" +d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-10 -11 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z +M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69t-19.5 45.5t-15.5 24.5t-13 15t-7.5 7 +q-14 62 -31 103t-29.5 56t-23.5 33t-15 40q-4 21 6 53.5t4.5 49.5t-44.5 25q-15 3 -44.5 18t-35.5 16q-8 1 -11 26t8 51t36 27q37 3 51 -30t4 -58q-11 -19 -2 -26.5t30 -0.5q13 4 13 36v37q-5 30 -13.5 50t-21 30.5t-23.5 15t-27 7.5q-107 -8 -89 -134q0 -15 -1 -15 +q-9 9 -29.5 10.5t-33 -0.5t-15.5 5q1 57 -16 90t-45 34q-27 1 -41.5 -27.5t-16.5 -59.5q-1 -15 3.5 -37t13 -37.5t15.5 -13.5q10 3 16 14q4 9 -7 8q-7 0 -15.5 14.5t-9.5 33.5q-1 22 9 37t34 14q17 0 27 -21t9.5 -39t-1.5 -22q-22 -15 -31 -29q-8 -12 -27.5 -23.5 +t-20.5 -12.5q-13 -14 -15.5 -27t7.5 -18q14 -8 25 -19.5t16 -19t18.5 -13t35.5 -6.5q47 -2 102 15q2 1 23 7t34.5 10.5t29.5 13t21 17.5q9 14 20 8q5 -3 6.5 -8.5t-3 -12t-16.5 -9.5q-20 -6 -56.5 -21.5t-45.5 -19.5q-44 -19 -70 -23q-25 -5 -79 2q-10 2 -9 -2t17 -19 +q25 -23 67 -22q17 1 36 7t36 14t33.5 17.5t30 17t24.5 12t17.5 2.5t8.5 -11q0 -2 -1 -4.5t-4 -5t-6 -4.5t-8.5 -5t-9 -4.5t-10 -5t-9.5 -4.5q-28 -14 -67.5 -44t-66.5 -43t-49 -1q-21 11 -63 73q-22 31 -25 22q-1 -3 -1 -10q0 -25 -15 -56.5t-29.5 -55.5t-21 -58t11.5 -63 +q-23 -6 -62.5 -90t-47.5 -141q-2 -18 -1.5 -69t-5.5 -59q-8 -24 -29 -3q-32 31 -36 94q-2 28 4 56q4 19 -1 18q-2 -1 -4 -5q-36 -65 10 -166q5 -12 25 -28t24 -20q20 -23 104 -90.5t93 -76.5q16 -15 17.5 -38t-14 -43t-45.5 -23q8 -15 29 -44.5t28 -54t7 -70.5q46 24 7 92 +q-4 8 -10.5 16t-9.5 12t-2 6q3 5 13 9.5t20 -2.5q46 -52 166 -36q133 15 177 87q23 38 34 30q12 -6 10 -52q-1 -25 -23 -92q-9 -23 -6 -37.5t24 -15.5q3 19 14.5 77t13.5 90q2 21 -6.5 73.5t-7.5 97t23 70.5q15 18 51 18q1 37 34.5 53t72.5 10.5t60 -22.5zM626 1152 +q3 17 -2.5 30t-11.5 15q-9 2 -9 -7q2 -5 5 -6q10 0 7 -15q-3 -20 8 -20q3 0 3 3zM1045 955q-2 8 -6.5 11.5t-13 5t-14.5 5.5q-5 3 -9.5 8t-7 8t-5.5 6.5t-4 4t-4 -1.5q-14 -16 7 -43.5t39 -31.5q9 -1 14.5 8t3.5 20zM867 1168q0 11 -5 19.5t-11 12.5t-9 3q-6 0 -8 -2t0 -4 +t5 -3q14 -4 18 -31q0 -3 8 2q2 2 2 3zM921 1401q0 2 -2.5 5t-9 7t-9.5 6q-15 15 -24 15q-9 -1 -11.5 -7.5t-1 -13t-0.5 -12.5q-1 -4 -6 -10.5t-6 -9t3 -8.5q4 -3 8 0t11 9t15 9q1 1 9 1t15 2t9 7zM1486 60q20 -12 31 -24.5t12 -24t-2.5 -22.5t-15.5 -22t-23.5 -19.5 +t-30 -18.5t-31.5 -16.5t-32 -15.5t-27 -13q-38 -19 -85.5 -56t-75.5 -64q-17 -16 -68 -19.5t-89 14.5q-18 9 -29.5 23.5t-16.5 25.5t-22 19.5t-47 9.5q-44 1 -130 1q-19 0 -57 -1.5t-58 -2.5q-44 -1 -79.5 -15t-53.5 -30t-43.5 -28.5t-53.5 -11.5q-29 1 -111 31t-146 43 +q-19 4 -51 9.5t-50 9t-39.5 9.5t-33.5 14.5t-17 19.5q-10 23 7 66.5t18 54.5q1 16 -4 40t-10 42.5t-4.5 36.5t10.5 27q14 12 57 14t60 12q30 18 42 35t12 51q21 -73 -32 -106q-32 -20 -83 -15q-34 3 -43 -10q-13 -15 5 -57q2 -6 8 -18t8.5 -18t4.5 -17t1 -22q0 -15 -17 -49 +t-14 -48q3 -17 37 -26q20 -6 84.5 -18.5t99.5 -20.5q24 -6 74 -22t82.5 -23t55.5 -4q43 6 64.5 28t23 48t-7.5 58.5t-19 52t-20 36.5q-121 190 -169 242q-68 74 -113 40q-11 -9 -15 15q-3 16 -2 38q1 29 10 52t24 47t22 42q8 21 26.5 72t29.5 78t30 61t39 54 +q110 143 124 195q-12 112 -16 310q-2 90 24 151.5t106 104.5q39 21 104 21q53 1 106 -13.5t89 -41.5q57 -42 91.5 -121.5t29.5 -147.5q-5 -95 30 -214q34 -113 133 -218q55 -59 99.5 -163t59.5 -191q8 -49 5 -84.5t-12 -55.5t-20 -22q-10 -2 -23.5 -19t-27 -35.5 +t-40.5 -33.5t-61 -14q-18 1 -31.5 5t-22.5 13.5t-13.5 15.5t-11.5 20.5t-9 19.5q-22 37 -41 30t-28 -49t7 -97q20 -70 1 -195q-10 -65 18 -100.5t73 -33t85 35.5q59 49 89.5 66.5t103.5 42.5q53 18 77 36.5t18.5 34.5t-25 28.5t-51.5 23.5q-33 11 -49.5 48t-15 72.5 +t15.5 47.5q1 -31 8 -56.5t14.5 -40.5t20.5 -28.5t21 -19t21.5 -13t16.5 -9.5z" /> + <glyph glyph-name="dribble" unicode="&#xf17d;" +d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 +t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q-185 164 -433 164q-76 0 -155 -19 +q131 -170 246 -382q69 26 130 60.5t96.5 61.5t65.5 57t37.5 40.5zM1424 647q-3 232 -149 410l-1 -1q-9 -12 -19 -24.5t-43.5 -44.5t-71 -60.5t-100 -65t-131.5 -64.5q25 -53 44 -95q2 -5 6.5 -17t7.5 -17q36 5 74.5 7t73.5 2t69 -1.5t64 -4t56.5 -5.5t48 -6.5t36.5 -6 +t25 -4.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="skype" unicode="&#xf17e;" +d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 +t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 -92 122 -157.5t291 -65.5 +q73 0 140 18.5t122.5 53.5t88.5 93.5t33 131.5zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5q-130 0 -234 80q-77 -16 -150 -16q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5q0 73 16 150q-80 104 -80 234q0 159 112.5 271.5t271.5 112.5q130 0 234 -80 +q77 16 150 16q143 0 273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -73 -16 -150q80 -104 80 -234z" /> + <glyph glyph-name="foursquare" unicode="&#xf180;" horiz-adv-x="1280" +d="M1000 1102l37 194q5 23 -9 40t-35 17h-712q-23 0 -38.5 -17t-15.5 -37v-1101q0 -7 6 -1l291 352q23 26 38 33.5t48 7.5h239q22 0 37 14.5t18 29.5q24 130 37 191q4 21 -11.5 40t-36.5 19h-294q-29 0 -48 19t-19 48v42q0 29 19 47.5t48 18.5h346q18 0 35 13.5t20 29.5z +M1227 1324q-15 -73 -53.5 -266.5t-69.5 -350t-35 -173.5q-6 -22 -9 -32.5t-14 -32.5t-24.5 -33t-38.5 -21t-58 -10h-271q-13 0 -22 -10q-8 -9 -426 -494q-22 -25 -58.5 -28.5t-48.5 5.5q-55 22 -55 98v1410q0 55 38 102.5t120 47.5h888q95 0 127 -53t10 -159zM1227 1324 +l-158 -790q4 17 35 173.5t69.5 350t53.5 266.5z" /> + <glyph glyph-name="trello" unicode="&#xf181;" +d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 +q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" /> + <glyph glyph-name="female" unicode="&#xf182;" horiz-adv-x="1280" +d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 +q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> + <glyph glyph-name="male" unicode="&#xf183;" horiz-adv-x="1024" +d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z +M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> + <glyph glyph-name="gittip" unicode="&#xf184;" +d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 +t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="sun" unicode="&#xf185;" horiz-adv-x="1792" +d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 +l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 29 4l292 -94l180 248q9 12 26 12t26 -12l180 -248l292 94 +q14 6 29 -4q13 -10 13 -26v-306l292 -96q16 -5 20 -20q5 -16 -4 -29l-180 -248l180 -248q9 -12 4 -29z" /> + <glyph glyph-name="_366" unicode="&#xf186;" +d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 +t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" /> + <glyph glyph-name="archive" unicode="&#xf187;" horiz-adv-x="1792" +d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 +q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" /> + <glyph glyph-name="bug" unicode="&#xf188;" horiz-adv-x="1664" +d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 +q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 -19t19 -45t-19 -45l-173 -173v-294h224q26 0 45 -19 +t19 -45zM1152 1152h-640q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5z" /> + <glyph glyph-name="vk" unicode="&#xf189;" horiz-adv-x="1920" +d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-40 -51 -55 -72t-30.5 -49.5t-12 -42t13 -34.5t32.5 -43t57 -53q4 -2 5 -4q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 +t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91t-106 151t-122.5 211t-130.5 272q-6 16 -6 27t3 16l4 6 +q15 19 57 19l274 2q12 -2 23 -6.5t16 -8.5l5 -3q16 -11 24 -32q20 -50 46 -103.5t41 -81.5l16 -29q29 -60 56 -104t48.5 -68.5t41.5 -38.5t34 -14t27 5q2 1 5 5t12 22t13.5 47t9.5 81t0 125q-2 40 -9 73t-14 46l-6 12q-25 34 -85 43q-13 2 5 24q16 19 38 30q53 26 239 24 +q82 -1 135 -13q20 -5 33.5 -13.5t20.5 -24t10.5 -32t3.5 -45.5t-1 -55t-2.5 -70.5t-1.5 -82.5q0 -11 -1 -42t-0.5 -48t3.5 -40.5t11.5 -39t22.5 -24.5q8 -2 17 -4t26 11t38 34.5t52 67t68 107.5q60 104 107 225q4 10 10 17.5t11 10.5l4 3l5 2.5t13 3t20 0.5l288 2 +q39 5 64 -2.5t31 -16.5z" /> + <glyph glyph-name="weibo" unicode="&#xf18a;" horiz-adv-x="1792" +d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 +q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 369.5 141.5t132.5 264.5zM1563 422 +q0 -68 -37 -139.5t-109 -137t-168.5 -117.5t-226 -83t-270.5 -31t-275 33.5t-240.5 93t-171.5 151t-65 199.5q0 115 69.5 245t197.5 258q169 169 341.5 236t246.5 -7q65 -64 20 -209q-4 -14 -1 -20t10 -7t14.5 0.5t13.5 3.5l6 2q139 59 246 59t153 -61q45 -63 0 -178 +q-2 -13 -4.5 -20t4.5 -12.5t12 -7.5t17 -6q57 -18 103 -47t80 -81.5t34 -116.5zM1489 1046q42 -47 54.5 -108.5t-6.5 -117.5q-8 -23 -29.5 -34t-44.5 -4q-23 8 -34 29.5t-4 44.5q20 63 -24 111t-107 35q-24 -5 -45 8t-25 37q-5 24 8 44.5t37 25.5q60 13 119 -5.5t101 -65.5z +M1670 1209q87 -96 112.5 -222.5t-13.5 -241.5q-9 -27 -34 -40t-52 -4t-40 34t-5 52q28 82 10 172t-80 158q-62 69 -148 95.5t-173 8.5q-28 -6 -52 9.5t-30 43.5t9.5 51.5t43.5 29.5q123 26 244 -11.5t208 -134.5z" /> + <glyph glyph-name="renren" unicode="&#xf18b;" +d="M1133 -34q-171 -94 -368 -94q-196 0 -367 94q138 87 235.5 211t131.5 268q35 -144 132.5 -268t235.5 -211zM638 1394v-485q0 -252 -126.5 -459.5t-330.5 -306.5q-181 215 -181 495q0 187 83.5 349.5t229.5 269.5t325 137zM1536 638q0 -280 -181 -495 +q-204 99 -330.5 306.5t-126.5 459.5v485q179 -30 325 -137t229.5 -269.5t83.5 -349.5z" /> + <glyph glyph-name="_372" unicode="&#xf18c;" horiz-adv-x="1408" +d="M1402 433q-32 -80 -76 -138t-91 -88.5t-99 -46.5t-101.5 -14.5t-96.5 8.5t-86.5 22t-69.5 27.5t-46 22.5l-17 10q-113 -228 -289.5 -359.5t-384.5 -132.5q-19 0 -32 13t-13 32t13 31.5t32 12.5q173 1 322.5 107.5t251.5 294.5q-36 -14 -72 -23t-83 -13t-91 2.5t-93 28.5 +t-92 59t-84.5 100t-74.5 146q114 47 214 57t167.5 -7.5t124.5 -56.5t88.5 -77t56.5 -82q53 131 79 291q-7 -1 -18 -2.5t-46.5 -2.5t-69.5 0.5t-81.5 10t-88.5 23t-84 42.5t-75 65t-54.5 94.5t-28.5 127.5q70 28 133.5 36.5t112.5 -1t92 -30t73.5 -50t56 -61t42 -63t27.5 -56 +t16 -39.5l4 -16q12 122 12 195q-8 6 -21.5 16t-49 44.5t-63.5 71.5t-54 93t-33 112.5t12 127t70 138.5q73 -25 127.5 -61.5t84.5 -76.5t48 -85t20.5 -89t-0.5 -85.5t-13 -76.5t-19 -62t-17 -42l-7 -15q1 -4 1 -50t-1 -72q3 7 10 18.5t30.5 43t50.5 58t71 55.5t91.5 44.5 +t112 14.5t132.5 -24q-2 -78 -21.5 -141.5t-50 -104.5t-69.5 -71.5t-81.5 -45.5t-84.5 -24t-80 -9.5t-67.5 1t-46.5 4.5l-17 3q-23 -147 -73 -283q6 7 18 18.5t49.5 41t77.5 52.5t99.5 42t117.5 20t129 -23.5t137 -77.5z" /> + <glyph glyph-name="stack_exchange" unicode="&#xf18d;" horiz-adv-x="1280" +d="M1259 283v-66q0 -85 -57.5 -144.5t-138.5 -59.5h-57l-260 -269v269h-529q-81 0 -138.5 59.5t-57.5 144.5v66h1238zM1259 609v-255h-1238v255h1238zM1259 937v-255h-1238v255h1238zM1259 1077v-67h-1238v67q0 84 57.5 143.5t138.5 59.5h846q81 0 138.5 -59.5t57.5 -143.5z +" /> + <glyph glyph-name="_374" unicode="&#xf18e;" +d="M1152 640q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192h-352q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h352v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 +t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="arrow_circle_alt_left" unicode="&#xf190;" +d="M1152 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-352v-192q0 -14 -9 -23t-23 -9q-12 0 -24 10l-319 319q-9 9 -9 23t9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h352q13 0 22.5 -9.5t9.5 -22.5zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198 +t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_376" unicode="&#xf191;" +d="M1024 960v-640q0 -26 -19 -45t-45 -19q-20 0 -37 12l-448 320q-27 19 -27 52t27 52l448 320q17 12 37 12q26 0 45 -19t19 -45zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5z +M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="dot_circle_alt" unicode="&#xf192;" +d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5 +t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_378" unicode="&#xf193;" horiz-adv-x="1664" +d="M1023 349l102 -204q-58 -179 -210 -290t-339 -111q-156 0 -288.5 77.5t-210 210t-77.5 288.5q0 181 104.5 330t274.5 211l17 -131q-122 -54 -195 -165.5t-73 -244.5q0 -185 131.5 -316.5t316.5 -131.5q126 0 232.5 65t165 175.5t49.5 236.5zM1571 249l58 -114l-256 -128 +q-13 -7 -29 -7q-40 0 -57 35l-239 477h-472q-24 0 -42.5 16.5t-21.5 40.5l-96 779q-2 17 6 42q14 51 57 82.5t97 31.5q66 0 113 -47t47 -113q0 -69 -52 -117.5t-120 -41.5l37 -289h423v-128h-407l16 -128h455q40 0 57 -35l228 -455z" /> + <glyph glyph-name="vimeo_square" unicode="&#xf194;" +d="M1292 898q10 216 -161 222q-231 8 -312 -261q44 19 82 19q85 0 74 -96q-4 -57 -74 -167t-105 -110q-43 0 -82 169q-13 54 -45 255q-30 189 -160 177q-59 -7 -164 -100l-81 -72l-81 -72l52 -67q76 52 87 52q57 0 107 -179q15 -55 45 -164.5t45 -164.5q68 -179 164 -179 +q157 0 383 294q220 283 226 444zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_380" unicode="&#xf195;" horiz-adv-x="1152" +d="M1152 704q0 -191 -94.5 -353t-256.5 -256.5t-353 -94.5h-160q-14 0 -23 9t-9 23v611l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v93l-215 -66q-3 -1 -9 -1q-10 0 -19 6q-13 10 -13 26v128q0 23 23 31l233 71v250q0 14 9 23t23 9h160 +q14 0 23 -9t9 -23v-181l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-93l375 116q15 5 28 -5t13 -26v-128q0 -23 -23 -31l-393 -121v-487q188 13 318 151t130 328q0 14 9 23t23 9h160q14 0 23 -9t9 -23z" /> + <glyph glyph-name="plus_square_o" unicode="&#xf196;" horiz-adv-x="1408" +d="M1152 736v-64q0 -14 -9 -23t-23 -9h-352v-352q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v352h-352q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h352v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-352h352q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832 +q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_382" unicode="&#xf197;" horiz-adv-x="2176" +d="M620 416q-110 -64 -268 -64h-128v64h-64q-13 0 -22.5 23.5t-9.5 56.5q0 24 7 49q-58 2 -96.5 10.5t-38.5 20.5t38.5 20.5t96.5 10.5q-7 25 -7 49q0 33 9.5 56.5t22.5 23.5h64v64h128q158 0 268 -64h1113q42 -7 106.5 -18t80.5 -14q89 -15 150 -40.5t83.5 -47.5t22.5 -40 +t-22.5 -40t-83.5 -47.5t-150 -40.5q-16 -3 -80.5 -14t-106.5 -18h-1113zM1739 668q53 -36 53 -92t-53 -92l81 -30q68 48 68 122t-68 122zM625 400h1015q-217 -38 -456 -80q-57 0 -113 -24t-83 -48l-28 -24l-288 -288q-26 -26 -70.5 -45t-89.5 -19h-96l-93 464h29 +q157 0 273 64zM352 816h-29l93 464h96q46 0 90 -19t70 -45l288 -288q4 -4 11 -10.5t30.5 -23t48.5 -29t61.5 -23t72.5 -10.5l456 -80h-1015q-116 64 -273 64z" /> + <glyph glyph-name="_383" unicode="&#xf198;" horiz-adv-x="1664" +d="M1519 760q62 0 103.5 -40.5t41.5 -101.5q0 -97 -93 -130l-172 -59l56 -167q7 -21 7 -47q0 -59 -42 -102t-101 -43q-47 0 -85.5 27t-53.5 72l-55 165l-310 -106l55 -164q8 -24 8 -47q0 -59 -42 -102t-102 -43q-47 0 -85 27t-53 72l-55 163l-153 -53q-29 -9 -50 -9 +q-61 0 -101.5 40t-40.5 101q0 47 27.5 85t71.5 53l156 53l-105 313l-156 -54q-26 -8 -48 -8q-60 0 -101 40.5t-41 100.5q0 47 27.5 85t71.5 53l157 53l-53 159q-8 24 -8 47q0 60 42 102.5t102 42.5q47 0 85 -27t53 -72l54 -160l310 105l-54 160q-8 24 -8 47q0 59 42.5 102 +t101.5 43q47 0 85.5 -27.5t53.5 -71.5l53 -161l162 55q21 6 43 6q60 0 102.5 -39.5t42.5 -98.5q0 -45 -30 -81.5t-74 -51.5l-157 -54l105 -316l164 56q24 8 46 8zM725 498l310 105l-105 315l-310 -107z" /> + <glyph glyph-name="_384" unicode="&#xf199;" +d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM1280 352v436q-31 -35 -64 -55q-34 -22 -132.5 -85t-151.5 -99q-98 -69 -164 -69v0v0q-66 0 -164 69 +q-47 32 -142 92.5t-142 92.5q-12 8 -33 27t-31 27v-436q0 -40 28 -68t68 -28h832q40 0 68 28t28 68zM1280 925q0 41 -27.5 70t-68.5 29h-832q-40 0 -68 -28t-28 -68q0 -37 30.5 -76.5t67.5 -64.5q47 -32 137.5 -89t129.5 -83q3 -2 17 -11.5t21 -14t21 -13t23.5 -13 +t21.5 -9.5t22.5 -7.5t20.5 -2.5t20.5 2.5t22.5 7.5t21.5 9.5t23.5 13t21 13t21 14t17 11.5l267 174q35 23 66.5 62.5t31.5 73.5z" /> + <glyph glyph-name="_385" unicode="&#xf19a;" horiz-adv-x="1792" +d="M127 640q0 163 67 313l367 -1005q-196 95 -315 281t-119 411zM1415 679q0 -19 -2.5 -38.5t-10 -49.5t-11.5 -44t-17.5 -59t-17.5 -58l-76 -256l-278 826q46 3 88 8q19 2 26 18.5t-2.5 31t-28.5 13.5l-205 -10q-75 1 -202 10q-12 1 -20.5 -5t-11.5 -15t-1.5 -18.5t9 -16.5 +t19.5 -8l80 -8l120 -328l-168 -504l-280 832q46 3 88 8q19 2 26 18.5t-2.5 31t-28.5 13.5l-205 -10q-7 0 -23 0.5t-26 0.5q105 160 274.5 253.5t367.5 93.5q147 0 280.5 -53t238.5 -149h-10q-55 0 -92 -40.5t-37 -95.5q0 -12 2 -24t4 -21.5t8 -23t9 -21t12 -22.5t12.5 -21 +t14.5 -24t14 -23q63 -107 63 -212zM909 573l237 -647q1 -6 5 -11q-126 -44 -255 -44q-112 0 -217 32zM1570 1009q95 -174 95 -369q0 -209 -104 -385.5t-279 -278.5l235 678q59 169 59 276q0 42 -6 79zM896 1536q182 0 348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286 +t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71zM896 -215q173 0 331.5 68t273 182.5t182.5 273t68 331.5t-68 331.5t-182.5 273t-273 182.5t-331.5 68t-331.5 -68t-273 -182.5t-182.5 -273t-68 -331.5t68 -331.5t182.5 -273 +t273 -182.5t331.5 -68z" /> + <glyph glyph-name="_386" unicode="&#xf19b;" horiz-adv-x="1792" +d="M1086 1536v-1536l-272 -128q-228 20 -414 102t-293 208.5t-107 272.5q0 140 100.5 263.5t275 205.5t391.5 108v-172q-217 -38 -356.5 -150t-139.5 -255q0 -152 154.5 -267t388.5 -145v1360zM1755 954l37 -390l-525 114l147 83q-119 70 -280 99v172q277 -33 481 -157z" /> + <glyph glyph-name="_387" unicode="&#xf19c;" horiz-adv-x="2048" +d="M960 1536l960 -384v-128h-128q0 -26 -20.5 -45t-48.5 -19h-1526q-28 0 -48.5 19t-20.5 45h-128v128zM256 896h256v-768h128v768h256v-768h128v768h256v-768h128v768h256v-768h59q28 0 48.5 -19t20.5 -45v-64h-1664v64q0 26 20.5 45t48.5 19h59v768zM1851 -64 +q28 0 48.5 -19t20.5 -45v-128h-1920v128q0 26 20.5 45t48.5 19h1782z" /> + <glyph glyph-name="_388" unicode="&#xf19d;" horiz-adv-x="2304" +d="M1774 700l18 -316q4 -69 -82 -128t-235 -93.5t-323 -34.5t-323 34.5t-235 93.5t-82 128l18 316l574 -181q22 -7 48 -7t48 7zM2304 1024q0 -23 -22 -31l-1120 -352q-4 -1 -10 -1t-10 1l-652 206q-43 -34 -71 -111.5t-34 -178.5q63 -36 63 -109q0 -69 -58 -107l58 -433 +q2 -14 -8 -25q-9 -11 -24 -11h-192q-15 0 -24 11q-10 11 -8 25l58 433q-58 38 -58 107q0 73 65 111q11 207 98 330l-333 104q-22 8 -22 31t22 31l1120 352q4 1 10 1t10 -1l1120 -352q22 -8 22 -31z" /> + <glyph glyph-name="_389" unicode="&#xf19e;" +d="M859 579l13 -707q-62 11 -105 11q-41 0 -105 -11l13 707q-40 69 -168.5 295.5t-216.5 374.5t-181 287q58 -15 108 -15q44 0 111 15q63 -111 133.5 -229.5t167 -276.5t138.5 -227q37 61 109.5 177.5t117.5 190t105 176t107 189.5q54 -14 107 -14q56 0 114 14v0 +q-28 -39 -60 -88.5t-49.5 -78.5t-56.5 -96t-49 -84q-146 -248 -353 -610z" /> + <glyph glyph-name="uniF1A0" unicode="&#xf1a0;" +d="M768 750h725q12 -67 12 -128q0 -217 -91 -387.5t-259.5 -266.5t-386.5 -96q-157 0 -299 60.5t-245 163.5t-163.5 245t-60.5 299t60.5 299t163.5 245t245 163.5t299 60.5q300 0 515 -201l-209 -201q-123 119 -306 119q-129 0 -238.5 -65t-173.5 -176.5t-64 -243.5 +t64 -243.5t173.5 -176.5t238.5 -65q87 0 160 24t120 60t82 82t51.5 87t22.5 78h-436v264z" /> + <glyph glyph-name="f1a1" unicode="&#xf1a1;" horiz-adv-x="1792" +d="M1095 369q16 -16 0 -31q-62 -62 -199 -62t-199 62q-16 15 0 31q6 6 15 6t15 -6q48 -49 169 -49q120 0 169 49q6 6 15 6t15 -6zM788 550q0 -37 -26 -63t-63 -26t-63.5 26t-26.5 63q0 38 26.5 64t63.5 26t63 -26.5t26 -63.5zM1183 550q0 -37 -26.5 -63t-63.5 -26t-63 26 +t-26 63t26 63.5t63 26.5t63.5 -26t26.5 -64zM1434 670q0 49 -35 84t-85 35t-86 -36q-130 90 -311 96l63 283l200 -45q0 -37 26 -63t63 -26t63.5 26.5t26.5 63.5t-26.5 63.5t-63.5 26.5q-54 0 -80 -50l-221 49q-19 5 -25 -16l-69 -312q-180 -7 -309 -97q-35 37 -87 37 +q-50 0 -85 -35t-35 -84q0 -35 18.5 -64t49.5 -44q-6 -27 -6 -56q0 -142 140 -243t337 -101q198 0 338 101t140 243q0 32 -7 57q30 15 48 43.5t18 63.5zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191 +t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="_392" unicode="&#xf1a2;" +d="M939 407q13 -13 0 -26q-53 -53 -171 -53t-171 53q-13 13 0 26q5 6 13 6t13 -6q42 -42 145 -42t145 42q5 6 13 6t13 -6zM676 563q0 -31 -23 -54t-54 -23t-54 23t-23 54q0 32 22.5 54.5t54.5 22.5t54.5 -22.5t22.5 -54.5zM1014 563q0 -31 -23 -54t-54 -23t-54 23t-23 54 +q0 32 22.5 54.5t54.5 22.5t54.5 -22.5t22.5 -54.5zM1229 666q0 42 -30 72t-73 30q-42 0 -73 -31q-113 78 -267 82l54 243l171 -39q1 -32 23.5 -54t53.5 -22q32 0 54.5 22.5t22.5 54.5t-22.5 54.5t-54.5 22.5q-48 0 -69 -43l-189 42q-17 5 -21 -13l-60 -268q-154 -6 -265 -83 +q-30 32 -74 32q-43 0 -73 -30t-30 -72q0 -30 16 -55t42 -38q-5 -25 -5 -48q0 -122 120 -208.5t289 -86.5q170 0 290 86.5t120 208.5q0 25 -6 49q25 13 40.5 37.5t15.5 54.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960 +q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_393" unicode="&#xf1a3;" +d="M866 697l90 27v62q0 79 -58 135t-138 56t-138 -55.5t-58 -134.5v-283q0 -20 -14 -33.5t-33 -13.5t-32.5 13.5t-13.5 33.5v120h-151v-122q0 -82 57.5 -139t139.5 -57q81 0 138.5 56.5t57.5 136.5v280q0 19 13.5 33t33.5 14q19 0 32.5 -14t13.5 -33v-54zM1199 502v122h-150 +v-126q0 -20 -13.5 -33.5t-33.5 -13.5q-19 0 -32.5 14t-13.5 33v123l-90 -26l-60 28v-123q0 -80 58 -137t139 -57t138.5 57t57.5 139zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103 +t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="f1a4" unicode="&#xf1a4;" horiz-adv-x="1920" +d="M1062 824v118q0 42 -30 72t-72 30t-72 -30t-30 -72v-612q0 -175 -126 -299t-303 -124q-178 0 -303.5 125.5t-125.5 303.5v266h328v-262q0 -43 30 -72.5t72 -29.5t72 29.5t30 72.5v620q0 171 126.5 292t301.5 121q176 0 302 -122t126 -294v-136l-195 -58zM1592 602h328 +v-266q0 -178 -125.5 -303.5t-303.5 -125.5q-177 0 -303 124.5t-126 300.5v268l131 -61l195 58v-270q0 -42 30 -71.5t72 -29.5t72 29.5t30 71.5v275z" /> + <glyph glyph-name="_395" unicode="&#xf1a5;" +d="M1472 160v480h-704v704h-480q-93 0 -158.5 -65.5t-65.5 -158.5v-480h704v-704h480q93 0 158.5 65.5t65.5 158.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 +t84.5 -203.5z" /> + <glyph glyph-name="_396" unicode="&#xf1a6;" horiz-adv-x="2048" +d="M328 1254h204v-983h-532v697h328v286zM328 435v369h-123v-369h123zM614 968v-697h205v697h-205zM614 1254v-204h205v204h-205zM901 968h533v-942h-533v163h328v82h-328v697zM1229 435v369h-123v-369h123zM1516 968h532v-942h-532v163h327v82h-327v697zM1843 435v369h-123 +v-369h123z" /> + <glyph glyph-name="_397" unicode="&#xf1a7;" +d="M1046 516q0 -64 -38 -109t-91 -45q-43 0 -70 15v277q28 17 70 17q53 0 91 -45.5t38 -109.5zM703 944q0 -64 -38 -109.5t-91 -45.5q-43 0 -70 15v277q28 17 70 17q53 0 91 -45t38 -109zM1265 513q0 134 -88 229t-213 95q-20 0 -39 -3q-23 -78 -78 -136q-87 -95 -211 -101 +v-636l211 41v206q51 -19 117 -19q125 0 213 95t88 229zM922 940q0 134 -88.5 229t-213.5 95q-74 0 -141 -36h-186v-840l211 41v206q55 -19 116 -19q125 0 213.5 95t88.5 229zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960 +q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_398" unicode="&#xf1a8;" horiz-adv-x="2038" +d="M1222 607q75 3 143.5 -20.5t118 -58.5t101 -94.5t84 -108t75.5 -120.5q33 -56 78.5 -109t75.5 -80.5t99 -88.5q-48 -30 -108.5 -57.5t-138.5 -59t-114 -47.5q-44 37 -74 115t-43.5 164.5t-33 180.5t-42.5 168.5t-72.5 123t-122.5 48.5l-10 -2l-6 -4q4 -5 13 -14 +q6 -5 28 -23.5t25.5 -22t19 -18t18 -20.5t11.5 -21t10.5 -27.5t4.5 -31t4 -40.5l1 -33q1 -26 -2.5 -57.5t-7.5 -52t-12.5 -58.5t-11.5 -53q-35 1 -101 -9.5t-98 -10.5q-39 0 -72 10q-2 16 -2 47q0 74 3 96q2 13 31.5 41.5t57 59t26.5 51.5q-24 2 -43 -24 +q-36 -53 -111.5 -99.5t-136.5 -46.5q-25 0 -75.5 63t-106.5 139.5t-84 96.5q-6 4 -27 30q-482 -112 -513 -112q-16 0 -28 11t-12 27q0 15 8.5 26.5t22.5 14.5l486 106q-8 14 -8 25t5.5 17.5t16 11.5t20 7t23 4.5t18.5 4.5q4 1 15.5 7.5t17.5 6.5q15 0 28 -16t20 -33 +q163 37 172 37q17 0 29.5 -11t12.5 -28q0 -15 -8.5 -26t-23.5 -14l-182 -40l-1 -16q-1 -26 81.5 -117.5t104.5 -91.5q47 0 119 80t72 129q0 36 -23.5 53t-51 18.5t-51 11.5t-23.5 34q0 16 10 34l-68 19q43 44 43 117q0 26 -5 58q82 16 144 16q44 0 71.5 -1.5t48.5 -8.5 +t31 -13.5t20.5 -24.5t15.5 -33.5t17 -47.5t24 -60l50 25q-3 -40 -23 -60t-42.5 -21t-40 -6.5t-16.5 -20.5zM1282 842q-5 5 -13.5 15.5t-12 14.5t-10.5 11.5t-10 10.5l-8 8t-8.5 7.5t-8 5t-8.5 4.5q-7 3 -14.5 5t-20.5 2.5t-22 0.5h-32.5h-37.5q-126 0 -217 -43 +q16 30 36 46.5t54 29.5t65.5 36t46 36.5t50 55t43.5 50.5q12 -9 28 -31.5t32 -36.5t38 -13l12 1v-76l22 -1q247 95 371 190q28 21 50 39t42.5 37.5t33 31t29.5 34t24 31t24.5 37t23 38t27 47.5t29.5 53l7 9q-2 -53 -43 -139q-79 -165 -205 -264t-306 -142q-14 -3 -42 -7.5 +t-50 -9.5t-39 -14q3 -19 24.5 -46t21.5 -34q0 -11 -26 -30zM1061 -79q39 26 131.5 47.5t146.5 21.5q9 0 22.5 -15.5t28 -42.5t26 -50t24 -51t14.5 -33q-121 -45 -244 -45q-61 0 -125 11zM822 568l48 12l109 -177l-73 -48zM1323 51q3 -15 3 -16q0 -7 -17.5 -14.5t-46 -13 +t-54 -9.5t-53.5 -7.5t-32 -4.5l-7 43q21 2 60.5 8.5t72 10t60.5 3.5h14zM866 679l-96 -20l-6 17q10 1 32.5 7t34.5 6q19 0 35 -10zM1061 45h31l10 -83l-41 -12v95zM1950 1535v1v-1zM1950 1535l-1 -5l-2 -2l1 3zM1950 1535l1 1z" /> + <glyph glyph-name="_399" unicode="&#xf1a9;" +d="M1167 -50q-5 19 -24 5q-30 -22 -87 -39t-131 -17q-129 0 -193 49q-5 4 -13 4q-11 0 -26 -12q-7 -6 -7.5 -16t7.5 -20q34 -32 87.5 -46t102.5 -12.5t99 4.5q41 4 84.5 20.5t65 30t28.5 20.5q12 12 7 29zM1128 65q-19 47 -39 61q-23 15 -76 15q-47 0 -71 -10 +q-29 -12 -78 -56q-26 -24 -12 -44q9 -8 17.5 -4.5t31.5 23.5q3 2 10.5 8.5t10.5 8.5t10 7t11.5 7t12.5 5t15 4.5t16.5 2.5t20.5 1q27 0 44.5 -7.5t23 -14.5t13.5 -22q10 -17 12.5 -20t12.5 1q23 12 14 34zM1483 346q0 22 -5 44.5t-16.5 45t-34 36.5t-52.5 14 +q-33 0 -97 -41.5t-129 -83.5t-101 -42q-27 -1 -63.5 19t-76 49t-83.5 58t-100 49t-111 19q-115 -1 -197 -78.5t-84 -178.5q-2 -112 74 -164q29 -20 62.5 -28.5t103.5 -8.5q57 0 132 32.5t134 71t120 70.5t93 31q26 -1 65 -31.5t71.5 -67t68 -67.5t55.5 -32q35 -3 58.5 14 +t55.5 63q28 41 42.5 101t14.5 106zM1536 506q0 -164 -62 -304.5t-166 -236t-242.5 -149.5t-290.5 -54t-293 57.5t-247.5 157t-170.5 241.5t-64 302q0 89 19.5 172.5t49 145.5t70.5 118.5t78.5 94t78.5 69.5t64.5 46.5t42.5 24.5q14 8 51 26.5t54.5 28.5t48 30t60.5 44 +q36 28 58 72.5t30 125.5q129 -155 186 -193q44 -29 130 -68t129 -66q21 -13 39 -25t60.5 -46.5t76 -70.5t75 -95t69 -122t47 -148.5t19.5 -177.5z" /> + <glyph glyph-name="_400" unicode="&#xf1aa;" +d="M1070 463l-160 -160l-151 -152l-30 -30q-65 -64 -151.5 -87t-171.5 -2q-16 -70 -72 -115t-129 -45q-85 0 -145 60.5t-60 145.5q0 72 44.5 128t113.5 72q-22 86 1 173t88 152l12 12l151 -152l-11 -11q-37 -37 -37 -89t37 -90q37 -37 89 -37t89 37l30 30l151 152l161 160z +M729 1145l12 -12l-152 -152l-12 12q-37 37 -89 37t-89 -37t-37 -89.5t37 -89.5l29 -29l152 -152l160 -160l-151 -152l-161 160l-151 152l-30 30q-68 67 -90 159.5t5 179.5q-70 15 -115 71t-45 129q0 85 60 145.5t145 60.5q76 0 133.5 -49t69.5 -123q84 20 169.5 -3.5 +t149.5 -87.5zM1536 78q0 -85 -60 -145.5t-145 -60.5q-74 0 -131 47t-71 118q-86 -28 -179.5 -6t-161.5 90l-11 12l151 152l12 -12q37 -37 89 -37t89 37t37 89t-37 89l-30 30l-152 152l-160 160l152 152l160 -160l152 -152l29 -30q64 -64 87.5 -150.5t2.5 -171.5 +q76 -11 126.5 -68.5t50.5 -134.5zM1534 1202q0 -77 -51 -135t-127 -69q26 -85 3 -176.5t-90 -158.5l-12 -12l-151 152l12 12q37 37 37 89t-37 89t-89 37t-89 -37l-30 -30l-152 -152l-160 -160l-152 152l161 160l152 152l29 30q67 67 159 89.5t178 -3.5q11 75 68.5 126 +t135.5 51q85 0 145 -60.5t60 -145.5z" /> + <glyph glyph-name="f1ab" unicode="&#xf1ab;" +d="M654 458q-1 -3 -12.5 0.5t-31.5 11.5l-20 9q-44 20 -87 49q-7 5 -41 31.5t-38 28.5q-67 -103 -134 -181q-81 -95 -105 -110q-4 -2 -19.5 -4t-18.5 0q6 4 82 92q21 24 85.5 115t78.5 118q17 30 51 98.5t36 77.5q-8 1 -110 -33q-8 -2 -27.5 -7.5t-34.5 -9.5t-17 -5 +q-2 -2 -2 -10.5t-1 -9.5q-5 -10 -31 -15q-23 -7 -47 0q-18 4 -28 21q-4 6 -5 23q6 2 24.5 5t29.5 6q58 16 105 32q100 35 102 35q10 2 43 19.5t44 21.5q9 3 21.5 8t14.5 5.5t6 -0.5q2 -12 -1 -33q0 -2 -12.5 -27t-26.5 -53.5t-17 -33.5q-25 -50 -77 -131l64 -28 +q12 -6 74.5 -32t67.5 -28q4 -1 10.5 -25.5t4.5 -30.5zM449 944q3 -15 -4 -28q-12 -23 -50 -38q-30 -12 -60 -12q-26 3 -49 26q-14 15 -18 41l1 3q3 -3 19.5 -5t26.5 0t58 16q36 12 55 14q17 0 21 -17zM1147 815l63 -227l-139 42zM39 15l694 232v1032l-694 -233v-1031z +M1280 332l102 -31l-181 657l-100 31l-216 -536l102 -31l45 110l211 -65zM777 1294l573 -184v380zM1088 -29l158 -13l-54 -160l-40 66q-130 -83 -276 -108q-58 -12 -91 -12h-84q-79 0 -199.5 39t-183.5 85q-8 7 -8 16q0 8 5 13.5t13 5.5q4 0 18 -7.5t30.5 -16.5t20.5 -11 +q73 -37 159.5 -61.5t157.5 -24.5q95 0 167 14.5t157 50.5q15 7 30.5 15.5t34 19t28.5 16.5zM1536 1050v-1079l-774 246q-14 -6 -375 -127.5t-368 -121.5q-13 0 -18 13q0 1 -1 3v1078q3 9 4 10q5 6 20 11q107 36 149 50v384l558 -198q2 0 160.5 55t316 108.5t161.5 53.5 +q20 0 20 -21v-418z" /> + <glyph glyph-name="_402" unicode="&#xf1ac;" horiz-adv-x="1792" +d="M288 1152q66 0 113 -47t47 -113v-1088q0 -66 -47 -113t-113 -47h-128q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h128zM1664 989q58 -34 93 -93t35 -128v-768q0 -106 -75 -181t-181 -75h-864q-66 0 -113 47t-47 113v1536q0 40 28 68t68 28h672q40 0 88 -20t76 -48 +l152 -152q28 -28 48 -76t20 -88v-163zM928 0v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM928 256v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM928 512v128q0 14 -9 23 +t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1184 0v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1184 256v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128 +q14 0 23 9t9 23zM1184 512v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1440 0v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1440 256v128q0 14 -9 23t-23 9h-128 +q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1440 512v128q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h128q14 0 23 9t9 23zM1536 896v256h-160q-40 0 -68 28t-28 68v160h-640v-512h896z" /> + <glyph glyph-name="_403" unicode="&#xf1ad;" +d="M1344 1536q26 0 45 -19t19 -45v-1664q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h1280zM512 1248v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM512 992v-64q0 -14 9 -23t23 -9h64q14 0 23 9 +t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM512 736v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM512 480v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM384 160v64 +q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM384 416v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM384 672v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64 +q14 0 23 9t9 23zM384 928v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM384 1184v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM896 -96v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9 +t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM896 416v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM896 672v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM896 928v64 +q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM896 1184v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1152 160v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64 +q14 0 23 9t9 23zM1152 416v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1152 672v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1152 928v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9 +t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1152 1184v64q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h64q14 0 23 9t9 23z" /> + <glyph glyph-name="_404" unicode="&#xf1ae;" horiz-adv-x="1280" +d="M1188 988l-292 -292v-824q0 -46 -33 -79t-79 -33t-79 33t-33 79v384h-64v-384q0 -46 -33 -79t-79 -33t-79 33t-33 79v824l-292 292q-28 28 -28 68t28 68q29 28 68.5 28t67.5 -28l228 -228h368l228 228q28 28 68 28t68 -28q28 -29 28 -68.5t-28 -67.5zM864 1152 +q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" /> + <glyph glyph-name="uniF1B1" unicode="&#xf1b0;" horiz-adv-x="1664" +d="M780 1064q0 -60 -19 -113.5t-63 -92.5t-105 -39q-76 0 -138 57.5t-92 135.5t-30 151q0 60 19 113.5t63 92.5t105 39q77 0 138.5 -57.5t91.5 -135t30 -151.5zM438 581q0 -80 -42 -139t-119 -59q-76 0 -141.5 55.5t-100.5 133.5t-35 152q0 80 42 139.5t119 59.5 +q76 0 141.5 -55.5t100.5 -134t35 -152.5zM832 608q118 0 255 -97.5t229 -237t92 -254.5q0 -46 -17 -76.5t-48.5 -45t-64.5 -20t-76 -5.5q-68 0 -187.5 45t-182.5 45q-66 0 -192.5 -44.5t-200.5 -44.5q-183 0 -183 146q0 86 56 191.5t139.5 192.5t187.5 146t193 59zM1071 819 +q-61 0 -105 39t-63 92.5t-19 113.5q0 74 30 151.5t91.5 135t138.5 57.5q61 0 105 -39t63 -92.5t19 -113.5q0 -73 -30 -151t-92 -135.5t-138 -57.5zM1503 923q77 0 119 -59.5t42 -139.5q0 -74 -35 -152t-100.5 -133.5t-141.5 -55.5q-77 0 -119 59t-42 139q0 74 35 152.5 +t100.5 134t141.5 55.5z" /> + <glyph glyph-name="_406" unicode="&#xf1b1;" horiz-adv-x="768" +d="M704 1008q0 -145 -57 -243.5t-152 -135.5l45 -821q2 -26 -16 -45t-44 -19h-192q-26 0 -44 19t-16 45l45 821q-95 37 -152 135.5t-57 243.5q0 128 42.5 249.5t117.5 200t160 78.5t160 -78.5t117.5 -200t42.5 -249.5z" /> + <glyph glyph-name="_407" unicode="&#xf1b2;" horiz-adv-x="1792" +d="M896 -93l640 349v636l-640 -233v-752zM832 772l698 254l-698 254l-698 -254zM1664 1024v-768q0 -35 -18 -65t-49 -47l-704 -384q-28 -16 -61 -16t-61 16l-704 384q-31 17 -49 47t-18 65v768q0 40 23 73t61 47l704 256q22 8 44 8t44 -8l704 -256q38 -14 61 -47t23 -73z +" /> + <glyph glyph-name="_408" unicode="&#xf1b3;" horiz-adv-x="2304" +d="M640 -96l384 192v314l-384 -164v-342zM576 358l404 173l-404 173l-404 -173zM1664 -96l384 192v314l-384 -164v-342zM1600 358l404 173l-404 173l-404 -173zM1152 651l384 165v266l-384 -164v-267zM1088 1030l441 189l-441 189l-441 -189zM2176 512v-416q0 -36 -19 -67 +t-52 -47l-448 -224q-25 -14 -57 -14t-57 14l-448 224q-4 2 -7 4q-2 -2 -7 -4l-448 -224q-25 -14 -57 -14t-57 14l-448 224q-33 16 -52 47t-19 67v416q0 38 21.5 70t56.5 48l434 186v400q0 38 21.5 70t56.5 48l448 192q23 10 50 10t50 -10l448 -192q35 -16 56.5 -48t21.5 -70 +v-400l434 -186q36 -16 57 -48t21 -70z" /> + <glyph glyph-name="_409" unicode="&#xf1b4;" horiz-adv-x="2048" +d="M1848 1197h-511v-124h511v124zM1596 771q-90 0 -146 -52.5t-62 -142.5h408q-18 195 -200 195zM1612 186q63 0 122 32t76 87h221q-100 -307 -427 -307q-214 0 -340.5 132t-126.5 347q0 208 130.5 345.5t336.5 137.5q138 0 240.5 -68t153 -179t50.5 -248q0 -17 -2 -47h-658 +q0 -111 57.5 -171.5t166.5 -60.5zM277 236h296q205 0 205 167q0 180 -199 180h-302v-347zM277 773h281q78 0 123.5 36.5t45.5 113.5q0 144 -190 144h-260v-294zM0 1282h594q87 0 155 -14t126.5 -47.5t90 -96.5t31.5 -154q0 -181 -172 -263q114 -32 172 -115t58 -204 +q0 -75 -24.5 -136.5t-66 -103.5t-98.5 -71t-121 -42t-134 -13h-611v1260z" /> + <glyph glyph-name="_410" unicode="&#xf1b5;" +d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM499 1041h-371v-787h382q117 0 197 57.5t80 170.5q0 158 -143 200q107 52 107 164q0 57 -19.5 96.5 +t-56.5 60.5t-79 29.5t-97 8.5zM477 723h-176v184h163q119 0 119 -90q0 -94 -106 -94zM486 388h-185v217h189q124 0 124 -113q0 -104 -128 -104zM1136 356q-68 0 -104 38t-36 107h411q1 10 1 30q0 132 -74.5 220.5t-203.5 88.5q-128 0 -210 -86t-82 -216q0 -135 79 -217 +t213 -82q205 0 267 191h-138q-11 -34 -47.5 -54t-75.5 -20zM1126 722q113 0 124 -122h-254q4 56 39 89t91 33zM964 988h319v-77h-319v77z" /> + <glyph glyph-name="_411" unicode="&#xf1b6;" horiz-adv-x="1792" +d="M1582 954q0 -101 -71.5 -172.5t-172.5 -71.5t-172.5 71.5t-71.5 172.5t71.5 172.5t172.5 71.5t172.5 -71.5t71.5 -172.5zM812 212q0 104 -73 177t-177 73q-27 0 -54 -6l104 -42q77 -31 109.5 -106.5t1.5 -151.5q-31 -77 -107 -109t-152 -1q-21 8 -62 24.5t-61 24.5 +q32 -60 91 -96.5t130 -36.5q104 0 177 73t73 177zM1642 953q0 126 -89.5 215.5t-215.5 89.5q-127 0 -216.5 -89.5t-89.5 -215.5q0 -127 89.5 -216t216.5 -89q126 0 215.5 89t89.5 216zM1792 953q0 -189 -133.5 -322t-321.5 -133l-437 -319q-12 -129 -109 -218t-229 -89 +q-121 0 -214 76t-118 192l-230 92v429l389 -157q79 48 173 48q13 0 35 -2l284 407q2 187 135.5 319t320.5 132q188 0 321.5 -133.5t133.5 -321.5z" /> + <glyph glyph-name="_412" unicode="&#xf1b7;" +d="M1242 889q0 80 -57 136.5t-137 56.5t-136.5 -57t-56.5 -136q0 -80 56.5 -136.5t136.5 -56.5t137 56.5t57 136.5zM632 301q0 -83 -58 -140.5t-140 -57.5q-56 0 -103 29t-72 77q52 -20 98 -40q60 -24 120 1.5t85 86.5q24 60 -1.5 120t-86.5 84l-82 33q22 5 42 5 +q82 0 140 -57.5t58 -140.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v153l172 -69q20 -92 93.5 -152t168.5 -60q104 0 181 70t87 173l345 252q150 0 255.5 105.5t105.5 254.5q0 150 -105.5 255.5t-255.5 105.5 +q-148 0 -253 -104.5t-107 -252.5l-225 -322q-9 1 -28 1q-75 0 -137 -37l-297 119v468q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5zM1289 887q0 -100 -71 -170.5t-171 -70.5t-170.5 70.5t-70.5 170.5t70.5 171t170.5 71q101 0 171.5 -70.5t70.5 -171.5z +" /> + <glyph glyph-name="_413" unicode="&#xf1b8;" horiz-adv-x="1792" +d="M836 367l-15 -368l-2 -22l-420 29q-36 3 -67 31.5t-47 65.5q-11 27 -14.5 55t4 65t12 55t21.5 64t19 53q78 -12 509 -28zM449 953l180 -379l-147 92q-63 -72 -111.5 -144.5t-72.5 -125t-39.5 -94.5t-18.5 -63l-4 -21l-190 357q-17 26 -18 56t6 47l8 18q35 63 114 188 +l-140 86zM1680 436l-188 -359q-12 -29 -36.5 -46.5t-43.5 -20.5l-18 -4q-71 -7 -219 -12l8 -164l-230 367l211 362l7 -173q170 -16 283 -5t170 33zM895 1360q-47 -63 -265 -435l-317 187l-19 12l225 356q20 31 60 45t80 10q24 -2 48.5 -12t42 -21t41.5 -33t36 -34.5 +t36 -39.5t32 -35zM1550 1053l212 -363q18 -37 12.5 -76t-27.5 -74q-13 -20 -33 -37t-38 -28t-48.5 -22t-47 -16t-51.5 -14t-46 -12q-34 72 -265 436l313 195zM1407 1279l142 83l-220 -373l-419 20l151 86q-34 89 -75 166t-75.5 123.5t-64.5 80t-47 46.5l-17 13l405 -1 +q31 3 58 -10.5t39 -28.5l11 -15q39 -61 112 -190z" /> + <glyph glyph-name="_414" unicode="&#xf1b9;" horiz-adv-x="2048" +d="M480 448q0 66 -47 113t-113 47t-113 -47t-47 -113t47 -113t113 -47t113 47t47 113zM516 768h1016l-89 357q-2 8 -14 17.5t-21 9.5h-768q-9 0 -21 -9.5t-14 -17.5zM1888 448q0 66 -47 113t-113 47t-113 -47t-47 -113t47 -113t113 -47t113 47t47 113zM2048 544v-384 +q0 -14 -9 -23t-23 -9h-96v-128q0 -80 -56 -136t-136 -56t-136 56t-56 136v128h-1024v-128q0 -80 -56 -136t-136 -56t-136 56t-56 136v128h-96q-14 0 -23 9t-9 23v384q0 93 65.5 158.5t158.5 65.5h28l105 419q23 94 104 157.5t179 63.5h768q98 0 179 -63.5t104 -157.5 +l105 -419h28q93 0 158.5 -65.5t65.5 -158.5z" /> + <glyph glyph-name="_415" unicode="&#xf1ba;" horiz-adv-x="2048" +d="M1824 640q93 0 158.5 -65.5t65.5 -158.5v-384q0 -14 -9 -23t-23 -9h-96v-64q0 -80 -56 -136t-136 -56t-136 56t-56 136v64h-1024v-64q0 -80 -56 -136t-136 -56t-136 56t-56 136v64h-96q-14 0 -23 9t-9 23v384q0 93 65.5 158.5t158.5 65.5h28l105 419q23 94 104 157.5 +t179 63.5h128v224q0 14 9 23t23 9h448q14 0 23 -9t9 -23v-224h128q98 0 179 -63.5t104 -157.5l105 -419h28zM320 160q66 0 113 47t47 113t-47 113t-113 47t-113 -47t-47 -113t47 -113t113 -47zM516 640h1016l-89 357q-2 8 -14 17.5t-21 9.5h-768q-9 0 -21 -9.5t-14 -17.5z +M1728 160q66 0 113 47t47 113t-47 113t-113 47t-113 -47t-47 -113t47 -113t113 -47z" /> + <glyph glyph-name="_416" unicode="&#xf1bb;" +d="M1504 64q0 -26 -19 -45t-45 -19h-462q1 -17 6 -87.5t5 -108.5q0 -25 -18 -42.5t-43 -17.5h-320q-25 0 -43 17.5t-18 42.5q0 38 5 108.5t6 87.5h-462q-26 0 -45 19t-19 45t19 45l402 403h-229q-26 0 -45 19t-19 45t19 45l402 403h-197q-26 0 -45 19t-19 45t19 45l384 384 +q19 19 45 19t45 -19l384 -384q19 -19 19 -45t-19 -45t-45 -19h-197l402 -403q19 -19 19 -45t-19 -45t-45 -19h-229l402 -403q19 -19 19 -45z" /> + <glyph glyph-name="_417" unicode="&#xf1bc;" +d="M1127 326q0 32 -30 51q-193 115 -447 115q-133 0 -287 -34q-42 -9 -42 -52q0 -20 13.5 -34.5t35.5 -14.5q5 0 37 8q132 27 243 27q226 0 397 -103q19 -11 33 -11q19 0 33 13.5t14 34.5zM1223 541q0 40 -35 61q-237 141 -548 141q-153 0 -303 -42q-48 -13 -48 -64 +q0 -25 17.5 -42.5t42.5 -17.5q7 0 37 8q122 33 251 33q279 0 488 -124q24 -13 38 -13q25 0 42.5 17.5t17.5 42.5zM1331 789q0 47 -40 70q-126 73 -293 110.5t-343 37.5q-204 0 -364 -47q-23 -7 -38.5 -25.5t-15.5 -48.5q0 -31 20.5 -52t51.5 -21q11 0 40 8q133 37 307 37 +q159 0 309.5 -34t253.5 -95q21 -12 40 -12q29 0 50.5 20.5t21.5 51.5zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_418" unicode="&#xf1bd;" horiz-adv-x="1024" +d="M1024 1233l-303 -582l24 -31h279v-415h-507l-44 -30l-142 -273l-30 -30h-301v303l303 583l-24 30h-279v415h507l44 30l142 273l30 30h301v-303z" /> + <glyph glyph-name="_419" unicode="&#xf1be;" horiz-adv-x="2304" +d="M784 164l16 241l-16 523q-1 10 -7.5 17t-16.5 7q-9 0 -16 -7t-7 -17l-14 -523l14 -241q1 -10 7.5 -16.5t15.5 -6.5q22 0 24 23zM1080 193l11 211l-12 586q0 16 -13 24q-8 5 -16 5t-16 -5q-13 -8 -13 -24l-1 -6l-10 -579q0 -1 11 -236v-1q0 -10 6 -17q9 -11 23 -11 +q11 0 20 9q9 7 9 20zM35 533l20 -128l-20 -126q-2 -9 -9 -9t-9 9l-17 126l17 128q2 9 9 9t9 -9zM121 612l26 -207l-26 -203q-2 -9 -10 -9q-9 0 -9 10l-23 202l23 207q0 9 9 9q8 0 10 -9zM401 159zM213 650l25 -245l-25 -237q0 -11 -11 -11q-10 0 -12 11l-21 237l21 245 +q2 12 12 12q11 0 11 -12zM307 657l23 -252l-23 -244q-2 -13 -14 -13q-13 0 -13 13l-21 244l21 252q0 13 13 13q12 0 14 -13zM401 639l21 -234l-21 -246q-2 -16 -16 -16q-6 0 -10.5 4.5t-4.5 11.5l-20 246l20 234q0 6 4.5 10.5t10.5 4.5q14 0 16 -15zM784 164zM495 785 +l21 -380l-21 -246q0 -7 -5 -12.5t-12 -5.5q-16 0 -18 18l-18 246l18 380q2 18 18 18q7 0 12 -5.5t5 -12.5zM589 871l19 -468l-19 -244q0 -8 -5.5 -13.5t-13.5 -5.5q-18 0 -20 19l-16 244l16 468q2 19 20 19q8 0 13.5 -5.5t5.5 -13.5zM687 911l18 -506l-18 -242 +q-2 -21 -22 -21q-19 0 -21 21l-16 242l16 506q0 9 6.5 15.5t14.5 6.5q9 0 15 -6.5t7 -15.5zM1079 169v0v0v0zM881 915l15 -510l-15 -239q0 -10 -7.5 -17.5t-17.5 -7.5t-17 7t-8 18l-14 239l14 510q0 11 7.5 18t17.5 7t17.5 -7t7.5 -18zM980 896l14 -492l-14 -236 +q0 -11 -8 -19t-19 -8t-19 8t-9 19l-12 236l12 492q1 12 9 20t19 8t18.5 -8t8.5 -20zM1192 404l-14 -231v0q0 -13 -9 -22t-22 -9t-22 9t-10 22l-6 114l-6 117l12 636v3q2 15 12 24q9 7 20 7q8 0 15 -5q14 -8 16 -26zM2304 423q0 -117 -83 -199.5t-200 -82.5h-786 +q-13 2 -22 11t-9 22v899q0 23 28 33q85 34 181 34q195 0 338 -131.5t160 -323.5q53 22 110 22q117 0 200 -83t83 -201z" /> + <glyph glyph-name="uniF1C0" unicode="&#xf1c0;" +d="M768 768q237 0 443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128v170q119 -84 325 -127t443 -43zM768 0q237 0 443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128v170q119 -84 325 -127 +t443 -43zM768 384q237 0 443 43t325 127v-170q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5t-103 128v170q119 -84 325 -127t443 -43zM768 1536q208 0 385 -34.5t280 -93.5t103 -128v-128q0 -69 -103 -128t-280 -93.5t-385 -34.5t-385 34.5t-280 93.5 +t-103 128v128q0 69 103 128t280 93.5t385 34.5z" /> + <glyph glyph-name="uniF1C1" unicode="&#xf1c1;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M894 465q33 -26 84 -56q59 7 117 7q147 0 177 -49q16 -22 2 -52q0 -1 -1 -2l-2 -2v-1q-6 -38 -71 -38q-48 0 -115 20t-130 53q-221 -24 -392 -83q-153 -262 -242 -262q-15 0 -28 7l-24 12q-1 1 -6 5q-10 10 -6 36q9 40 56 91.5t132 96.5q14 9 23 -6q2 -2 2 -4q52 85 107 197 +q68 136 104 262q-24 82 -30.5 159.5t6.5 127.5q11 40 42 40h21h1q23 0 35 -15q18 -21 9 -68q-2 -6 -4 -8q1 -3 1 -8v-30q-2 -123 -14 -192q55 -164 146 -238zM318 54q52 24 137 158q-51 -40 -87.5 -84t-49.5 -74zM716 974q-15 -42 -2 -132q1 7 7 44q0 3 7 43q1 4 4 8 +q-1 1 -1 2q-1 2 -1 3q-1 22 -13 36q0 -1 -1 -2v-2zM592 313q135 54 284 81q-2 1 -13 9.5t-16 13.5q-76 67 -127 176q-27 -86 -83 -197q-30 -56 -45 -83zM1238 329q-24 24 -140 24q76 -28 124 -28q14 0 18 1q0 1 -2 3z" /> + <glyph glyph-name="_422" unicode="&#xf1c2;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M233 768v-107h70l164 -661h159l128 485q7 20 10 46q2 16 2 24h4l3 -24q1 -3 3.5 -20t5.5 -26l128 -485h159l164 661h70v107h-300v-107h90l-99 -438q-5 -20 -7 -46l-2 -21h-4q0 3 -0.5 6.5t-1.5 8t-1 6.5q-1 5 -4 21t-5 25l-144 545h-114l-144 -545q-2 -9 -4.5 -24.5 +t-3.5 -21.5l-4 -21h-4l-2 21q-2 26 -7 46l-99 438h90v107h-300z" /> + <glyph glyph-name="_423" unicode="&#xf1c3;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M429 106v-106h281v106h-75l103 161q5 7 10 16.5t7.5 13.5t3.5 4h2q1 -4 5 -10q2 -4 4.5 -7.5t6 -8t6.5 -8.5l107 -161h-76v-106h291v106h-68l-192 273l195 282h67v107h-279v-107h74l-103 -159q-4 -7 -10 -16.5t-9 -13.5l-2 -3h-2q-1 4 -5 10q-6 11 -17 23l-106 159h76v107 +h-290v-107h68l189 -272l-194 -283h-68z" /> + <glyph glyph-name="_424" unicode="&#xf1c4;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M416 106v-106h327v106h-93v167h137q76 0 118 15q67 23 106.5 87t39.5 146q0 81 -37 141t-100 87q-48 19 -130 19h-368v-107h92v-555h-92zM769 386h-119v268h120q52 0 83 -18q56 -33 56 -115q0 -89 -62 -120q-31 -15 -78 -15z" /> + <glyph glyph-name="_425" unicode="&#xf1c5;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M1280 320v-320h-1024v192l192 192l128 -128l384 384zM448 512q-80 0 -136 56t-56 136t56 136t136 56t136 -56t56 -136t-56 -136t-136 -56z" /> + <glyph glyph-name="_426" unicode="&#xf1c6;" +d="M640 1152v128h-128v-128h128zM768 1024v128h-128v-128h128zM640 896v128h-128v-128h128zM768 768v128h-128v-128h128zM1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400 +v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-128v-128h-128v128h-512v-1536h1280zM781 593l107 -349q8 -27 8 -52q0 -83 -72.5 -137.5t-183.5 -54.5t-183.5 54.5t-72.5 137.5q0 25 8 52q21 63 120 396v128h128v-128h79 +q22 0 39 -13t23 -34zM640 128q53 0 90.5 19t37.5 45t-37.5 45t-90.5 19t-90.5 -19t-37.5 -45t37.5 -45t90.5 -19z" /> + <glyph glyph-name="_427" unicode="&#xf1c7;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M620 686q20 -8 20 -30v-544q0 -22 -20 -30q-8 -2 -12 -2q-12 0 -23 9l-166 167h-131q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h131l166 167q16 15 35 7zM1037 -3q31 0 50 24q129 159 129 363t-129 363q-16 21 -43 24t-47 -14q-21 -17 -23.5 -43.5t14.5 -47.5 +q100 -123 100 -282t-100 -282q-17 -21 -14.5 -47.5t23.5 -42.5q18 -15 40 -15zM826 145q27 0 47 20q87 93 87 219t-87 219q-18 19 -45 20t-46 -17t-20 -44.5t18 -46.5q52 -57 52 -131t-52 -131q-19 -20 -18 -46.5t20 -44.5q20 -17 44 -17z" /> + <glyph glyph-name="_428" unicode="&#xf1c8;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M768 768q52 0 90 -38t38 -90v-384q0 -52 -38 -90t-90 -38h-384q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h384zM1260 766q20 -8 20 -30v-576q0 -22 -20 -30q-8 -2 -12 -2q-14 0 -23 9l-265 266v90l265 266q9 9 23 9q4 0 12 -2z" /> + <glyph glyph-name="_429" unicode="&#xf1c9;" +d="M1468 1156q28 -28 48 -76t20 -88v-1152q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1600q0 40 28 68t68 28h896q40 0 88 -20t76 -48zM1024 1400v-376h376q-10 29 -22 41l-313 313q-12 12 -41 22zM1408 -128v1024h-416q-40 0 -68 28t-28 68v416h-768v-1536h1280z +M480 768q8 11 21 12.5t24 -6.5l51 -38q11 -8 12.5 -21t-6.5 -24l-182 -243l182 -243q8 -11 6.5 -24t-12.5 -21l-51 -38q-11 -8 -24 -6.5t-21 12.5l-226 301q-14 19 0 38zM1282 467q14 -19 0 -38l-226 -301q-8 -11 -21 -12.5t-24 6.5l-51 38q-11 8 -12.5 21t6.5 24l182 243 +l-182 243q-8 11 -6.5 24t12.5 21l51 38q11 8 24 6.5t21 -12.5zM662 6q-13 2 -20.5 13t-5.5 24l138 831q2 13 13 20.5t24 5.5l63 -10q13 -2 20.5 -13t5.5 -24l-138 -831q-2 -13 -13 -20.5t-24 -5.5z" /> + <glyph glyph-name="_430" unicode="&#xf1ca;" +d="M1497 709v-198q-101 -23 -198 -23q-65 -136 -165.5 -271t-181.5 -215.5t-128 -106.5q-80 -45 -162 3q-28 17 -60.5 43.5t-85 83.5t-102.5 128.5t-107.5 184t-105.5 244t-91.5 314.5t-70.5 390h283q26 -218 70 -398.5t104.5 -317t121.5 -235.5t140 -195q169 169 287 406 +q-142 72 -223 220t-81 333q0 192 104 314.5t284 122.5q178 0 273 -105.5t95 -297.5q0 -159 -58 -286q-7 -1 -19.5 -3t-46 -2t-63 6t-62 25.5t-50.5 51.5q31 103 31 184q0 87 -29 132t-79 45q-53 0 -85 -49.5t-32 -140.5q0 -186 105 -293.5t267 -107.5q62 0 121 14z" /> + <glyph glyph-name="_431" unicode="&#xf1cb;" horiz-adv-x="1792" +d="M216 367l603 -402v359l-334 223zM154 511l193 129l-193 129v-258zM973 -35l603 402l-269 180l-334 -223v-359zM896 458l272 182l-272 182l-272 -182zM485 733l334 223v359l-603 -402zM1445 640l193 -129v258zM1307 733l269 180l-603 402v-359zM1792 913v-546 +q0 -41 -34 -64l-819 -546q-21 -13 -43 -13t-43 13l-819 546q-34 23 -34 64v546q0 41 34 64l819 546q21 13 43 13t43 -13l819 -546q34 -23 34 -64z" /> + <glyph glyph-name="_432" unicode="&#xf1cc;" horiz-adv-x="2048" +d="M1800 764q111 -46 179.5 -145.5t68.5 -221.5q0 -164 -118 -280.5t-285 -116.5q-4 0 -11.5 0.5t-10.5 0.5h-1209h-1h-2h-5q-170 10 -288 125.5t-118 280.5q0 110 55 203t147 147q-12 39 -12 82q0 115 82 196t199 81q95 0 172 -58q75 154 222.5 248t326.5 94 +q166 0 306 -80.5t221.5 -218.5t81.5 -301q0 -6 -0.5 -18t-0.5 -18zM468 498q0 -122 84 -193t208 -71q137 0 240 99q-16 20 -47.5 56.5t-43.5 50.5q-67 -65 -144 -65q-55 0 -93.5 33.5t-38.5 87.5q0 53 38.5 87t91.5 34q44 0 84.5 -21t73 -55t65 -75t69 -82t77 -75t97 -55 +t121.5 -21q121 0 204.5 71.5t83.5 190.5q0 121 -84 192t-207 71q-143 0 -241 -97l93 -108q66 64 142 64q52 0 92 -33t40 -84q0 -57 -37 -91.5t-94 -34.5q-43 0 -82.5 21t-72 55t-65.5 75t-69.5 82t-77.5 75t-96.5 55t-118.5 21q-122 0 -207 -70.5t-85 -189.5z" /> + <glyph glyph-name="_433" unicode="&#xf1cd;" horiz-adv-x="1792" +d="M896 1536q182 0 348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71zM896 1408q-190 0 -361 -90l194 -194q82 28 167 28t167 -28l194 194q-171 90 -361 90zM218 279l194 194 +q-28 82 -28 167t28 167l-194 194q-90 -171 -90 -361t90 -361zM896 -128q190 0 361 90l-194 194q-82 -28 -167 -28t-167 28l-194 -194q171 -90 361 -90zM896 256q159 0 271.5 112.5t112.5 271.5t-112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5 +t271.5 -112.5zM1380 473l194 -194q90 171 90 361t-90 361l-194 -194q28 -82 28 -167t-28 -167z" /> + <glyph glyph-name="_434" unicode="&#xf1ce;" horiz-adv-x="1792" +d="M1760 640q0 -176 -68.5 -336t-184 -275.5t-275.5 -184t-336 -68.5t-336 68.5t-275.5 184t-184 275.5t-68.5 336q0 213 97 398.5t265 305.5t374 151v-228q-221 -45 -366.5 -221t-145.5 -406q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5 +t136.5 204t51 248.5q0 230 -145.5 406t-366.5 221v228q206 -31 374 -151t265 -305.5t97 -398.5z" /> + <glyph glyph-name="uniF1D0" unicode="&#xf1d0;" horiz-adv-x="1792" +d="M19 662q8 217 116 406t305 318h5q0 -1 -1 -3q-8 -8 -28 -33.5t-52 -76.5t-60 -110.5t-44.5 -135.5t-14 -150.5t39 -157.5t108.5 -154q50 -50 102 -69.5t90.5 -11.5t69.5 23.5t47 32.5l16 16q39 51 53 116.5t6.5 122.5t-21 107t-26.5 80l-14 29q-10 25 -30.5 49.5t-43 41 +t-43.5 29.5t-35 19l-13 6l104 115q39 -17 78 -52t59 -61l19 -27q1 48 -18.5 103.5t-40.5 87.5l-20 31l161 183l160 -181q-33 -46 -52.5 -102.5t-22.5 -90.5l-4 -33q22 37 61.5 72.5t67.5 52.5l28 17l103 -115q-44 -14 -85 -50t-60 -65l-19 -29q-31 -56 -48 -133.5t-7 -170 +t57 -156.5q33 -45 77.5 -60.5t85 -5.5t76 26.5t57.5 33.5l21 16q60 53 96.5 115t48.5 121.5t10 121.5t-18 118t-37 107.5t-45.5 93t-45 72t-34.5 47.5l-13 17q-14 13 -7 13l10 -3q40 -29 62.5 -46t62 -50t64 -58t58.5 -65t55.5 -77t45.5 -88t38 -103t23.5 -117t10.5 -136 +q3 -259 -108 -465t-312 -321t-456 -115q-185 0 -351 74t-283.5 198t-184 293t-60.5 353z" /> + <glyph glyph-name="uniF1D1" unicode="&#xf1d1;" horiz-adv-x="1792" +d="M874 -102v-66q-208 6 -385 109.5t-283 275.5l58 34q29 -49 73 -99l65 57q148 -168 368 -212l-17 -86q65 -12 121 -13zM276 428l-83 -28q22 -60 49 -112l-57 -33q-98 180 -98 385t98 385l57 -33q-30 -56 -49 -112l82 -28q-35 -100 -35 -212q0 -109 36 -212zM1528 251 +l58 -34q-106 -172 -283 -275.5t-385 -109.5v66q56 1 121 13l-17 86q220 44 368 212l65 -57q44 50 73 99zM1377 805l-233 -80q14 -42 14 -85t-14 -85l232 -80q-31 -92 -98 -169l-185 162q-57 -67 -147 -85l48 -241q-52 -10 -98 -10t-98 10l48 241q-90 18 -147 85l-185 -162 +q-67 77 -98 169l232 80q-14 42 -14 85t14 85l-233 80q33 93 99 169l185 -162q59 68 147 86l-48 240q44 10 98 10t98 -10l-48 -240q88 -18 147 -86l185 162q66 -76 99 -169zM874 1448v-66q-65 -2 -121 -13l17 -86q-220 -42 -368 -211l-65 56q-38 -42 -73 -98l-57 33 +q106 172 282 275.5t385 109.5zM1705 640q0 -205 -98 -385l-57 33q27 52 49 112l-83 28q36 103 36 212q0 112 -35 212l82 28q-19 56 -49 112l57 33q98 -180 98 -385zM1585 1063l-57 -33q-35 56 -73 98l-65 -56q-148 169 -368 211l17 86q-56 11 -121 13v66q209 -6 385 -109.5 +t282 -275.5zM1748 640q0 173 -67.5 331t-181.5 272t-272 181.5t-331 67.5t-331 -67.5t-272 -181.5t-181.5 -272t-67.5 -331t67.5 -331t181.5 -272t272 -181.5t331 -67.5t331 67.5t272 181.5t181.5 272t67.5 331zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71 +t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF1D2" unicode="&#xf1d2;" +d="M582 228q0 -66 -93 -66q-107 0 -107 63q0 64 98 64q102 0 102 -61zM546 694q0 -85 -74 -85q-77 0 -77 84q0 90 77 90q36 0 55 -25.5t19 -63.5zM712 769v125q-78 -29 -135 -29q-50 29 -110 29q-86 0 -145 -57t-59 -143q0 -50 29.5 -102t73.5 -67v-3q-38 -17 -38 -85 +q0 -53 41 -77v-3q-113 -37 -113 -139q0 -45 20 -78.5t54 -51t72 -25.5t81 -8q224 0 224 188q0 67 -48 99t-126 46q-27 5 -51.5 20.5t-24.5 39.5q0 44 49 52q77 15 122 70t45 134q0 24 -10 52q37 9 49 13zM771 350h137q-2 27 -2 82v387q0 46 2 69h-137q3 -23 3 -71v-392 +q0 -50 -3 -75zM1280 366v121q-30 -21 -68 -21q-53 0 -53 82v225h52q9 0 26.5 -1t26.5 -1v117h-105q0 82 3 102h-140q4 -24 4 -55v-47h-60v-117q36 3 37 3q3 0 11 -0.5t12 -0.5v-2h-2v-217q0 -37 2.5 -64t11.5 -56.5t24.5 -48.5t43.5 -31t66 -12q64 0 108 24zM924 1072 +q0 36 -24 63.5t-60 27.5t-60.5 -27t-24.5 -64q0 -36 25 -62.5t60 -26.5t59.5 27t24.5 62zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_438" unicode="&#xf1d3;" horiz-adv-x="1792" +d="M595 22q0 100 -165 100q-158 0 -158 -104q0 -101 172 -101q151 0 151 105zM536 777q0 61 -30 102t-89 41q-124 0 -124 -145q0 -135 124 -135q119 0 119 137zM805 1101v-202q-36 -12 -79 -22q16 -43 16 -84q0 -127 -73 -216.5t-197 -112.5q-40 -8 -59.5 -27t-19.5 -58 +q0 -31 22.5 -51.5t58 -32t78.5 -22t86 -25.5t78.5 -37.5t58 -64t22.5 -98.5q0 -304 -363 -304q-69 0 -130 12.5t-116 41t-87.5 82t-32.5 127.5q0 165 182 225v4q-67 41 -67 126q0 109 63 137v4q-72 24 -119.5 108.5t-47.5 165.5q0 139 95 231.5t235 92.5q96 0 178 -47 +q98 0 218 47zM1123 220h-222q4 45 4 134v609q0 94 -4 128h222q-4 -33 -4 -124v-613q0 -89 4 -134zM1724 442v-196q-71 -39 -174 -39q-62 0 -107 20t-70 50t-39.5 78t-18.5 92t-4 103v351h2v4q-7 0 -19 1t-18 1q-21 0 -59 -6v190h96v76q0 54 -6 89h227q-6 -41 -6 -165h171 +v-190q-15 0 -43.5 2t-42.5 2h-85v-365q0 -131 87 -131q61 0 109 33zM1148 1389q0 -58 -39 -101.5t-96 -43.5q-58 0 -98 43.5t-40 101.5q0 59 39.5 103t98.5 44q58 0 96.5 -44.5t38.5 -102.5z" /> + <glyph glyph-name="_439" unicode="&#xf1d4;" +d="M809 532l266 499h-112l-157 -312q-24 -48 -44 -92l-42 92l-155 312h-120l263 -493v-324h101v318zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="uniF1D5" unicode="&#xf1d5;" horiz-adv-x="1280" +d="M842 964q0 -80 -57 -136.5t-136 -56.5q-60 0 -111 35q-62 -67 -115 -146q-247 -371 -202 -859q1 -22 -12.5 -38.5t-34.5 -18.5h-5q-20 0 -35 13.5t-17 33.5q-14 126 -3.5 247.5t29.5 217t54 186t69 155.5t74 125q61 90 132 165q-16 35 -16 77q0 80 56.5 136.5t136.5 56.5 +t136.5 -56.5t56.5 -136.5zM1223 953q0 -158 -78 -292t-212.5 -212t-292.5 -78q-64 0 -131 14q-21 5 -32.5 23.5t-6.5 39.5q5 20 23 31.5t39 7.5q51 -13 108 -13q97 0 186 38t153 102t102 153t38 186t-38 186t-102 153t-153 102t-186 38t-186 -38t-153 -102t-102 -153 +t-38 -186q0 -114 52 -218q10 -20 3.5 -40t-25.5 -30t-39.5 -3t-30.5 26q-64 123 -64 265q0 119 46.5 227t124.5 186t186 124t226 46q158 0 292.5 -78t212.5 -212.5t78 -292.5z" /> + <glyph glyph-name="uniF1D6" unicode="&#xf1d6;" horiz-adv-x="1792" +d="M270 730q-8 19 -8 52q0 20 11 49t24 45q-1 22 7.5 53t22.5 43q0 139 92.5 288.5t217.5 209.5q139 66 324 66q133 0 266 -55q49 -21 90 -48t71 -56t55 -68t42 -74t32.5 -84.5t25.5 -89.5t22 -98l1 -5q55 -83 55 -150q0 -14 -9 -40t-9 -38q0 -1 1.5 -3.5t3.5 -5t2 -3.5 +q77 -114 120.5 -214.5t43.5 -208.5q0 -43 -19.5 -100t-55.5 -57q-9 0 -19.5 7.5t-19 17.5t-19 26t-16 26.5t-13.5 26t-9 17.5q-1 1 -3 1l-5 -4q-59 -154 -132 -223q20 -20 61.5 -38.5t69 -41.5t35.5 -65q-2 -4 -4 -16t-7 -18q-64 -97 -302 -97q-53 0 -110.5 9t-98 20 +t-104.5 30q-15 5 -23 7q-14 4 -46 4.5t-40 1.5q-41 -45 -127.5 -65t-168.5 -20q-35 0 -69 1.5t-93 9t-101 20.5t-74.5 40t-32.5 64q0 40 10 59.5t41 48.5q11 2 40.5 13t49.5 12q4 0 14 2q2 2 2 4l-2 3q-48 11 -108 105.5t-73 156.5l-5 3q-4 0 -12 -20q-18 -41 -54.5 -74.5 +t-77.5 -37.5h-1q-4 0 -6 4.5t-5 5.5q-23 54 -23 100q0 275 252 466z" /> + <glyph glyph-name="uniF1D7" unicode="&#xf1d7;" horiz-adv-x="2048" +d="M580 1075q0 41 -25 66t-66 25q-43 0 -76 -25.5t-33 -65.5q0 -39 33 -64.5t76 -25.5q41 0 66 24.5t25 65.5zM1323 568q0 28 -25.5 50t-65.5 22q-27 0 -49.5 -22.5t-22.5 -49.5q0 -28 22.5 -50.5t49.5 -22.5q40 0 65.5 22t25.5 51zM1087 1075q0 41 -24.5 66t-65.5 25 +q-43 0 -76 -25.5t-33 -65.5q0 -39 33 -64.5t76 -25.5q41 0 65.5 24.5t24.5 65.5zM1722 568q0 28 -26 50t-65 22q-27 0 -49.5 -22.5t-22.5 -49.5q0 -28 22.5 -50.5t49.5 -22.5q39 0 65 22t26 51zM1456 965q-31 4 -70 4q-169 0 -311 -77t-223.5 -208.5t-81.5 -287.5 +q0 -78 23 -152q-35 -3 -68 -3q-26 0 -50 1.5t-55 6.5t-44.5 7t-54.5 10.5t-50 10.5l-253 -127l72 218q-290 203 -290 490q0 169 97.5 311t264 223.5t363.5 81.5q176 0 332.5 -66t262 -182.5t136.5 -260.5zM2048 404q0 -117 -68.5 -223.5t-185.5 -193.5l55 -181l-199 109 +q-150 -37 -218 -37q-169 0 -311 70.5t-223.5 191.5t-81.5 264t81.5 264t223.5 191.5t311 70.5q161 0 303 -70.5t227.5 -192t85.5 -263.5z" /> + <glyph glyph-name="_443" unicode="&#xf1d8;" horiz-adv-x="1792" +d="M1764 1525q33 -24 27 -64l-256 -1536q-5 -29 -32 -45q-14 -8 -31 -8q-11 0 -24 5l-453 185l-242 -295q-18 -23 -49 -23q-13 0 -22 4q-19 7 -30.5 23.5t-11.5 36.5v349l864 1059l-1069 -925l-395 162q-37 14 -40 55q-2 40 32 59l1664 960q15 9 32 9q20 0 36 -11z" /> + <glyph glyph-name="_444" unicode="&#xf1d9;" horiz-adv-x="1792" +d="M1764 1525q33 -24 27 -64l-256 -1536q-5 -29 -32 -45q-14 -8 -31 -8q-11 0 -24 5l-527 215l-298 -327q-18 -21 -47 -21q-14 0 -23 4q-19 7 -30 23.5t-11 36.5v452l-472 193q-37 14 -40 55q-3 39 32 59l1664 960q35 21 68 -2zM1422 26l221 1323l-1434 -827l336 -137 +l863 639l-478 -797z" /> + <glyph glyph-name="_445" unicode="&#xf1da;" +d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 +t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 -298zM896 928v-448q0 -14 -9 -23 +t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23z" /> + <glyph glyph-name="_446" unicode="&#xf1db;" +d="M768 1280q-130 0 -248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5t-51 248.5t-136.5 204t-204 136.5t-248.5 51zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 +t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_447" unicode="&#xf1dc;" horiz-adv-x="1792" +d="M1682 -128q-44 0 -132.5 3.5t-133.5 3.5q-44 0 -132 -3.5t-132 -3.5q-24 0 -37 20.5t-13 45.5q0 31 17 46t39 17t51 7t45 15q33 21 33 140l-1 391q0 21 -1 31q-13 4 -50 4h-675q-38 0 -51 -4q-1 -10 -1 -31l-1 -371q0 -142 37 -164q16 -10 48 -13t57 -3.5t45 -15 +t20 -45.5q0 -26 -12.5 -48t-36.5 -22q-47 0 -139.5 3.5t-138.5 3.5q-43 0 -128 -3.5t-127 -3.5q-23 0 -35.5 21t-12.5 45q0 30 15.5 45t36 17.5t47.5 7.5t42 15q33 23 33 143l-1 57v813q0 3 0.5 26t0 36.5t-1.5 38.5t-3.5 42t-6.5 36.5t-11 31.5t-16 18q-15 10 -45 12t-53 2 +t-41 14t-18 45q0 26 12 48t36 22q46 0 138.5 -3.5t138.5 -3.5q42 0 126.5 3.5t126.5 3.5q25 0 37.5 -22t12.5 -48q0 -30 -17 -43.5t-38.5 -14.5t-49.5 -4t-43 -13q-35 -21 -35 -160l1 -320q0 -21 1 -32q13 -3 39 -3h699q25 0 38 3q1 11 1 32l1 320q0 139 -35 160 +q-18 11 -58.5 12.5t-66 13t-25.5 49.5q0 26 12.5 48t37.5 22q44 0 132 -3.5t132 -3.5q43 0 129 3.5t129 3.5q25 0 37.5 -22t12.5 -48q0 -30 -17.5 -44t-40 -14.5t-51.5 -3t-44 -12.5q-35 -23 -35 -161l1 -943q0 -119 34 -140q16 -10 46 -13.5t53.5 -4.5t41.5 -15.5t18 -44.5 +q0 -26 -12 -48t-36 -22z" /> + <glyph glyph-name="_448" unicode="&#xf1dd;" horiz-adv-x="1280" +d="M1278 1347v-73q0 -29 -18.5 -61t-42.5 -32q-50 0 -54 -1q-26 -6 -32 -31q-3 -11 -3 -64v-1152q0 -25 -18 -43t-43 -18h-108q-25 0 -43 18t-18 43v1218h-143v-1218q0 -25 -17.5 -43t-43.5 -18h-108q-26 0 -43.5 18t-17.5 43v496q-147 12 -245 59q-126 58 -192 179 +q-64 117 -64 259q0 166 88 286q88 118 209 159q111 37 417 37h479q25 0 43 -18t18 -43z" /> + <glyph glyph-name="_449" unicode="&#xf1de;" +d="M352 128v-128h-352v128h352zM704 256q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h256zM864 640v-128h-864v128h864zM224 1152v-128h-224v128h224zM1536 128v-128h-736v128h736zM576 1280q26 0 45 -19t19 -45v-256 +q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h256zM1216 768q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h256zM1536 640v-128h-224v128h224zM1536 1152v-128h-864v128h864z" /> + <glyph glyph-name="uniF1E0" unicode="&#xf1e0;" +d="M1216 512q133 0 226.5 -93.5t93.5 -226.5t-93.5 -226.5t-226.5 -93.5t-226.5 93.5t-93.5 226.5q0 12 2 34l-360 180q-92 -86 -218 -86q-133 0 -226.5 93.5t-93.5 226.5t93.5 226.5t226.5 93.5q126 0 218 -86l360 180q-2 22 -2 34q0 133 93.5 226.5t226.5 93.5 +t226.5 -93.5t93.5 -226.5t-93.5 -226.5t-226.5 -93.5q-126 0 -218 86l-360 -180q2 -22 2 -34t-2 -34l360 -180q92 86 218 86z" /> + <glyph glyph-name="_451" unicode="&#xf1e1;" +d="M1280 341q0 88 -62.5 151t-150.5 63q-84 0 -145 -58l-241 120q2 16 2 23t-2 23l241 120q61 -58 145 -58q88 0 150.5 63t62.5 151t-62.5 150.5t-150.5 62.5t-151 -62.5t-63 -150.5q0 -7 2 -23l-241 -120q-62 57 -145 57q-88 0 -150.5 -62.5t-62.5 -150.5t62.5 -150.5 +t150.5 -62.5q83 0 145 57l241 -120q-2 -16 -2 -23q0 -88 63 -150.5t151 -62.5t150.5 62.5t62.5 150.5zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_452" unicode="&#xf1e2;" horiz-adv-x="1792" +d="M571 947q-10 25 -34 35t-49 0q-108 -44 -191 -127t-127 -191q-10 -25 0 -49t35 -34q13 -5 24 -5q42 0 60 40q34 84 98.5 148.5t148.5 98.5q25 11 35 35t0 49zM1513 1303l46 -46l-244 -243l68 -68q19 -19 19 -45.5t-19 -45.5l-64 -64q89 -161 89 -343q0 -143 -55.5 -273.5 +t-150 -225t-225 -150t-273.5 -55.5t-273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5q182 0 343 -89l64 64q19 19 45.5 19t45.5 -19l68 -68zM1521 1359q-10 -10 -22 -10q-13 0 -23 10l-91 90q-9 10 -9 23t9 23q10 9 23 9t23 -9l90 -91 +q10 -9 10 -22.5t-10 -22.5zM1751 1129q-11 -9 -23 -9t-23 9l-90 91q-10 9 -10 22.5t10 22.5q9 10 22.5 10t22.5 -10l91 -90q9 -10 9 -23t-9 -23zM1792 1312q0 -14 -9 -23t-23 -9h-96q-14 0 -23 9t-9 23t9 23t23 9h96q14 0 23 -9t9 -23zM1600 1504v-96q0 -14 -9 -23t-23 -9 +t-23 9t-9 23v96q0 14 9 23t23 9t23 -9t9 -23zM1751 1449l-91 -90q-10 -10 -22 -10q-13 0 -23 10q-10 9 -10 22.5t10 22.5l90 91q10 9 23 9t23 -9q9 -10 9 -23t-9 -23z" /> + <glyph glyph-name="_453" unicode="&#xf1e3;" horiz-adv-x="1792" +d="M609 720l287 208l287 -208l-109 -336h-355zM896 1536q182 0 348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71zM1515 186q149 203 149 454v3l-102 -89l-240 224l63 323 +l134 -12q-150 206 -389 282l53 -124l-287 -159l-287 159l53 124q-239 -76 -389 -282l135 12l62 -323l-240 -224l-102 89v-3q0 -251 149 -454l30 132l326 -40l139 -298l-116 -69q117 -39 240 -39t240 39l-116 69l139 298l326 40z" /> + <glyph glyph-name="_454" unicode="&#xf1e4;" horiz-adv-x="1792" +d="M448 224v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM256 608v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM832 224v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 +v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 608v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM66 768q-28 0 -47 19t-19 46v129h514v-129q0 -27 -19 -46t-46 -19h-383zM1216 224v-192q0 -14 -9 -23t-23 -9h-192 +q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 608v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1600 224v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23 +zM1408 608v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1016v-13h-514v10q0 104 -382 102q-382 -1 -382 -102v-10h-514v13q0 17 8.5 43t34 64t65.5 75.5t110.5 76t160 67.5t224 47.5t293.5 18.5t293 -18.5t224 -47.5 +t160.5 -67.5t110.5 -76t65.5 -75.5t34 -64t8.5 -43zM1792 608v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 962v-129q0 -27 -19 -46t-46 -19h-384q-27 0 -46 19t-19 46v129h514z" /> + <glyph glyph-name="_455" unicode="&#xf1e5;" horiz-adv-x="1792" +d="M704 1216v-768q0 -26 -19 -45t-45 -19v-576q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v512l249 873q7 23 31 23h424zM1024 1216v-704h-256v704h256zM1792 320v-512q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v576q-26 0 -45 19t-19 45v768h424q24 0 31 -23z +M736 1504v-224h-352v224q0 14 9 23t23 9h288q14 0 23 -9t9 -23zM1408 1504v-224h-352v224q0 14 9 23t23 9h288q14 0 23 -9t9 -23z" /> + <glyph glyph-name="_456" unicode="&#xf1e6;" horiz-adv-x="1792" +d="M1755 1083q37 -38 37 -90.5t-37 -90.5l-401 -400l150 -150l-160 -160q-163 -163 -389.5 -186.5t-411.5 100.5l-362 -362h-181v181l362 362q-124 185 -100.5 411.5t186.5 389.5l160 160l150 -150l400 401q38 37 91 37t90 -37t37 -90.5t-37 -90.5l-400 -401l234 -234 +l401 400q38 37 91 37t90 -37z" /> + <glyph glyph-name="_457" unicode="&#xf1e7;" horiz-adv-x="1792" +d="M873 796q0 -83 -63.5 -142.5t-152.5 -59.5t-152.5 59.5t-63.5 142.5q0 84 63.5 143t152.5 59t152.5 -59t63.5 -143zM1375 796q0 -83 -63 -142.5t-153 -59.5q-89 0 -152.5 59.5t-63.5 142.5q0 84 63.5 143t152.5 59q90 0 153 -59t63 -143zM1600 616v667q0 87 -32 123.5 +t-111 36.5h-1112q-83 0 -112.5 -34t-29.5 -126v-673q43 -23 88.5 -40t81 -28t81 -18.5t71 -11t70 -4t58.5 -0.5t56.5 2t44.5 2q68 1 95 -27q6 -6 10 -9q26 -25 61 -51q7 91 118 87q5 0 36.5 -1.5t43 -2t45.5 -1t53 1t54.5 4.5t61 8.5t62 13.5t67 19.5t67.5 27t72 34.5z +M1763 621q-121 -149 -372 -252q84 -285 -23 -465q-66 -113 -183 -148q-104 -32 -182 15q-86 51 -82 164l-1 326v1q-8 2 -24.5 6t-23.5 5l-1 -338q4 -114 -83 -164q-79 -47 -183 -15q-117 36 -182 150q-105 180 -22 463q-251 103 -372 252q-25 37 -4 63t60 -1q4 -2 11.5 -7 +t10.5 -8v694q0 72 47 123t114 51h1257q67 0 114 -51t47 -123v-694l21 15q39 27 60 1t-4 -63z" /> + <glyph glyph-name="_458" unicode="&#xf1e8;" horiz-adv-x="1792" +d="M896 1102v-434h-145v434h145zM1294 1102v-434h-145v434h145zM1294 342l253 254v795h-1194v-1049h326v-217l217 217h398zM1692 1536v-1013l-434 -434h-326l-217 -217h-217v217h-398v1158l109 289h1483z" /> + <glyph glyph-name="_459" unicode="&#xf1e9;" +d="M773 217v-127q-1 -292 -6 -305q-12 -32 -51 -40q-54 -9 -181.5 38t-162.5 89q-13 15 -17 36q-1 12 4 26q4 10 34 47t181 216q1 0 60 70q15 19 39.5 24.5t49.5 -3.5q24 -10 37.5 -29t12.5 -42zM624 468q-3 -55 -52 -70l-120 -39q-275 -88 -292 -88q-35 2 -54 36 +q-12 25 -17 75q-8 76 1 166.5t30 124.5t56 32q13 0 202 -77q71 -29 115 -47l84 -34q23 -9 35.5 -30.5t11.5 -48.5zM1450 171q-7 -54 -91.5 -161t-135.5 -127q-37 -14 -63 7q-14 10 -184 287l-47 77q-14 21 -11.5 46t19.5 46q35 43 83 26q1 -1 119 -40q203 -66 242 -79.5 +t47 -20.5q28 -22 22 -61zM778 803q5 -102 -54 -122q-58 -17 -114 71l-378 598q-8 35 19 62q41 43 207.5 89.5t224.5 31.5q40 -10 49 -45q3 -18 22 -305.5t24 -379.5zM1440 695q3 -39 -26 -59q-15 -10 -329 -86q-67 -15 -91 -23l1 2q-23 -6 -46 4t-37 32q-30 47 0 87 +q1 1 75 102q125 171 150 204t34 39q28 19 65 2q48 -23 123 -133.5t81 -167.5v-3z" /> + <glyph glyph-name="_460" unicode="&#xf1ea;" horiz-adv-x="2048" +d="M1024 1024h-384v-384h384v384zM1152 384v-128h-640v128h640zM1152 1152v-640h-640v640h640zM1792 384v-128h-512v128h512zM1792 640v-128h-512v128h512zM1792 896v-128h-512v128h512zM1792 1152v-128h-512v128h512zM256 192v960h-128v-960q0 -26 19 -45t45 -19t45 19 +t19 45zM1920 192v1088h-1536v-1088q0 -33 -11 -64h1483q26 0 45 19t19 45zM2048 1408v-1216q0 -80 -56 -136t-136 -56h-1664q-80 0 -136 56t-56 136v1088h256v128h1792z" /> + <glyph glyph-name="_461" unicode="&#xf1eb;" horiz-adv-x="2048" +d="M1024 13q-20 0 -93 73.5t-73 93.5q0 32 62.5 54t103.5 22t103.5 -22t62.5 -54q0 -20 -73 -93.5t-93 -73.5zM1294 284q-2 0 -40 25t-101.5 50t-128.5 25t-128.5 -25t-101 -50t-40.5 -25q-18 0 -93.5 75t-75.5 93q0 13 10 23q78 77 196 121t233 44t233 -44t196 -121 +q10 -10 10 -23q0 -18 -75.5 -93t-93.5 -75zM1567 556q-11 0 -23 8q-136 105 -252 154.5t-268 49.5q-85 0 -170.5 -22t-149 -53t-113.5 -62t-79 -53t-31 -22q-17 0 -92 75t-75 93q0 12 10 22q132 132 320 205t380 73t380 -73t320 -205q10 -10 10 -22q0 -18 -75 -93t-92 -75z +M1838 827q-11 0 -22 9q-179 157 -371.5 236.5t-420.5 79.5t-420.5 -79.5t-371.5 -236.5q-11 -9 -22 -9q-17 0 -92.5 75t-75.5 93q0 13 10 23q187 186 445 288t527 102t527 -102t445 -288q10 -10 10 -23q0 -18 -75.5 -93t-92.5 -75z" /> + <glyph glyph-name="_462" unicode="&#xf1ec;" horiz-adv-x="1792" +d="M384 0q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM768 0q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 +t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1152 0q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM768 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5 +t37.5 90.5zM384 768q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1152 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM768 768q0 53 -37.5 90.5t-90.5 37.5 +t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1536 0v384q0 52 -38 90t-90 38t-90 -38t-38 -90v-384q0 -52 38 -90t90 -38t90 38t38 90zM1152 768q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5z +M1536 1088v256q0 26 -19 45t-45 19h-1280q-26 0 -45 -19t-19 -45v-256q0 -26 19 -45t45 -19h1280q26 0 45 19t19 45zM1536 768q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1408v-1536q0 -52 -38 -90t-90 -38 +h-1408q-52 0 -90 38t-38 90v1536q0 52 38 90t90 38h1408q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_463" unicode="&#xf1ed;" +d="M1519 890q18 -84 -4 -204q-87 -444 -565 -444h-44q-25 0 -44 -16.5t-24 -42.5l-4 -19l-55 -346l-2 -15q-5 -26 -24.5 -42.5t-44.5 -16.5h-251q-21 0 -33 15t-9 36q9 56 26.5 168t26.5 168t27 167.5t27 167.5q5 37 43 37h131q133 -2 236 21q175 39 287 144q102 95 155 246 +q24 70 35 133q1 6 2.5 7.5t3.5 1t6 -3.5q79 -59 98 -162zM1347 1172q0 -107 -46 -236q-80 -233 -302 -315q-113 -40 -252 -42q0 -1 -90 -1l-90 1q-100 0 -118 -96q-2 -8 -85 -530q-1 -10 -12 -10h-295q-22 0 -36.5 16.5t-11.5 38.5l232 1471q5 29 27.5 48t51.5 19h598 +q34 0 97.5 -13t111.5 -32q107 -41 163.5 -123t56.5 -196z" /> + <glyph glyph-name="_464" unicode="&#xf1ee;" horiz-adv-x="1792" +d="M441 864q33 0 52 -26q266 -364 362 -774h-446q-127 441 -367 749q-12 16 -3 33.5t29 17.5h373zM1000 507q-49 -199 -125 -393q-79 310 -256 594q40 221 44 449q211 -340 337 -650zM1099 1216q235 -324 384.5 -698.5t184.5 -773.5h-451q-41 665 -553 1472h435zM1792 640 +q0 -424 -101 -812q-67 560 -359 1083q-25 301 -106 584q-4 16 5.5 28.5t25.5 12.5h359q21 0 38.5 -13t22.5 -33q115 -409 115 -850z" /> + <glyph glyph-name="uniF1F0" unicode="&#xf1f0;" horiz-adv-x="2304" +d="M1975 546h-138q14 37 66 179l3 9q4 10 10 26t9 26l12 -55zM531 611l-58 295q-11 54 -75 54h-268l-2 -13q311 -79 403 -336zM710 960l-162 -438l-17 89q-26 70 -85 129.5t-131 88.5l135 -510h175l261 641h-176zM849 318h166l104 642h-166zM1617 944q-69 27 -149 27 +q-123 0 -201 -59t-79 -153q-1 -102 145 -174q48 -23 67 -41t19 -39q0 -30 -30 -46t-69 -16q-86 0 -156 33l-22 11l-23 -144q74 -34 185 -34q130 -1 208.5 59t80.5 160q0 106 -140 174q-49 25 -71 42t-22 38q0 22 24.5 38.5t70.5 16.5q70 1 124 -24l15 -8zM2042 960h-128 +q-65 0 -87 -54l-246 -588h174l35 96h212q5 -22 20 -96h154zM2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_466" unicode="&#xf1f1;" horiz-adv-x="2304" +d="M1119 1195q-128 85 -281 85q-103 0 -197.5 -40.5t-162.5 -108.5t-108.5 -162t-40.5 -197q0 -104 40.5 -198t108.5 -162t162 -108.5t198 -40.5q153 0 281 85q-131 107 -178 265.5t0.5 316.5t177.5 265zM1152 1171q-126 -99 -172 -249.5t-0.5 -300.5t172.5 -249 +q127 99 172.5 249t-0.5 300.5t-172 249.5zM1185 1195q130 -107 177.5 -265.5t0.5 -317t-178 -264.5q128 -85 281 -85q104 0 198 40.5t162 108.5t108.5 162t40.5 198q0 103 -40.5 197t-108.5 162t-162.5 108.5t-197.5 40.5q-153 0 -281 -85zM1926 473h7v3h-17v-3h7v-17h3v17z +M1955 456h4v20h-5l-6 -13l-6 13h-5v-20h3v15l6 -13h4l5 13v-15zM1947 16v-2h-2h-3v3h3h2v-1zM1947 7h3l-4 5h2l1 1q1 1 1 3t-1 3l-1 1h-3h-6v-13h3v5h1zM685 75q0 19 11 31t30 12q18 0 29 -12.5t11 -30.5q0 -19 -11 -31t-29 -12q-19 0 -30 12t-11 31zM1158 119q30 0 35 -32 +h-70q5 32 35 32zM1514 75q0 19 11 31t29 12t29.5 -12.5t11.5 -30.5q0 -19 -11 -31t-30 -12q-18 0 -29 12t-11 31zM1786 75q0 18 11.5 30.5t29.5 12.5t29.5 -12.5t11.5 -30.5q0 -19 -11.5 -31t-29.5 -12t-29.5 12.5t-11.5 30.5zM1944 3q-2 0 -4 1q-1 0 -3 2t-2 3q-1 2 -1 4 +q0 3 1 4q0 2 2 4l1 1q2 0 2 1q2 1 4 1q3 0 4 -1l4 -2l2 -4v-1q1 -2 1 -3l-1 -1v-3t-1 -1l-1 -2q-2 -2 -4 -2q-1 -1 -4 -1zM599 7h30v85q0 24 -14.5 38.5t-39.5 15.5q-32 0 -47 -24q-14 24 -45 24q-24 0 -39 -20v16h-30v-135h30v75q0 36 33 36q30 0 30 -36v-75h29v75 +q0 36 33 36q30 0 30 -36v-75zM765 7h29v68v67h-29v-16q-17 20 -43 20q-29 0 -48 -20t-19 -51t19 -51t48 -20q28 0 43 20v-17zM943 48q0 34 -47 40l-14 2q-23 4 -23 14q0 15 25 15q23 0 43 -11l12 24q-22 14 -55 14q-26 0 -41 -12t-15 -32q0 -33 47 -39l13 -2q24 -4 24 -14 +q0 -17 -31 -17q-25 0 -45 14l-13 -23q25 -17 58 -17q29 0 45.5 12t16.5 32zM1073 14l-8 25q-13 -7 -26 -7q-19 0 -19 22v61h48v27h-48v41h-30v-41h-28v-27h28v-61q0 -50 47 -50q21 0 36 10zM1159 146q-29 0 -48 -20t-19 -51q0 -32 19.5 -51.5t49.5 -19.5q33 0 55 19l-14 22 +q-18 -15 -39 -15q-34 0 -41 33h101v12q0 32 -18 51.5t-46 19.5zM1318 146q-23 0 -35 -20v16h-30v-135h30v76q0 35 29 35q10 0 18 -4l9 28q-9 4 -21 4zM1348 75q0 -31 19.5 -51t52.5 -20q29 0 48 16l-14 24q-18 -13 -35 -12q-18 0 -29.5 12t-11.5 31t11.5 31t29.5 12 +q19 0 35 -12l14 24q-20 16 -48 16q-33 0 -52.5 -20t-19.5 -51zM1593 7h30v68v67h-30v-16q-15 20 -42 20q-29 0 -48.5 -20t-19.5 -51t19.5 -51t48.5 -20q28 0 42 20v-17zM1726 146q-23 0 -35 -20v16h-29v-135h29v76q0 35 29 35q10 0 18 -4l9 28q-8 4 -21 4zM1866 7h29v68v122 +h-29v-71q-15 20 -43 20t-47.5 -20.5t-19.5 -50.5t19.5 -50.5t47.5 -20.5q29 0 43 20v-17zM1944 27l-2 -1h-3q-2 -1 -4 -3q-3 -1 -3 -4q-1 -2 -1 -6q0 -3 1 -5q0 -2 3 -4q2 -2 4 -3t5 -1q4 0 6 1q0 1 2 2l2 1q1 1 3 4q1 2 1 5q0 4 -1 6q-1 1 -3 4q0 1 -2 2l-2 1q-1 0 -3 0.5 +t-3 0.5zM2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_467" unicode="&#xf1f2;" horiz-adv-x="2304" +d="M313 759q0 -51 -36 -84q-29 -26 -89 -26h-17v220h17q61 0 89 -27q36 -31 36 -83zM2089 824q0 -52 -64 -52h-19v101h20q63 0 63 -49zM380 759q0 74 -50 120.5t-129 46.5h-95v-333h95q74 0 119 38q60 51 60 128zM410 593h65v333h-65v-333zM730 694q0 40 -20.5 62t-75.5 42 +q-29 10 -39.5 19t-10.5 23q0 16 13.5 26.5t34.5 10.5q29 0 53 -27l34 44q-41 37 -98 37q-44 0 -74 -27.5t-30 -67.5q0 -35 18 -55.5t64 -36.5q37 -13 45 -19q19 -12 19 -34q0 -20 -14 -33.5t-36 -13.5q-48 0 -71 44l-42 -40q44 -64 115 -64q51 0 83 30.5t32 79.5zM1008 604 +v77q-37 -37 -78 -37q-49 0 -80.5 32.5t-31.5 82.5q0 48 31.5 81.5t77.5 33.5q43 0 81 -38v77q-40 20 -80 20q-74 0 -125.5 -50.5t-51.5 -123.5t51 -123.5t125 -50.5q42 0 81 19zM2240 0v527q-65 -40 -144.5 -84t-237.5 -117t-329.5 -137.5t-417.5 -134.5t-504 -118h1569 +q26 0 45 19t19 45zM1389 757q0 75 -53 128t-128 53t-128 -53t-53 -128t53 -128t128 -53t128 53t53 128zM1541 584l144 342h-71l-90 -224l-89 224h-71l142 -342h35zM1714 593h184v56h-119v90h115v56h-115v74h119v57h-184v-333zM2105 593h80l-105 140q76 16 76 94q0 47 -31 73 +t-87 26h-97v-333h65v133h9zM2304 1274v-1268q0 -56 -38.5 -95t-93.5 -39h-2040q-55 0 -93.5 39t-38.5 95v1268q0 56 38.5 95t93.5 39h2040q55 0 93.5 -39t38.5 -95z" /> + <glyph glyph-name="f1f3" unicode="&#xf1f3;" horiz-adv-x="2304" +d="M119 854h89l-45 108zM740 328l74 79l-70 79h-163v-49h142v-55h-142v-54h159zM898 406l99 -110v217zM1186 453q0 33 -40 33h-84v-69h83q41 0 41 36zM1475 457q0 29 -42 29h-82v-61h81q43 0 43 32zM1197 923q0 29 -42 29h-82v-60h81q43 0 43 31zM1656 854h89l-44 108z +M699 1009v-271h-66v212l-94 -212h-57l-94 212v-212h-132l-25 60h-135l-25 -60h-70l116 271h96l110 -257v257h106l85 -184l77 184h108zM1255 453q0 -20 -5.5 -35t-14 -25t-22.5 -16.5t-26 -10t-31.5 -4.5t-31.5 -1t-32.5 0.5t-29.5 0.5v-91h-126l-80 90l-83 -90h-256v271h260 +l80 -89l82 89h207q109 0 109 -89zM964 794v-56h-217v271h217v-57h-152v-49h148v-55h-148v-54h152zM2304 235v-229q0 -55 -38.5 -94.5t-93.5 -39.5h-2040q-55 0 -93.5 39.5t-38.5 94.5v678h111l25 61h55l25 -61h218v46l19 -46h113l20 47v-47h541v99l10 1q10 0 10 -14v-86h279 +v23q23 -12 55 -18t52.5 -6.5t63 0.5t51.5 1l25 61h56l25 -61h227v58l34 -58h182v378h-180v-44l-25 44h-185v-44l-23 44h-249q-69 0 -109 -22v22h-172v-22q-24 22 -73 22h-628l-43 -97l-43 97h-198v-44l-22 44h-169l-78 -179v391q0 55 38.5 94.5t93.5 39.5h2040 +q55 0 93.5 -39.5t38.5 -94.5v-678h-120q-51 0 -81 -22v22h-177q-55 0 -78 -22v22h-316v-22q-31 22 -87 22h-209v-22q-23 22 -91 22h-234l-54 -58l-50 58h-349v-378h343l55 59l52 -59h211v89h21q59 0 90 13v-102h174v99h8q8 0 10 -2t2 -10v-87h529q57 0 88 24v-24h168 +q60 0 95 17zM1546 469q0 -23 -12 -43t-34 -29q25 -9 34 -26t9 -46v-54h-65v45q0 33 -12 43.5t-46 10.5h-69v-99h-65v271h154q48 0 77 -15t29 -58zM1269 936q0 -24 -12.5 -44t-33.5 -29q26 -9 34.5 -25.5t8.5 -46.5v-53h-65q0 9 0.5 26.5t0 25t-3 18.5t-8.5 16t-17.5 8.5 +t-29.5 3.5h-70v-98h-64v271l153 -1q49 0 78 -14.5t29 -57.5zM1798 327v-56h-216v271h216v-56h-151v-49h148v-55h-148v-54zM1372 1009v-271h-66v271h66zM2065 357q0 -86 -102 -86h-126v58h126q34 0 34 25q0 16 -17 21t-41.5 5t-49.5 3.5t-42 22.5t-17 55q0 39 26 60t66 21 +h130v-57h-119q-36 0 -36 -25q0 -16 17.5 -20.5t42 -4t49 -2.5t42 -21.5t17.5 -54.5zM2304 407v-101q-24 -35 -88 -35h-125v58h125q33 0 33 25q0 13 -12.5 19t-31 5.5t-40 2t-40 8t-31 24t-12.5 48.5q0 39 26.5 60t66.5 21h129v-57h-118q-36 0 -36 -25q0 -20 29 -22t68.5 -5 +t56.5 -26zM2139 1008v-270h-92l-122 203v-203h-132l-26 60h-134l-25 -60h-75q-129 0 -129 133q0 138 133 138h63v-59q-7 0 -28 1t-28.5 0.5t-23 -2t-21.5 -6.5t-14.5 -13.5t-11.5 -23t-3 -33.5q0 -38 13.5 -58t49.5 -20h29l92 213h97l109 -256v256h99l114 -188v188h66z" /> + <glyph glyph-name="_469" unicode="&#xf1f4;" horiz-adv-x="2304" +d="M745 630q0 -37 -25.5 -61.5t-62.5 -24.5q-29 0 -46.5 16t-17.5 44q0 37 25 62.5t62 25.5q28 0 46.5 -16.5t18.5 -45.5zM1530 779q0 -42 -22 -57t-66 -15l-32 -1l17 107q2 11 13 11h18q22 0 35 -2t25 -12.5t12 -30.5zM1881 630q0 -36 -25.5 -61t-61.5 -25q-29 0 -47 16 +t-18 44q0 37 25 62.5t62 25.5q28 0 46.5 -16.5t18.5 -45.5zM513 801q0 59 -38.5 85.5t-100.5 26.5h-160q-19 0 -21 -19l-65 -408q-1 -6 3 -11t10 -5h76q20 0 22 19l18 110q1 8 7 13t15 6.5t17 1.5t19 -1t14 -1q86 0 135 48.5t49 134.5zM822 489l41 261q1 6 -3 11t-10 5h-76 +q-14 0 -17 -33q-27 40 -95 40q-72 0 -122.5 -54t-50.5 -127q0 -59 34.5 -94t92.5 -35q28 0 58 12t48 32q-4 -12 -4 -21q0 -16 13 -16h69q19 0 22 19zM1269 752q0 5 -4 9.5t-9 4.5h-77q-11 0 -18 -10l-106 -156l-44 150q-5 16 -22 16h-75q-5 0 -9 -4.5t-4 -9.5q0 -2 19.5 -59 +t42 -123t23.5 -70q-82 -112 -82 -120q0 -13 13 -13h77q11 0 18 10l255 368q2 2 2 7zM1649 801q0 59 -38.5 85.5t-100.5 26.5h-159q-20 0 -22 -19l-65 -408q-1 -6 3 -11t10 -5h82q12 0 16 13l18 116q1 8 7 13t15 6.5t17 1.5t19 -1t14 -1q86 0 135 48.5t49 134.5zM1958 489 +l41 261q1 6 -3 11t-10 5h-76q-14 0 -17 -33q-26 40 -95 40q-72 0 -122.5 -54t-50.5 -127q0 -59 34.5 -94t92.5 -35q29 0 59 12t47 32q0 -1 -2 -9t-2 -12q0 -16 13 -16h69q19 0 22 19zM2176 898v1q0 14 -13 14h-74q-11 0 -13 -11l-65 -416l-1 -2q0 -5 4 -9.5t10 -4.5h66 +q19 0 21 19zM392 764q-5 -35 -26 -46t-60 -11l-33 -1l17 107q2 11 13 11h19q40 0 58 -11.5t12 -48.5zM2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_470" unicode="&#xf1f5;" horiz-adv-x="2304" +d="M1597 633q0 -69 -21 -106q-19 -35 -52 -35q-23 0 -41 9v224q29 30 57 30q57 0 57 -122zM2035 669h-110q6 98 56 98q51 0 54 -98zM476 534q0 59 -33 91.5t-101 57.5q-36 13 -52 24t-16 25q0 26 38 26q58 0 124 -33l18 112q-67 32 -149 32q-77 0 -123 -38q-48 -39 -48 -109 +q0 -58 32.5 -90.5t99.5 -56.5q39 -14 54.5 -25.5t15.5 -27.5q0 -31 -48 -31q-29 0 -70 12.5t-72 30.5l-18 -113q72 -41 168 -41q81 0 129 37q51 41 51 117zM771 749l19 111h-96v135l-129 -21l-18 -114l-46 -8l-17 -103h62v-219q0 -84 44 -120q38 -30 111 -30q32 0 79 11v118 +q-32 -7 -44 -7q-42 0 -42 50v197h77zM1087 724v139q-15 3 -28 3q-32 0 -55.5 -16t-33.5 -46l-10 56h-131v-471h150v306q26 31 82 31q16 0 26 -2zM1124 389h150v471h-150v-471zM1746 638q0 122 -45 179q-40 52 -111 52q-64 0 -117 -56l-8 47h-132v-645l150 25v151 +q36 -11 68 -11q83 0 134 56q61 65 61 202zM1278 986q0 33 -23 56t-56 23t-56 -23t-23 -56t23 -56.5t56 -23.5t56 23.5t23 56.5zM2176 629q0 113 -48 176q-50 64 -144 64q-96 0 -151.5 -66t-55.5 -180q0 -128 63 -188q55 -55 161 -55q101 0 160 40l-16 103q-57 -31 -128 -31 +q-43 0 -63 19q-23 19 -28 66h248q2 14 2 52zM2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_471" unicode="&#xf1f6;" horiz-adv-x="2048" +d="M1558 684q61 -356 298 -556q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-180.5 74.5t-75.5 180.5zM1024 -176q16 0 16 16t-16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5zM2026 1424q8 -10 7.5 -23.5t-10.5 -22.5 +l-1872 -1622q-10 -8 -23.5 -7t-21.5 11l-84 96q-8 10 -7.5 23.5t10.5 21.5l186 161q-19 32 -19 66q50 42 91 88t85 119.5t74.5 158.5t50 206t19.5 260q0 152 117 282.5t307 158.5q-8 19 -8 39q0 40 28 68t68 28t68 -28t28 -68q0 -20 -8 -39q124 -18 219 -82.5t148 -157.5 +l418 363q10 8 23.5 7t21.5 -11z" /> + <glyph glyph-name="_472" unicode="&#xf1f7;" horiz-adv-x="2048" +d="M1040 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM503 315l877 760q-42 88 -132.5 146.5t-223.5 58.5q-93 0 -169.5 -31.5t-121.5 -80.5t-69 -103t-24 -105q0 -384 -137 -645zM1856 128 +q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-180.5 74.5t-75.5 180.5l149 129h757q-166 187 -227 459l111 97q61 -356 298 -556zM1942 1520l84 -96q8 -10 7.5 -23.5t-10.5 -22.5l-1872 -1622q-10 -8 -23.5 -7t-21.5 11l-84 96q-8 10 -7.5 23.5t10.5 21.5l186 161 +q-19 32 -19 66q50 42 91 88t85 119.5t74.5 158.5t50 206t19.5 260q0 152 117 282.5t307 158.5q-8 19 -8 39q0 40 28 68t68 28t68 -28t28 -68q0 -20 -8 -39q124 -18 219 -82.5t148 -157.5l418 363q10 8 23.5 7t21.5 -11z" /> + <glyph glyph-name="_473" unicode="&#xf1f8;" horiz-adv-x="1408" +d="M512 160v704q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-704q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM768 160v704q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-704q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1024 160v704q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-704 +q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q0 -14 -9 -23t-23 -9h-96v-948q0 -83 -47 -143.5t-113 -60.5h-832q-66 0 -113 58.5t-47 141.5v952h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h309l70 167 +q15 37 54 63t79 26h320q40 0 79 -26t54 -63l70 -167h309q14 0 23 -9t9 -23z" /> + <glyph glyph-name="_474" unicode="&#xf1f9;" +d="M1150 462v-109q0 -50 -36.5 -89t-94 -60.5t-118 -32.5t-117.5 -11q-205 0 -342.5 139t-137.5 346q0 203 136 339t339 136q34 0 75.5 -4.5t93 -18t92.5 -34t69 -56.5t28 -81v-109q0 -16 -16 -16h-118q-16 0 -16 16v70q0 43 -65.5 67.5t-137.5 24.5q-140 0 -228.5 -91.5 +t-88.5 -237.5q0 -151 91.5 -249.5t233.5 -98.5q68 0 138 24t70 66v70q0 7 4.5 11.5t10.5 4.5h119q6 0 11 -4.5t5 -11.5zM768 1280q-130 0 -248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5 +t-51 248.5t-136.5 204t-204 136.5t-248.5 51zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_475" unicode="&#xf1fa;" +d="M972 761q0 108 -53.5 169t-147.5 61q-63 0 -124 -30.5t-110 -84.5t-79.5 -137t-30.5 -180q0 -112 53.5 -173t150.5 -61q96 0 176 66.5t122.5 166t42.5 203.5zM1536 640q0 -111 -37 -197t-98.5 -135t-131.5 -74.5t-145 -27.5q-6 0 -15.5 -0.5t-16.5 -0.5q-95 0 -142 53 +q-28 33 -33 83q-52 -66 -131.5 -110t-173.5 -44q-161 0 -249.5 95.5t-88.5 269.5q0 157 66 290t179 210.5t246 77.5q87 0 155 -35.5t106 -99.5l2 19l11 56q1 6 5.5 12t9.5 6h118q5 0 13 -11q5 -5 3 -16l-120 -614q-5 -24 -5 -48q0 -39 12.5 -52t44.5 -13q28 1 57 5.5t73 24 +t77 50t57 89.5t24 137q0 292 -174 466t-466 174q-130 0 -248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51q228 0 405 144q11 9 24 8t21 -12l41 -49q8 -12 7 -24q-2 -13 -12 -22q-102 -83 -227.5 -128t-258.5 -45q-156 0 -298 61 +t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q344 0 556 -212t212 -556z" /> + <glyph glyph-name="_476" unicode="&#xf1fb;" horiz-adv-x="1792" +d="M1698 1442q94 -94 94 -226.5t-94 -225.5l-225 -223l104 -104q10 -10 10 -23t-10 -23l-210 -210q-10 -10 -23 -10t-23 10l-105 105l-603 -603q-37 -37 -90 -37h-203l-256 -128l-64 64l128 256v203q0 53 37 90l603 603l-105 105q-10 10 -10 23t10 23l210 210q10 10 23 10 +t23 -10l104 -104l223 225q93 94 225.5 94t226.5 -94zM512 64l576 576l-192 192l-576 -576v-192h192z" /> + <glyph glyph-name="f1fc" unicode="&#xf1fc;" horiz-adv-x="1792" +d="M1615 1536q70 0 122.5 -46.5t52.5 -116.5q0 -63 -45 -151q-332 -629 -465 -752q-97 -91 -218 -91q-126 0 -216.5 92.5t-90.5 219.5q0 128 92 212l638 579q59 54 130 54zM706 502q39 -76 106.5 -130t150.5 -76l1 -71q4 -213 -129.5 -347t-348.5 -134q-123 0 -218 46.5 +t-152.5 127.5t-86.5 183t-29 220q7 -5 41 -30t62 -44.5t59 -36.5t46 -17q41 0 55 37q25 66 57.5 112.5t69.5 76t88 47.5t103 25.5t125 10.5z" /> + <glyph glyph-name="_478" unicode="&#xf1fd;" horiz-adv-x="1792" +d="M1792 128v-384h-1792v384q45 0 85 14t59 27.5t47 37.5q30 27 51.5 38t56.5 11q24 0 44 -7t31 -15t33 -27q29 -25 47 -38t58 -27t86 -14q45 0 85 14.5t58 27t48 37.5q21 19 32.5 27t31 15t43.5 7q35 0 56.5 -11t51.5 -38q28 -24 47 -37.5t59 -27.5t85 -14t85 14t59 27.5 +t47 37.5q30 27 51.5 38t56.5 11q34 0 55.5 -11t51.5 -38q28 -24 47 -37.5t59 -27.5t85 -14zM1792 448v-192q-24 0 -44 7t-31 15t-33 27q-29 25 -47 38t-58 27t-85 14q-46 0 -86 -14t-58 -27t-47 -38q-22 -19 -33 -27t-31 -15t-44 -7q-35 0 -56.5 11t-51.5 38q-29 25 -47 38 +t-58 27t-86 14q-45 0 -85 -14.5t-58 -27t-48 -37.5q-21 -19 -32.5 -27t-31 -15t-43.5 -7q-35 0 -56.5 11t-51.5 38q-28 24 -47 37.5t-59 27.5t-85 14q-46 0 -86 -14t-58 -27t-47 -38q-30 -27 -51.5 -38t-56.5 -11v192q0 80 56 136t136 56h64v448h256v-448h256v448h256v-448 +h256v448h256v-448h64q80 0 136 -56t56 -136zM512 1312q0 -77 -36 -118.5t-92 -41.5q-53 0 -90.5 37.5t-37.5 90.5q0 29 9.5 51t23.5 34t31 28t31 31.5t23.5 44.5t9.5 67q38 0 83 -74t45 -150zM1024 1312q0 -77 -36 -118.5t-92 -41.5q-53 0 -90.5 37.5t-37.5 90.5 +q0 29 9.5 51t23.5 34t31 28t31 31.5t23.5 44.5t9.5 67q38 0 83 -74t45 -150zM1536 1312q0 -77 -36 -118.5t-92 -41.5q-53 0 -90.5 37.5t-37.5 90.5q0 29 9.5 51t23.5 34t31 28t31 31.5t23.5 44.5t9.5 67q38 0 83 -74t45 -150z" /> + <glyph glyph-name="_479" unicode="&#xf1fe;" horiz-adv-x="2048" +d="M2048 0v-128h-2048v1536h128v-1408h1920zM1664 1024l256 -896h-1664v576l448 576l576 -576z" /> + <glyph glyph-name="_480" unicode="&#xf200;" horiz-adv-x="1792" +d="M768 646l546 -546q-106 -108 -247.5 -168t-298.5 -60q-209 0 -385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103v-762zM955 640h773q0 -157 -60 -298.5t-168 -247.5zM1664 768h-768v768q209 0 385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_481" unicode="&#xf201;" horiz-adv-x="2048" +d="M2048 0v-128h-2048v1536h128v-1408h1920zM1920 1248v-435q0 -21 -19.5 -29.5t-35.5 7.5l-121 121l-633 -633q-10 -10 -23 -10t-23 10l-233 233l-416 -416l-192 192l585 585q10 10 23 10t23 -10l233 -233l464 464l-121 121q-16 16 -7.5 35.5t29.5 19.5h435q14 0 23 -9 +t9 -23z" /> + <glyph glyph-name="_482" unicode="&#xf202;" horiz-adv-x="1792" +d="M1292 832q0 -6 10 -41q10 -29 25 -49.5t41 -34t44 -20t55 -16.5q325 -91 325 -332q0 -146 -105.5 -242.5t-254.5 -96.5q-59 0 -111.5 18.5t-91.5 45.5t-77 74.5t-63 87.5t-53.5 103.5t-43.5 103t-39.5 106.5t-35.5 95q-32 81 -61.5 133.5t-73.5 96.5t-104 64t-142 20 +q-96 0 -183 -55.5t-138 -144.5t-51 -185q0 -160 106.5 -279.5t263.5 -119.5q177 0 258 95q56 63 83 116l84 -152q-15 -34 -44 -70l1 -1q-131 -152 -388 -152q-147 0 -269.5 79t-190.5 207.5t-68 274.5q0 105 43.5 206t116 176.5t172 121.5t204.5 46q87 0 159 -19t123.5 -50 +t95 -80t72.5 -99t58.5 -117t50.5 -124.5t50 -130.5t55 -127q96 -200 233 -200q81 0 138.5 48.5t57.5 128.5q0 42 -19 72t-50.5 46t-72.5 31.5t-84.5 27t-87.5 34t-81 52t-65 82t-39 122.5q-3 16 -3 33q0 110 87.5 192t198.5 78q78 -3 120.5 -14.5t90.5 -53.5h-1 +q12 -11 23 -24.5t26 -36t19 -27.5l-129 -99q-26 49 -54 70v1q-23 21 -97 21q-49 0 -84 -33t-35 -83z" /> + <glyph glyph-name="_483" unicode="&#xf203;" +d="M1432 484q0 173 -234 239q-35 10 -53 16.5t-38 25t-29 46.5q0 2 -2 8.5t-3 12t-1 7.5q0 36 24.5 59.5t60.5 23.5q54 0 71 -15h-1q20 -15 39 -51l93 71q-39 54 -49 64q-33 29 -67.5 39t-85.5 10q-80 0 -142 -57.5t-62 -137.5q0 -7 2 -23q16 -96 64.5 -140t148.5 -73 +q29 -8 49 -15.5t45 -21.5t38.5 -34.5t13.5 -46.5v-5q1 -58 -40.5 -93t-100.5 -35q-97 0 -167 144q-23 47 -51.5 121.5t-48 125.5t-54 110.5t-74 95.5t-103.5 60.5t-147 24.5q-101 0 -192 -56t-144 -148t-50 -192v-1q4 -108 50.5 -199t133.5 -147.5t196 -56.5q186 0 279 110 +q20 27 31 51l-60 109q-42 -80 -99 -116t-146 -36q-115 0 -191 87t-76 204q0 105 82 189t186 84q112 0 170 -53.5t104 -172.5q8 -21 25.5 -68.5t28.5 -76.5t31.5 -74.5t38.5 -74t45.5 -62.5t55.5 -53.5t66 -33t80 -13.5q107 0 183 69.5t76 174.5zM1536 1120v-960 +q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_484" unicode="&#xf204;" horiz-adv-x="2048" +d="M1152 640q0 104 -40.5 198.5t-109.5 163.5t-163.5 109.5t-198.5 40.5t-198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5zM1920 640q0 104 -40.5 198.5 +t-109.5 163.5t-163.5 109.5t-198.5 40.5h-386q119 -90 188.5 -224t69.5 -288t-69.5 -288t-188.5 -224h386q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5zM2048 640q0 -130 -51 -248.5t-136.5 -204t-204 -136.5t-248.5 -51h-768q-130 0 -248.5 51t-204 136.5 +t-136.5 204t-51 248.5t51 248.5t136.5 204t204 136.5t248.5 51h768q130 0 248.5 -51t204 -136.5t136.5 -204t51 -248.5z" /> + <glyph glyph-name="_485" unicode="&#xf205;" horiz-adv-x="2048" +d="M0 640q0 130 51 248.5t136.5 204t204 136.5t248.5 51h768q130 0 248.5 -51t204 -136.5t136.5 -204t51 -248.5t-51 -248.5t-136.5 -204t-204 -136.5t-248.5 -51h-768q-130 0 -248.5 51t-204 136.5t-136.5 204t-51 248.5zM1408 128q104 0 198.5 40.5t163.5 109.5 +t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5t-163.5 109.5t-198.5 40.5t-198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5z" /> + <glyph glyph-name="_486" unicode="&#xf206;" horiz-adv-x="2304" +d="M762 384h-314q-40 0 -57.5 35t6.5 67l188 251q-65 31 -137 31q-132 0 -226 -94t-94 -226t94 -226t226 -94q115 0 203 72.5t111 183.5zM576 512h186q-18 85 -75 148zM1056 512l288 384h-480l-99 -132q105 -103 126 -252h165zM2176 448q0 132 -94 226t-226 94 +q-60 0 -121 -24l174 -260q15 -23 10 -49t-27 -40q-15 -11 -36 -11q-35 0 -53 29l-174 260q-93 -95 -93 -225q0 -132 94 -226t226 -94t226 94t94 226zM2304 448q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 97 39.5 183.5t109.5 149.5l-65 98l-353 -469 +q-18 -26 -51 -26h-197q-23 -164 -149 -274t-294 -110q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q114 0 215 -55l137 183h-224q-26 0 -45 19t-19 45t19 45t45 19h384v-128h435l-85 128h-222q-26 0 -45 19t-19 45t19 45t45 19h256q33 0 53 -28l267 -400 +q91 44 192 44q185 0 316.5 -131.5t131.5 -316.5z" /> + <glyph glyph-name="_487" unicode="&#xf207;" +d="M384 320q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1408 320q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1362 716l-72 384q-5 23 -22.5 37.5t-40.5 14.5 +h-918q-23 0 -40.5 -14.5t-22.5 -37.5l-72 -384q-5 -30 14 -53t49 -23h1062q30 0 49 23t14 53zM1136 1328q0 20 -14 34t-34 14h-640q-20 0 -34 -14t-14 -34t14 -34t34 -14h640q20 0 34 14t14 34zM1536 603v-603h-128v-128q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 +t-37.5 90.5v128h-768v-128q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5v128h-128v603q0 112 25 223l103 454q9 78 97.5 137t230 89t312.5 30t312.5 -30t230 -89t97.5 -137l105 -454q23 -102 23 -223z" /> + <glyph glyph-name="_488" unicode="&#xf208;" horiz-adv-x="2048" +d="M1463 704q0 -35 -25 -60.5t-61 -25.5h-702q-36 0 -61 25.5t-25 60.5t25 60.5t61 25.5h702q36 0 61 -25.5t25 -60.5zM1677 704q0 86 -23 170h-982q-36 0 -61 25t-25 60q0 36 25 61t61 25h908q-88 143 -235 227t-320 84q-177 0 -327.5 -87.5t-238 -237.5t-87.5 -327 +q0 -86 23 -170h982q36 0 61 -25t25 -60q0 -36 -25 -61t-61 -25h-908q88 -143 235.5 -227t320.5 -84q132 0 253 51.5t208 139t139 208t52 253.5zM2048 959q0 -35 -25 -60t-61 -25h-131q17 -85 17 -170q0 -167 -65.5 -319.5t-175.5 -263t-262.5 -176t-319.5 -65.5 +q-246 0 -448.5 133t-301.5 350h-189q-36 0 -61 25t-25 61q0 35 25 60t61 25h132q-17 85 -17 170q0 167 65.5 319.5t175.5 263t262.5 176t320.5 65.5q245 0 447.5 -133t301.5 -350h188q36 0 61 -25t25 -61z" /> + <glyph glyph-name="_489" unicode="&#xf209;" horiz-adv-x="1280" +d="M953 1158l-114 -328l117 -21q165 451 165 518q0 56 -38 56q-57 0 -130 -225zM654 471l33 -88q37 42 71 67l-33 5.5t-38.5 7t-32.5 8.5zM362 1367q0 -98 159 -521q17 10 49 10q15 0 75 -5l-121 351q-75 220 -123 220q-19 0 -29 -17.5t-10 -37.5zM283 608q0 -36 51.5 -119 +t117.5 -153t100 -70q14 0 25.5 13t11.5 27q0 24 -32 102q-13 32 -32 72t-47.5 89t-61.5 81t-62 32q-20 0 -45.5 -27t-25.5 -47zM125 273q0 -41 25 -104q59 -145 183.5 -227t281.5 -82q227 0 382 170q152 169 152 427q0 43 -1 67t-11.5 62t-30.5 56q-56 49 -211.5 75.5 +t-270.5 26.5q-37 0 -49 -11q-12 -5 -12 -35q0 -34 21.5 -60t55.5 -40t77.5 -23.5t87.5 -11.5t85 -4t70 0h23q24 0 40 -19q15 -19 19 -55q-28 -28 -96 -54q-61 -22 -93 -46q-64 -46 -108.5 -114t-44.5 -137q0 -31 18.5 -88.5t18.5 -87.5l-3 -12q-4 -12 -4 -14 +q-137 10 -146 216q-8 -2 -41 -2q2 -7 2 -21q0 -53 -40.5 -89.5t-94.5 -36.5q-82 0 -166.5 78t-84.5 159q0 34 33 67q52 -64 60 -76q77 -104 133 -104q12 0 26.5 8.5t14.5 20.5q0 34 -87.5 145t-116.5 111q-43 0 -70 -44.5t-27 -90.5zM11 264q0 101 42.5 163t136.5 88 +q-28 74 -28 104q0 62 61 123t122 61q29 0 70 -15q-163 462 -163 567q0 80 41 130.5t119 50.5q131 0 325 -581q6 -17 8 -23q6 16 29 79.5t43.5 118.5t54 127.5t64.5 123t70.5 86.5t76.5 36q71 0 112 -49t41 -122q0 -108 -159 -550q61 -15 100.5 -46t58.5 -78t26 -93.5 +t7 -110.5q0 -150 -47 -280t-132 -225t-211 -150t-278 -55q-111 0 -223 42q-149 57 -258 191.5t-109 286.5z" /> + <glyph glyph-name="_490" unicode="&#xf20a;" horiz-adv-x="2048" +d="M785 528h207q-14 -158 -98.5 -248.5t-214.5 -90.5q-162 0 -254.5 116t-92.5 316q0 194 93 311.5t233 117.5q148 0 232 -87t97 -247h-203q-5 64 -35.5 99t-81.5 35q-57 0 -88.5 -60.5t-31.5 -177.5q0 -48 5 -84t18 -69.5t40 -51.5t66 -18q95 0 109 139zM1497 528h206 +q-14 -158 -98 -248.5t-214 -90.5q-162 0 -254.5 116t-92.5 316q0 194 93 311.5t233 117.5q148 0 232 -87t97 -247h-204q-4 64 -35 99t-81 35q-57 0 -88.5 -60.5t-31.5 -177.5q0 -48 5 -84t18 -69.5t39.5 -51.5t65.5 -18q49 0 76.5 38t33.5 101zM1856 647q0 207 -15.5 307 +t-60.5 161q-6 8 -13.5 14t-21.5 15t-16 11q-86 63 -697 63q-625 0 -710 -63q-5 -4 -17.5 -11.5t-21 -14t-14.5 -14.5q-45 -60 -60 -159.5t-15 -308.5q0 -208 15 -307.5t60 -160.5q6 -8 15 -15t20.5 -14t17.5 -12q44 -33 239.5 -49t470.5 -16q610 0 697 65q5 4 17 11t20.5 14 +t13.5 16q46 60 61 159t15 309zM2048 1408v-1536h-2048v1536h2048z" /> + <glyph glyph-name="_491" unicode="&#xf20b;" +d="M992 912v-496q0 -14 -9 -23t-23 -9h-160q-14 0 -23 9t-9 23v496q0 112 -80 192t-192 80h-272v-1152q0 -14 -9 -23t-23 -9h-160q-14 0 -23 9t-9 23v1344q0 14 9 23t23 9h464q135 0 249 -66.5t180.5 -180.5t66.5 -249zM1376 1376v-880q0 -135 -66.5 -249t-180.5 -180.5 +t-249 -66.5h-464q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h160q14 0 23 -9t9 -23v-768h272q112 0 192 80t80 192v880q0 14 9 23t23 9h160q14 0 23 -9t9 -23z" /> + <glyph glyph-name="_492" unicode="&#xf20c;" +d="M1311 694v-114q0 -24 -13.5 -38t-37.5 -14h-202q-24 0 -38 14t-14 38v114q0 24 14 38t38 14h202q24 0 37.5 -14t13.5 -38zM821 464v250q0 53 -32.5 85.5t-85.5 32.5h-133q-68 0 -96 -52q-28 52 -96 52h-130q-53 0 -85.5 -32.5t-32.5 -85.5v-250q0 -22 21 -22h55 +q22 0 22 22v230q0 24 13.5 38t38.5 14h94q24 0 38 -14t14 -38v-230q0 -22 21 -22h54q22 0 22 22v230q0 24 14 38t38 14h97q24 0 37.5 -14t13.5 -38v-230q0 -22 22 -22h55q21 0 21 22zM1410 560v154q0 53 -33 85.5t-86 32.5h-264q-53 0 -86 -32.5t-33 -85.5v-410 +q0 -21 22 -21h55q21 0 21 21v180q31 -42 94 -42h191q53 0 86 32.5t33 85.5zM1536 1176v-1072q0 -96 -68 -164t-164 -68h-1072q-96 0 -164 68t-68 164v1072q0 96 68 164t164 68h1072q96 0 164 -68t68 -164z" /> + <glyph glyph-name="_493" unicode="&#xf20d;" +d="M915 450h-294l147 551zM1001 128h311l-324 1024h-440l-324 -1024h311l383 314zM1536 1120v-960q0 -118 -85 -203t-203 -85h-960q-118 0 -203 85t-85 203v960q0 118 85 203t203 85h960q118 0 203 -85t85 -203z" /> + <glyph glyph-name="_494" unicode="&#xf20e;" horiz-adv-x="2048" +d="M2048 641q0 -21 -13 -36.5t-33 -19.5l-205 -356q3 -9 3 -18q0 -20 -12.5 -35.5t-32.5 -19.5l-193 -337q3 -8 3 -16q0 -23 -16.5 -40t-40.5 -17q-25 0 -41 18h-400q-17 -20 -43 -20t-43 20h-399q-17 -20 -43 -20q-23 0 -40 16.5t-17 40.5q0 8 4 20l-193 335 +q-20 4 -32.5 19.5t-12.5 35.5q0 9 3 18l-206 356q-20 5 -32.5 20.5t-12.5 35.5q0 21 13.5 36.5t33.5 19.5l199 344q0 1 -0.5 3t-0.5 3q0 36 34 51l209 363q-4 10 -4 18q0 24 17 40.5t40 16.5q26 0 44 -21h396q16 21 43 21t43 -21h398q18 21 44 21q23 0 40 -16.5t17 -40.5 +q0 -6 -4 -18l207 -358q23 -1 39 -17.5t16 -38.5q0 -13 -7 -27l187 -324q19 -4 31.5 -19.5t12.5 -35.5zM1063 -158h389l-342 354h-143l-342 -354h360q18 16 39 16t39 -16zM112 654q1 -4 1 -13q0 -10 -2 -15l208 -360l15 -6l188 199v347l-187 194q-13 -8 -29 -10zM986 1438 +h-388l190 -200l554 200h-280q-16 -16 -38 -16t-38 16zM1689 226q1 6 5 11l-64 68l-17 -79h76zM1583 226l22 105l-252 266l-296 -307l63 -64h463zM1495 -142l16 28l65 310h-427l333 -343q8 4 13 5zM578 -158h5l342 354h-373v-335l4 -6q14 -5 22 -13zM552 226h402l64 66 +l-309 321l-157 -166v-221zM359 226h163v189l-168 -177q4 -8 5 -12zM358 1051q0 -1 0.5 -2t0.5 -2q0 -16 -8 -29l171 -177v269zM552 1121v-311l153 -157l297 314l-223 236zM556 1425l-4 -8v-264l205 74l-191 201q-6 -2 -10 -3zM1447 1438h-16l-621 -224l213 -225zM1023 946 +l-297 -315l311 -319l296 307zM688 634l-136 141v-284zM1038 270l-42 -44h85zM1374 618l238 -251l132 624l-3 5l-1 1zM1718 1018q-8 13 -8 29v2l-216 376q-5 1 -13 5l-437 -463l310 -327zM522 1142v223l-163 -282zM522 196h-163l163 -283v283zM1607 196l-48 -227l130 227h-82 +zM1729 266l207 361q-2 10 -2 14q0 1 3 16l-171 296l-129 -612l77 -82q5 3 15 7z" /> + <glyph glyph-name="f210" unicode="&#xf210;" +d="M0 856q0 131 91.5 226.5t222.5 95.5h742l352 358v-1470q0 -132 -91.5 -227t-222.5 -95h-780q-131 0 -222.5 95t-91.5 227v790zM1232 102l-176 180v425q0 46 -32 79t-78 33h-484q-46 0 -78 -33t-32 -79v-492q0 -46 32.5 -79.5t77.5 -33.5h770z" /> + <glyph glyph-name="_496" unicode="&#xf211;" +d="M934 1386q-317 -121 -556 -362.5t-358 -560.5q-20 89 -20 176q0 208 102.5 384.5t278.5 279t384 102.5q82 0 169 -19zM1203 1267q93 -65 164 -155q-389 -113 -674.5 -400.5t-396.5 -676.5q-93 72 -155 162q112 386 395 671t667 399zM470 -67q115 356 379.5 622t619.5 384 +q40 -92 54 -195q-292 -120 -516 -345t-343 -518q-103 14 -194 52zM1536 -125q-193 50 -367 115q-135 -84 -290 -107q109 205 274 370.5t369 275.5q-21 -152 -101 -284q65 -175 115 -370z" /> + <glyph glyph-name="f212" unicode="&#xf212;" horiz-adv-x="2048" +d="M1893 1144l155 -1272q-131 0 -257 57q-200 91 -393 91q-226 0 -374 -148q-148 148 -374 148q-193 0 -393 -91q-128 -57 -252 -57h-5l155 1272q224 127 482 127q233 0 387 -106q154 106 387 106q258 0 482 -127zM1398 157q129 0 232 -28.5t260 -93.5l-124 1021 +q-171 78 -368 78q-224 0 -374 -141q-150 141 -374 141q-197 0 -368 -78l-124 -1021q105 43 165.5 65t148.5 39.5t178 17.5q202 0 374 -108q172 108 374 108zM1438 191l-55 907q-211 -4 -359 -155q-152 155 -374 155q-176 0 -336 -66l-114 -941q124 51 228.5 76t221.5 25 +q209 0 374 -102q172 107 374 102z" /> + <glyph glyph-name="_498" unicode="&#xf213;" horiz-adv-x="2048" +d="M1500 165v733q0 21 -15 36t-35 15h-93q-20 0 -35 -15t-15 -36v-733q0 -20 15 -35t35 -15h93q20 0 35 15t15 35zM1216 165v531q0 20 -15 35t-35 15h-101q-20 0 -35 -15t-15 -35v-531q0 -20 15 -35t35 -15h101q20 0 35 15t15 35zM924 165v429q0 20 -15 35t-35 15h-101 +q-20 0 -35 -15t-15 -35v-429q0 -20 15 -35t35 -15h101q20 0 35 15t15 35zM632 165v362q0 20 -15 35t-35 15h-101q-20 0 -35 -15t-15 -35v-362q0 -20 15 -35t35 -15h101q20 0 35 15t15 35zM2048 311q0 -166 -118 -284t-284 -118h-1244q-166 0 -284 118t-118 284 +q0 116 63 214.5t168 148.5q-10 34 -10 73q0 113 80.5 193.5t193.5 80.5q102 0 180 -67q45 183 194 300t338 117q149 0 275 -73.5t199.5 -199.5t73.5 -275q0 -66 -14 -122q135 -33 221 -142.5t86 -247.5z" /> + <glyph glyph-name="_499" unicode="&#xf214;" +d="M0 1536h1536v-1392l-776 -338l-760 338v1392zM1436 209v926h-1336v-926l661 -294zM1436 1235v201h-1336v-201h1336zM181 937v-115h-37v115h37zM181 789v-115h-37v115h37zM181 641v-115h-37v115h37zM181 493v-115h-37v115h37zM181 345v-115h-37v115h37zM207 202l15 34 +l105 -47l-15 -33zM343 142l15 34l105 -46l-15 -34zM478 82l15 34l105 -46l-15 -34zM614 23l15 33l104 -46l-15 -34zM797 10l105 46l15 -33l-105 -47zM932 70l105 46l15 -34l-105 -46zM1068 130l105 46l15 -34l-105 -46zM1203 189l105 47l15 -34l-105 -46zM259 1389v-36h-114 +v36h114zM421 1389v-36h-115v36h115zM583 1389v-36h-115v36h115zM744 1389v-36h-114v36h114zM906 1389v-36h-114v36h114zM1068 1389v-36h-115v36h115zM1230 1389v-36h-115v36h115zM1391 1389v-36h-114v36h114zM181 1049v-79h-37v115h115v-36h-78zM421 1085v-36h-115v36h115z +M583 1085v-36h-115v36h115zM744 1085v-36h-114v36h114zM906 1085v-36h-114v36h114zM1068 1085v-36h-115v36h115zM1230 1085v-36h-115v36h115zM1355 970v79h-78v36h115v-115h-37zM1355 822v115h37v-115h-37zM1355 674v115h37v-115h-37zM1355 526v115h37v-115h-37zM1355 378 +v115h37v-115h-37zM1355 230v115h37v-115h-37zM760 265q-129 0 -221 91.5t-92 221.5q0 129 92 221t221 92q130 0 221.5 -92t91.5 -221q0 -130 -91.5 -221.5t-221.5 -91.5zM595 646q0 -36 19.5 -56.5t49.5 -25t64 -7t64 -2t49.5 -9t19.5 -30.5q0 -49 -112 -49q-97 0 -123 51 +h-3l-31 -63q67 -42 162 -42q29 0 56.5 5t55.5 16t45.5 33t17.5 53q0 46 -27.5 69.5t-67.5 27t-79.5 3t-67 5t-27.5 25.5q0 21 20.5 33t40.5 15t41 3q34 0 70.5 -11t51.5 -34h3l30 58q-3 1 -21 8.5t-22.5 9t-19.5 7t-22 7t-20 4.5t-24 4t-23 1q-29 0 -56.5 -5t-54 -16.5 +t-43 -34t-16.5 -53.5z" /> + <glyph glyph-name="_500" unicode="&#xf215;" horiz-adv-x="2048" +d="M863 504q0 112 -79.5 191.5t-191.5 79.5t-191 -79.5t-79 -191.5t79 -191t191 -79t191.5 79t79.5 191zM1726 505q0 112 -79 191t-191 79t-191.5 -79t-79.5 -191q0 -113 79.5 -192t191.5 -79t191 79.5t79 191.5zM2048 1314v-1348q0 -44 -31.5 -75.5t-76.5 -31.5h-1832 +q-45 0 -76.5 31.5t-31.5 75.5v1348q0 44 31.5 75.5t76.5 31.5h431q44 0 76 -31.5t32 -75.5v-161h754v161q0 44 32 75.5t76 31.5h431q45 0 76.5 -31.5t31.5 -75.5z" /> + <glyph glyph-name="_501" unicode="&#xf216;" horiz-adv-x="2048" +d="M1430 953zM1690 749q148 0 253 -98.5t105 -244.5q0 -157 -109 -261.5t-267 -104.5q-85 0 -162 27.5t-138 73.5t-118 106t-109 126t-103.5 132.5t-108.5 126.5t-117 106t-136 73.5t-159 27.5q-154 0 -251.5 -91.5t-97.5 -244.5q0 -157 104 -250t263 -93q100 0 208 37.5 +t193 98.5q5 4 21 18.5t30 24t22 9.5q14 0 24.5 -10.5t10.5 -24.5q0 -24 -60 -77q-101 -88 -234.5 -142t-260.5 -54q-133 0 -245.5 58t-180 165t-67.5 241q0 205 141.5 341t347.5 136q120 0 226.5 -43.5t185.5 -113t151.5 -153t139 -167.5t133.5 -153.5t149.5 -113 +t172.5 -43.5q102 0 168.5 61.5t66.5 162.5q0 95 -64.5 159t-159.5 64q-30 0 -81.5 -18.5t-68.5 -18.5q-20 0 -35.5 15t-15.5 35q0 18 8.5 57t8.5 59q0 159 -107.5 263t-266.5 104q-58 0 -111.5 -18.5t-84 -40.5t-55.5 -40.5t-33 -18.5q-15 0 -25.5 10.5t-10.5 25.5 +q0 19 25 46q59 67 147 103.5t182 36.5q191 0 318 -125.5t127 -315.5q0 -37 -4 -66q57 15 115 15z" /> + <glyph glyph-name="_502" unicode="&#xf217;" horiz-adv-x="1664" +d="M1216 832q0 26 -19 45t-45 19h-128v128q0 26 -19 45t-45 19t-45 -19t-19 -45v-128h-128q-26 0 -45 -19t-19 -45t19 -45t45 -19h128v-128q0 -26 19 -45t45 -19t45 19t19 45v128h128q26 0 45 19t19 45zM640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 +t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920 +q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" /> + <glyph glyph-name="_503" unicode="&#xf218;" horiz-adv-x="1664" +d="M1280 832q0 26 -19 45t-45 19t-45 -19l-147 -146v293q0 26 -19 45t-45 19t-45 -19t-19 -45v-293l-147 146q-19 19 -45 19t-45 -19t-19 -45t19 -45l256 -256q19 -19 45 -19t45 19l256 256q19 19 19 45zM640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 +t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920 +q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h256q16 0 28.5 -6.5t20 -15.5t13 -24.5t7.5 -26.5t5.5 -29.5t4.5 -25.5h1201q26 0 45 -19t19 -45z" /> + <glyph glyph-name="_504" unicode="&#xf219;" horiz-adv-x="2048" +d="M212 768l623 -665l-300 665h-323zM1024 -4l349 772h-698zM538 896l204 384h-262l-288 -384h346zM1213 103l623 665h-323zM683 896h682l-204 384h-274zM1510 896h346l-288 384h-262zM1651 1382l384 -512q14 -18 13 -41.5t-17 -40.5l-960 -1024q-18 -20 -47 -20t-47 20 +l-960 1024q-16 17 -17 40.5t13 41.5l384 512q18 26 51 26h1152q33 0 51 -26z" /> + <glyph glyph-name="_505" unicode="&#xf21a;" horiz-adv-x="2048" +d="M1811 -19q19 19 45 19t45 -19l128 -128l-90 -90l-83 83l-83 -83q-18 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83 +q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-128 128l90 90l83 -83l83 83q19 19 45 19t45 -19l83 -83l83 83q19 19 45 19t45 -19l83 -83l83 83q19 19 45 19t45 -19l83 -83l83 83q19 19 45 19t45 -19l83 -83l83 83q19 19 45 19t45 -19l83 -83l83 83 +q19 19 45 19t45 -19l83 -83zM237 19q-19 -19 -45 -19t-45 19l-128 128l90 90l83 -82l83 82q19 19 45 19t45 -19l83 -82l64 64v293l-210 314q-17 26 -7 56.5t40 40.5l177 58v299h128v128h256v128h256v-128h256v-128h128v-299l177 -58q30 -10 40 -40.5t-7 -56.5l-210 -314 +v-293l19 18q19 19 45 19t45 -19l83 -82l83 82q19 19 45 19t45 -19l128 -128l-90 -90l-83 83l-83 -83q-18 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83l-83 -83 +q-19 -19 -45 -19t-45 19l-83 83l-83 -83q-19 -19 -45 -19t-45 19l-83 83zM640 1152v-128l384 128l384 -128v128h-128v128h-512v-128h-128z" /> + <glyph glyph-name="_506" unicode="&#xf21b;" +d="M576 0l96 448l-96 128l-128 64zM832 0l128 640l-128 -64l-96 -128zM992 1010q-2 4 -4 6q-10 8 -96 8q-70 0 -167 -19q-7 -2 -21 -2t-21 2q-97 19 -167 19q-86 0 -96 -8q-2 -2 -4 -6q2 -18 4 -27q2 -3 7.5 -6.5t7.5 -10.5q2 -4 7.5 -20.5t7 -20.5t7.5 -17t8.5 -17t9 -14 +t12 -13.5t14 -9.5t17.5 -8t20.5 -4t24.5 -2q36 0 59 12.5t32.5 30t14.5 34.5t11.5 29.5t17.5 12.5h12q11 0 17.5 -12.5t11.5 -29.5t14.5 -34.5t32.5 -30t59 -12.5q13 0 24.5 2t20.5 4t17.5 8t14 9.5t12 13.5t9 14t8.5 17t7.5 17t7 20.5t7.5 20.5q2 7 7.5 10.5t7.5 6.5 +q2 9 4 27zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 61 4.5 118t19 125.5t37.5 123.5t63.5 103.5t93.5 74.5l-90 220h214q-22 64 -22 128q0 12 2 32q-194 40 -194 96q0 57 210 99q17 62 51.5 134t70.5 114q32 37 76 37q30 0 84 -31t84 -31t84 31 +t84 31q44 0 76 -37q36 -42 70.5 -114t51.5 -134q210 -42 210 -99q0 -56 -194 -96q7 -81 -20 -160h214l-82 -225q63 -33 107.5 -96.5t65.5 -143.5t29 -151.5t8 -148.5z" /> + <glyph glyph-name="_507" unicode="&#xf21c;" horiz-adv-x="2304" +d="M2301 500q12 -103 -22 -198.5t-99 -163.5t-158.5 -106t-196.5 -31q-161 11 -279.5 125t-134.5 274q-12 111 27.5 210.5t118.5 170.5l-71 107q-96 -80 -151 -194t-55 -244q0 -27 -18.5 -46.5t-45.5 -19.5h-256h-69q-23 -164 -149 -274t-294 -110q-185 0 -316.5 131.5 +t-131.5 316.5t131.5 316.5t316.5 131.5q76 0 152 -27l24 45q-123 110 -304 110h-64q-26 0 -45 19t-19 45t19 45t45 19h128q78 0 145 -13.5t116.5 -38.5t71.5 -39.5t51 -36.5h512h115l-85 128h-222q-30 0 -49 22.5t-14 52.5q4 23 23 38t43 15h253q33 0 53 -28l70 -105 +l114 114q19 19 46 19h101q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-179l115 -172q131 63 275 36q143 -26 244 -134.5t118 -253.5zM448 128q115 0 203 72.5t111 183.5h-314q-35 0 -55 31q-18 32 -1 63l147 277q-47 13 -91 13q-132 0 -226 -94t-94 -226t94 -226 +t226 -94zM1856 128q132 0 226 94t94 226t-94 226t-226 94q-60 0 -121 -24l174 -260q15 -23 10 -49t-27 -40q-15 -11 -36 -11q-35 0 -53 29l-174 260q-93 -95 -93 -225q0 -132 94 -226t226 -94z" /> + <glyph glyph-name="_508" unicode="&#xf21d;" +d="M1408 0q0 -63 -61.5 -113.5t-164 -81t-225 -46t-253.5 -15.5t-253.5 15.5t-225 46t-164 81t-61.5 113.5q0 49 33 88.5t91 66.5t118 44.5t131 29.5q26 5 48 -10.5t26 -41.5q5 -26 -10.5 -48t-41.5 -26q-58 -10 -106 -23.5t-76.5 -25.5t-48.5 -23.5t-27.5 -19.5t-8.5 -12 +q3 -11 27 -26.5t73 -33t114 -32.5t160.5 -25t201.5 -10t201.5 10t160.5 25t114 33t73 33.5t27 27.5q-1 4 -8.5 11t-27.5 19t-48.5 23.5t-76.5 25t-106 23.5q-26 4 -41.5 26t-10.5 48q4 26 26 41.5t48 10.5q71 -12 131 -29.5t118 -44.5t91 -66.5t33 -88.5zM1024 896v-384 +q0 -26 -19 -45t-45 -19h-64v-384q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v384h-64q-26 0 -45 19t-19 45v384q0 53 37.5 90.5t90.5 37.5h384q53 0 90.5 -37.5t37.5 -90.5zM928 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5 +t158.5 -65.5t65.5 -158.5z" /> + <glyph glyph-name="_509" unicode="&#xf21e;" horiz-adv-x="1792" +d="M1280 512h305q-5 -6 -10 -10.5t-9 -7.5l-3 -4l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-5 2 -21 20h369q22 0 39.5 13.5t22.5 34.5l70 281l190 -667q6 -20 23 -33t39 -13q21 0 38 13t23 33l146 485l56 -112q18 -35 57 -35zM1792 940q0 -145 -103 -300h-369l-111 221 +q-8 17 -25.5 27t-36.5 8q-45 -5 -56 -46l-129 -430l-196 686q-6 20 -23.5 33t-39.5 13t-39 -13.5t-22 -34.5l-116 -464h-423q-103 155 -103 300q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124 +t127 -344z" /> + <glyph glyph-name="venus" unicode="&#xf221;" horiz-adv-x="1280" +d="M1152 960q0 -221 -147.5 -384.5t-364.5 -187.5v-260h224q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v260q-150 16 -271.5 103t-186 224t-52.5 292 +q11 134 80.5 249t182 188t245.5 88q170 19 319 -54t236 -212t87 -306zM128 960q0 -185 131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5z" /> + <glyph glyph-name="_511" unicode="&#xf222;" +d="M1472 1408q26 0 45 -19t19 -45v-416q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v262l-382 -383q126 -156 126 -359q0 -117 -45.5 -223.5t-123 -184t-184 -123t-223.5 -45.5t-223.5 45.5t-184 123t-123 184t-45.5 223.5t45.5 223.5t123 184t184 123t223.5 45.5 +q203 0 359 -126l382 382h-261q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h416zM576 0q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_512" unicode="&#xf223;" horiz-adv-x="1280" +d="M830 1220q145 -72 233.5 -210.5t88.5 -305.5q0 -221 -147.5 -384.5t-364.5 -187.5v-132h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96v-96q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v96h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96v132q-217 24 -364.5 187.5 +t-147.5 384.5q0 167 88.5 305.5t233.5 210.5q-165 96 -228 273q-6 16 3.5 29.5t26.5 13.5h69q21 0 29 -20q44 -106 140 -171t214 -65t214 65t140 171q8 20 37 20h61q17 0 26.5 -13.5t3.5 -29.5q-63 -177 -228 -273zM576 256q185 0 316.5 131.5t131.5 316.5t-131.5 316.5 +t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_513" unicode="&#xf224;" +d="M1024 1504q0 14 9 23t23 9h288q26 0 45 -19t19 -45v-288q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v134l-254 -255q126 -158 126 -359q0 -221 -147.5 -384.5t-364.5 -187.5v-132h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96v-96q0 -14 -9 -23t-23 -9h-64 +q-14 0 -23 9t-9 23v96h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96v132q-149 16 -270.5 103t-186.5 223.5t-53 291.5q16 204 160 353.5t347 172.5q118 14 228 -19t198 -103l255 254h-134q-14 0 -23 9t-9 23v64zM576 256q185 0 316.5 131.5t131.5 316.5t-131.5 316.5 +t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_514" unicode="&#xf225;" horiz-adv-x="1792" +d="M1280 1504q0 14 9 23t23 9h288q26 0 45 -19t19 -45v-288q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v134l-254 -255q126 -158 126 -359q0 -221 -147.5 -384.5t-364.5 -187.5v-132h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96v-96q0 -14 -9 -23t-23 -9h-64 +q-14 0 -23 9t-9 23v96h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96v132q-217 24 -364.5 187.5t-147.5 384.5q0 201 126 359l-52 53l-101 -111q-9 -10 -22 -10.5t-23 7.5l-48 44q-10 8 -10.5 21.5t8.5 23.5l105 115l-111 112v-134q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9 +t-9 23v288q0 26 19 45t45 19h288q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-133l106 -107l86 94q9 10 22 10.5t23 -7.5l48 -44q10 -8 10.5 -21.5t-8.5 -23.5l-90 -99l57 -56q158 126 359 126t359 -126l255 254h-134q-14 0 -23 9t-9 23v64zM832 256q185 0 316.5 131.5 +t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_515" unicode="&#xf226;" horiz-adv-x="1792" +d="M1790 1007q12 -155 -52.5 -292t-186 -224t-271.5 -103v-260h224q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-512v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23 +t23 9h224v260q-150 16 -271.5 103t-186 224t-52.5 292q17 206 164.5 356.5t352.5 169.5q206 21 377 -94q171 115 377 94q205 -19 352.5 -169.5t164.5 -356.5zM896 647q128 131 128 313t-128 313q-128 -131 -128 -313t128 -313zM576 512q115 0 218 57q-154 165 -154 391 +q0 224 154 391q-103 57 -218 57q-185 0 -316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5zM1152 128v260q-137 15 -256 94q-119 -79 -256 -94v-260h512zM1216 512q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5q-115 0 -218 -57q154 -167 154 -391 +q0 -226 -154 -391q103 -57 218 -57z" /> + <glyph glyph-name="_516" unicode="&#xf227;" horiz-adv-x="1920" +d="M1536 1120q0 14 9 23t23 9h288q26 0 45 -19t19 -45v-288q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v134l-254 -255q76 -95 107.5 -214t9.5 -247q-31 -182 -166 -312t-318 -156q-210 -29 -384.5 80t-241.5 300q-117 6 -221 57.5t-177.5 133t-113.5 192.5t-32 230 +q9 135 78 252t182 191.5t248 89.5q118 14 227.5 -19t198.5 -103l255 254h-134q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h288q26 0 45 -19t19 -45v-288q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v134l-254 -255q59 -74 93 -169q182 -9 328 -124l255 254h-134q-14 0 -23 9 +t-9 23v64zM1024 704q0 20 -4 58q-162 -25 -271 -150t-109 -292q0 -20 4 -58q162 25 271 150t109 292zM128 704q0 -168 111 -294t276 -149q-3 29 -3 59q0 210 135 369.5t338 196.5q-53 120 -163.5 193t-245.5 73q-185 0 -316.5 -131.5t-131.5 -316.5zM1088 -128 +q185 0 316.5 131.5t131.5 316.5q0 168 -111 294t-276 149q3 -28 3 -59q0 -210 -135 -369.5t-338 -196.5q53 -120 163.5 -193t245.5 -73z" /> + <glyph glyph-name="_517" unicode="&#xf228;" horiz-adv-x="2048" +d="M1664 1504q0 14 9 23t23 9h288q26 0 45 -19t19 -45v-288q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v134l-254 -255q76 -95 107.5 -214t9.5 -247q-32 -180 -164.5 -310t-313.5 -157q-223 -34 -409 90q-117 -78 -256 -93v-132h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23 +t-23 -9h-96v-96q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v96h-96q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96v132q-155 17 -279.5 109.5t-187 237.5t-39.5 307q25 187 159.5 322.5t320.5 164.5q224 34 410 -90q146 97 320 97q201 0 359 -126l255 254h-134q-14 0 -23 9 +t-9 23v64zM896 391q128 131 128 313t-128 313q-128 -131 -128 -313t128 -313zM128 704q0 -185 131.5 -316.5t316.5 -131.5q117 0 218 57q-154 167 -154 391t154 391q-101 57 -218 57q-185 0 -316.5 -131.5t-131.5 -316.5zM1216 256q185 0 316.5 131.5t131.5 316.5 +t-131.5 316.5t-316.5 131.5q-117 0 -218 -57q154 -167 154 -391t-154 -391q101 -57 218 -57z" /> + <glyph glyph-name="_518" unicode="&#xf229;" +d="M1472 1408q26 0 45 -19t19 -45v-416q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v262l-213 -214l140 -140q9 -10 9 -23t-9 -22l-46 -46q-9 -9 -22 -9t-23 9l-140 141l-78 -79q126 -156 126 -359q0 -117 -45.5 -223.5t-123 -184t-184 -123t-223.5 -45.5t-223.5 45.5 +t-184 123t-123 184t-45.5 223.5t45.5 223.5t123 184t184 123t223.5 45.5q203 0 359 -126l78 78l-172 172q-9 10 -9 23t9 22l46 46q9 9 22 9t23 -9l172 -172l213 213h-261q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h416zM576 0q185 0 316.5 131.5t131.5 316.5t-131.5 316.5 +t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_519" unicode="&#xf22a;" horiz-adv-x="1280" +d="M640 892q217 -24 364.5 -187.5t147.5 -384.5q0 -167 -87 -306t-236 -212t-319 -54q-133 15 -245.5 88t-182 188t-80.5 249q-12 155 52.5 292t186 224t271.5 103v132h-160q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h160v165l-92 -92q-10 -9 -23 -9t-22 9l-46 46q-9 9 -9 22 +t9 23l202 201q19 19 45 19t45 -19l202 -201q9 -10 9 -23t-9 -22l-46 -46q-9 -9 -22 -9t-23 9l-92 92v-165h160q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-160v-132zM576 -128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5 +t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_520" unicode="&#xf22b;" horiz-adv-x="2048" +d="M1901 621q19 -19 19 -45t-19 -45l-294 -294q-9 -10 -22.5 -10t-22.5 10l-45 45q-10 9 -10 22.5t10 22.5l185 185h-294v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-132q-24 -217 -187.5 -364.5t-384.5 -147.5q-167 0 -306 87t-212 236t-54 319q15 133 88 245.5 +t188 182t249 80.5q155 12 292 -52.5t224 -186t103 -271.5h132v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h294l-185 185q-10 9 -10 22.5t10 22.5l45 45q9 10 22.5 10t22.5 -10zM576 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5 +t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_521" unicode="&#xf22c;" horiz-adv-x="1280" +d="M1152 960q0 -221 -147.5 -384.5t-364.5 -187.5v-612q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v612q-217 24 -364.5 187.5t-147.5 384.5q0 117 45.5 223.5t123 184t184 123t223.5 45.5t223.5 -45.5t184 -123t123 -184t45.5 -223.5zM576 512q185 0 316.5 131.5 +t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" /> + <glyph glyph-name="_522" unicode="&#xf22d;" horiz-adv-x="1280" +d="M1024 576q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1152 576q0 -117 -45.5 -223.5t-123 -184t-184 -123t-223.5 -45.5t-223.5 45.5t-184 123t-123 184t-45.5 223.5t45.5 223.5t123 184t184 123 +t223.5 45.5t223.5 -45.5t184 -123t123 -184t45.5 -223.5z" /> + <glyph glyph-name="_523" unicode="&#xf22e;" horiz-adv-x="1792" + /> + <glyph glyph-name="_524" unicode="&#xf22f;" horiz-adv-x="1792" + /> + <glyph glyph-name="_525" unicode="&#xf230;" +d="M1451 1408q35 0 60 -25t25 -60v-1366q0 -35 -25 -60t-60 -25h-391v595h199l30 232h-229v148q0 56 23.5 84t91.5 28l122 1v207q-63 9 -178 9q-136 0 -217.5 -80t-81.5 -226v-171h-200v-232h200v-595h-735q-35 0 -60 25t-25 60v1366q0 35 25 60t60 25h1366z" /> + <glyph glyph-name="_526" unicode="&#xf231;" horiz-adv-x="1280" +d="M0 939q0 108 37.5 203.5t103.5 166.5t152 123t185 78t202 26q158 0 294 -66.5t221 -193.5t85 -287q0 -96 -19 -188t-60 -177t-100 -149.5t-145 -103t-189 -38.5q-68 0 -135 32t-96 88q-10 -39 -28 -112.5t-23.5 -95t-20.5 -71t-26 -71t-32 -62.5t-46 -77.5t-62 -86.5 +l-14 -5l-9 10q-15 157 -15 188q0 92 21.5 206.5t66.5 287.5t52 203q-32 65 -32 169q0 83 52 156t132 73q61 0 95 -40.5t34 -102.5q0 -66 -44 -191t-44 -187q0 -63 45 -104.5t109 -41.5q55 0 102 25t78.5 68t56 95t38 110.5t20 111t6.5 99.5q0 173 -109.5 269.5t-285.5 96.5 +q-200 0 -334 -129.5t-134 -328.5q0 -44 12.5 -85t27 -65t27 -45.5t12.5 -30.5q0 -28 -15 -73t-37 -45q-2 0 -17 3q-51 15 -90.5 56t-61 94.5t-32.5 108t-11 106.5z" /> + <glyph glyph-name="_527" unicode="&#xf232;" +d="M985 562q13 0 97.5 -44t89.5 -53q2 -5 2 -15q0 -33 -17 -76q-16 -39 -71 -65.5t-102 -26.5q-57 0 -190 62q-98 45 -170 118t-148 185q-72 107 -71 194v8q3 91 74 158q24 22 52 22q6 0 18 -1.5t19 -1.5q19 0 26.5 -6.5t15.5 -27.5q8 -20 33 -88t25 -75q0 -21 -34.5 -57.5 +t-34.5 -46.5q0 -7 5 -15q34 -73 102 -137q56 -53 151 -101q12 -7 22 -7q15 0 54 48.5t52 48.5zM782 32q127 0 243.5 50t200.5 134t134 200.5t50 243.5t-50 243.5t-134 200.5t-200.5 134t-243.5 50t-243.5 -50t-200.5 -134t-134 -200.5t-50 -243.5q0 -203 120 -368l-79 -233 +l242 77q158 -104 345 -104zM782 1414q153 0 292.5 -60t240.5 -161t161 -240.5t60 -292.5t-60 -292.5t-161 -240.5t-240.5 -161t-292.5 -60q-195 0 -365 94l-417 -134l136 405q-108 178 -108 389q0 153 60 292.5t161 240.5t240.5 161t292.5 60z" /> + <glyph glyph-name="_528" unicode="&#xf233;" horiz-adv-x="1792" +d="M128 128h1024v128h-1024v-128zM128 640h1024v128h-1024v-128zM1696 192q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM128 1152h1024v128h-1024v-128zM1696 704q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1696 1216 +q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1792 384v-384h-1792v384h1792zM1792 896v-384h-1792v384h1792zM1792 1408v-384h-1792v384h1792z" /> + <glyph glyph-name="_529" unicode="&#xf234;" horiz-adv-x="2048" +d="M704 640q-159 0 -271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5zM1664 512h352q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-352v-352q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5 +t-9.5 22.5v352h-352q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h352v352q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5v-352zM928 288q0 -52 38 -90t90 -38h256v-238q-68 -50 -171 -50h-874q-121 0 -194 69t-73 190q0 53 3.5 103.5t14 109t26.5 108.5 +t43 97.5t62 81t85.5 53.5t111.5 20q19 0 39 -17q79 -61 154.5 -91.5t164.5 -30.5t164.5 30.5t154.5 91.5q20 17 39 17q132 0 217 -96h-223q-52 0 -90 -38t-38 -90v-192z" /> + <glyph glyph-name="_530" unicode="&#xf235;" horiz-adv-x="2048" +d="M704 640q-159 0 -271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5t-112.5 -271.5t-271.5 -112.5zM1781 320l249 -249q9 -9 9 -23q0 -13 -9 -22l-136 -136q-9 -9 -22 -9q-14 0 -23 9l-249 249l-249 -249q-9 -9 -23 -9q-13 0 -22 9l-136 136 +q-9 9 -9 22q0 14 9 23l249 249l-249 249q-9 9 -9 23q0 13 9 22l136 136q9 9 22 9q14 0 23 -9l249 -249l249 249q9 9 23 9q13 0 22 -9l136 -136q9 -9 9 -22q0 -14 -9 -23zM1283 320l-181 -181q-37 -37 -37 -91q0 -53 37 -90l83 -83q-21 -3 -44 -3h-874q-121 0 -194 69 +t-73 190q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q19 0 39 -17q154 -122 319 -122t319 122q20 17 39 17q28 0 57 -6q-28 -27 -41 -50t-13 -56q0 -54 37 -91z" /> + <glyph glyph-name="_531" unicode="&#xf236;" horiz-adv-x="2048" +d="M256 512h1728q26 0 45 -19t19 -45v-448h-256v256h-1536v-256h-256v1216q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-704zM832 832q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM2048 576v64q0 159 -112.5 271.5t-271.5 112.5h-704 +q-26 0 -45 -19t-19 -45v-384h1152z" /> + <glyph glyph-name="_532" unicode="&#xf237;" +d="M1536 1536l-192 -448h192v-192h-274l-55 -128h329v-192h-411l-357 -832l-357 832h-411v192h329l-55 128h-274v192h192l-192 448h256l323 -768h378l323 768h256zM768 320l108 256h-216z" /> + <glyph glyph-name="_533" unicode="&#xf238;" +d="M1088 1536q185 0 316.5 -93.5t131.5 -226.5v-896q0 -130 -125.5 -222t-305.5 -97l213 -202q16 -15 8 -35t-30 -20h-1056q-22 0 -30 20t8 35l213 202q-180 5 -305.5 97t-125.5 222v896q0 133 131.5 226.5t316.5 93.5h640zM768 192q80 0 136 56t56 136t-56 136t-136 56 +t-136 -56t-56 -136t56 -136t136 -56zM1344 768v512h-1152v-512h1152z" /> + <glyph glyph-name="_534" unicode="&#xf239;" +d="M1088 1536q185 0 316.5 -93.5t131.5 -226.5v-896q0 -130 -125.5 -222t-305.5 -97l213 -202q16 -15 8 -35t-30 -20h-1056q-22 0 -30 20t8 35l213 202q-180 5 -305.5 97t-125.5 222v896q0 133 131.5 226.5t316.5 93.5h640zM288 224q66 0 113 47t47 113t-47 113t-113 47 +t-113 -47t-47 -113t47 -113t113 -47zM704 768v512h-544v-512h544zM1248 224q66 0 113 47t47 113t-47 113t-113 47t-113 -47t-47 -113t47 -113t113 -47zM1408 768v512h-576v-512h576z" /> + <glyph glyph-name="_535" unicode="&#xf23a;" horiz-adv-x="1792" +d="M597 1115v-1173q0 -25 -12.5 -42.5t-36.5 -17.5q-17 0 -33 8l-465 233q-21 10 -35.5 33.5t-14.5 46.5v1140q0 20 10 34t29 14q14 0 44 -15l511 -256q3 -3 3 -5zM661 1014l534 -866l-534 266v600zM1792 996v-1054q0 -25 -14 -40.5t-38 -15.5t-47 13l-441 220zM1789 1116 +q0 -3 -256.5 -419.5t-300.5 -487.5l-390 634l324 527q17 28 52 28q14 0 26 -6l541 -270q4 -2 4 -6z" /> + <glyph glyph-name="_536" unicode="&#xf23b;" +d="M809 532l266 499h-112l-157 -312q-24 -48 -44 -92l-42 92l-155 312h-120l263 -493v-324h101v318zM1536 1408v-1536h-1536v1536h1536z" /> + <glyph glyph-name="_537" unicode="&#xf23c;" horiz-adv-x="2296" +d="M478 -139q-8 -16 -27 -34.5t-37 -25.5q-25 -9 -51.5 3.5t-28.5 31.5q-1 22 40 55t68 38q23 4 34 -21.5t2 -46.5zM1819 -139q7 -16 26 -34.5t38 -25.5q25 -9 51.5 3.5t27.5 31.5q2 22 -39.5 55t-68.5 38q-22 4 -33 -21.5t-2 -46.5zM1867 -30q13 -27 56.5 -59.5t77.5 -41.5 +q45 -13 82 4.5t37 50.5q0 46 -67.5 100.5t-115.5 59.5q-40 5 -63.5 -37.5t-6.5 -76.5zM428 -30q-13 -27 -56 -59.5t-77 -41.5q-45 -13 -82 4.5t-37 50.5q0 46 67.5 100.5t115.5 59.5q40 5 63 -37.5t6 -76.5zM1158 1094h1q-41 0 -76 -15q27 -8 44 -30.5t17 -49.5 +q0 -35 -27 -60t-65 -25q-52 0 -80 43q-5 -23 -5 -42q0 -74 56 -126.5t135 -52.5q80 0 136 52.5t56 126.5t-56 126.5t-136 52.5zM1462 1312q-99 109 -220.5 131.5t-245.5 -44.5q27 60 82.5 96.5t118 39.5t121.5 -17t99.5 -74.5t44.5 -131.5zM2212 73q8 -11 -11 -42 +q7 -23 7 -40q1 -56 -44.5 -112.5t-109.5 -91.5t-118 -37q-48 -2 -92 21.5t-66 65.5q-687 -25 -1259 0q-23 -41 -66.5 -65t-92.5 -22q-86 3 -179.5 80.5t-92.5 160.5q2 22 7 40q-19 31 -11 42q6 10 31 1q14 22 41 51q-7 29 2 38q11 10 39 -4q29 20 59 34q0 29 13 37 +q23 12 51 -16q35 5 61 -2q18 -4 38 -19v73q-11 0 -18 2q-53 10 -97 44.5t-55 87.5q-9 38 0 81q15 62 93 95q2 17 19 35.5t36 23.5t33 -7.5t19 -30.5h13q46 -5 60 -23q3 -3 5 -7q10 1 30.5 3.5t30.5 3.5q-15 11 -30 17q-23 40 -91 43q0 6 1 10q-62 2 -118.5 18.5t-84.5 47.5 +q-32 36 -42.5 92t-2.5 112q16 126 90 179q23 16 52 4.5t32 -40.5q0 -1 1.5 -14t2.5 -21t3 -20t5.5 -19t8.5 -10q27 -14 76 -12q48 46 98 74q-40 4 -162 -14l47 46q61 58 163 111q145 73 282 86q-20 8 -41 15.5t-47 14t-42.5 10.5t-47.5 11t-43 10q595 126 904 -139 +q98 -84 158 -222q85 -10 121 9h1q5 3 8.5 10t5.5 19t3 19.5t3 21.5l1 14q3 28 32 40t52 -5q73 -52 91 -178q7 -57 -3.5 -113t-42.5 -91q-28 -32 -83.5 -48.5t-115.5 -18.5v-10q-71 -2 -95 -43q-14 -5 -31 -17q11 -1 32 -3.5t30 -3.5q1 5 5 8q16 18 60 23h13q5 18 19 30t33 8 +t36 -23t19 -36q79 -32 93 -95q9 -40 1 -81q-12 -53 -56 -88t-97 -44q-10 -2 -17 -2q0 -49 -1 -73q20 15 38 19q26 7 61 2q28 28 51 16q14 -9 14 -37q33 -16 59 -34q27 13 38 4q10 -10 2 -38q28 -30 41 -51q23 8 31 -1zM1937 1025q0 -29 -9 -54q82 -32 112 -132 +q4 37 -9.5 98.5t-41.5 90.5q-20 19 -36 17t-16 -20zM1859 925q35 -42 47.5 -108.5t-0.5 -124.5q67 13 97 45q13 14 18 28q-3 64 -31 114.5t-79 66.5q-15 -15 -52 -21zM1822 921q-30 0 -44 1q42 -115 53 -239q21 0 43 3q16 68 1 135t-53 100zM258 839q30 100 112 132 +q-9 25 -9 54q0 18 -16.5 20t-35.5 -17q-28 -29 -41.5 -90.5t-9.5 -98.5zM294 737q29 -31 97 -45q-13 58 -0.5 124.5t47.5 108.5v0q-37 6 -52 21q-51 -16 -78.5 -66t-31.5 -115q9 -17 18 -28zM471 683q14 124 73 235q-19 -4 -55 -18l-45 -19v1q-46 -89 -20 -196q25 -3 47 -3z +M1434 644q8 -38 16.5 -108.5t11.5 -89.5q3 -18 9.5 -21.5t23.5 4.5q40 20 62 85.5t23 125.5q-24 2 -146 4zM1152 1285q-116 0 -199 -82.5t-83 -198.5q0 -117 83 -199.5t199 -82.5t199 82.5t83 199.5q0 116 -83 198.5t-199 82.5zM1380 646q-105 2 -211 0v1q-1 -27 2.5 -86 +t13.5 -66q29 -14 93.5 -14.5t95.5 10.5q9 3 11 39t-0.5 69.5t-4.5 46.5zM1112 447q8 4 9.5 48t-0.5 88t-4 63v1q-212 -3 -214 -3q-4 -20 -7 -62t0 -83t14 -46q34 -15 101 -16t101 10zM718 636q-16 -59 4.5 -118.5t77.5 -84.5q15 -8 24 -5t12 21q3 16 8 90t10 103 +q-69 -2 -136 -6zM591 510q3 -23 -34 -36q132 -141 271.5 -240t305.5 -154q172 49 310.5 146t293.5 250q-33 13 -30 34q0 2 0.5 3.5t1.5 3t1 2.5v1v-1q-17 2 -50 5.5t-48 4.5q-26 -90 -82 -132q-51 -38 -82 1q-5 6 -9 14q-7 13 -17 62q-2 -5 -5 -9t-7.5 -7t-8 -5.5t-9.5 -4 +l-10 -2.5t-12 -2l-12 -1.5t-13.5 -1t-13.5 -0.5q-106 -9 -163 11q-4 -17 -10 -26.5t-21 -15t-23 -7t-36 -3.5q-6 -1 -9 -1q-179 -17 -203 40q-2 -63 -56 -54q-47 8 -91 54q-12 13 -20 26q-17 29 -26 65q-58 -6 -87 -10q1 -2 4 -10zM507 -118q3 14 3 30q-17 71 -51 130 +t-73 70q-41 12 -101.5 -14.5t-104.5 -80t-39 -107.5q35 -53 100 -93t119 -42q51 -2 94 28t53 79zM510 53q23 -63 27 -119q195 113 392 174q-98 52 -180.5 120t-179.5 165q-6 -4 -29 -13q0 -1 -1 -4t-1 -5q31 -18 22 -37q-12 -23 -56 -34q-10 -13 -29 -24h-1q-2 -83 1 -150 +q19 -34 35 -73zM579 -113q532 -21 1145 0q-254 147 -428 196q-76 -35 -156 -57q-8 -3 -16 0q-65 21 -129 49q-208 -60 -416 -188h-1v-1q1 0 1 1zM1763 -67q4 54 28 120q14 38 33 71l-1 -1q3 77 3 153q-15 8 -30 25q-42 9 -56 33q-9 20 22 38q-2 4 -2 9q-16 4 -28 12 +q-204 -190 -383 -284q198 -59 414 -176zM2155 -90q5 54 -39 107.5t-104 80t-102 14.5q-38 -11 -72.5 -70.5t-51.5 -129.5q0 -16 3 -30q10 -49 53 -79t94 -28q54 2 119 42t100 93z" /> + <glyph glyph-name="_538" unicode="&#xf23d;" horiz-adv-x="2304" +d="M1524 -25q0 -68 -48 -116t-116 -48t-116.5 48t-48.5 116t48.5 116.5t116.5 48.5t116 -48.5t48 -116.5zM775 -25q0 -68 -48.5 -116t-116.5 -48t-116 48t-48 116t48 116.5t116 48.5t116.5 -48.5t48.5 -116.5zM0 1469q57 -60 110.5 -104.5t121 -82t136 -63t166 -45.5 +t200 -31.5t250 -18.5t304 -9.5t372.5 -2.5q139 0 244.5 -5t181 -16.5t124 -27.5t71 -39.5t24 -51.5t-19.5 -64t-56.5 -76.5t-89.5 -91t-116 -104.5t-139 -119q-185 -157 -286 -247q29 51 76.5 109t94 105.5t94.5 98.5t83 91.5t54 80.5t13 70t-45.5 55.5t-116.5 41t-204 23.5 +t-304 5q-168 -2 -314 6t-256 23t-204.5 41t-159.5 51.5t-122.5 62.5t-91.5 66.5t-68 71.5t-50.5 69.5t-40 68t-36.5 59.5z" /> + <glyph glyph-name="_539" unicode="&#xf23e;" horiz-adv-x="1792" +d="M896 1472q-169 0 -323 -66t-265.5 -177.5t-177.5 -265.5t-66 -323t66 -323t177.5 -265.5t265.5 -177.5t323 -66t323 66t265.5 177.5t177.5 265.5t66 323t-66 323t-177.5 265.5t-265.5 177.5t-323 66zM896 1536q182 0 348 -71t286 -191t191 -286t71 -348t-71 -348 +t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71zM496 704q16 0 16 -16v-480q0 -16 -16 -16h-32q-16 0 -16 16v480q0 16 16 16h32zM896 640q53 0 90.5 -37.5t37.5 -90.5q0 -35 -17.5 -64t-46.5 -46v-114q0 -14 -9 -23 +t-23 -9h-64q-14 0 -23 9t-9 23v114q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5zM896 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM544 928v-96 +q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v96q0 93 65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5v-96q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v96q0 146 -103 249t-249 103t-249 -103t-103 -249zM1408 192v512q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-512 +q0 -26 19 -45t45 -19h896q26 0 45 19t19 45z" /> + <glyph glyph-name="_540" unicode="&#xf240;" horiz-adv-x="2304" +d="M1920 1024v-768h-1664v768h1664zM2048 448h128v384h-128v288q0 14 -9 23t-23 9h-1856q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h1856q14 0 23 9t9 23v288zM2304 832v-384q0 -53 -37.5 -90.5t-90.5 -37.5v-160q0 -66 -47 -113t-113 -47h-1856q-66 0 -113 47t-47 113 +v960q0 66 47 113t113 47h1856q66 0 113 -47t47 -113v-160q53 0 90.5 -37.5t37.5 -90.5z" /> + <glyph glyph-name="_541" unicode="&#xf241;" horiz-adv-x="2304" +d="M256 256v768h1280v-768h-1280zM2176 960q53 0 90.5 -37.5t37.5 -90.5v-384q0 -53 -37.5 -90.5t-90.5 -37.5v-160q0 -66 -47 -113t-113 -47h-1856q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h1856q66 0 113 -47t47 -113v-160zM2176 448v384h-128v288q0 14 -9 23t-23 9 +h-1856q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h1856q14 0 23 9t9 23v288h128z" /> + <glyph glyph-name="_542" unicode="&#xf242;" horiz-adv-x="2304" +d="M256 256v768h896v-768h-896zM2176 960q53 0 90.5 -37.5t37.5 -90.5v-384q0 -53 -37.5 -90.5t-90.5 -37.5v-160q0 -66 -47 -113t-113 -47h-1856q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h1856q66 0 113 -47t47 -113v-160zM2176 448v384h-128v288q0 14 -9 23t-23 9 +h-1856q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h1856q14 0 23 9t9 23v288h128z" /> + <glyph glyph-name="_543" unicode="&#xf243;" horiz-adv-x="2304" +d="M256 256v768h512v-768h-512zM2176 960q53 0 90.5 -37.5t37.5 -90.5v-384q0 -53 -37.5 -90.5t-90.5 -37.5v-160q0 -66 -47 -113t-113 -47h-1856q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h1856q66 0 113 -47t47 -113v-160zM2176 448v384h-128v288q0 14 -9 23t-23 9 +h-1856q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h1856q14 0 23 9t9 23v288h128z" /> + <glyph glyph-name="_544" unicode="&#xf244;" horiz-adv-x="2304" +d="M2176 960q53 0 90.5 -37.5t37.5 -90.5v-384q0 -53 -37.5 -90.5t-90.5 -37.5v-160q0 -66 -47 -113t-113 -47h-1856q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h1856q66 0 113 -47t47 -113v-160zM2176 448v384h-128v288q0 14 -9 23t-23 9h-1856q-14 0 -23 -9t-9 -23 +v-960q0 -14 9 -23t23 -9h1856q14 0 23 9t9 23v288h128z" /> + <glyph glyph-name="_545" unicode="&#xf245;" horiz-adv-x="1280" +d="M1133 493q31 -30 14 -69q-17 -40 -59 -40h-382l201 -476q10 -25 0 -49t-34 -35l-177 -75q-25 -10 -49 0t-35 34l-191 452l-312 -312q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v1504q0 42 40 59q12 5 24 5q27 0 45 -19z" /> + <glyph glyph-name="_546" unicode="&#xf246;" horiz-adv-x="1024" +d="M832 1408q-320 0 -320 -224v-416h128v-128h-128v-544q0 -224 320 -224h64v-128h-64q-272 0 -384 146q-112 -146 -384 -146h-64v128h64q320 0 320 224v544h-128v128h128v416q0 224 -320 224h-64v128h64q272 0 384 -146q112 146 384 146h64v-128h-64z" /> + <glyph glyph-name="_547" unicode="&#xf247;" horiz-adv-x="2048" +d="M2048 1152h-128v-1024h128v-384h-384v128h-1280v-128h-384v384h128v1024h-128v384h384v-128h1280v128h384v-384zM1792 1408v-128h128v128h-128zM128 1408v-128h128v128h-128zM256 -128v128h-128v-128h128zM1664 0v128h128v1024h-128v128h-1280v-128h-128v-1024h128v-128 +h1280zM1920 -128v128h-128v-128h128zM1280 896h384v-768h-896v256h-384v768h896v-256zM512 512h640v512h-640v-512zM1536 256v512h-256v-384h-384v-128h640z" /> + <glyph glyph-name="_548" unicode="&#xf248;" horiz-adv-x="2304" +d="M2304 768h-128v-640h128v-384h-384v128h-896v-128h-384v384h128v128h-384v-128h-384v384h128v640h-128v384h384v-128h896v128h384v-384h-128v-128h384v128h384v-384zM2048 1024v-128h128v128h-128zM1408 1408v-128h128v128h-128zM128 1408v-128h128v128h-128zM256 256 +v128h-128v-128h128zM1536 384h-128v-128h128v128zM384 384h896v128h128v640h-128v128h-896v-128h-128v-640h128v-128zM896 -128v128h-128v-128h128zM2176 -128v128h-128v-128h128zM2048 128v640h-128v128h-384v-384h128v-384h-384v128h-384v-128h128v-128h896v128h128z" /> + <glyph glyph-name="_549" unicode="&#xf249;" +d="M1024 288v-416h-928q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1344q40 0 68 -28t28 -68v-928h-416q-40 0 -68 -28t-28 -68zM1152 256h381q-15 -82 -65 -132l-184 -184q-50 -50 -132 -65v381z" /> + <glyph glyph-name="_550" unicode="&#xf24a;" +d="M1400 256h-248v-248q29 10 41 22l185 185q12 12 22 41zM1120 384h288v896h-1280v-1280h896v288q0 40 28 68t68 28zM1536 1312v-1024q0 -40 -20 -88t-48 -76l-184 -184q-28 -28 -76 -48t-88 -20h-1024q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1344q40 0 68 -28t28 -68 +z" /> + <glyph glyph-name="_551" unicode="&#xf24b;" horiz-adv-x="2304" +d="M1951 538q0 -26 -15.5 -44.5t-38.5 -23.5q-8 -2 -18 -2h-153v140h153q10 0 18 -2q23 -5 38.5 -23.5t15.5 -44.5zM1933 751q0 -25 -15 -42t-38 -21q-3 -1 -15 -1h-139v129h139q3 0 8.5 -0.5t6.5 -0.5q23 -4 38 -21.5t15 -42.5zM728 587v308h-228v-308q0 -58 -38 -94.5 +t-105 -36.5q-108 0 -229 59v-112q53 -15 121 -23t109 -9l42 -1q328 0 328 217zM1442 403v113q-99 -52 -200 -59q-108 -8 -169 41t-61 142t61 142t169 41q101 -7 200 -58v112q-48 12 -100 19.5t-80 9.5l-28 2q-127 6 -218.5 -14t-140.5 -60t-71 -88t-22 -106t22 -106t71 -88 +t140.5 -60t218.5 -14q101 4 208 31zM2176 518q0 54 -43 88.5t-109 39.5v3q57 8 89 41.5t32 79.5q0 55 -41 88t-107 36q-3 0 -12 0.5t-14 0.5h-455v-510h491q74 0 121.5 36.5t47.5 96.5zM2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90 +t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_552" unicode="&#xf24c;" horiz-adv-x="2304" +d="M858 295v693q-106 -41 -172 -135.5t-66 -211.5t66 -211.5t172 -134.5zM1362 641q0 117 -66 211.5t-172 135.5v-694q106 41 172 135.5t66 211.5zM1577 641q0 -159 -78.5 -294t-213.5 -213.5t-294 -78.5q-119 0 -227.5 46.5t-187 125t-125 187t-46.5 227.5q0 159 78.5 294 +t213.5 213.5t294 78.5t294 -78.5t213.5 -213.5t78.5 -294zM1960 634q0 139 -55.5 261.5t-147.5 205.5t-213.5 131t-252.5 48h-301q-176 0 -323.5 -81t-235 -230t-87.5 -335q0 -171 87 -317.5t236 -231.5t323 -85h301q129 0 251.5 50.5t214.5 135t147.5 202.5t55.5 246z +M2304 1280v-1280q0 -52 -38 -90t-90 -38h-2048q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h2048q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_553" unicode="&#xf24d;" horiz-adv-x="1792" +d="M1664 -96v1088q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5v-1088q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5zM1792 992v-1088q0 -66 -47 -113t-113 -47h-1088q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1088q66 0 113 -47t47 -113 +zM1408 1376v-160h-128v160q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5v-1088q0 -13 9.5 -22.5t22.5 -9.5h160v-128h-160q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1088q66 0 113 -47t47 -113z" /> + <glyph glyph-name="_554" unicode="&#xf24e;" horiz-adv-x="2304" +d="M1728 1088l-384 -704h768zM448 1088l-384 -704h768zM1269 1280q-14 -40 -45.5 -71.5t-71.5 -45.5v-1291h608q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1344q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h608v1291q-40 14 -71.5 45.5t-45.5 71.5h-491q-14 0 -23 9t-9 23v64 +q0 14 9 23t23 9h491q21 57 70 92.5t111 35.5t111 -35.5t70 -92.5h491q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-491zM1088 1264q33 0 56.5 23.5t23.5 56.5t-23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5zM2176 384q0 -73 -46.5 -131t-117.5 -91 +t-144.5 -49.5t-139.5 -16.5t-139.5 16.5t-144.5 49.5t-117.5 91t-46.5 131q0 11 35 81t92 174.5t107 195.5t102 184t56 100q18 33 56 33t56 -33q4 -7 56 -100t102 -184t107 -195.5t92 -174.5t35 -81zM896 384q0 -73 -46.5 -131t-117.5 -91t-144.5 -49.5t-139.5 -16.5 +t-139.5 16.5t-144.5 49.5t-117.5 91t-46.5 131q0 11 35 81t92 174.5t107 195.5t102 184t56 100q18 33 56 33t56 -33q4 -7 56 -100t102 -184t107 -195.5t92 -174.5t35 -81z" /> + <glyph glyph-name="_555" unicode="&#xf250;" +d="M1408 1408q0 -261 -106.5 -461.5t-266.5 -306.5q160 -106 266.5 -306.5t106.5 -461.5h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1472q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96q0 261 106.5 461.5t266.5 306.5q-160 106 -266.5 306.5t-106.5 461.5h-96q-14 0 -23 9 +t-9 23v64q0 14 9 23t23 9h1472q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96zM874 700q77 29 149 92.5t129.5 152.5t92.5 210t35 253h-1024q0 -132 35 -253t92.5 -210t129.5 -152.5t149 -92.5q19 -7 30.5 -23.5t11.5 -36.5t-11.5 -36.5t-30.5 -23.5q-77 -29 -149 -92.5 +t-129.5 -152.5t-92.5 -210t-35 -253h1024q0 132 -35 253t-92.5 210t-129.5 152.5t-149 92.5q-19 7 -30.5 23.5t-11.5 36.5t11.5 36.5t30.5 23.5z" /> + <glyph glyph-name="_556" unicode="&#xf251;" +d="M1408 1408q0 -261 -106.5 -461.5t-266.5 -306.5q160 -106 266.5 -306.5t106.5 -461.5h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1472q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96q0 261 106.5 461.5t266.5 306.5q-160 106 -266.5 306.5t-106.5 461.5h-96q-14 0 -23 9 +t-9 23v64q0 14 9 23t23 9h1472q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96zM1280 1408h-1024q0 -66 9 -128h1006q9 61 9 128zM1280 -128q0 130 -34 249.5t-90.5 208t-126.5 152t-146 94.5h-230q-76 -31 -146 -94.5t-126.5 -152t-90.5 -208t-34 -249.5h1024z" /> + <glyph glyph-name="_557" unicode="&#xf252;" +d="M1408 1408q0 -261 -106.5 -461.5t-266.5 -306.5q160 -106 266.5 -306.5t106.5 -461.5h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1472q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96q0 261 106.5 461.5t266.5 306.5q-160 106 -266.5 306.5t-106.5 461.5h-96q-14 0 -23 9 +t-9 23v64q0 14 9 23t23 9h1472q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96zM1280 1408h-1024q0 -206 85 -384h854q85 178 85 384zM1223 192q-54 141 -145.5 241.5t-194.5 142.5h-230q-103 -42 -194.5 -142.5t-145.5 -241.5h910z" /> + <glyph glyph-name="_558" unicode="&#xf253;" +d="M1408 1408q0 -261 -106.5 -461.5t-266.5 -306.5q160 -106 266.5 -306.5t106.5 -461.5h96q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1472q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96q0 261 106.5 461.5t266.5 306.5q-160 106 -266.5 306.5t-106.5 461.5h-96q-14 0 -23 9 +t-9 23v64q0 14 9 23t23 9h1472q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-96zM874 700q77 29 149 92.5t129.5 152.5t92.5 210t35 253h-1024q0 -132 35 -253t92.5 -210t129.5 -152.5t149 -92.5q19 -7 30.5 -23.5t11.5 -36.5t-11.5 -36.5t-30.5 -23.5q-137 -51 -244 -196 +h700q-107 145 -244 196q-19 7 -30.5 23.5t-11.5 36.5t11.5 36.5t30.5 23.5z" /> + <glyph glyph-name="_559" unicode="&#xf254;" +d="M1504 -64q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9h-1472q-14 0 -23 9t-9 23v128q0 14 9 23t23 9h1472zM130 0q3 55 16 107t30 95t46 87t53.5 76t64.5 69.5t66 60t70.5 55t66.5 47.5t65 43q-43 28 -65 43t-66.5 47.5t-70.5 55t-66 60t-64.5 69.5t-53.5 76t-46 87 +t-30 95t-16 107h1276q-3 -55 -16 -107t-30 -95t-46 -87t-53.5 -76t-64.5 -69.5t-66 -60t-70.5 -55t-66.5 -47.5t-65 -43q43 -28 65 -43t66.5 -47.5t70.5 -55t66 -60t64.5 -69.5t53.5 -76t46 -87t30 -95t16 -107h-1276zM1504 1536q14 0 23 -9t9 -23v-128q0 -14 -9 -23t-23 -9 +h-1472q-14 0 -23 9t-9 23v128q0 14 9 23t23 9h1472z" /> + <glyph glyph-name="_560" unicode="&#xf255;" +d="M768 1152q-53 0 -90.5 -37.5t-37.5 -90.5v-128h-32v93q0 48 -32 81.5t-80 33.5q-46 0 -79 -33t-33 -79v-429l-32 30v172q0 48 -32 81.5t-80 33.5q-46 0 -79 -33t-33 -79v-224q0 -47 35 -82l310 -296q39 -39 39 -102q0 -26 19 -45t45 -19h640q26 0 45 19t19 45v25 +q0 41 10 77l108 436q10 36 10 77v246q0 48 -32 81.5t-80 33.5q-46 0 -79 -33t-33 -79v-32h-32v125q0 40 -25 72.5t-64 40.5q-14 2 -23 2q-46 0 -79 -33t-33 -79v-128h-32v122q0 51 -32.5 89.5t-82.5 43.5q-5 1 -13 1zM768 1280q84 0 149 -50q57 34 123 34q59 0 111 -27 +t86 -76q27 7 59 7q100 0 170 -71.5t70 -171.5v-246q0 -51 -13 -108l-109 -436q-6 -24 -6 -71q0 -80 -56 -136t-136 -56h-640q-84 0 -138 58.5t-54 142.5l-308 296q-76 73 -76 175v224q0 99 70.5 169.5t169.5 70.5q11 0 16 -1q6 95 75.5 160t164.5 65q52 0 98 -21 +q72 69 174 69z" /> + <glyph glyph-name="_561" unicode="&#xf256;" horiz-adv-x="1792" +d="M880 1408q-46 0 -79 -33t-33 -79v-656h-32v528q0 46 -33 79t-79 33t-79 -33t-33 -79v-528v-256l-154 205q-38 51 -102 51q-53 0 -90.5 -37.5t-37.5 -90.5q0 -43 26 -77l384 -512q38 -51 102 -51h688q34 0 61 22t34 56l76 405q5 32 5 59v498q0 46 -33 79t-79 33t-79 -33 +t-33 -79v-272h-32v528q0 46 -33 79t-79 33t-79 -33t-33 -79v-528h-32v656q0 46 -33 79t-79 33zM880 1536q68 0 125.5 -35.5t88.5 -96.5q19 4 42 4q99 0 169.5 -70.5t70.5 -169.5v-17q105 6 180.5 -64t75.5 -175v-498q0 -40 -8 -83l-76 -404q-14 -79 -76.5 -131t-143.5 -52 +h-688q-60 0 -114.5 27.5t-90.5 74.5l-384 512q-51 68 -51 154q0 106 75 181t181 75q78 0 128 -34v434q0 99 70.5 169.5t169.5 70.5q23 0 42 -4q31 61 88.5 96.5t125.5 35.5z" /> + <glyph glyph-name="_562" unicode="&#xf257;" horiz-adv-x="1792" +d="M1073 -128h-177q-163 0 -226 141q-23 49 -23 102v5q-62 30 -98.5 88.5t-36.5 127.5q0 38 5 48h-261q-106 0 -181 75t-75 181t75 181t181 75h113l-44 17q-74 28 -119.5 93.5t-45.5 145.5q0 106 75 181t181 75q46 0 91 -17l628 -239h401q106 0 181 -75t75 -181v-668 +q0 -88 -54 -157.5t-140 -90.5l-339 -85q-92 -23 -186 -23zM1024 583l-155 -71l-163 -74q-30 -14 -48 -41.5t-18 -60.5q0 -46 33 -79t79 -33q26 0 46 10l338 154q-49 10 -80.5 50t-31.5 90v55zM1344 272q0 46 -33 79t-79 33q-26 0 -46 -10l-290 -132q-28 -13 -37 -17 +t-30.5 -17t-29.5 -23.5t-16 -29t-8 -40.5q0 -50 31.5 -82t81.5 -32q20 0 38 9l352 160q30 14 48 41.5t18 60.5zM1112 1024l-650 248q-24 8 -46 8q-53 0 -90.5 -37.5t-37.5 -90.5q0 -40 22.5 -73t59.5 -47l526 -200v-64h-640q-53 0 -90.5 -37.5t-37.5 -90.5t37.5 -90.5 +t90.5 -37.5h535l233 106v198q0 63 46 106l111 102h-69zM1073 0q82 0 155 19l339 85q43 11 70 45.5t27 78.5v668q0 53 -37.5 90.5t-90.5 37.5h-308l-136 -126q-36 -33 -36 -82v-296q0 -46 33 -77t79 -31t79 35t33 81v208h32v-208q0 -70 -57 -114q52 -8 86.5 -48.5t34.5 -93.5 +q0 -42 -23 -78t-61 -53l-310 -141h91z" /> + <glyph glyph-name="_563" unicode="&#xf258;" horiz-adv-x="2048" +d="M1151 1536q61 0 116 -28t91 -77l572 -781q118 -159 118 -359v-355q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v177l-286 143h-546q-80 0 -136 56t-56 136v32q0 119 84.5 203.5t203.5 84.5h420l42 128h-686q-100 0 -173.5 67.5t-81.5 166.5q-65 79 -65 182v32 +q0 80 56 136t136 56h959zM1920 -64v355q0 157 -93 284l-573 781q-39 52 -103 52h-959q-26 0 -45 -19t-19 -45q0 -32 1.5 -49.5t9.5 -40.5t25 -43q10 31 35.5 50t56.5 19h832v-32h-832q-26 0 -45 -19t-19 -45q0 -44 3 -58q8 -44 44 -73t81 -29h640h91q40 0 68 -28t28 -68 +q0 -15 -5 -30l-64 -192q-10 -29 -35 -47.5t-56 -18.5h-443q-66 0 -113 -47t-47 -113v-32q0 -26 19 -45t45 -19h561q16 0 29 -7l317 -158q24 -13 38.5 -36t14.5 -50v-197q0 -26 19 -45t45 -19h384q26 0 45 19t19 45z" /> + <glyph glyph-name="_564" unicode="&#xf259;" horiz-adv-x="2048" +d="M459 -256q-77 0 -137.5 47.5t-79.5 122.5l-101 401q-13 57 -13 108q0 45 -5 67l-116 477q-7 27 -7 57q0 93 62 161t155 78q17 85 82.5 139t152.5 54q83 0 148 -51.5t85 -132.5l83 -348l103 428q20 81 85 132.5t148 51.5q89 0 155.5 -57.5t80.5 -144.5q92 -10 152 -79 +t60 -162q0 -24 -7 -59l-123 -512q10 7 37.5 28.5t38.5 29.5t35 23t41 20.5t41.5 11t49.5 5.5q105 0 180 -74t75 -179q0 -62 -28.5 -118t-78.5 -94l-507 -380q-68 -51 -153 -51h-694zM1104 1408q-38 0 -68.5 -24t-39.5 -62l-164 -682h-127l-145 602q-9 38 -39.5 62t-68.5 24 +q-48 0 -80 -33t-32 -80q0 -15 3 -28l132 -547h-26l-99 408q-9 37 -40 62.5t-69 25.5q-47 0 -80 -33t-33 -79q0 -14 3 -26l116 -478q7 -28 9 -86t10 -88l100 -401q8 -32 34 -52.5t59 -20.5h694q42 0 76 26l507 379q56 43 56 110q0 52 -37.5 88.5t-89.5 36.5q-43 0 -77 -26 +l-307 -230v227q0 4 32 138t68 282t39 161q4 18 4 29q0 47 -32 81t-79 34q-39 0 -69.5 -24t-39.5 -62l-116 -482h-26l150 624q3 14 3 28q0 48 -31.5 82t-79.5 34z" /> + <glyph glyph-name="_565" unicode="&#xf25a;" horiz-adv-x="1792" +d="M640 1408q-53 0 -90.5 -37.5t-37.5 -90.5v-512v-384l-151 202q-41 54 -107 54q-52 0 -89 -38t-37 -90q0 -43 26 -77l384 -512q38 -51 102 -51h718q22 0 39.5 13.5t22.5 34.5l92 368q24 96 24 194v217q0 41 -28 71t-68 30t-68 -28t-28 -68h-32v61q0 48 -32 81.5t-80 33.5 +q-46 0 -79 -33t-33 -79v-64h-32v90q0 55 -37 94.5t-91 39.5q-53 0 -90.5 -37.5t-37.5 -90.5v-96h-32v570q0 55 -37 94.5t-91 39.5zM640 1536q107 0 181.5 -77.5t74.5 -184.5v-220q22 2 32 2q99 0 173 -69q47 21 99 21q113 0 184 -87q27 7 56 7q94 0 159 -67.5t65 -161.5 +v-217q0 -116 -28 -225l-92 -368q-16 -64 -68 -104.5t-118 -40.5h-718q-60 0 -114.5 27.5t-90.5 74.5l-384 512q-51 68 -51 154q0 105 74.5 180.5t179.5 75.5q71 0 130 -35v547q0 106 75 181t181 75zM768 128v384h-32v-384h32zM1024 128v384h-32v-384h32zM1280 128v384h-32 +v-384h32z" /> + <glyph glyph-name="_566" unicode="&#xf25b;" +d="M1288 889q60 0 107 -23q141 -63 141 -226v-177q0 -94 -23 -186l-85 -339q-21 -86 -90.5 -140t-157.5 -54h-668q-106 0 -181 75t-75 181v401l-239 628q-17 45 -17 91q0 106 75 181t181 75q80 0 145.5 -45.5t93.5 -119.5l17 -44v113q0 106 75 181t181 75t181 -75t75 -181 +v-261q27 5 48 5q69 0 127.5 -36.5t88.5 -98.5zM1072 896q-33 0 -60.5 -18t-41.5 -48l-74 -163l-71 -155h55q50 0 90 -31.5t50 -80.5l154 338q10 20 10 46q0 46 -33 79t-79 33zM1293 761q-22 0 -40.5 -8t-29 -16t-23.5 -29.5t-17 -30.5t-17 -37l-132 -290q-10 -20 -10 -46 +q0 -46 33 -79t79 -33q33 0 60.5 18t41.5 48l160 352q9 18 9 38q0 50 -32 81.5t-82 31.5zM128 1120q0 -22 8 -46l248 -650v-69l102 111q43 46 106 46h198l106 233v535q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5v-640h-64l-200 526q-14 37 -47 59.5t-73 22.5 +q-53 0 -90.5 -37.5t-37.5 -90.5zM1180 -128q44 0 78.5 27t45.5 70l85 339q19 73 19 155v91l-141 -310q-17 -38 -53 -61t-78 -23q-53 0 -93.5 34.5t-48.5 86.5q-44 -57 -114 -57h-208v32h208q46 0 81 33t35 79t-31 79t-77 33h-296q-49 0 -82 -36l-126 -136v-308 +q0 -53 37.5 -90.5t90.5 -37.5h668z" /> + <glyph glyph-name="_567" unicode="&#xf25c;" horiz-adv-x="1973" +d="M857 992v-117q0 -13 -9.5 -22t-22.5 -9h-298v-812q0 -13 -9 -22.5t-22 -9.5h-135q-13 0 -22.5 9t-9.5 23v812h-297q-13 0 -22.5 9t-9.5 22v117q0 14 9 23t23 9h793q13 0 22.5 -9.5t9.5 -22.5zM1895 995l77 -961q1 -13 -8 -24q-10 -10 -23 -10h-134q-12 0 -21 8.5 +t-10 20.5l-46 588l-189 -425q-8 -19 -29 -19h-120q-20 0 -29 19l-188 427l-45 -590q-1 -12 -10 -20.5t-21 -8.5h-135q-13 0 -23 10q-9 10 -9 24l78 961q1 12 10 20.5t21 8.5h142q20 0 29 -19l220 -520q10 -24 20 -51q3 7 9.5 24.5t10.5 26.5l221 520q9 19 29 19h141 +q13 0 22 -8.5t10 -20.5z" /> + <glyph glyph-name="_568" unicode="&#xf25d;" horiz-adv-x="1792" +d="M1042 833q0 88 -60 121q-33 18 -117 18h-123v-281h162q66 0 102 37t36 105zM1094 548l205 -373q8 -17 -1 -31q-8 -16 -27 -16h-152q-20 0 -28 17l-194 365h-155v-350q0 -14 -9 -23t-23 -9h-134q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h294q128 0 190 -24q85 -31 134 -109 +t49 -180q0 -92 -42.5 -165.5t-115.5 -109.5q6 -10 9 -16zM896 1376q-150 0 -286 -58.5t-234.5 -157t-157 -234.5t-58.5 -286t58.5 -286t157 -234.5t234.5 -157t286 -58.5t286 58.5t234.5 157t157 234.5t58.5 286t-58.5 286t-157 234.5t-234.5 157t-286 58.5zM1792 640 +q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="_569" unicode="&#xf25e;" horiz-adv-x="1792" +d="M605 303q153 0 257 104q14 18 3 36l-45 82q-6 13 -24 17q-16 2 -27 -11l-4 -3q-4 -4 -11.5 -10t-17.5 -13.5t-23.5 -14.5t-28.5 -13t-33.5 -9.5t-37.5 -3.5q-76 0 -125 50t-49 127q0 76 48 125.5t122 49.5q37 0 71.5 -14t50.5 -28l16 -14q11 -11 26 -10q16 2 24 14l53 78 +q13 20 -2 39q-3 4 -11 12t-30 23.5t-48.5 28t-67.5 22.5t-86 10q-148 0 -246 -96.5t-98 -240.5q0 -146 97 -241.5t247 -95.5zM1235 303q153 0 257 104q14 18 4 36l-45 82q-8 14 -25 17q-16 2 -27 -11l-4 -3q-4 -4 -11.5 -10t-17.5 -13.5t-23.5 -14.5t-28.5 -13t-33.5 -9.5 +t-37.5 -3.5q-76 0 -125 50t-49 127q0 76 48 125.5t122 49.5q37 0 71.5 -14t50.5 -28l16 -14q11 -11 26 -10q16 2 24 14l53 78q13 20 -2 39q-3 4 -11 12t-30 23.5t-48.5 28t-67.5 22.5t-86 10q-147 0 -245.5 -96.5t-98.5 -240.5q0 -146 97 -241.5t247 -95.5zM896 1376 +q-150 0 -286 -58.5t-234.5 -157t-157 -234.5t-58.5 -286t58.5 -286t157 -234.5t234.5 -157t286 -58.5t286 58.5t234.5 157t157 234.5t58.5 286t-58.5 286t-157 234.5t-234.5 157t-286 58.5zM896 1536q182 0 348 -71t286 -191t191 -286t71 -348t-71 -348t-191 -286t-286 -191 +t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71z" /> + <glyph glyph-name="f260" unicode="&#xf260;" horiz-adv-x="2048" +d="M736 736l384 -384l-384 -384l-672 672l672 672l168 -168l-96 -96l-72 72l-480 -480l480 -480l193 193l-289 287zM1312 1312l672 -672l-672 -672l-168 168l96 96l72 -72l480 480l-480 480l-193 -193l289 -287l-96 -96l-384 384z" /> + <glyph glyph-name="f261" unicode="&#xf261;" horiz-adv-x="1792" +d="M717 182l271 271l-279 279l-88 -88l192 -191l-96 -96l-279 279l279 279l40 -40l87 87l-127 128l-454 -454zM1075 190l454 454l-454 454l-271 -271l279 -279l88 88l-192 191l96 96l279 -279l-279 -279l-40 40l-87 -88zM1792 640q0 -182 -71 -348t-191 -286t-286 -191 +t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="_572" unicode="&#xf262;" horiz-adv-x="2304" +d="M651 539q0 -39 -27.5 -66.5t-65.5 -27.5q-39 0 -66.5 27.5t-27.5 66.5q0 38 27.5 65.5t66.5 27.5q38 0 65.5 -27.5t27.5 -65.5zM1805 540q0 -39 -27.5 -66.5t-66.5 -27.5t-66.5 27.5t-27.5 66.5t27.5 66t66.5 27t66.5 -27t27.5 -66zM765 539q0 79 -56.5 136t-136.5 57 +t-136.5 -56.5t-56.5 -136.5t56.5 -136.5t136.5 -56.5t136.5 56.5t56.5 136.5zM1918 540q0 80 -56.5 136.5t-136.5 56.5q-79 0 -136 -56.5t-57 -136.5t56.5 -136.5t136.5 -56.5t136.5 56.5t56.5 136.5zM850 539q0 -116 -81.5 -197.5t-196.5 -81.5q-116 0 -197.5 82t-81.5 197 +t82 196.5t197 81.5t196.5 -81.5t81.5 -196.5zM2004 540q0 -115 -81.5 -196.5t-197.5 -81.5q-115 0 -196.5 81.5t-81.5 196.5t81.5 196.5t196.5 81.5q116 0 197.5 -81.5t81.5 -196.5zM1040 537q0 191 -135.5 326.5t-326.5 135.5q-125 0 -231 -62t-168 -168.5t-62 -231.5 +t62 -231.5t168 -168.5t231 -62q191 0 326.5 135.5t135.5 326.5zM1708 1110q-254 111 -556 111q-319 0 -573 -110q117 0 223 -45.5t182.5 -122.5t122 -183t45.5 -223q0 115 43.5 219.5t118 180.5t177.5 123t217 50zM2187 537q0 191 -135 326.5t-326 135.5t-326.5 -135.5 +t-135.5 -326.5t135.5 -326.5t326.5 -135.5t326 135.5t135 326.5zM1921 1103h383q-44 -51 -75 -114.5t-40 -114.5q110 -151 110 -337q0 -156 -77 -288t-209 -208.5t-287 -76.5q-133 0 -249 56t-196 155q-47 -56 -129 -179q-11 22 -53.5 82.5t-74.5 97.5 +q-80 -99 -196.5 -155.5t-249.5 -56.5q-155 0 -287 76.5t-209 208.5t-77 288q0 186 110 337q-9 51 -40 114.5t-75 114.5h365q149 100 355 156.5t432 56.5q224 0 421 -56t348 -157z" /> + <glyph glyph-name="f263" unicode="&#xf263;" horiz-adv-x="1280" +d="M640 629q-188 0 -321 133t-133 320q0 188 133 321t321 133t321 -133t133 -321q0 -187 -133 -320t-321 -133zM640 1306q-92 0 -157.5 -65.5t-65.5 -158.5q0 -92 65.5 -157.5t157.5 -65.5t157.5 65.5t65.5 157.5q0 93 -65.5 158.5t-157.5 65.5zM1163 574q13 -27 15 -49.5 +t-4.5 -40.5t-26.5 -38.5t-42.5 -37t-61.5 -41.5q-115 -73 -315 -94l73 -72l267 -267q30 -31 30 -74t-30 -73l-12 -13q-31 -30 -74 -30t-74 30q-67 68 -267 268l-267 -268q-31 -30 -74 -30t-73 30l-12 13q-31 30 -31 73t31 74l267 267l72 72q-203 21 -317 94 +q-39 25 -61.5 41.5t-42.5 37t-26.5 38.5t-4.5 40.5t15 49.5q10 20 28 35t42 22t56 -2t65 -35q5 -4 15 -11t43 -24.5t69 -30.5t92 -24t113 -11q91 0 174 25.5t120 50.5l38 25q33 26 65 35t56 2t42 -22t28 -35z" /> + <glyph glyph-name="_574" unicode="&#xf264;" +d="M927 956q0 -66 -46.5 -112.5t-112.5 -46.5t-112.5 46.5t-46.5 112.5t46.5 112.5t112.5 46.5t112.5 -46.5t46.5 -112.5zM1141 593q-10 20 -28 32t-47.5 9.5t-60.5 -27.5q-10 -8 -29 -20t-81 -32t-127 -20t-124 18t-86 36l-27 18q-31 25 -60.5 27.5t-47.5 -9.5t-28 -32 +q-22 -45 -2 -74.5t87 -73.5q83 -53 226 -67l-51 -52q-142 -142 -191 -190q-22 -22 -22 -52.5t22 -52.5l9 -9q22 -22 52.5 -22t52.5 22l191 191q114 -115 191 -191q22 -22 52.5 -22t52.5 22l9 9q22 22 22 52.5t-22 52.5l-191 190l-52 52q141 14 225 67q67 44 87 73.5t-2 74.5 +zM1092 956q0 134 -95 229t-229 95t-229 -95t-95 -229t95 -229t229 -95t229 95t95 229zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="_575" unicode="&#xf265;" horiz-adv-x="1720" +d="M1565 1408q65 0 110 -45.5t45 -110.5v-519q0 -176 -68 -336t-182.5 -275t-274 -182.5t-334.5 -67.5q-176 0 -335.5 67.5t-274.5 182.5t-183 275t-68 336v519q0 64 46 110t110 46h1409zM861 344q47 0 82 33l404 388q37 35 37 85q0 49 -34.5 83.5t-83.5 34.5q-47 0 -82 -33 +l-323 -310l-323 310q-35 33 -81 33q-49 0 -83.5 -34.5t-34.5 -83.5q0 -51 36 -85l405 -388q33 -33 81 -33z" /> + <glyph glyph-name="_576" unicode="&#xf266;" horiz-adv-x="2304" +d="M1494 -103l-295 695q-25 -49 -158.5 -305.5t-198.5 -389.5q-1 -1 -27.5 -0.5t-26.5 1.5q-82 193 -255.5 587t-259.5 596q-21 50 -66.5 107.5t-103.5 100.5t-102 43q0 5 -0.5 24t-0.5 27h583v-50q-39 -2 -79.5 -16t-66.5 -43t-10 -64q26 -59 216.5 -499t235.5 -540 +q31 61 140 266.5t131 247.5q-19 39 -126 281t-136 295q-38 69 -201 71v50l513 -1v-47q-60 -2 -93.5 -25t-12.5 -69q33 -70 87 -189.5t86 -187.5q110 214 173 363q24 55 -10 79.5t-129 26.5q1 7 1 25v24q64 0 170.5 0.5t180 1t92.5 0.5v-49q-62 -2 -119 -33t-90 -81 +l-213 -442q13 -33 127.5 -290t121.5 -274l441 1017q-14 38 -49.5 62.5t-65 31.5t-55.5 8v50l460 -4l1 -2l-1 -44q-139 -4 -201 -145q-526 -1216 -559 -1291h-49z" /> + <glyph glyph-name="_577" unicode="&#xf267;" horiz-adv-x="1792" +d="M949 643q0 -26 -16.5 -45t-41.5 -19q-26 0 -45 16.5t-19 41.5q0 26 17 45t42 19t44 -16.5t19 -41.5zM964 585l350 581q-9 -8 -67.5 -62.5t-125.5 -116.5t-136.5 -127t-117 -110.5t-50.5 -51.5l-349 -580q7 7 67 62t126 116.5t136 127t117 111t50 50.5zM1611 640 +q0 -201 -104 -371q-3 2 -17 11t-26.5 16.5t-16.5 7.5q-13 0 -13 -13q0 -10 59 -44q-74 -112 -184.5 -190.5t-241.5 -110.5l-16 67q-1 10 -15 10q-5 0 -8 -5.5t-2 -9.5l16 -68q-72 -15 -146 -15q-199 0 -372 105q1 2 13 20.5t21.5 33.5t9.5 19q0 13 -13 13q-6 0 -17 -14.5 +t-22.5 -34.5t-13.5 -23q-113 75 -192 187.5t-110 244.5l69 15q10 3 10 15q0 5 -5.5 8t-10.5 2l-68 -15q-14 72 -14 139q0 206 109 379q2 -1 18.5 -12t30 -19t17.5 -8q13 0 13 12q0 6 -12.5 15.5t-32.5 21.5l-20 12q77 112 189 189t244 107l15 -67q2 -10 15 -10q5 0 8 5.5 +t2 10.5l-15 66q71 13 134 13q204 0 379 -109q-39 -56 -39 -65q0 -13 12 -13q11 0 48 64q111 -75 187.5 -186t107.5 -241l-56 -12q-10 -2 -10 -16q0 -5 5.5 -8t9.5 -2l57 13q14 -72 14 -140zM1696 640q0 163 -63.5 311t-170.5 255t-255 170.5t-311 63.5t-311 -63.5 +t-255 -170.5t-170.5 -255t-63.5 -311t63.5 -311t170.5 -255t255 -170.5t311 -63.5t311 63.5t255 170.5t170.5 255t63.5 311zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191 +t191 -286t71 -348z" /> + <glyph glyph-name="_578" unicode="&#xf268;" horiz-adv-x="1792" +d="M893 1536q240 2 451 -120q232 -134 352 -372l-742 39q-160 9 -294 -74.5t-185 -229.5l-276 424q128 159 311 245.5t383 87.5zM146 1131l337 -663q72 -143 211 -217t293 -45l-230 -451q-212 33 -385 157.5t-272.5 316t-99.5 411.5q0 267 146 491zM1732 962 +q58 -150 59.5 -310.5t-48.5 -306t-153 -272t-246 -209.5q-230 -133 -498 -119l405 623q88 131 82.5 290.5t-106.5 277.5zM896 942q125 0 213.5 -88.5t88.5 -213.5t-88.5 -213.5t-213.5 -88.5t-213.5 88.5t-88.5 213.5t88.5 213.5t213.5 88.5z" /> + <glyph glyph-name="_579" unicode="&#xf269;" horiz-adv-x="1792" +d="M903 -256q-283 0 -504.5 150.5t-329.5 398.5q-58 131 -67 301t26 332.5t111 312t179 242.5l-11 -281q11 14 68 15.5t70 -15.5q42 81 160.5 138t234.5 59q-54 -45 -119.5 -148.5t-58.5 -163.5q25 -8 62.5 -13.5t63 -7.5t68 -4t50.5 -3q15 -5 9.5 -45.5t-30.5 -75.5 +q-5 -7 -16.5 -18.5t-56.5 -35.5t-101 -34l15 -189l-139 67q-18 -43 -7.5 -81.5t36 -66.5t65.5 -41.5t81 -6.5q51 9 98 34.5t83.5 45t73.5 17.5q61 -4 89.5 -33t19.5 -65q-1 -2 -2.5 -5.5t-8.5 -12.5t-18 -15.5t-31.5 -10.5t-46.5 -1q-60 -95 -144.5 -135.5t-209.5 -29.5 +q74 -61 162.5 -82.5t168.5 -6t154.5 52t128 87.5t80.5 104q43 91 39 192.5t-37.5 188.5t-78.5 125q87 -38 137 -79.5t77 -112.5q15 170 -57.5 343t-209.5 284q265 -77 412 -279.5t151 -517.5q2 -127 -40.5 -255t-123.5 -238t-189 -196t-247.5 -135.5t-288.5 -49.5z" /> + <glyph glyph-name="_580" unicode="&#xf26a;" horiz-adv-x="1792" +d="M1493 1308q-165 110 -359 110q-155 0 -293 -73t-240 -200q-75 -93 -119.5 -218t-48.5 -266v-42q4 -141 48.5 -266t119.5 -218q102 -127 240 -200t293 -73q194 0 359 110q-121 -108 -274.5 -168t-322.5 -60q-29 0 -43 1q-175 8 -333 82t-272 193t-181 281t-67 339 +q0 182 71 348t191 286t286 191t348 71h3q168 -1 320.5 -60.5t273.5 -167.5zM1792 640q0 -192 -77 -362.5t-213 -296.5q-104 -63 -222 -63q-137 0 -255 84q154 56 253.5 233t99.5 405q0 227 -99 404t-253 234q119 83 254 83q119 0 226 -65q135 -125 210.5 -295t75.5 -361z +" /> + <glyph glyph-name="_581" unicode="&#xf26b;" horiz-adv-x="1792" +d="M1792 599q0 -56 -7 -104h-1151q0 -146 109.5 -244.5t257.5 -98.5q99 0 185.5 46.5t136.5 130.5h423q-56 -159 -170.5 -281t-267.5 -188.5t-321 -66.5q-187 0 -356 83q-228 -116 -394 -116q-237 0 -237 263q0 115 45 275q17 60 109 229q199 360 475 606 +q-184 -79 -427 -354q63 274 283.5 449.5t501.5 175.5q30 0 45 -1q255 117 433 117q64 0 116 -13t94.5 -40.5t66.5 -76.5t24 -115q0 -116 -75 -286q101 -182 101 -390zM1722 1239q0 83 -53 132t-137 49q-108 0 -254 -70q121 -47 222.5 -131.5t170.5 -195.5q51 135 51 216z +M128 2q0 -86 48.5 -132.5t134.5 -46.5q115 0 266 83q-122 72 -213.5 183t-137.5 245q-98 -205 -98 -332zM632 715h728q-5 142 -113 237t-251 95q-144 0 -251.5 -95t-112.5 -237z" /> + <glyph glyph-name="_582" unicode="&#xf26c;" horiz-adv-x="2048" +d="M1792 288v960q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1248v-960q0 -66 -47 -113t-113 -47h-736v-128h352q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23 +v64q0 14 9 23t23 9h352v128h-736q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" /> + <glyph glyph-name="_583" unicode="&#xf26d;" horiz-adv-x="1792" +d="M138 1408h197q-70 -64 -126 -149q-36 -56 -59 -115t-30 -125.5t-8.5 -120t10.5 -132t21 -126t28 -136.5q4 -19 6 -28q51 -238 81 -329q57 -171 152 -275h-272q-48 0 -82 34t-34 82v1304q0 48 34 82t82 34zM1346 1408h308q48 0 82 -34t34 -82v-1304q0 -48 -34 -82t-82 -34 +h-178q212 210 196 565l-469 -101q-2 -45 -12 -82t-31 -72t-59.5 -59.5t-93.5 -36.5q-123 -26 -199 40q-32 27 -53 61t-51.5 129t-64.5 258q-35 163 -45.5 263t-5.5 139t23 77q20 41 62.5 73t102.5 45q45 12 83.5 6.5t67 -17t54 -35t43 -48t34.5 -56.5l468 100 +q-68 175 -180 287z" /> + <glyph glyph-name="_584" unicode="&#xf26e;" +d="M1401 -11l-6 -6q-113 -113 -259 -175q-154 -64 -317 -64q-165 0 -317 64q-148 63 -259 175q-113 112 -175 258q-42 103 -54 189q-4 28 48 36q51 8 56 -20q1 -1 1 -4q18 -90 46 -159q50 -124 152 -226q98 -98 226 -152q132 -56 276 -56q143 0 276 56q128 55 225 152l6 6 +q10 10 25 6q12 -3 33 -22q36 -37 17 -58zM929 604l-66 -66l63 -63q21 -21 -7 -49q-17 -17 -32 -17q-10 0 -19 10l-62 61l-66 -66q-5 -5 -15 -5q-15 0 -31 16l-2 2q-18 15 -18 29q0 7 8 17l66 65l-66 66q-16 16 14 45q18 18 31 18q6 0 13 -5l65 -66l65 65q18 17 48 -13 +q27 -27 11 -44zM1400 547q0 -118 -46 -228q-45 -105 -126 -186q-80 -80 -187 -126t-228 -46t-228 46t-187 126q-82 82 -125 186q-15 33 -15 40h-1q-9 27 43 44q50 16 60 -12q37 -99 97 -167h1v339v2q3 136 102 232q105 103 253 103q147 0 251 -103t104 -249 +q0 -147 -104.5 -251t-250.5 -104q-58 0 -112 16q-28 11 -13 61q16 51 44 43l14 -3q14 -3 33 -6t30 -3q104 0 176 71.5t72 174.5q0 101 -72 171q-71 71 -175 71q-107 0 -178 -80q-64 -72 -64 -160v-413q110 -67 242 -67q96 0 185 36.5t156 103.5t103.5 155t36.5 183 +q0 198 -141 339q-140 140 -339 140q-200 0 -340 -140q-53 -53 -77 -87l-2 -2q-8 -11 -13 -15.5t-21.5 -9.5t-38.5 3q-21 5 -36.5 16.5t-15.5 26.5v680q0 15 10.5 26.5t27.5 11.5h877q30 0 30 -55t-30 -55h-811v-483h1q40 42 102 84t108 61q109 46 231 46q121 0 228 -46 +t187 -126q81 -81 126 -186q46 -112 46 -229zM1369 1128q9 -8 9 -18t-5.5 -18t-16.5 -21q-26 -26 -39 -26q-9 0 -16 7q-106 91 -207 133q-128 56 -276 56q-133 0 -262 -49q-27 -10 -45 37q-9 25 -8 38q3 16 16 20q130 57 299 57q164 0 316 -64q137 -58 235 -152z" /> + <glyph glyph-name="_585" unicode="&#xf270;" horiz-adv-x="1792" +d="M1551 60q15 6 26 3t11 -17.5t-15 -33.5q-13 -16 -44 -43.5t-95.5 -68t-141 -74t-188 -58t-229.5 -24.5q-119 0 -238 31t-209 76.5t-172.5 104t-132.5 105t-84 87.5q-8 9 -10 16.5t1 12t8 7t11.5 2t11.5 -4.5q192 -117 300 -166q389 -176 799 -90q190 40 391 135z +M1758 175q11 -16 2.5 -69.5t-28.5 -102.5q-34 -83 -85 -124q-17 -14 -26 -9t0 24q21 45 44.5 121.5t6.5 98.5q-5 7 -15.5 11.5t-27 6t-29.5 2.5t-35 0t-31.5 -2t-31 -3t-22.5 -2q-6 -1 -13 -1.5t-11 -1t-8.5 -1t-7 -0.5h-5.5h-4.5t-3 0.5t-2 1.5l-1.5 3q-6 16 47 40t103 30 +q46 7 108 1t76 -24zM1364 618q0 -31 13.5 -64t32 -58t37.5 -46t33 -32l13 -11l-227 -224q-40 37 -79 75.5t-58 58.5l-19 20q-11 11 -25 33q-38 -59 -97.5 -102.5t-127.5 -63.5t-140 -23t-137.5 21t-117.5 65.5t-83 113t-31 162.5q0 84 28 154t72 116.5t106.5 83t122.5 57 +t130 34.5t119.5 18.5t99.5 6.5v127q0 65 -21 97q-34 53 -121 53q-6 0 -16.5 -1t-40.5 -12t-56 -29.5t-56 -59.5t-48 -96l-294 27q0 60 22 119t67 113t108 95t151.5 65.5t190.5 24.5q100 0 181 -25t129.5 -61.5t81 -83t45 -86t12.5 -73.5v-589zM692 597q0 -86 70 -133 +q66 -44 139 -22q84 25 114 123q14 45 14 101v162q-59 -2 -111 -12t-106.5 -33.5t-87 -71t-32.5 -114.5z" /> + <glyph glyph-name="_586" unicode="&#xf271;" horiz-adv-x="1792" +d="M1536 1280q52 0 90 -38t38 -90v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128zM1152 1376v-288q0 -14 9 -23t23 -9 +h64q14 0 23 9t9 23v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM384 1376v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23zM1536 -128v1024h-1408v-1024h1408zM896 448h224q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-224 +v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224z" /> + <glyph glyph-name="_587" unicode="&#xf272;" horiz-adv-x="1792" +d="M1152 416v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576q14 0 23 -9t9 -23zM128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23 +t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47 +t47 -113v-96h128q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_588" unicode="&#xf273;" horiz-adv-x="1792" +d="M1111 151l-46 -46q-9 -9 -22 -9t-23 9l-188 189l-188 -189q-10 -9 -23 -9t-22 9l-46 46q-9 9 -9 22t9 23l189 188l-189 188q-9 10 -9 23t9 22l46 46q9 9 22 9t23 -9l188 -188l188 188q10 9 23 9t22 -9l46 -46q9 -9 9 -22t-9 -23l-188 -188l188 -188q9 -10 9 -23t-9 -22z +M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 +q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_589" unicode="&#xf274;" horiz-adv-x="1792" +d="M1303 572l-512 -512q-10 -9 -23 -9t-23 9l-288 288q-9 10 -9 23t9 22l46 46q9 9 22 9t23 -9l220 -220l444 444q10 9 23 9t22 -9l46 -46q9 -9 9 -22t-9 -23zM128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23 +t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47 +t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" /> + <glyph glyph-name="_590" unicode="&#xf275;" horiz-adv-x="1792" +d="M448 1536q26 0 45 -19t19 -45v-891l536 429q17 14 40 14q26 0 45 -19t19 -45v-379l536 429q17 14 40 14q26 0 45 -19t19 -45v-1152q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v1664q0 26 19 45t45 19h384z" /> + <glyph glyph-name="_591" unicode="&#xf276;" horiz-adv-x="1024" +d="M512 448q66 0 128 15v-655q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v655q62 -15 128 -15zM512 1536q212 0 362 -150t150 -362t-150 -362t-362 -150t-362 150t-150 362t150 362t362 150zM512 1312q14 0 23 9t9 23t-9 23t-23 9q-146 0 -249 -103t-103 -249 +q0 -14 9 -23t23 -9t23 9t9 23q0 119 84.5 203.5t203.5 84.5z" /> + <glyph glyph-name="_592" unicode="&#xf277;" horiz-adv-x="1792" +d="M1745 1239q10 -10 10 -23t-10 -23l-141 -141q-28 -28 -68 -28h-1344q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h576v64q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-64h512q40 0 68 -28zM768 320h256v-512q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v512zM1600 768 +q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19h-1344q-40 0 -68 28l-141 141q-10 10 -10 23t10 23l141 141q28 28 68 28h512v192h256v-192h576z" /> + <glyph glyph-name="_593" unicode="&#xf278;" horiz-adv-x="2048" +d="M2020 1525q28 -20 28 -53v-1408q0 -20 -11 -36t-29 -23l-640 -256q-24 -11 -48 0l-616 246l-616 -246q-10 -5 -24 -5q-19 0 -36 11q-28 20 -28 53v1408q0 20 11 36t29 23l640 256q24 11 48 0l616 -246l616 246q32 13 60 -6zM736 1390v-1270l576 -230v1270zM128 1173 +v-1270l544 217v1270zM1920 107v1270l-544 -217v-1270z" /> + <glyph glyph-name="_594" unicode="&#xf279;" horiz-adv-x="1792" +d="M512 1536q13 0 22.5 -9.5t9.5 -22.5v-1472q0 -20 -17 -28l-480 -256q-7 -4 -15 -4q-13 0 -22.5 9.5t-9.5 22.5v1472q0 20 17 28l480 256q7 4 15 4zM1760 1536q13 0 22.5 -9.5t9.5 -22.5v-1472q0 -20 -17 -28l-480 -256q-7 -4 -15 -4q-13 0 -22.5 9.5t-9.5 22.5v1472 +q0 20 17 28l480 256q7 4 15 4zM640 1536q8 0 14 -3l512 -256q18 -10 18 -29v-1472q0 -13 -9.5 -22.5t-22.5 -9.5q-8 0 -14 3l-512 256q-18 10 -18 29v1472q0 13 9.5 22.5t22.5 9.5z" /> + <glyph glyph-name="_595" unicode="&#xf27a;" horiz-adv-x="1792" +d="M640 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1408 640q0 53 -37.5 90.5t-90.5 37.5 +t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-110 0 -211 18q-173 -173 -435 -229q-52 -10 -86 -13q-12 -1 -22 6t-13 18q-4 15 20 37q5 5 23.5 21.5t25.5 23.5t23.5 25.5t24 31.5t20.5 37 +t20 48t14.5 57.5t12.5 72.5q-146 90 -229.5 216.5t-83.5 269.5q0 174 120 321.5t326 233t450 85.5t450 -85.5t326 -233t120 -321.5z" /> + <glyph glyph-name="_596" unicode="&#xf27b;" horiz-adv-x="1792" +d="M640 640q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1024 640q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 -53 -37.5 -90.5t-90.5 -37.5 +t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5 +t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51 +t27 59t26 76q-157 89 -247.5 220t-90.5 281q0 130 71 248.5t191 204.5t286 136.5t348 50.5t348 -50.5t286 -136.5t191 -204.5t71 -248.5z" /> + <glyph glyph-name="_597" unicode="&#xf27c;" horiz-adv-x="1024" +d="M512 345l512 295v-591l-512 -296v592zM0 640v-591l512 296zM512 1527v-591l-512 -296v591zM512 936l512 295v-591z" /> + <glyph glyph-name="_598" unicode="&#xf27d;" horiz-adv-x="1792" +d="M1709 1018q-10 -236 -332 -651q-333 -431 -562 -431q-142 0 -240 263q-44 160 -132 482q-72 262 -157 262q-18 0 -127 -76l-77 98q24 21 108 96.5t130 115.5q156 138 241 146q95 9 153 -55.5t81 -203.5q44 -287 66 -373q55 -249 120 -249q51 0 154 161q101 161 109 246 +q13 139 -109 139q-57 0 -121 -26q120 393 459 382q251 -8 236 -326z" /> + <glyph glyph-name="f27e" unicode="&#xf27e;" +d="M0 1408h1536v-1536h-1536v1536zM1085 293l-221 631l221 297h-634l221 -297l-221 -631l317 -304z" /> + <glyph glyph-name="uniF280" unicode="&#xf280;" +d="M0 1408h1536v-1536h-1536v1536zM908 1088l-12 -33l75 -83l-31 -114l25 -25l107 57l107 -57l25 25l-31 114l75 83l-12 33h-95l-53 96h-32l-53 -96h-95zM641 925q32 0 44.5 -16t11.5 -63l174 21q0 55 -17.5 92.5t-50.5 56t-69 25.5t-85 7q-133 0 -199 -57.5t-66 -182.5v-72 +h-96v-128h76q20 0 20 -8v-382q0 -14 -5 -20t-18 -7l-73 -7v-88h448v86l-149 14q-6 1 -8.5 1.5t-3.5 2.5t-0.5 4t1 7t0.5 10v387h191l38 128h-231q-6 0 -2 6t4 9v80q0 27 1.5 40.5t7.5 28t19.5 20t36.5 5.5zM1248 96v86l-54 9q-7 1 -9.5 2.5t-2.5 3t1 7.5t1 12v520h-275 +l-23 -101l83 -22q23 -7 23 -27v-370q0 -14 -6 -18.5t-20 -6.5l-70 -9v-86h352z" /> + <glyph glyph-name="uniF281" unicode="&#xf281;" horiz-adv-x="1792" +d="M1792 690q0 -58 -29.5 -105.5t-79.5 -72.5q12 -46 12 -96q0 -155 -106.5 -287t-290.5 -208.5t-400 -76.5t-399.5 76.5t-290 208.5t-106.5 287q0 47 11 94q-51 25 -82 73.5t-31 106.5q0 82 58 140.5t141 58.5q85 0 145 -63q218 152 515 162l116 521q3 13 15 21t26 5 +l369 -81q18 37 54 59.5t79 22.5q62 0 106 -43.5t44 -105.5t-44 -106t-106 -44t-105.5 43.5t-43.5 105.5l-334 74l-104 -472q300 -9 519 -160q58 61 143 61q83 0 141 -58.5t58 -140.5zM418 491q0 -62 43.5 -106t105.5 -44t106 44t44 106t-44 105.5t-106 43.5q-61 0 -105 -44 +t-44 -105zM1228 136q11 11 11 26t-11 26q-10 10 -25 10t-26 -10q-41 -42 -121 -62t-160 -20t-160 20t-121 62q-11 10 -26 10t-25 -10q-11 -10 -11 -25.5t11 -26.5q43 -43 118.5 -68t122.5 -29.5t91 -4.5t91 4.5t122.5 29.5t118.5 68zM1225 341q62 0 105.5 44t43.5 106 +q0 61 -44 105t-105 44q-62 0 -106 -43.5t-44 -105.5t44 -106t106 -44z" /> + <glyph glyph-name="_602" unicode="&#xf282;" horiz-adv-x="1792" +d="M69 741h1q16 126 58.5 241.5t115 217t167.5 176t223.5 117.5t276.5 43q231 0 414 -105.5t294 -303.5q104 -187 104 -442v-188h-1125q1 -111 53.5 -192.5t136.5 -122.5t189.5 -57t213 -3t208 46.5t173.5 84.5v-377q-92 -55 -229.5 -92t-312.5 -38t-316 53 +q-189 73 -311.5 249t-124.5 372q-3 242 111 412t325 268q-48 -60 -78 -125.5t-46 -159.5h635q8 77 -8 140t-47 101.5t-70.5 66.5t-80.5 41t-75 20.5t-56 8.5l-22 1q-135 -5 -259.5 -44.5t-223.5 -104.5t-176 -140.5t-138 -163.5z" /> + <glyph glyph-name="_603" unicode="&#xf283;" horiz-adv-x="2304" +d="M0 32v608h2304v-608q0 -66 -47 -113t-113 -47h-1984q-66 0 -113 47t-47 113zM640 256v-128h384v128h-384zM256 256v-128h256v128h-256zM2144 1408q66 0 113 -47t47 -113v-224h-2304v224q0 66 47 113t113 47h1984z" /> + <glyph glyph-name="_604" unicode="&#xf284;" horiz-adv-x="1792" +d="M1584 246l-218 111q-74 -120 -196.5 -189t-263.5 -69q-147 0 -271 72t-196 196t-72 270q0 110 42.5 209.5t115 172t172 115t209.5 42.5q131 0 247.5 -60.5t192.5 -168.5l215 125q-110 169 -286.5 265t-378.5 96q-161 0 -308 -63t-253 -169t-169 -253t-63 -308t63 -308 +t169 -253t253 -169t308 -63q213 0 397.5 107t290.5 292zM1030 643l693 -352q-116 -253 -334.5 -400t-492.5 -147q-182 0 -348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71q260 0 470.5 -133.5t335.5 -366.5zM1543 640h-39v-160h-96v352h136q32 0 54.5 -20 +t28.5 -48t1 -56t-27.5 -48t-57.5 -20z" /> + <glyph glyph-name="uniF285" unicode="&#xf285;" horiz-adv-x="1792" +d="M1427 827l-614 386l92 151h855zM405 562l-184 116v858l1183 -743zM1424 697l147 -95v-858l-532 335zM1387 718l-500 -802h-855l356 571z" /> + <glyph glyph-name="uniF286" unicode="&#xf286;" horiz-adv-x="1792" +d="M640 528v224q0 16 -16 16h-96q-16 0 -16 -16v-224q0 -16 16 -16h96q16 0 16 16zM1152 528v224q0 16 -16 16h-96q-16 0 -16 -16v-224q0 -16 16 -16h96q16 0 16 16zM1664 496v-752h-640v320q0 80 -56 136t-136 56t-136 -56t-56 -136v-320h-640v752q0 16 16 16h96 +q16 0 16 -16v-112h128v624q0 16 16 16h96q16 0 16 -16v-112h128v112q0 16 16 16h96q16 0 16 -16v-112h128v112q0 6 2.5 9.5t8.5 5t9.5 2t11.5 0t9 -0.5v391q-32 15 -32 50q0 23 16.5 39t38.5 16t38.5 -16t16.5 -39q0 -35 -32 -50v-17q45 10 83 10q21 0 59.5 -7.5t54.5 -7.5 +q17 0 47 7.5t37 7.5q16 0 16 -16v-210q0 -15 -35 -21.5t-62 -6.5q-18 0 -54.5 7.5t-55.5 7.5q-40 0 -90 -12v-133q1 0 9 0.5t11.5 0t9.5 -2t8.5 -5t2.5 -9.5v-112h128v112q0 16 16 16h96q16 0 16 -16v-112h128v112q0 16 16 16h96q16 0 16 -16v-624h128v112q0 16 16 16h96 +q16 0 16 -16z" /> + <glyph glyph-name="_607" unicode="&#xf287;" horiz-adv-x="2304" +d="M2288 731q16 -8 16 -27t-16 -27l-320 -192q-8 -5 -16 -5q-9 0 -16 4q-16 10 -16 28v128h-858q37 -58 83 -165q16 -37 24.5 -55t24 -49t27 -47t27 -34t31.5 -26t33 -8h96v96q0 14 9 23t23 9h320q14 0 23 -9t9 -23v-320q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v96h-96 +q-32 0 -61 10t-51 23.5t-45 40.5t-37 46t-33.5 57t-28.5 57.5t-28 60.5q-23 53 -37 81.5t-36 65t-44.5 53.5t-46.5 17h-360q-22 -84 -91 -138t-157 -54q-106 0 -181 75t-75 181t75 181t181 75q88 0 157 -54t91 -138h104q24 0 46.5 17t44.5 53.5t36 65t37 81.5q19 41 28 60.5 +t28.5 57.5t33.5 57t37 46t45 40.5t51 23.5t61 10h107q21 57 70 92.5t111 35.5q80 0 136 -56t56 -136t-56 -136t-136 -56q-62 0 -111 35.5t-70 92.5h-107q-17 0 -33 -8t-31.5 -26t-27 -34t-27 -47t-24 -49t-24.5 -55q-46 -107 -83 -165h1114v128q0 18 16 28t32 -1z" /> + <glyph glyph-name="_608" unicode="&#xf288;" horiz-adv-x="1792" +d="M1150 774q0 -56 -39.5 -95t-95.5 -39h-253v269h253q56 0 95.5 -39.5t39.5 -95.5zM1329 774q0 130 -91.5 222t-222.5 92h-433v-896h180v269h253q130 0 222 91.5t92 221.5zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348 +t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="_609" unicode="&#xf289;" horiz-adv-x="2304" +d="M1645 438q0 59 -34 106.5t-87 68.5q-7 -45 -23 -92q-7 -24 -27.5 -38t-44.5 -14q-12 0 -24 3q-31 10 -45 38.5t-4 58.5q23 71 23 143q0 123 -61 227.5t-166 165.5t-228 61q-134 0 -247 -73t-167 -194q108 -28 188 -106q22 -23 22 -55t-22 -54t-54 -22t-55 22 +q-75 75 -180 75q-106 0 -181 -74.5t-75 -180.5t75 -180.5t181 -74.5h1046q79 0 134.5 55.5t55.5 133.5zM1798 438q0 -142 -100.5 -242t-242.5 -100h-1046q-169 0 -289 119.5t-120 288.5q0 153 100 267t249 136q62 184 221 298t354 114q235 0 408.5 -158.5t196.5 -389.5 +q116 -25 192.5 -118.5t76.5 -214.5zM2048 438q0 -175 -97 -319q-23 -33 -64 -33q-24 0 -43 13q-26 17 -32 48.5t12 57.5q71 104 71 233t-71 233q-18 26 -12 57t32 49t57.5 11.5t49.5 -32.5q97 -142 97 -318zM2304 438q0 -244 -134 -443q-23 -34 -64 -34q-23 0 -42 13 +q-26 18 -32.5 49t11.5 57q108 164 108 358q0 195 -108 357q-18 26 -11.5 57.5t32.5 48.5q26 18 57 12t49 -33q134 -198 134 -442z" /> + <glyph glyph-name="_610" unicode="&#xf28a;" +d="M1500 -13q0 -89 -63 -152.5t-153 -63.5t-153.5 63.5t-63.5 152.5q0 90 63.5 153.5t153.5 63.5t153 -63.5t63 -153.5zM1267 268q-115 -15 -192.5 -102.5t-77.5 -205.5q0 -74 33 -138q-146 -78 -379 -78q-109 0 -201 21t-153.5 54.5t-110.5 76.5t-76 85t-44.5 83 +t-23.5 66.5t-6 39.5q0 19 4.5 42.5t18.5 56t36.5 58t64 43.5t94.5 18t94 -17.5t63 -41t35.5 -53t17.5 -49t4 -33.5q0 -34 -23 -81q28 -27 82 -42t93 -17l40 -1q115 0 190 51t75 133q0 26 -9 48.5t-31.5 44.5t-49.5 41t-74 44t-93.5 47.5t-119.5 56.5q-28 13 -43 20 +q-116 55 -187 100t-122.5 102t-72 125.5t-20.5 162.5q0 78 20.5 150t66 137.5t112.5 114t166.5 77t221.5 28.5q120 0 220 -26t164.5 -67t109.5 -94t64 -105.5t19 -103.5q0 -46 -15 -82.5t-36.5 -58t-48.5 -36t-49 -19.5t-39 -5h-8h-32t-39 5t-44 14t-41 28t-37 46t-24 70.5 +t-10 97.5q-15 16 -59 25.5t-81 10.5l-37 1q-68 0 -117.5 -31t-70.5 -70t-21 -76q0 -24 5 -43t24 -46t53 -51t97 -53.5t150 -58.5q76 -25 138.5 -53.5t109 -55.5t83 -59t60.5 -59.5t41 -62.5t26.5 -62t14.5 -63.5t6 -62t1 -62.5z" /> + <glyph glyph-name="_611" unicode="&#xf28b;" +d="M704 352v576q0 14 -9 23t-23 9h-256q-14 0 -23 -9t-9 -23v-576q0 -14 9 -23t23 -9h256q14 0 23 9t9 23zM1152 352v576q0 14 -9 23t-23 9h-256q-14 0 -23 -9t-9 -23v-576q0 -14 9 -23t23 -9h256q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103 +t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_612" unicode="&#xf28c;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM768 96q148 0 273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273 +t73 -273t198 -198t273 -73zM864 320q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-192zM480 320q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-192z" /> + <glyph glyph-name="_613" unicode="&#xf28d;" +d="M1088 352v576q0 14 -9 23t-23 9h-576q-14 0 -23 -9t-9 -23v-576q0 -14 9 -23t23 -9h576q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 +t103 -385.5z" /> + <glyph glyph-name="_614" unicode="&#xf28e;" +d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM768 96q148 0 273 73t198 198t73 273t-73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273 +t73 -273t198 -198t273 -73zM480 320q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h576q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-576z" /> + <glyph glyph-name="_615" unicode="&#xf290;" horiz-adv-x="1792" +d="M1757 128l35 -313q3 -28 -16 -50q-19 -21 -48 -21h-1664q-29 0 -48 21q-19 22 -16 50l35 313h1722zM1664 967l86 -775h-1708l86 775q3 24 21 40.5t43 16.5h256v-128q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5v128h384v-128q0 -53 37.5 -90.5t90.5 -37.5 +t90.5 37.5t37.5 90.5v128h256q25 0 43 -16.5t21 -40.5zM1280 1152v-256q0 -26 -19 -45t-45 -19t-45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-256q0 -26 -19 -45t-45 -19t-45 19t-19 45v256q0 159 112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" /> + <glyph glyph-name="_616" unicode="&#xf291;" horiz-adv-x="2048" +d="M1920 768q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5h-15l-115 -662q-8 -46 -44 -76t-82 -30h-1280q-46 0 -82 30t-44 76l-115 662h-15q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5h1792zM485 -32q26 2 43.5 22.5t15.5 46.5l-32 416q-2 26 -22.5 43.5 +t-46.5 15.5t-43.5 -22.5t-15.5 -46.5l32 -416q2 -25 20.5 -42t43.5 -17h5zM896 32v416q0 26 -19 45t-45 19t-45 -19t-19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45zM1280 32v416q0 26 -19 45t-45 19t-45 -19t-19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45zM1632 27l32 416 +q2 26 -15.5 46.5t-43.5 22.5t-46.5 -15.5t-22.5 -43.5l-32 -416q-2 -26 15.5 -46.5t43.5 -22.5h5q25 0 43.5 17t20.5 42zM476 1244l-93 -412h-132l101 441q19 88 89 143.5t160 55.5h167q0 26 19 45t45 19h384q26 0 45 -19t19 -45h167q90 0 160 -55.5t89 -143.5l101 -441 +h-132l-93 412q-11 44 -45.5 72t-79.5 28h-167q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45h-167q-45 0 -79.5 -28t-45.5 -72z" /> + <glyph glyph-name="_617" unicode="&#xf292;" horiz-adv-x="1792" +d="M991 512l64 256h-254l-64 -256h254zM1759 1016l-56 -224q-7 -24 -31 -24h-327l-64 -256h311q15 0 25 -12q10 -14 6 -28l-56 -224q-5 -24 -31 -24h-327l-81 -328q-7 -24 -31 -24h-224q-16 0 -26 12q-9 12 -6 28l78 312h-254l-81 -328q-7 -24 -31 -24h-225q-15 0 -25 12 +q-9 12 -6 28l78 312h-311q-15 0 -25 12q-9 12 -6 28l56 224q7 24 31 24h327l64 256h-311q-15 0 -25 12q-10 14 -6 28l56 224q5 24 31 24h327l81 328q7 24 32 24h224q15 0 25 -12q9 -12 6 -28l-78 -312h254l81 328q7 24 32 24h224q15 0 25 -12q9 -12 6 -28l-78 -312h311 +q15 0 25 -12q9 -12 6 -28z" /> + <glyph glyph-name="_618" unicode="&#xf293;" +d="M841 483l148 -148l-149 -149zM840 1094l149 -149l-148 -148zM710 -130l464 464l-306 306l306 306l-464 464v-611l-255 255l-93 -93l320 -321l-320 -321l93 -93l255 255v-611zM1429 640q0 -209 -32 -365.5t-87.5 -257t-140.5 -162.5t-181.5 -86.5t-219.5 -24.5 +t-219.5 24.5t-181.5 86.5t-140.5 162.5t-87.5 257t-32 365.5t32 365.5t87.5 257t140.5 162.5t181.5 86.5t219.5 24.5t219.5 -24.5t181.5 -86.5t140.5 -162.5t87.5 -257t32 -365.5z" /> + <glyph glyph-name="_619" unicode="&#xf294;" horiz-adv-x="1024" +d="M596 113l173 172l-173 172v-344zM596 823l173 172l-173 172v-344zM628 640l356 -356l-539 -540v711l-297 -296l-108 108l372 373l-372 373l108 108l297 -296v711l539 -540z" /> + <glyph glyph-name="_620" unicode="&#xf295;" +d="M1280 256q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM512 1024q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1536 256q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5 +t112.5 -271.5zM1440 1344q0 -20 -13 -38l-1056 -1408q-19 -26 -51 -26h-160q-26 0 -45 19t-19 45q0 20 13 38l1056 1408q19 26 51 26h160q26 0 45 -19t19 -45zM768 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5 +t271.5 -112.5t112.5 -271.5z" /> + <glyph glyph-name="_621" unicode="&#xf296;" horiz-adv-x="1792" +d="M104 830l792 -1015l-868 630q-18 13 -25 34.5t0 42.5l101 308v0zM566 830h660l-330 -1015v0zM368 1442l198 -612h-462l198 612q8 23 33 23t33 -23zM1688 830l101 -308q7 -21 0 -42.5t-25 -34.5l-868 -630l792 1015v0zM1688 830h-462l198 612q8 23 33 23t33 -23z" /> + <glyph glyph-name="_622" unicode="&#xf297;" horiz-adv-x="1792" +d="M384 704h160v224h-160v-224zM1221 372v92q-104 -36 -243 -38q-135 -1 -259.5 46.5t-220.5 122.5l1 -96q88 -80 212 -128.5t272 -47.5q129 0 238 49zM640 704h640v224h-640v-224zM1792 736q0 -187 -99 -352q89 -102 89 -229q0 -157 -129.5 -268t-313.5 -111 +q-122 0 -225 52.5t-161 140.5q-19 -1 -57 -1t-57 1q-58 -88 -161 -140.5t-225 -52.5q-184 0 -313.5 111t-129.5 268q0 127 89 229q-99 165 -99 352q0 209 120 385.5t326.5 279.5t449.5 103t449.5 -103t326.5 -279.5t120 -385.5z" /> + <glyph glyph-name="_623" unicode="&#xf298;" +d="M515 625v-128h-252v128h252zM515 880v-127h-252v127h252zM1273 369v-128h-341v128h341zM1273 625v-128h-672v128h672zM1273 880v-127h-672v127h672zM1408 20v1240q0 8 -6 14t-14 6h-32l-378 -256l-210 171l-210 -171l-378 256h-32q-8 0 -14 -6t-6 -14v-1240q0 -8 6 -14 +t14 -6h1240q8 0 14 6t6 14zM553 1130l185 150h-406zM983 1130l221 150h-406zM1536 1260v-1240q0 -62 -43 -105t-105 -43h-1240q-62 0 -105 43t-43 105v1240q0 62 43 105t105 43h1240q62 0 105 -43t43 -105z" /> + <glyph glyph-name="_624" unicode="&#xf299;" horiz-adv-x="1792" +d="M896 720q-104 196 -160 278q-139 202 -347 318q-34 19 -70 36q-89 40 -94 32t34 -38l39 -31q62 -43 112.5 -93.5t94.5 -116.5t70.5 -113t70.5 -131q9 -17 13 -25q44 -84 84 -153t98 -154t115.5 -150t131 -123.5t148.5 -90.5q153 -66 154 -60q1 3 -49 37q-53 36 -81 57 +q-77 58 -179 211t-185 310zM549 177q-76 60 -132.5 125t-98 143.5t-71 154.5t-58.5 186t-52 209t-60.5 252t-76.5 289q273 0 497.5 -36t379 -92t271 -144.5t185.5 -172.5t110 -198.5t56 -199.5t12.5 -198.5t-9.5 -173t-20 -143.5t-13 -107l323 -327h-104l-281 285 +q-22 -2 -91.5 -14t-121.5 -19t-138 -6t-160.5 17t-167.5 59t-179 111z" /> + <glyph glyph-name="_625" unicode="&#xf29a;" horiz-adv-x="1792" +d="M1374 879q-6 26 -28.5 39.5t-48.5 7.5q-261 -62 -401 -62t-401 62q-26 6 -48.5 -7.5t-28.5 -39.5t7.5 -48.5t39.5 -28.5q194 -46 303 -58q-2 -158 -15.5 -269t-26.5 -155.5t-41 -115.5l-9 -21q-10 -25 1 -49t36 -34q9 -4 23 -4q44 0 60 41l8 20q54 139 71 259h42 +q17 -120 71 -259l8 -20q16 -41 60 -41q14 0 23 4q25 10 36 34t1 49l-9 21q-28 71 -41 115.5t-26.5 155.5t-15.5 269q109 12 303 58q26 6 39.5 28.5t7.5 48.5zM1024 1024q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5z +M1600 640q0 -143 -55.5 -273.5t-150 -225t-225 -150t-273.5 -55.5t-273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5zM896 1408q-156 0 -298 -61t-245 -164t-164 -245t-61 -298t61 -298 +t164 -245t245 -164t298 -61t298 61t245 164t164 245t61 298t-61 298t-164 245t-245 164t-298 61zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="_626" unicode="&#xf29b;" +d="M1438 723q34 -35 29 -82l-44 -551q-4 -42 -34.5 -70t-71.5 -28q-6 0 -9 1q-44 3 -72.5 36.5t-25.5 77.5l35 429l-143 -8q55 -113 55 -240q0 -216 -148 -372l-137 137q91 101 91 235q0 145 -102.5 248t-247.5 103q-134 0 -236 -92l-137 138q120 114 284 141l264 300 +l-149 87l-181 -161q-33 -30 -77 -27.5t-73 35.5t-26.5 77t34.5 73l239 213q26 23 60 26.5t64 -14.5l488 -283q36 -21 48 -68q17 -67 -26 -117l-205 -232l371 20q49 3 83 -32zM1240 1180q-74 0 -126 52t-52 126t52 126t126 52t126.5 -52t52.5 -126t-52.5 -126t-126.5 -52z +M613 -62q106 0 196 61l139 -139q-146 -116 -335 -116q-148 0 -273.5 73t-198.5 198t-73 273q0 188 116 336l139 -139q-60 -88 -60 -197q0 -145 102.5 -247.5t247.5 -102.5z" /> + <glyph glyph-name="_627" unicode="&#xf29c;" +d="M880 336v-160q0 -14 -9 -23t-23 -9h-160q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h160q14 0 23 -9t9 -23zM1136 832q0 -50 -15 -90t-45.5 -69t-52 -44t-59.5 -36q-32 -18 -46.5 -28t-26 -24t-11.5 -29v-32q0 -14 -9 -23t-23 -9h-160q-14 0 -23 9t-9 23v68q0 35 10.5 64.5 +t24 47.5t39 35.5t41 25.5t44.5 21q53 25 75 43t22 49q0 42 -43.5 71.5t-95.5 29.5q-56 0 -95 -27q-29 -20 -80 -83q-9 -12 -25 -12q-11 0 -19 6l-108 82q-10 7 -12 20t5 23q122 192 349 192q129 0 238.5 -89.5t109.5 -214.5zM768 1280q-130 0 -248.5 -51t-204 -136.5 +t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 204t51 248.5t-51 248.5t-136.5 204t-204 136.5t-248.5 51zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5 +t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="_628" unicode="&#xf29d;" horiz-adv-x="1408" +d="M366 1225q-64 0 -110 45.5t-46 110.5q0 64 46 109.5t110 45.5t109.5 -45.5t45.5 -109.5q0 -65 -45.5 -110.5t-109.5 -45.5zM917 583q0 -50 -30 -67.5t-63.5 -6.5t-47.5 34l-367 438q-7 12 -14 15.5t-11 1.5l-3 -3q-7 -8 4 -21l122 -139l1 -354l-161 -457 +q-67 -192 -92 -234q-15 -26 -28 -32q-50 -26 -103 -1q-29 13 -41.5 43t-9.5 57q2 17 197 618l5 416l-85 -164l35 -222q4 -24 -1 -42t-14 -27.5t-19 -16t-17 -7.5l-7 -2q-19 -3 -34.5 3t-24 16t-14 22t-7.5 19.5t-2 9.5l-46 299l211 381q23 34 113 34q75 0 107 -40l424 -521 +q7 -5 14 -17l3 -3l-1 -1q7 -13 7 -29zM514 433q43 -113 88.5 -225t69.5 -168l24 -55q36 -93 42 -125q11 -70 -36 -97q-35 -22 -66 -16t-51 22t-29 35h-1q-6 16 -8 25l-124 351zM1338 -159q31 -49 31 -57q0 -5 -3 -7q-9 -5 -14.5 0.5t-15.5 26t-16 30.5q-114 172 -423 661 +q3 -1 7 1t7 4l3 2q11 9 11 17z" /> + <glyph glyph-name="_629" unicode="&#xf29e;" horiz-adv-x="2304" +d="M504 542h171l-1 265zM1530 641q0 87 -50.5 140t-146.5 53h-54v-388h52q91 0 145 57t54 138zM956 1018l1 -756q0 -14 -9.5 -24t-23.5 -10h-216q-14 0 -23.5 10t-9.5 24v62h-291l-55 -81q-10 -15 -28 -15h-267q-21 0 -30.5 18t3.5 35l556 757q9 14 27 14h332q14 0 24 -10 +t10 -24zM1783 641q0 -193 -125.5 -303t-324.5 -110h-270q-14 0 -24 10t-10 24v756q0 14 10 24t24 10h268q200 0 326 -109t126 -302zM1939 640q0 -11 -0.5 -29t-8 -71.5t-21.5 -102t-44.5 -108t-73.5 -102.5h-51q38 45 66.5 104.5t41.5 112t21 98t9 72.5l1 27q0 8 -0.5 22.5 +t-7.5 60t-20 91.5t-41 111.5t-66 124.5h43q41 -47 72 -107t45.5 -111.5t23 -96t10.5 -70.5zM2123 640q0 -11 -0.5 -29t-8 -71.5t-21.5 -102t-45 -108t-74 -102.5h-51q38 45 66.5 104.5t41.5 112t21 98t9 72.5l1 27q0 8 -0.5 22.5t-7.5 60t-19.5 91.5t-40.5 111.5t-66 124.5 +h43q41 -47 72 -107t45.5 -111.5t23 -96t10.5 -70.5zM2304 640q0 -11 -0.5 -29t-8 -71.5t-21.5 -102t-44.5 -108t-73.5 -102.5h-51q38 45 66 104.5t41 112t21 98t9 72.5l1 27q0 8 -0.5 22.5t-7.5 60t-19.5 91.5t-40.5 111.5t-66 124.5h43q41 -47 72 -107t45.5 -111.5t23 -96 +t9.5 -70.5z" /> + <glyph glyph-name="uniF2A0" unicode="&#xf2a0;" horiz-adv-x="1408" +d="M617 -153q0 11 -13 58t-31 107t-20 69q-1 4 -5 26.5t-8.5 36t-13.5 21.5q-15 14 -51 14q-23 0 -70 -5.5t-71 -5.5q-34 0 -47 11q-6 5 -11 15.5t-7.5 20t-6.5 24t-5 18.5q-37 128 -37 255t37 255q1 4 5 18.5t6.5 24t7.5 20t11 15.5q13 11 47 11q24 0 71 -5.5t70 -5.5 +q36 0 51 14q9 8 13.5 21.5t8.5 36t5 26.5q2 9 20 69t31 107t13 58q0 22 -43.5 52.5t-75.5 42.5q-20 8 -45 8q-34 0 -98 -18q-57 -17 -96.5 -40.5t-71 -66t-46 -70t-45.5 -94.5q-6 -12 -9 -19q-49 -107 -68 -216t-19 -244t19 -244t68 -216q56 -122 83 -161q63 -91 179 -127 +l6 -2q64 -18 98 -18q25 0 45 8q32 12 75.5 42.5t43.5 52.5zM776 760q-26 0 -45 19t-19 45.5t19 45.5q37 37 37 90q0 52 -37 91q-19 19 -19 45t19 45t45 19t45 -19q75 -75 75 -181t-75 -181q-21 -19 -45 -19zM957 579q-27 0 -45 19q-19 19 -19 45t19 45q112 114 112 272 +t-112 272q-19 19 -19 45t19 45t45 19t45 -19q150 -150 150 -362t-150 -362q-18 -19 -45 -19zM1138 398q-27 0 -45 19q-19 19 -19 45t19 45q90 91 138.5 208t48.5 245t-48.5 245t-138.5 208q-19 19 -19 45t19 45t45 19t45 -19q109 -109 167 -249t58 -294t-58 -294t-167 -249 +q-18 -19 -45 -19z" /> + <glyph glyph-name="uniF2A1" unicode="&#xf2a1;" horiz-adv-x="2176" +d="M192 352q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM704 352q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM704 864q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM1472 352 +q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM1984 352q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM1472 864q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM1984 864 +q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM1984 1376q-66 0 -113 -47t-47 -113t47 -113t113 -47t113 47t47 113t-47 113t-113 47zM384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 192q0 -80 -56 -136 +t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 704q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 704q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 1216q0 -80 -56 -136t-136 -56 +t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 1216q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM2176 192q0 -80 -56 -136t-136 -56t-136 56 +t-56 136t56 136t136 56t136 -56t56 -136zM1664 704q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM2176 704q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 1216q0 -80 -56 -136t-136 -56t-136 56t-56 136 +t56 136t136 56t136 -56t56 -136zM2176 1216q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136z" /> + <glyph glyph-name="uniF2A2" unicode="&#xf2a2;" horiz-adv-x="1792" +d="M128 -192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM320 0q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM365 365l256 -256l-90 -90l-256 256zM704 384q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45z +M1411 704q0 -59 -11.5 -108.5t-37.5 -93.5t-44 -67.5t-53 -64.5q-31 -35 -45.5 -54t-33.5 -50t-26.5 -64t-7.5 -74q0 -159 -112.5 -271.5t-271.5 -112.5q-26 0 -45 19t-19 45t19 45t45 19q106 0 181 75t75 181q0 57 11.5 105.5t37 91t43.5 66.5t52 63q40 46 59.5 72 +t37.5 74.5t18 103.5q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5q0 -26 -19 -45t-45 -19t-45 19t-19 45q0 117 45.5 223.5t123 184t184 123t223.5 45.5t223.5 -45.5t184 -123t123 -184t45.5 -223.5zM896 576q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45 +t45 19t45 -19t19 -45zM1184 704q0 -26 -19 -45t-45 -19t-45 19t-19 45q0 93 -65.5 158.5t-158.5 65.5q-92 0 -158 -65.5t-66 -158.5q0 -26 -19 -45t-45 -19t-45 19t-19 45q0 146 103 249t249 103t249 -103t103 -249zM1578 993q10 -25 -1 -49t-36 -34q-9 -4 -23 -4 +q-19 0 -35.5 11t-23.5 30q-68 178 -224 295q-21 16 -25 42t12 47q17 21 43 25t47 -12q183 -137 266 -351zM1788 1074q9 -25 -1.5 -49t-35.5 -34q-11 -4 -23 -4q-44 0 -60 41q-92 238 -297 393q-22 16 -25.5 42t12.5 47q16 22 42 25.5t47 -12.5q235 -175 341 -449z" /> + <glyph glyph-name="uniF2A3" unicode="&#xf2a3;" horiz-adv-x="2304" +d="M1032 576q-59 2 -84 55q-17 34 -48 53.5t-68 19.5q-53 0 -90.5 -37.5t-37.5 -90.5q0 -56 36 -89l10 -8q34 -31 82 -31q37 0 68 19.5t48 53.5q25 53 84 55zM1600 704q0 56 -36 89l-10 8q-34 31 -82 31q-37 0 -68 -19.5t-48 -53.5q-25 -53 -84 -55q59 -2 84 -55 +q17 -34 48 -53.5t68 -19.5q53 0 90.5 37.5t37.5 90.5zM1174 925q-17 -35 -55 -48t-73 4q-62 31 -134 31q-51 0 -99 -17q3 0 9.5 0.5t9.5 0.5q92 0 170.5 -50t118.5 -133q17 -36 3.5 -73.5t-49.5 -54.5q-18 -9 -39 -9q21 0 39 -9q36 -17 49.5 -54.5t-3.5 -73.5 +q-40 -83 -118.5 -133t-170.5 -50h-6q-16 2 -44 4l-290 27l-239 -120q-14 -7 -29 -7q-40 0 -57 35l-160 320q-11 23 -4 47.5t29 37.5l209 119l148 267q17 155 91.5 291.5t195.5 236.5q31 25 70.5 21.5t64.5 -34.5t21.5 -70t-34.5 -65q-70 -59 -117 -128q123 84 267 101 +q40 5 71.5 -19t35.5 -64q5 -40 -19 -71.5t-64 -35.5q-84 -10 -159 -55q46 10 99 10q115 0 218 -50q36 -18 49 -55.5t-5 -73.5zM2137 1085l160 -320q11 -23 4 -47.5t-29 -37.5l-209 -119l-148 -267q-17 -155 -91.5 -291.5t-195.5 -236.5q-26 -22 -61 -22q-45 0 -74 35 +q-25 31 -21.5 70t34.5 65q70 59 117 128q-123 -84 -267 -101q-4 -1 -12 -1q-36 0 -63.5 24t-31.5 60q-5 40 19 71.5t64 35.5q84 10 159 55q-46 -10 -99 -10q-115 0 -218 50q-36 18 -49 55.5t5 73.5q17 35 55 48t73 -4q62 -31 134 -31q51 0 99 17q-3 0 -9.5 -0.5t-9.5 -0.5 +q-92 0 -170.5 50t-118.5 133q-17 36 -3.5 73.5t49.5 54.5q18 9 39 9q-21 0 -39 9q-36 17 -49.5 54.5t3.5 73.5q40 83 118.5 133t170.5 50h6h1q14 -2 42 -4l291 -27l239 120q14 7 29 7q40 0 57 -35z" /> + <glyph glyph-name="uniF2A4" unicode="&#xf2a4;" horiz-adv-x="1792" +d="M1056 704q0 -26 19 -45t45 -19t45 19t19 45q0 146 -103 249t-249 103t-249 -103t-103 -249q0 -26 19 -45t45 -19t45 19t19 45q0 93 66 158.5t158 65.5t158 -65.5t66 -158.5zM835 1280q-117 0 -223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5q0 -26 19 -45t45 -19t45 19 +t19 45q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -55 -18 -103.5t-37.5 -74.5t-59.5 -72q-34 -39 -52 -63t-43.5 -66.5t-37 -91t-11.5 -105.5q0 -106 -75 -181t-181 -75q-26 0 -45 -19t-19 -45t19 -45t45 -19q159 0 271.5 112.5t112.5 271.5q0 41 7.5 74 +t26.5 64t33.5 50t45.5 54q35 41 53 64.5t44 67.5t37.5 93.5t11.5 108.5q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5zM591 561l226 -226l-579 -579q-12 -12 -29 -12t-29 12l-168 168q-12 12 -12 29t12 29zM1612 1524l168 -168q12 -12 12 -29t-12 -30l-233 -233 +l-26 -25l-71 -71q-66 153 -195 258l91 91l207 207q13 12 30 12t29 -12z" /> + <glyph glyph-name="uniF2A5" unicode="&#xf2a5;" +d="M866 1021q0 -27 -13 -94q-11 -50 -31.5 -150t-30.5 -150q-2 -11 -4.5 -12.5t-13.5 -2.5q-20 -2 -31 -2q-58 0 -84 49.5t-26 113.5q0 88 35 174t103 124q28 14 51 14q28 0 36.5 -16.5t8.5 -47.5zM1352 597q0 14 -39 75.5t-52 66.5q-21 8 -34 8q-91 0 -226 -77l-2 2 +q3 22 27.5 135t24.5 178q0 233 -242 233q-24 0 -68 -6q-94 -17 -168.5 -89.5t-111.5 -166.5t-37 -189q0 -146 80.5 -225t227.5 -79q25 0 25 -3t-1 -5q-4 -34 -26 -117q-14 -52 -51.5 -101t-82.5 -49q-42 0 -42 47q0 24 10.5 47.5t25 39.5t29.5 28.5t26 20t11 8.5q0 3 -7 10 +q-24 22 -58.5 36.5t-65.5 14.5q-35 0 -63.5 -34t-41 -75t-12.5 -75q0 -88 51.5 -142t138.5 -54q82 0 155 53t117.5 126t65.5 153q6 22 15.5 66.5t14.5 66.5q3 12 14 18q118 60 227 60q48 0 127 -18q1 -1 4 -1q5 0 9.5 4.5t4.5 8.5zM1536 1120v-960q0 -119 -84.5 -203.5 +t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="uniF2A6" unicode="&#xf2a6;" horiz-adv-x="1535" +d="M744 1231q0 24 -2 38.5t-8.5 30t-21 23t-37.5 7.5q-39 0 -78 -23q-105 -58 -159 -190.5t-54 -269.5q0 -44 8.5 -85.5t26.5 -80.5t52.5 -62.5t81.5 -23.5q4 0 18 -0.5t20 0t16 3t15 8.5t7 16q16 77 48 231.5t48 231.5q19 91 19 146zM1498 575q0 -7 -7.5 -13.5t-15.5 -6.5 +l-6 1q-22 3 -62 11t-72 12.5t-63 4.5q-167 0 -351 -93q-15 -8 -21 -27q-10 -36 -24.5 -105.5t-22.5 -100.5q-23 -91 -70 -179.5t-112.5 -164.5t-154.5 -123t-185 -47q-135 0 -214.5 83.5t-79.5 219.5q0 53 19.5 117t63 116.5t97.5 52.5q38 0 120 -33.5t83 -61.5 +q0 -1 -16.5 -12.5t-39.5 -31t-46 -44.5t-39 -61t-16 -74q0 -33 16.5 -53t48.5 -20q45 0 85 31.5t66.5 78t48 105.5t32.5 107t16 90v9q0 2 -3.5 3.5t-8.5 1.5h-10t-10 -0.5t-6 -0.5q-227 0 -352 122.5t-125 348.5q0 108 34.5 221t96 210t156 167.5t204.5 89.5q52 9 106 9 +q374 0 374 -360q0 -98 -38 -273t-43 -211l3 -3q101 57 182.5 88t167.5 31q22 0 53 -13q19 -7 80 -102.5t61 -116.5z" /> + <glyph glyph-name="uniF2A7" unicode="&#xf2a7;" horiz-adv-x="1664" +d="M831 863q32 0 59 -18l222 -148q61 -40 110 -97l146 -170q40 -46 29 -106l-72 -413q-6 -32 -29.5 -53.5t-55.5 -25.5l-527 -56l-352 -32h-9q-39 0 -67.5 28t-28.5 68q0 37 27 64t65 32l260 32h-448q-41 0 -69.5 30t-26.5 71q2 39 32 65t69 26l442 1l-521 64q-41 5 -66 37 +t-19 73q6 35 34.5 57.5t65.5 22.5h10l481 -60l-351 94q-38 10 -62 41.5t-18 68.5q6 36 33 58.5t62 22.5q6 0 20 -2l448 -96l217 -37q1 0 3 -0.5t3 -0.5q23 0 30.5 23t-12.5 36l-186 125q-35 23 -42 63.5t18 73.5q27 38 76 38zM761 661l186 -125l-218 37l-5 2l-36 38 +l-238 262q-1 1 -2.5 3.5t-2.5 3.5q-24 31 -18.5 70t37.5 64q31 23 68 17.5t64 -33.5l142 -147q-2 -1 -5 -3.5t-4 -4.5q-32 -45 -23 -99t55 -85zM1648 1115l15 -266q4 -73 -11 -147l-48 -219q-12 -59 -67 -87l-106 -54q2 62 -39 109l-146 170q-53 61 -117 103l-222 148 +q-34 23 -76 23q-51 0 -88 -37l-235 312q-25 33 -18 73.5t41 63.5q33 22 71.5 14t62.5 -40l266 -352l-262 455q-21 35 -10.5 75t47.5 59q35 18 72.5 6t57.5 -46l241 -420l-136 337q-15 35 -4.5 74t44.5 56q37 19 76 6t56 -51l193 -415l101 -196q8 -15 23 -17.5t27 7.5t11 26 +l-12 224q-2 41 26 71t69 31q39 0 67 -28.5t30 -67.5z" /> + <glyph glyph-name="uniF2A8" unicode="&#xf2a8;" horiz-adv-x="1792" +d="M335 180q-2 0 -6 2q-86 57 -168.5 145t-139.5 180q-21 30 -21 69q0 9 2 19t4 18t7 18t8.5 16t10.5 17t10 15t12 15.5t11 14.5q184 251 452 365q-110 198 -110 211q0 19 17 29q116 64 128 64q18 0 28 -16l124 -229q92 19 192 19q266 0 497.5 -137.5t378.5 -369.5 +q20 -31 20 -69t-20 -69q-91 -142 -218.5 -253.5t-278.5 -175.5q110 -198 110 -211q0 -20 -17 -29q-116 -64 -127 -64q-19 0 -29 16l-124 229l-64 119l-444 820l7 7q-58 -24 -99 -47q3 -5 127 -234t243 -449t119 -223q0 -7 -9 -9q-13 -3 -72 -3q-57 0 -60 7l-456 841 +q-39 -28 -82 -68q24 -43 214 -393.5t190 -354.5q0 -10 -11 -10q-14 0 -82.5 22t-72.5 28l-106 197l-224 413q-44 -53 -78 -106q2 -3 18 -25t23 -34l176 -327q0 -10 -10 -10zM1165 282l49 -91q273 111 450 385q-180 277 -459 389q67 -64 103 -148.5t36 -176.5 +q0 -106 -47 -200.5t-132 -157.5zM848 896q0 -20 14 -34t34 -14q86 0 147 -61t61 -147q0 -20 14 -34t34 -14t34 14t14 34q0 126 -89 215t-215 89q-20 0 -34 -14t-14 -34zM1214 961l-9 4l7 -7z" /> + <glyph glyph-name="uniF2A9" unicode="&#xf2a9;" horiz-adv-x="1280" +d="M1050 430q0 -215 -147 -374q-148 -161 -378 -161q-232 0 -378 161q-147 159 -147 374q0 147 68 270.5t189 196.5t268 73q96 0 182 -31q-32 -62 -39 -126q-66 28 -143 28q-167 0 -280.5 -123t-113.5 -291q0 -170 112.5 -288.5t281.5 -118.5t281 118.5t112 288.5 +q0 89 -32 166q66 13 123 49q41 -98 41 -212zM846 619q0 -192 -79.5 -345t-238.5 -253l-14 -1q-29 0 -62 5q83 32 146.5 102.5t99.5 154.5t58.5 189t30 192.5t7.5 178.5q0 69 -3 103q55 -160 55 -326zM791 947v-2q-73 214 -206 440q88 -59 142.5 -186.5t63.5 -251.5z +M1035 744q-83 0 -160 75q218 120 290 247q19 37 21 56q-42 -94 -139.5 -166.5t-204.5 -97.5q-35 54 -35 113q0 37 17 79t43 68q46 44 157 74q59 16 106 58.5t74 100.5q74 -105 74 -253q0 -109 -24 -170q-32 -77 -88.5 -130.5t-130.5 -53.5z" /> + <glyph glyph-name="uniF2AA" unicode="&#xf2aa;" +d="M1050 495q0 78 -28 147q-41 -25 -85 -34q22 -50 22 -114q0 -117 -77 -198.5t-193 -81.5t-193.5 81.5t-77.5 198.5q0 115 78 199.5t193 84.5q53 0 98 -19q4 43 27 87q-60 21 -125 21q-154 0 -257.5 -108.5t-103.5 -263.5t103.5 -261t257.5 -106t257.5 106.5t103.5 260.5z +M872 850q2 -24 2 -71q0 -63 -5 -123t-20.5 -132.5t-40.5 -130t-68.5 -106t-100.5 -70.5q21 -3 42 -3h10q219 139 219 411q0 116 -38 225zM872 850q-4 80 -44 171.5t-98 130.5q92 -156 142 -302zM1207 955q0 102 -51 174q-41 -86 -124 -109q-69 -19 -109 -53.5t-40 -99.5 +q0 -40 24 -77q74 17 140.5 67t95.5 115q-4 -52 -74.5 -111.5t-138.5 -97.5q52 -52 110 -52q51 0 90 37t60 90q17 42 17 117zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 +t84.5 -203.5z" /> + <glyph glyph-name="uniF2AB" unicode="&#xf2ab;" +d="M1279 388q0 22 -22 27q-67 15 -118 59t-80 108q-7 19 -7 25q0 15 19.5 26t43 17t43 20.5t19.5 36.5q0 19 -18.5 31.5t-38.5 12.5q-12 0 -32 -8t-31 -8q-4 0 -12 2q5 95 5 114q0 79 -17 114q-36 78 -103 121.5t-152 43.5q-199 0 -275 -165q-17 -35 -17 -114q0 -19 5 -114 +q-4 -2 -14 -2q-12 0 -32 7.5t-30 7.5q-21 0 -38.5 -12t-17.5 -32q0 -21 19.5 -35.5t43 -20.5t43 -17t19.5 -26q0 -6 -7 -25q-64 -138 -198 -167q-22 -5 -22 -27q0 -46 137 -68q2 -5 6 -26t11.5 -30.5t23.5 -9.5q12 0 37.5 4.5t39.5 4.5q35 0 67 -15t54 -32.5t57.5 -32.5 +t76.5 -15q43 0 79 15t57.5 32.5t53.5 32.5t67 15q14 0 39.5 -4t38.5 -4q16 0 23 10t11 30t6 25q137 22 137 68zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 +t103 -385.5z" /> + <glyph glyph-name="uniF2AC" unicode="&#xf2ac;" horiz-adv-x="1664" +d="M848 1408q134 1 240.5 -68.5t163.5 -192.5q27 -58 27 -179q0 -47 -9 -191q14 -7 28 -7q18 0 51 13.5t51 13.5q29 0 56 -18t27 -46q0 -32 -31.5 -54t-69 -31.5t-69 -29t-31.5 -47.5q0 -15 12 -43q37 -82 102.5 -150t144.5 -101q28 -12 80 -23q28 -6 28 -35 +q0 -70 -219 -103q-7 -11 -11 -39t-14 -46.5t-33 -18.5q-20 0 -62 6.5t-64 6.5q-37 0 -62 -5q-32 -5 -63 -22.5t-58 -38t-58 -40.5t-76 -33.5t-99 -13.5q-52 0 -96.5 13.5t-75 33.5t-57.5 40.5t-58 38t-62 22.5q-26 5 -63 5q-24 0 -65.5 -7.5t-58.5 -7.5q-25 0 -35 18.5 +t-14 47.5t-11 40q-219 33 -219 103q0 29 28 35q52 11 80 23q78 32 144.5 101t102.5 150q12 28 12 43q0 28 -31.5 47.5t-69.5 29.5t-69.5 31.5t-31.5 52.5q0 27 26 45.5t55 18.5q15 0 48 -13t53 -13q18 0 32 7q-9 142 -9 190q0 122 27 180q64 137 172 198t264 63z" /> + <glyph glyph-name="uniF2AD" unicode="&#xf2ad;" +d="M1280 388q0 22 -22 27q-67 14 -118 58t-80 109q-7 14 -7 25q0 15 19.5 26t42.5 17t42.5 20.5t19.5 36.5q0 19 -18.5 31.5t-38.5 12.5q-11 0 -31 -8t-32 -8q-4 0 -12 2q5 63 5 115q0 78 -17 114q-36 78 -102.5 121.5t-152.5 43.5q-198 0 -275 -165q-18 -38 -18 -115 +q0 -38 6 -114q-10 -2 -15 -2q-11 0 -31.5 8t-30.5 8q-20 0 -37.5 -12.5t-17.5 -32.5q0 -21 19.5 -35.5t42.5 -20.5t42.5 -17t19.5 -26q0 -11 -7 -25q-64 -138 -198 -167q-22 -5 -22 -27q0 -47 138 -69q2 -5 6 -26t11 -30.5t23 -9.5q13 0 38.5 5t38.5 5q35 0 67.5 -15 +t54.5 -32.5t57.5 -32.5t76.5 -15q43 0 79 15t57.5 32.5t54 32.5t67.5 15q13 0 39 -4.5t39 -4.5q15 0 22.5 9.5t11.5 31t5 24.5q138 22 138 69zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 +q119 0 203.5 -84.5t84.5 -203.5z" /> + <glyph glyph-name="uniF2AE" unicode="&#xf2ae;" horiz-adv-x="2304" +d="M2304 1536q-69 -46 -125 -92t-89 -81t-59.5 -71.5t-37.5 -57.5t-22 -44.5t-14 -29.5q-10 -18 -35.5 -136.5t-48.5 -164.5q-15 -29 -50 -60.5t-67.5 -50.5t-72.5 -41t-48 -28q-47 -31 -151 -231q-341 14 -630 -158q-92 -53 -303 -179q47 16 86 31t55 22l15 7 +q71 27 163 64.5t133.5 53.5t108 34.5t142.5 31.5q186 31 465 -7q1 0 10 -3q11 -6 14 -17t-3 -22l-194 -345q-15 -29 -47 -22q-128 24 -354 24q-146 0 -402 -44.5t-392 -46.5q-82 -1 -149 13t-107 37t-61 40t-33 34l-1 1v2q0 6 6 6q138 0 371 55q192 366 374.5 524t383.5 158 +q5 0 14.5 -0.5t38 -5t55 -12t61.5 -24.5t63 -39.5t54 -59t40 -82.5l102 177q2 4 21 42.5t44.5 86.5t61 109.5t84 133.5t100.5 137q66 82 128 141.5t121.5 96.5t92.5 53.5t88 39.5z" /> + <glyph glyph-name="uniF2B0" unicode="&#xf2b0;" +d="M1322 640q0 -45 -5 -76l-236 14l224 -78q-19 -73 -58 -141l-214 103l177 -158q-44 -61 -107 -108l-157 178l103 -215q-61 -37 -140 -59l-79 228l14 -240q-38 -6 -76 -6t-76 6l14 238l-78 -226q-74 19 -140 59l103 215l-157 -178q-59 43 -108 108l178 158l-214 -104 +q-39 69 -58 141l224 79l-237 -14q-5 42 -5 76q0 35 5 77l238 -14l-225 79q19 73 58 140l214 -104l-177 159q46 61 107 108l158 -178l-103 215q67 39 140 58l77 -224l-13 236q36 6 75 6q38 0 76 -6l-14 -237l78 225q74 -19 140 -59l-103 -214l158 178q61 -47 107 -108 +l-177 -159l213 104q37 -62 58 -141l-224 -78l237 14q5 -31 5 -77zM1352 640q0 160 -78.5 295.5t-213 214t-292.5 78.5q-119 0 -227 -46.5t-186.5 -125t-124.5 -187.5t-46 -229q0 -119 46 -228t124.5 -187.5t186.5 -125t227 -46.5q158 0 292.5 78.5t213 214t78.5 294.5z +M1425 1023v-766l-657 -383l-657 383v766l657 383zM768 -183l708 412v823l-708 411l-708 -411v-823zM1536 1088v-896l-768 -448l-768 448v896l768 448z" /> + <glyph glyph-name="uniF2B1" unicode="&#xf2b1;" horiz-adv-x="1664" +d="M339 1318h691l-26 -72h-665q-110 0 -188.5 -79t-78.5 -189v-771q0 -95 60.5 -169.5t153.5 -93.5q23 -5 98 -5v-72h-45q-140 0 -239.5 100t-99.5 240v771q0 140 99.5 240t239.5 100zM1190 1536h247l-482 -1294q-23 -61 -40.5 -103.5t-45 -98t-54 -93.5t-64.5 -78.5 +t-79.5 -65t-95.5 -41t-116 -18.5v195q163 26 220 182q20 52 20 105q0 54 -20 106l-285 733h228l187 -585zM1664 978v-1111h-795q37 55 45 73h678v1038q0 85 -49.5 155t-129.5 99l25 67q101 -34 163.5 -123.5t62.5 -197.5z" /> + <glyph glyph-name="uniF2B2" unicode="&#xf2b2;" horiz-adv-x="1792" +d="M852 1227q0 -29 -17 -52.5t-45 -23.5t-45 23.5t-17 52.5t17 52.5t45 23.5t45 -23.5t17 -52.5zM688 -149v114q0 30 -20.5 51.5t-50.5 21.5t-50 -21.5t-20 -51.5v-114q0 -30 20.5 -52t49.5 -22q30 0 50.5 22t20.5 52zM860 -149v114q0 30 -20 51.5t-50 21.5t-50.5 -21.5 +t-20.5 -51.5v-114q0 -30 20.5 -52t50.5 -22q29 0 49.5 22t20.5 52zM1034 -149v114q0 30 -20.5 51.5t-50.5 21.5t-50.5 -21.5t-20.5 -51.5v-114q0 -30 20.5 -52t50.5 -22t50.5 22t20.5 52zM1208 -149v114q0 30 -20.5 51.5t-50.5 21.5t-50.5 -21.5t-20.5 -51.5v-114 +q0 -30 20.5 -52t50.5 -22t50.5 22t20.5 52zM1476 535q-84 -160 -232 -259.5t-323 -99.5q-123 0 -229.5 51.5t-178.5 137t-113 197.5t-41 232q0 88 21 174q-104 -175 -104 -390q0 -162 65 -312t185 -251q30 57 91 57q56 0 86 -50q32 50 87 50q56 0 86 -50q32 50 87 50t87 -50 +q30 50 86 50q28 0 52.5 -15.5t37.5 -40.5q112 94 177 231.5t73 287.5zM1326 564q0 75 -72 75q-17 0 -47 -6q-95 -19 -149 -19q-226 0 -226 243q0 86 30 204q-83 -127 -83 -275q0 -150 89 -260.5t235 -110.5q111 0 210 70q13 48 13 79zM884 1223q0 50 -32 89.5t-81 39.5 +t-81 -39.5t-32 -89.5q0 -51 31.5 -90.5t81.5 -39.5t81.5 39.5t31.5 90.5zM1513 884q0 96 -37.5 179t-113 137t-173.5 54q-77 0 -149 -35t-127 -94q-48 -159 -48 -268q0 -104 45.5 -157t147.5 -53q53 0 142 19q36 6 53 6q51 0 77.5 -28t26.5 -80q0 -26 -4 -46 +q75 68 117.5 165.5t42.5 200.5zM1792 667q0 -111 -33.5 -249.5t-93.5 -204.5q-58 -64 -195 -142.5t-228 -104.5l-4 -1v-114q0 -43 -29.5 -75t-72.5 -32q-56 0 -86 50q-32 -50 -87 -50t-87 50q-30 -50 -86 -50q-55 0 -87 50q-30 -50 -86 -50q-47 0 -75 33.5t-28 81.5 +q-90 -68 -198 -68q-118 0 -211 80q54 1 106 20q-113 31 -182 127q32 -7 71 -7q89 0 164 46q-192 192 -240 306q-24 56 -24 160q0 57 9 125.5t31.5 146.5t55 141t86.5 105t120 42q59 0 81 -52q19 29 42 54q2 3 12 13t13 16q10 15 23 38t25 42t28 39q87 111 211.5 177 +t260.5 66q35 0 62 -4q59 64 146 64q83 0 140 -57q5 -5 5 -12q0 -5 -6 -13.5t-12.5 -16t-16 -17l-10.5 -10.5q17 -6 36 -18t19 -24q0 -6 -16 -25q157 -138 197 -378q25 30 60 30q45 0 100 -49q90 -80 90 -279z" /> + <glyph glyph-name="uniF2B3" unicode="&#xf2b3;" +d="M917 631q0 33 -6 64h-362v-132h217q-12 -76 -74.5 -120.5t-142.5 -44.5q-99 0 -169 71.5t-70 170.5t70 170.5t169 71.5q93 0 153 -59l104 101q-108 100 -257 100q-160 0 -272 -112.5t-112 -271.5t112 -271.5t272 -112.5q165 0 266.5 105t101.5 270zM1262 585h109v110 +h-109v110h-110v-110h-110v-110h110v-110h110v110zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" /> + <glyph glyph-name="uniF2B4" unicode="&#xf2b4;" +d="M1536 1024v-839q0 -48 -49 -62q-174 -52 -338 -52q-73 0 -215.5 29.5t-227.5 29.5q-164 0 -370 -48v-338h-160v1368q-63 25 -101 81t-38 124q0 91 64 155t155 64t155 -64t64 -155q0 -68 -38 -124t-101 -81v-68q190 44 343 44q99 0 198 -15q14 -2 111.5 -22.5t149.5 -20.5 +q77 0 165 18q11 2 80 21t89 19q26 0 45 -19t19 -45z" /> + <glyph glyph-name="uniF2B5" unicode="&#xf2b5;" horiz-adv-x="2304" +d="M192 384q40 0 56 32t0 64t-56 32t-56 -32t0 -64t56 -32zM1665 442q-10 13 -38.5 50t-41.5 54t-38 49t-42.5 53t-40.5 47t-45 49l-125 -140q-83 -94 -208.5 -92t-205.5 98q-57 69 -56.5 158t58.5 157l177 206q-22 11 -51 16.5t-47.5 6t-56.5 -0.5t-49 -1q-92 0 -158 -66 +l-158 -158h-155v-544q5 0 21 0.5t22 0t19.5 -2t20.5 -4.5t17.5 -8.5t18.5 -13.5l297 -292q115 -111 227 -111q78 0 125 47q57 -20 112.5 8t72.5 85q74 -6 127 44q20 18 36 45.5t14 50.5q10 -10 43 -10q43 0 77 21t49.5 53t12 71.5t-30.5 73.5zM1824 384h96v512h-93l-157 180 +q-66 76 -169 76h-167q-89 0 -146 -67l-209 -243q-28 -33 -28 -75t27 -75q43 -51 110 -52t111 49l193 218q25 23 53.5 21.5t47 -27t8.5 -56.5q16 -19 56 -63t60 -68q29 -36 82.5 -105.5t64.5 -84.5q52 -66 60 -140zM2112 384q40 0 56 32t0 64t-56 32t-56 -32t0 -64t56 -32z +M2304 960v-640q0 -26 -19 -45t-45 -19h-434q-27 -65 -82 -106.5t-125 -51.5q-33 -48 -80.5 -81.5t-102.5 -45.5q-42 -53 -104.5 -81.5t-128.5 -24.5q-60 -34 -126 -39.5t-127.5 14t-117 53.5t-103.5 81l-287 282h-358q-26 0 -45 19t-19 45v672q0 26 19 45t45 19h421 +q14 14 47 48t47.5 48t44 40t50.5 37.5t51 25.5t62 19.5t68 5.5h117q99 0 181 -56q82 56 181 56h167q35 0 67 -6t56.5 -14.5t51.5 -26.5t44.5 -31t43 -39.5t39 -42t41 -48t41.5 -48.5h355q26 0 45 -19t19 -45z" /> + <glyph glyph-name="uniF2B6" unicode="&#xf2b6;" horiz-adv-x="1792" +d="M1792 882v-978q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v978q0 15 11 24q8 7 39 34.5t41.5 36t45.5 37.5t70 55.5t96 73t143.5 107t192.5 140.5q5 4 52.5 40t71.5 52.5t64 35t69 18.5t69 -18.5t65 -35.5t71 -52t52 -40q110 -80 192.5 -140.5t143.5 -107 +t96 -73t70 -55.5t45.5 -37.5t41.5 -36t39 -34.5q11 -9 11 -24zM1228 297q263 191 345 252q11 8 12.5 20.5t-6.5 23.5l-38 52q-8 11 -21 12.5t-24 -6.5q-231 -169 -343 -250q-5 -3 -52 -39t-71.5 -52.5t-64.5 -35t-69 -18.5t-69 18.5t-64.5 35t-71.5 52.5t-52 39 +q-186 134 -343 250q-11 8 -24 6.5t-21 -12.5l-38 -52q-8 -11 -6.5 -23.5t12.5 -20.5q82 -61 345 -252q10 -8 50 -38t65 -47t64 -39.5t77.5 -33.5t75.5 -11t75.5 11t79 34.5t64.5 39.5t65 47.5t48 36.5z" /> + <glyph glyph-name="uniF2B7" unicode="&#xf2b7;" horiz-adv-x="1792" +d="M1474 623l39 -51q8 -11 6.5 -23.5t-11.5 -20.5q-43 -34 -126.5 -98.5t-146.5 -113t-67 -51.5q-39 -32 -60 -48t-60.5 -41t-76.5 -36.5t-74 -11.5h-1h-1q-37 0 -74 11.5t-76 36.5t-61 41.5t-60 47.5q-5 4 -65 50.5t-143.5 111t-122.5 94.5q-11 8 -12.5 20.5t6.5 23.5 +l37 52q8 11 21.5 13t24.5 -7q94 -73 306 -236q5 -4 43.5 -35t60.5 -46.5t56.5 -32.5t58.5 -17h1h1q24 0 58.5 17t56.5 32.5t60.5 46.5t43.5 35q258 198 313 242q11 8 24 6.5t21 -12.5zM1664 -96v928q-90 83 -159 139q-91 74 -389 304q-3 2 -43 35t-61 48t-56 32.5t-59 17.5 +h-1h-1q-24 0 -59 -17.5t-56 -32.5t-61 -48t-43 -35q-215 -166 -315.5 -245.5t-129.5 -104t-82 -74.5q-14 -12 -21 -19v-928q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1792 832v-928q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v928q0 56 41 94 +q123 114 350 290.5t233 181.5q36 30 59 47.5t61.5 42t76 36.5t74.5 12h1h1q37 0 74.5 -12t76 -36.5t61.5 -42t59 -47.5q43 -36 156 -122t226 -177t201 -173q41 -38 41 -94z" /> + <glyph glyph-name="uniF2B8" unicode="&#xf2b8;" +d="M330 1l202 -214l-34 236l-216 213zM556 -225l274 218l-11 245l-300 -215zM245 413l227 -213l-48 327l-245 204zM495 189l317 214l-14 324l-352 -200zM843 178l95 -80l-2 239l-103 79q0 -1 1 -8.5t0 -12t-5 -7.5l-78 -52l85 -70q7 -6 7 -88zM138 930l256 -200l-68 465 +l-279 173zM1173 267l15 234l-230 -164l2 -240zM417 722l373 194l-19 441l-423 -163zM1270 357l20 233l-226 142l-2 -105l144 -95q6 -4 4 -9l-7 -119zM1461 496l30 222l-179 -128l-20 -228zM1273 329l-71 49l-8 -117q0 -5 -4 -8l-234 -187q-7 -5 -14 0l-98 83l7 -161 +q0 -5 -4 -8l-293 -234q-4 -2 -6 -2q-8 2 -8 3l-228 242q-4 4 -59 277q-2 7 5 11l61 37q-94 86 -95 92l-72 351q-2 7 6 12l94 45q-133 100 -135 108l-96 466q-2 10 7 13l433 135q5 0 8 -1l317 -153q6 -4 6 -9l20 -463q0 -7 -6 -10l-118 -61l126 -85q5 -2 5 -8l5 -123l121 74 +q5 4 11 0l84 -56l3 110q0 6 5 9l206 126q6 3 11 0l245 -135q4 -4 5 -7t-6.5 -60t-17.5 -124.5t-10 -70.5q0 -5 -4 -7l-191 -153q-6 -5 -13 0z" /> + <glyph glyph-name="uniF2B9" unicode="&#xf2b9;" horiz-adv-x="1664" +d="M1201 298q0 57 -5.5 107t-21 100.5t-39.5 86t-64 58t-91 22.5q-6 -4 -33.5 -20.5t-42.5 -24.5t-40.5 -20t-49 -17t-46.5 -5t-46.5 5t-49 17t-40.5 20t-42.5 24.5t-33.5 20.5q-51 0 -91 -22.5t-64 -58t-39.5 -86t-21 -100.5t-5.5 -107q0 -73 42 -121.5t103 -48.5h576 +q61 0 103 48.5t42 121.5zM1028 892q0 108 -76.5 184t-183.5 76t-183.5 -76t-76.5 -184q0 -107 76.5 -183t183.5 -76t183.5 76t76.5 183zM1664 352v-192q0 -14 -9 -23t-23 -9h-96v-224q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v1472q0 66 47 113t113 47h1216 +q66 0 113 -47t47 -113v-224h96q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-96v-128h96q14 0 23 -9t9 -23v-192q0 -14 -9 -23t-23 -9h-96v-128h96q14 0 23 -9t9 -23z" /> + <glyph glyph-name="uniF2BA" unicode="&#xf2ba;" horiz-adv-x="1664" +d="M1028 892q0 -107 -76.5 -183t-183.5 -76t-183.5 76t-76.5 183q0 108 76.5 184t183.5 76t183.5 -76t76.5 -184zM980 672q46 0 82.5 -17t60 -47.5t39.5 -67t24 -81t11.5 -82.5t3.5 -79q0 -67 -39.5 -118.5t-105.5 -51.5h-576q-66 0 -105.5 51.5t-39.5 118.5q0 48 4.5 93.5 +t18.5 98.5t36.5 91.5t63 64.5t93.5 26h5q7 -4 32 -19.5t35.5 -21t33 -17t37 -16t35 -9t39.5 -4.5t39.5 4.5t35 9t37 16t33 17t35.5 21t32 19.5zM1664 928q0 -13 -9.5 -22.5t-22.5 -9.5h-96v-128h96q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-96v-128h96 +q13 0 22.5 -9.5t9.5 -22.5v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-96v-224q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113v1472q0 66 47 113t113 47h1216q66 0 113 -47t47 -113v-224h96q13 0 22.5 -9.5t9.5 -22.5v-192zM1408 -96v1472q0 13 -9.5 22.5t-22.5 9.5h-1216 +q-13 0 -22.5 -9.5t-9.5 -22.5v-1472q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5z" /> + <glyph glyph-name="uniF2BB" unicode="&#xf2bb;" horiz-adv-x="2048" +d="M1024 405q0 64 -9 117.5t-29.5 103t-60.5 78t-97 28.5q-6 -4 -30 -18t-37.5 -21.5t-35.5 -17.5t-43 -14.5t-42 -4.5t-42 4.5t-43 14.5t-35.5 17.5t-37.5 21.5t-30 18q-57 0 -97 -28.5t-60.5 -78t-29.5 -103t-9 -117.5t37 -106.5t91 -42.5h512q54 0 91 42.5t37 106.5z +M867 925q0 94 -66.5 160.5t-160.5 66.5t-160.5 -66.5t-66.5 -160.5t66.5 -160.5t160.5 -66.5t160.5 66.5t66.5 160.5zM1792 416v64q0 14 -9 23t-23 9h-576q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h576q14 0 23 9t9 23zM1792 676v56q0 15 -10.5 25.5t-25.5 10.5h-568 +q-15 0 -25.5 -10.5t-10.5 -25.5v-56q0 -15 10.5 -25.5t25.5 -10.5h568q15 0 25.5 10.5t10.5 25.5zM1792 928v64q0 14 -9 23t-23 9h-576q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h576q14 0 23 9t9 23zM2048 1248v-1216q0 -66 -47 -113t-113 -47h-352v96q0 14 -9 23t-23 9 +h-64q-14 0 -23 -9t-9 -23v-96h-768v96q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-96h-352q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1728q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2BC" unicode="&#xf2bc;" horiz-adv-x="2048" +d="M1024 405q0 -64 -37 -106.5t-91 -42.5h-512q-54 0 -91 42.5t-37 106.5t9 117.5t29.5 103t60.5 78t97 28.5q6 -4 30 -18t37.5 -21.5t35.5 -17.5t43 -14.5t42 -4.5t42 4.5t43 14.5t35.5 17.5t37.5 21.5t30 18q57 0 97 -28.5t60.5 -78t29.5 -103t9 -117.5zM867 925 +q0 -94 -66.5 -160.5t-160.5 -66.5t-160.5 66.5t-66.5 160.5t66.5 160.5t160.5 66.5t160.5 -66.5t66.5 -160.5zM1792 480v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576q14 0 23 -9t9 -23zM1792 732v-56q0 -15 -10.5 -25.5t-25.5 -10.5h-568 +q-15 0 -25.5 10.5t-10.5 25.5v56q0 15 10.5 25.5t25.5 10.5h568q15 0 25.5 -10.5t10.5 -25.5zM1792 992v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576q14 0 23 -9t9 -23zM1920 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1728q-13 0 -22.5 -9.5 +t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h352v96q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-96h768v96q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-96h352q13 0 22.5 9.5t9.5 22.5zM2048 1248v-1216q0 -66 -47 -113t-113 -47h-1728q-66 0 -113 47t-47 113v1216q0 66 47 113 +t113 47h1728q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2BD" unicode="&#xf2bd;" horiz-adv-x="1792" +d="M1523 197q-22 155 -87.5 257.5t-184.5 118.5q-67 -74 -159.5 -115.5t-195.5 -41.5t-195.5 41.5t-159.5 115.5q-119 -16 -184.5 -118.5t-87.5 -257.5q106 -150 271 -237.5t356 -87.5t356 87.5t271 237.5zM1280 896q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5 +t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1792 640q0 -182 -71 -347.5t-190.5 -286t-285.5 -191.5t-349 -71q-182 0 -348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF2BE" unicode="&#xf2be;" horiz-adv-x="1792" +d="M896 1536q182 0 348 -71t286 -191t191 -286t71 -348q0 -181 -70.5 -347t-190.5 -286t-286 -191.5t-349 -71.5t-349 71t-285.5 191.5t-190.5 286t-71 347.5t71 348t191 286t286 191t348 71zM1515 185q149 205 149 455q0 156 -61 298t-164 245t-245 164t-298 61t-298 -61 +t-245 -164t-164 -245t-61 -298q0 -250 149 -455q66 327 306 327q131 -128 313 -128t313 128q240 0 306 -327zM1280 832q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5z" /> + <glyph glyph-name="uniF2C0" unicode="&#xf2c0;" +d="M1201 752q47 -14 89.5 -38t89 -73t79.5 -115.5t55 -172t22 -236.5q0 -154 -100 -263.5t-241 -109.5h-854q-141 0 -241 109.5t-100 263.5q0 131 22 236.5t55 172t79.5 115.5t89 73t89.5 38q-79 125 -79 272q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5 +t198.5 -40.5t163.5 -109.5t109.5 -163.5t40.5 -198.5q0 -147 -79 -272zM768 1408q-159 0 -271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5t-112.5 271.5t-271.5 112.5zM1195 -128q88 0 150.5 71.5t62.5 173.5q0 239 -78.5 377t-225.5 145 +q-145 -127 -336 -127t-336 127q-147 -7 -225.5 -145t-78.5 -377q0 -102 62.5 -173.5t150.5 -71.5h854z" /> + <glyph glyph-name="uniF2C1" unicode="&#xf2c1;" horiz-adv-x="1280" +d="M1024 278q0 -64 -37 -107t-91 -43h-512q-54 0 -91 43t-37 107t9 118t29.5 104t61 78.5t96.5 28.5q80 -75 188 -75t188 75q56 0 96.5 -28.5t61 -78.5t29.5 -104t9 -118zM870 797q0 -94 -67.5 -160.5t-162.5 -66.5t-162.5 66.5t-67.5 160.5t67.5 160.5t162.5 66.5 +t162.5 -66.5t67.5 -160.5zM1152 -96v1376h-1024v-1376q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1280 1376v-1472q0 -66 -47 -113t-113 -47h-960q-66 0 -113 47t-47 113v1472q0 66 47 113t113 47h352v-96q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v96h352 +q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2C2" unicode="&#xf2c2;" horiz-adv-x="2048" +d="M896 324q0 54 -7.5 100.5t-24.5 90t-51 68.5t-81 25q-64 -64 -156 -64t-156 64q-47 0 -81 -25t-51 -68.5t-24.5 -90t-7.5 -100.5q0 -55 31.5 -93.5t75.5 -38.5h426q44 0 75.5 38.5t31.5 93.5zM768 768q0 80 -56 136t-136 56t-136 -56t-56 -136t56 -136t136 -56t136 56 +t56 136zM1792 288v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1408 544v64q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1792 544v64q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23 +v-64q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1792 800v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM128 1152h1792v96q0 14 -9 23t-23 9h-1728q-14 0 -23 -9t-9 -23v-96zM2048 1248v-1216q0 -66 -47 -113t-113 -47h-1728 +q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1728q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2C3" unicode="&#xf2c3;" horiz-adv-x="2048" +d="M896 324q0 -55 -31.5 -93.5t-75.5 -38.5h-426q-44 0 -75.5 38.5t-31.5 93.5q0 54 7.5 100.5t24.5 90t51 68.5t81 25q64 -64 156 -64t156 64q47 0 81 -25t51 -68.5t24.5 -90t7.5 -100.5zM768 768q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136z +M1792 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1408 608v-64q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h320q14 0 23 -9t9 -23zM1792 608v-64q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v64 +q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 864v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1920 32v1120h-1792v-1120q0 -13 9.5 -22.5t22.5 -9.5h1728q13 0 22.5 9.5t9.5 22.5zM2048 1248v-1216q0 -66 -47 -113t-113 -47 +h-1728q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1728q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2C4" unicode="&#xf2c4;" horiz-adv-x="1792" +d="M1255 749q0 318 -105 474.5t-330 156.5q-222 0 -326 -157t-104 -474q0 -316 104 -471.5t326 -155.5q74 0 131 17q-22 43 -39 73t-44 65t-53.5 56.5t-63 36t-77.5 14.5q-46 0 -79 -16l-49 97q105 91 276 91q132 0 215.5 -54t150.5 -155q67 149 67 402zM1645 117h117 +q3 -27 -2 -67t-26.5 -95t-58 -100.5t-107 -78t-162.5 -32.5q-71 0 -130.5 19t-105.5 56t-79 78t-66 96q-97 -27 -205 -27q-150 0 -292.5 58t-253 158.5t-178 249t-67.5 317.5q0 170 67.5 319.5t178.5 250.5t253.5 159t291.5 58q121 0 238.5 -36t217 -106t176 -164.5 +t119.5 -219t43 -261.5q0 -190 -80.5 -347.5t-218.5 -264.5q47 -70 93.5 -106.5t104.5 -36.5q61 0 94 37.5t38 85.5z" /> + <glyph glyph-name="uniF2C5" unicode="&#xf2c5;" horiz-adv-x="2304" +d="M453 -101q0 -21 -16 -37.5t-37 -16.5q-1 0 -13 3q-63 15 -162 140q-225 284 -225 676q0 341 213 614q39 51 95 103.5t94 52.5q19 0 35 -13.5t16 -32.5q0 -27 -63 -90q-98 -102 -147 -184q-119 -199 -119 -449q0 -281 123 -491q50 -85 136 -173q2 -3 14.5 -16t19.5 -21 +t17 -20.5t14.5 -23.5t4.5 -21zM1796 33q0 -29 -17.5 -48.5t-46.5 -19.5h-1081q-26 0 -45 19t-19 45q0 29 17.5 48.5t46.5 19.5h1081q26 0 45 -19t19 -45zM1581 644q0 -134 -67 -233q-25 -38 -69.5 -78.5t-83.5 -60.5q-16 -10 -27 -10q-7 0 -15 6t-8 12q0 9 19 30t42 46 +t42 67.5t19 88.5q0 76 -35 130q-29 42 -46 42q-3 0 -3 -5q0 -12 7.5 -35.5t7.5 -36.5q0 -22 -21.5 -35t-44.5 -13q-66 0 -66 76q0 15 1.5 44t1.5 44q0 25 -10 46q-13 25 -42 53.5t-51 28.5q-5 0 -7 -0.5t-3.5 -2.5t-1.5 -6q0 -2 16 -26t16 -54q0 -37 -19 -68t-46 -54 +t-53.5 -46t-45.5 -54t-19 -68q0 -98 42 -160q29 -43 79 -63q16 -5 17 -10q1 -2 1 -5q0 -16 -18 -16q-6 0 -33 11q-119 43 -195 139.5t-76 218.5q0 55 24.5 115.5t60 115t70.5 108.5t59.5 113.5t24.5 111.5q0 53 -25 94q-29 48 -56 64q-19 9 -19 21q0 20 41 20q50 0 110 -29 +q41 -19 71 -44.5t49.5 -51t33.5 -62.5t22 -69t16 -80q0 -1 3 -17.5t4.5 -25t5.5 -25t9 -27t11 -21.5t14.5 -16.5t18.5 -5.5q23 0 37 14t14 37q0 25 -20 67t-20 52t10 10q27 0 93 -70q72 -76 102.5 -156t30.5 -186zM2304 615q0 -274 -138 -503q-19 -32 -48 -72t-68 -86.5 +t-81 -77t-74 -30.5q-16 0 -31 15.5t-15 31.5q0 15 29 50.5t68.5 77t48.5 52.5q183 230 183 531q0 131 -20.5 235t-72.5 211q-58 119 -163 228q-2 3 -13 13.5t-16.5 16.5t-15 17.5t-15 20t-9.5 18.5t-4 19q0 19 16 35.5t35 16.5q70 0 196 -169q98 -131 146 -273t60 -314 +q2 -42 2 -64z" /> + <glyph glyph-name="uniF2C6" unicode="&#xf2c6;" horiz-adv-x="1792" +d="M1189 229l147 693q9 44 -10.5 63t-51.5 7l-864 -333q-29 -11 -39.5 -25t-2.5 -26.5t32 -19.5l221 -69l513 323q21 14 32 6q7 -5 -4 -15l-415 -375v0v0l-16 -228q23 0 45 22l108 104l224 -165q64 -36 81 38zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71 +t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF2C7" unicode="&#xf2c7;" horiz-adv-x="1024" +d="M640 192q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 60 35 110t93 71v907h128v-907q58 -21 93 -71t35 -110zM768 192q0 77 -34 144t-94 112v768q0 80 -56 136t-136 56t-136 -56t-56 -136v-768q-60 -45 -94 -112t-34 -144q0 -133 93.5 -226.5t226.5 -93.5t226.5 93.5 +t93.5 226.5zM896 192q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 182 128 313v711q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5v-711q128 -131 128 -313zM1024 768v-128h-192v128h192zM1024 1024v-128h-192v128h192zM1024 1280v-128h-192 +v128h192z" /> + <glyph glyph-name="uniF2C8" unicode="&#xf2c8;" horiz-adv-x="1024" +d="M640 192q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 60 35 110t93 71v651h128v-651q58 -21 93 -71t35 -110zM768 192q0 77 -34 144t-94 112v768q0 80 -56 136t-136 56t-136 -56t-56 -136v-768q-60 -45 -94 -112t-34 -144q0 -133 93.5 -226.5t226.5 -93.5t226.5 93.5 +t93.5 226.5zM896 192q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 182 128 313v711q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5v-711q128 -131 128 -313zM1024 768v-128h-192v128h192zM1024 1024v-128h-192v128h192zM1024 1280v-128h-192 +v128h192z" /> + <glyph glyph-name="uniF2C9" unicode="&#xf2c9;" horiz-adv-x="1024" +d="M640 192q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 60 35 110t93 71v395h128v-395q58 -21 93 -71t35 -110zM768 192q0 77 -34 144t-94 112v768q0 80 -56 136t-136 56t-136 -56t-56 -136v-768q-60 -45 -94 -112t-34 -144q0 -133 93.5 -226.5t226.5 -93.5t226.5 93.5 +t93.5 226.5zM896 192q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 182 128 313v711q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5v-711q128 -131 128 -313zM1024 768v-128h-192v128h192zM1024 1024v-128h-192v128h192zM1024 1280v-128h-192 +v128h192z" /> + <glyph glyph-name="uniF2CA" unicode="&#xf2ca;" horiz-adv-x="1024" +d="M640 192q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 60 35 110t93 71v139h128v-139q58 -21 93 -71t35 -110zM768 192q0 77 -34 144t-94 112v768q0 80 -56 136t-136 56t-136 -56t-56 -136v-768q-60 -45 -94 -112t-34 -144q0 -133 93.5 -226.5t226.5 -93.5t226.5 93.5 +t93.5 226.5zM896 192q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 182 128 313v711q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5v-711q128 -131 128 -313zM1024 768v-128h-192v128h192zM1024 1024v-128h-192v128h192zM1024 1280v-128h-192 +v128h192z" /> + <glyph glyph-name="uniF2CB" unicode="&#xf2cb;" horiz-adv-x="1024" +d="M640 192q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 79 56 135.5t136 56.5t136 -56.5t56 -135.5zM768 192q0 77 -34 144t-94 112v768q0 80 -56 136t-136 56t-136 -56t-56 -136v-768q-60 -45 -94 -112t-34 -144q0 -133 93.5 -226.5t226.5 -93.5t226.5 93.5t93.5 226.5z +M896 192q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 182 128 313v711q0 133 93.5 226.5t226.5 93.5t226.5 -93.5t93.5 -226.5v-711q128 -131 128 -313zM1024 768v-128h-192v128h192zM1024 1024v-128h-192v128h192zM1024 1280v-128h-192v128h192z" /> + <glyph glyph-name="uniF2CC" unicode="&#xf2cc;" horiz-adv-x="1920" +d="M1433 1287q10 -10 10 -23t-10 -23l-626 -626q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 23l44 44q-72 91 -81.5 207t46.5 215q-74 71 -176 71q-106 0 -181 -75t-75 -181v-1280h-256v1280q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5q106 0 201 -41 +t166 -115q94 39 197 24.5t185 -79.5l44 44q10 10 23 10t23 -10zM1344 1024q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1600 896q-26 0 -45 19t-19 45t19 45t45 19t45 -19t19 -45t-19 -45t-45 -19zM1856 1024q26 0 45 -19t19 -45t-19 -45t-45 -19 +t-45 19t-19 45t19 45t45 19zM1216 896q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1408 832q0 26 19 45t45 19t45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45zM1728 896q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1088 768 +q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1344 640q-26 0 -45 19t-19 45t19 45t45 19t45 -19t19 -45t-19 -45t-45 -19zM1600 768q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1216 512q-26 0 -45 19t-19 45t19 45t45 19t45 -19 +t19 -45t-19 -45t-45 -19zM1472 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1088 512q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1344 512q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1216 384 +q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1088 256q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19z" /> + <glyph glyph-name="uniF2CD" unicode="&#xf2cd;" horiz-adv-x="1792" +d="M1664 448v-192q0 -169 -128 -286v-194q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v118q-63 -22 -128 -22h-768q-65 0 -128 22v-110q0 -17 -9.5 -28.5t-22.5 -11.5h-64q-13 0 -22.5 11.5t-9.5 28.5v186q-128 117 -128 286v192h1536zM704 864q0 -14 -9 -23t-23 -9t-23 9 +t-9 23t9 23t23 9t23 -9t9 -23zM768 928q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM704 992q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM832 992q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM768 1056q0 -14 -9 -23t-23 -9t-23 9 +t-9 23t9 23t23 9t23 -9t9 -23zM704 1120q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM1792 608v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h96v640q0 106 75 181t181 75q108 0 184 -78q46 19 98 12t93 -39l22 22q11 11 22 0l42 -42 +q11 -11 0 -22l-314 -314q-11 -11 -22 0l-42 42q-11 11 0 22l22 22q-36 46 -40.5 104t23.5 108q-37 35 -88 35q-53 0 -90.5 -37.5t-37.5 -90.5v-640h1504q14 0 23 -9t9 -23zM896 1056q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM832 1120q0 -14 -9 -23t-23 -9 +t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM768 1184q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM960 1120q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM896 1184q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM832 1248q0 -14 -9 -23 +t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM1024 1184q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM960 1248q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23zM1088 1248q0 -14 -9 -23t-23 -9t-23 9t-9 23t9 23t23 9t23 -9t9 -23z" /> + <glyph glyph-name="uniF2CE" unicode="&#xf2ce;" +d="M994 344q0 -86 -17 -197q-31 -215 -55 -313q-22 -90 -152 -90t-152 90q-24 98 -55 313q-17 110 -17 197q0 168 224 168t224 -168zM1536 768q0 -240 -134 -434t-350 -280q-8 -3 -15 3t-6 15q7 48 10 66q4 32 6 47q1 9 9 12q159 81 255.5 234t96.5 337q0 180 -91 330.5 +t-247 234.5t-337 74q-124 -7 -237 -61t-193.5 -140.5t-128 -202t-46.5 -240.5q1 -184 99 -336.5t257 -231.5q7 -3 9 -12q3 -21 6 -45q1 -9 5 -32.5t6 -35.5q1 -9 -6.5 -15t-15.5 -2q-148 58 -261 169.5t-173.5 264t-52.5 319.5q7 143 66 273.5t154.5 227t225 157.5t272.5 70 +q164 10 315.5 -46.5t261 -160.5t175 -250.5t65.5 -308.5zM994 800q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5zM1282 768q0 -122 -53.5 -228.5t-146.5 -177.5q-8 -6 -16 -2t-10 14q-6 52 -29 92q-7 10 3 20 +q58 54 91 127t33 155q0 111 -58.5 204t-157.5 141.5t-212 36.5q-133 -15 -229 -113t-109 -231q-10 -92 23.5 -176t98.5 -144q10 -10 3 -20q-24 -41 -29 -93q-2 -9 -10 -13t-16 2q-95 74 -148.5 183t-51.5 234q3 131 69 244t177 181.5t241 74.5q144 7 268 -60t196.5 -187.5 +t72.5 -263.5z" /> + <glyph glyph-name="uniF2D0" unicode="&#xf2d0;" horiz-adv-x="1792" +d="M256 128h1280v768h-1280v-768zM1792 1248v-1216q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2D1" unicode="&#xf2d1;" horiz-adv-x="1792" +d="M1792 224v-192q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2D2" unicode="&#xf2d2;" horiz-adv-x="2048" +d="M256 0h768v512h-768v-512zM1280 512h512v768h-768v-256h96q66 0 113 -47t47 -113v-352zM2048 1376v-960q0 -66 -47 -113t-113 -47h-608v-352q0 -66 -47 -113t-113 -47h-960q-66 0 -113 47t-47 113v960q0 66 47 113t113 47h608v352q0 66 47 113t113 47h960q66 0 113 -47 +t47 -113z" /> + <glyph glyph-name="uniF2D3" unicode="&#xf2d3;" horiz-adv-x="1792" +d="M1175 215l146 146q10 10 10 23t-10 23l-233 233l233 233q10 10 10 23t-10 23l-146 146q-10 10 -23 10t-23 -10l-233 -233l-233 233q-10 10 -23 10t-23 -10l-146 -146q-10 -10 -10 -23t10 -23l233 -233l-233 -233q-10 -10 -10 -23t10 -23l146 -146q10 -10 23 -10t23 10 +l233 233l233 -233q10 -10 23 -10t23 10zM1792 1248v-1216q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2D4" unicode="&#xf2d4;" horiz-adv-x="1792" +d="M1257 425l-146 -146q-10 -10 -23 -10t-23 10l-169 169l-169 -169q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l169 169l-169 169q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l169 -169l169 169q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 +l-169 -169l169 -169q10 -10 10 -23t-10 -23zM256 128h1280v1024h-1280v-1024zM1792 1248v-1216q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1472q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2D5" unicode="&#xf2d5;" horiz-adv-x="1792" +d="M1070 358l306 564h-654l-306 -564h654zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF2D6" unicode="&#xf2d6;" horiz-adv-x="1794" +d="M1291 1060q-15 17 -35 8.5t-26 -28.5t5 -38q14 -17 40 -14.5t34 20.5t-18 52zM895 814q-8 -8 -19.5 -8t-18.5 8q-8 8 -8 19t8 18q7 8 18.5 8t19.5 -8q7 -7 7 -18t-7 -19zM1060 740l-35 -35q-12 -13 -29.5 -13t-30.5 13l-38 38q-12 13 -12 30t12 30l35 35q12 12 29.5 12 +t30.5 -12l38 -39q12 -12 12 -29.5t-12 -29.5zM951 870q-7 -8 -18.5 -8t-19.5 8q-7 8 -7 19t7 19q8 8 19 8t19 -8t8 -19t-8 -19zM1354 968q-34 -64 -107.5 -85.5t-127.5 16.5q-38 28 -61 66.5t-21 87.5t39 92t75.5 53t70.5 -5t70 -51q2 -2 13 -12.5t14.5 -13.5t13 -13.5 +t12.5 -15.5t10 -15.5t8.5 -18t4 -18.5t1 -21t-5 -22t-9.5 -24zM1555 486q3 20 -8.5 34.5t-27.5 21.5t-33 17t-23 20q-40 71 -84 98.5t-113 11.5q19 13 40 18.5t33 4.5l12 -1q2 45 -34 90q6 20 6.5 40.5t-2.5 30.5l-3 10q43 24 71 65t34 91q10 84 -43 150.5t-137 76.5 +q-60 7 -114 -18.5t-82 -74.5q-30 -51 -33.5 -101t14.5 -87t43.5 -64t56.5 -42q-45 4 -88 36t-57 88q-28 108 32 222q-16 21 -29 32q-50 0 -89 -19q19 24 42 37t36 14l13 1q0 50 -13 78q-10 21 -32.5 28.5t-47 -3.5t-37.5 -40q2 4 4 7q-7 -28 -6.5 -75.5t19 -117t48.5 -122.5 +q-25 -14 -47 -36q-35 -16 -85.5 -70.5t-84.5 -101.5l-33 -46q-90 -34 -181 -125.5t-75 -162.5q1 -16 11 -27q-15 -12 -30 -30q-21 -25 -21 -54t21.5 -40t63.5 6q41 19 77 49.5t55 60.5q-2 2 -6.5 5t-20.5 7.5t-33 3.5q23 5 51 12.5t40 10t27.5 6t26 4t23.5 0.5q14 -7 22 34 +q7 37 7 90q0 102 -40 150q106 -103 101 -219q-1 -29 -15 -50t-27 -27l-13 -6q-4 -7 -19 -32t-26 -45.5t-26.5 -52t-25 -61t-17 -63t-6.5 -66.5t10 -63q-35 54 -37 80q-22 -24 -34.5 -39t-33.5 -42t-30.5 -46t-16.5 -41t-0.5 -38t25.5 -27q45 -25 144 64t190.5 221.5 +t122.5 228.5q86 52 145 115.5t86 119.5q47 -93 154 -178q104 -83 167 -80q39 2 46 43zM1794 640q0 -182 -71 -348t-191 -286t-286.5 -191t-348.5 -71t-348.5 71t-286.5 191t-191 286t-71 348t71 348t191 286t286.5 191t348.5 71t348.5 -71t286.5 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF2D7" unicode="&#xf2d7;" +d="M518 1353v-655q103 -1 191.5 1.5t125.5 5.5l37 3q68 2 90.5 24.5t39.5 94.5l33 142h103l-14 -322l7 -319h-103l-29 127q-15 68 -45 93t-84 26q-87 8 -352 8v-556q0 -78 43.5 -115.5t133.5 -37.5h357q35 0 59.5 2t55 7.5t54 18t48.5 32t46 50.5t39 73l93 216h89 +q-6 -37 -31.5 -252t-30.5 -276q-146 5 -263.5 8t-162.5 4h-44h-628l-376 -12v102l127 25q67 13 91.5 37t25.5 79l8 643q3 402 -8 645q-2 61 -25.5 84t-91.5 36l-127 24v102l376 -12h702q139 0 374 27q-6 -68 -14 -194.5t-12 -219.5l-5 -92h-93l-32 124q-31 121 -74 179.5 +t-113 58.5h-548q-28 0 -35.5 -8.5t-7.5 -30.5z" /> + <glyph glyph-name="uniF2D8" unicode="&#xf2d8;" +d="M922 739v-182q0 -4 0.5 -15t0 -15l-1.5 -12t-3.5 -11.5t-6.5 -7.5t-11 -5.5t-16 -1.5v309q9 0 16 -1t11 -5t6.5 -5.5t3.5 -9.5t1 -10.5v-13.5v-14zM1238 643v-121q0 -1 0.5 -12.5t0 -15.5t-2.5 -11.5t-7.5 -10.5t-13.5 -3q-9 0 -14 9q-4 10 -4 165v7v8.5v9t1.5 8.5l3.5 7 +t5 5.5t8 1.5q6 0 10 -1.5t6.5 -4.5t4 -6t2 -8.5t0.5 -8v-9.5v-9zM180 407h122v472h-122v-472zM614 407h106v472h-159l-28 -221q-20 148 -32 221h-158v-472h107v312l45 -312h76l43 319v-319zM1039 712q0 67 -5 90q-3 16 -11 28.5t-17 20.5t-25 14t-26.5 8.5t-31 4t-29 1.5 +h-29.5h-12h-91v-472h56q169 -1 197 24.5t25 180.5q-1 62 -1 100zM1356 515v133q0 29 -2 45t-9.5 33.5t-24.5 25t-46 7.5q-46 0 -77 -34v154h-117v-472h110l7 30q30 -36 77 -36q50 0 66 30.5t16 83.5zM1536 1248v-1216q0 -66 -47 -113t-113 -47h-1216q-66 0 -113 47t-47 113 +v1216q0 66 47 113t113 47h1216q66 0 113 -47t47 -113z" /> + <glyph glyph-name="uniF2D9" unicode="&#xf2d9;" horiz-adv-x="2176" +d="M1143 -197q-6 1 -11 4q-13 8 -36 23t-86 65t-116.5 104.5t-112 140t-89.5 172.5q-17 3 -175 37q66 -213 235 -362t391 -184zM502 409l168 -28q-25 76 -41 167.5t-19 145.5l-4 53q-84 -82 -121 -224q5 -65 17 -114zM612 1018q-43 -64 -77 -148q44 46 74 68zM2049 584 +q0 161 -62 307t-167.5 252t-250.5 168.5t-304 62.5q-147 0 -281 -52.5t-240 -148.5q-30 -58 -45 -160q60 51 143 83.5t158.5 43t143 13.5t108.5 -1l40 -3q33 -1 53 -15.5t24.5 -33t6.5 -37t-1 -28.5q-126 11 -227.5 0.5t-183 -43.5t-142.5 -71.5t-131 -98.5 +q4 -36 11.5 -92.5t35.5 -178t62 -179.5q123 -6 247.5 14.5t214.5 53.5t162.5 67t109.5 59l37 24q22 16 39.5 20.5t30.5 -5t17 -34.5q14 -97 -39 -121q-208 -97 -467 -134q-135 -20 -317 -16q41 -96 110 -176.5t137 -127t130.5 -79t101.5 -43.5l39 -12q143 -23 263 15 +q195 99 314 289t119 418zM2123 621q-14 -135 -40 -212q-70 -208 -181.5 -346.5t-318.5 -253.5q-48 -33 -82 -44q-72 -26 -163 -16q-36 -3 -73 -3q-283 0 -504.5 173t-295.5 442q-1 0 -4 0.5t-5 0.5q-6 -50 2.5 -112.5t26 -115t36 -98t31.5 -71.5l14 -26q8 -12 54 -82 +q-71 38 -124.5 106.5t-78.5 140t-39.5 137t-17.5 107.5l-2 42q-5 2 -33.5 12.5t-48.5 18t-53 20.5t-57.5 25t-50 25.5t-42.5 27t-25 25.5q19 -10 50.5 -25.5t113 -45.5t145.5 -38l2 32q11 149 94 290q41 202 176 365q28 115 81 214q15 28 32 45t49 32q158 74 303.5 104 +t302 11t306.5 -97q220 -115 333 -336t87 -474z" /> + <glyph glyph-name="uniF2DA" unicode="&#xf2da;" horiz-adv-x="1792" +d="M1341 752q29 44 -6.5 129.5t-121.5 142.5q-58 39 -125.5 53.5t-118 4.5t-68.5 -37q-12 -23 -4.5 -28t42.5 -10q23 -3 38.5 -5t44.5 -9.5t56 -17.5q36 -13 67.5 -31.5t53 -37t40 -38.5t30.5 -38t22 -34.5t16.5 -28.5t12 -18.5t10.5 -6t11 9.5zM1704 178 +q-52 -127 -148.5 -220t-214.5 -141.5t-253 -60.5t-266 13.5t-251 91t-210 161.5t-141.5 235.5t-46.5 303.5q1 41 8.5 84.5t12.5 64t24 80.5t23 73q-51 -208 1 -397t173 -318t291 -206t346 -83t349 74.5t289 244.5q20 27 18 14q0 -4 -4 -14zM1465 627q0 -104 -40.5 -199 +t-108.5 -164t-162 -109.5t-198 -40.5t-198 40.5t-162 109.5t-108.5 164t-40.5 199t40.5 199t108.5 164t162 109.5t198 40.5t198 -40.5t162 -109.5t108.5 -164t40.5 -199zM1752 915q-65 147 -180.5 251t-253 153.5t-292 53.5t-301 -36.5t-275.5 -129t-220 -211.5t-131 -297 +t-10 -373q-49 161 -51.5 311.5t35.5 272.5t109 227t165.5 180.5t207 126t232 71t242.5 9t236 -54t216 -124.5t178 -197q33 -50 62 -121t31 -112zM1690 573q12 244 -136.5 416t-396.5 240q-8 0 -10 5t24 8q125 -4 230 -50t173 -120t116 -168.5t58.5 -199t-1 -208 +t-61.5 -197.5t-122.5 -167t-185 -117.5t-248.5 -46.5q108 30 201.5 80t174 123t129.5 176.5t55 225.5z" /> + <glyph glyph-name="uniF2DB" unicode="&#xf2db;" +d="M192 256v-128h-112q-16 0 -16 16v16h-48q-16 0 -16 16v32q0 16 16 16h48v16q0 16 16 16h112zM192 512v-128h-112q-16 0 -16 16v16h-48q-16 0 -16 16v32q0 16 16 16h48v16q0 16 16 16h112zM192 768v-128h-112q-16 0 -16 16v16h-48q-16 0 -16 16v32q0 16 16 16h48v16 +q0 16 16 16h112zM192 1024v-128h-112q-16 0 -16 16v16h-48q-16 0 -16 16v32q0 16 16 16h48v16q0 16 16 16h112zM192 1280v-128h-112q-16 0 -16 16v16h-48q-16 0 -16 16v32q0 16 16 16h48v16q0 16 16 16h112zM1280 1440v-1472q0 -40 -28 -68t-68 -28h-832q-40 0 -68 28 +t-28 68v1472q0 40 28 68t68 28h832q40 0 68 -28t28 -68zM1536 208v-32q0 -16 -16 -16h-48v-16q0 -16 -16 -16h-112v128h112q16 0 16 -16v-16h48q16 0 16 -16zM1536 464v-32q0 -16 -16 -16h-48v-16q0 -16 -16 -16h-112v128h112q16 0 16 -16v-16h48q16 0 16 -16zM1536 720v-32 +q0 -16 -16 -16h-48v-16q0 -16 -16 -16h-112v128h112q16 0 16 -16v-16h48q16 0 16 -16zM1536 976v-32q0 -16 -16 -16h-48v-16q0 -16 -16 -16h-112v128h112q16 0 16 -16v-16h48q16 0 16 -16zM1536 1232v-32q0 -16 -16 -16h-48v-16q0 -16 -16 -16h-112v128h112q16 0 16 -16v-16 +h48q16 0 16 -16z" /> + <glyph glyph-name="uniF2DC" unicode="&#xf2dc;" horiz-adv-x="1664" +d="M1566 419l-167 -33l186 -107q23 -13 29.5 -38.5t-6.5 -48.5q-14 -23 -39 -29.5t-48 6.5l-186 106l55 -160q13 -38 -12 -63.5t-60.5 -20.5t-48.5 42l-102 300l-271 156v-313l208 -238q16 -18 17 -39t-11 -36.5t-28.5 -25t-37 -5.5t-36.5 22l-112 128v-214q0 -26 -19 -45 +t-45 -19t-45 19t-19 45v214l-112 -128q-16 -18 -36.5 -22t-37 5.5t-28.5 25t-11 36.5t17 39l208 238v313l-271 -156l-102 -300q-13 -37 -48.5 -42t-60.5 20.5t-12 63.5l55 160l-186 -106q-23 -13 -48 -6.5t-39 29.5q-13 23 -6.5 48.5t29.5 38.5l186 107l-167 33 +q-29 6 -42 29t-8.5 46.5t25.5 40t50 10.5l310 -62l271 157l-271 157l-310 -62q-4 -1 -13 -1q-27 0 -44 18t-19 40t11 43t40 26l167 33l-186 107q-23 13 -29.5 38.5t6.5 48.5t39 30t48 -7l186 -106l-55 160q-13 38 12 63.5t60.5 20.5t48.5 -42l102 -300l271 -156v313 +l-208 238q-16 18 -17 39t11 36.5t28.5 25t37 5.5t36.5 -22l112 -128v214q0 26 19 45t45 19t45 -19t19 -45v-214l112 128q16 18 36.5 22t37 -5.5t28.5 -25t11 -36.5t-17 -39l-208 -238v-313l271 156l102 300q13 37 48.5 42t60.5 -20.5t12 -63.5l-55 -160l186 106 +q23 13 48 6.5t39 -29.5q13 -23 6.5 -48.5t-29.5 -38.5l-186 -107l167 -33q27 -5 40 -26t11 -43t-19 -40t-44 -18q-9 0 -13 1l-310 62l-271 -157l271 -157l310 62q29 6 50 -10.5t25.5 -40t-8.5 -46.5t-42 -29z" /> + <glyph glyph-name="uniF2DD" unicode="&#xf2dd;" horiz-adv-x="1792" +d="M1473 607q7 118 -33 226.5t-113 189t-177 131t-221 57.5q-116 7 -225.5 -32t-192 -110.5t-135 -175t-59.5 -220.5q-7 -118 33 -226.5t113 -189t177.5 -131t221.5 -57.5q155 -9 293 59t224 195.5t94 283.5zM1792 1536l-349 -348q120 -117 180.5 -272t50.5 -321 +q-11 -183 -102 -339t-241 -255.5t-332 -124.5l-999 -132l347 347q-120 116 -180.5 271.5t-50.5 321.5q11 184 102 340t241.5 255.5t332.5 124.5q167 22 500 66t500 66z" /> + <glyph glyph-name="uniF2DE" unicode="&#xf2de;" horiz-adv-x="1792" +d="M948 508l163 -329h-51l-175 350l-171 -350h-49l179 374l-78 33l21 49l240 -102l-21 -50zM563 1100l304 -130l-130 -304l-304 130zM907 915l240 -103l-103 -239l-239 102zM1188 765l191 -81l-82 -190l-190 81zM1680 640q0 159 -62 304t-167.5 250.5t-250.5 167.5t-304 62 +t-304 -62t-250.5 -167.5t-167.5 -250.5t-62 -304t62 -304t167.5 -250.5t250.5 -167.5t304 -62t304 62t250.5 167.5t167.5 250.5t62 304zM1792 640q0 -182 -71 -348t-191 -286t-286 -191t-348 -71t-348 71t-286 191t-191 286t-71 348t71 348t191 286t286 191t348 71t348 -71 +t286 -191t191 -286t71 -348z" /> + <glyph glyph-name="uniF2E0" unicode="&#xf2e0;" horiz-adv-x="1920" +d="M1334 302q-4 24 -27.5 34t-49.5 10.5t-48.5 12.5t-25.5 38q-5 47 33 139.5t75 181t32 127.5q-14 101 -117 103q-45 1 -75 -16l-3 -2l-5 -2.5t-4.5 -2t-5 -2t-5 -0.5t-6 1.5t-6 3.5t-6.5 5q-3 2 -9 8.5t-9 9t-8.5 7.5t-9.5 7.5t-9.5 5.5t-11 4.5t-11.5 2.5q-30 5 -48 -3 +t-45 -31q-1 -1 -9 -8.5t-12.5 -11t-15 -10t-16.5 -5.5t-17 3q-54 27 -84 40q-41 18 -94 -5t-76 -65q-16 -28 -41 -98.5t-43.5 -132.5t-40 -134t-21.5 -73q-22 -69 18.5 -119t110.5 -46q30 2 50.5 15t38.5 46q7 13 79 199.5t77 194.5q6 11 21.5 18t29.5 0q27 -15 21 -53 +q-2 -18 -51 -139.5t-50 -132.5q-6 -38 19.5 -56.5t60.5 -7t55 49.5q4 8 45.5 92t81.5 163.5t46 88.5q20 29 41 28q29 0 25 -38q-2 -16 -65.5 -147.5t-70.5 -159.5q-12 -53 13 -103t74 -74q17 -9 51 -15.5t71.5 -8t62.5 14t20 48.5zM383 86q3 -15 -5 -27.5t-23 -15.5 +q-14 -3 -26.5 5t-15.5 23q-3 14 5 27t22 16t27 -5t16 -23zM953 -177q12 -17 8.5 -37.5t-20.5 -32.5t-37.5 -8t-32.5 21q-11 17 -7.5 37.5t20.5 32.5t37.5 8t31.5 -21zM177 635q-18 -27 -49.5 -33t-57.5 13q-26 18 -32 50t12 58q18 27 49.5 33t57.5 -12q26 -19 32 -50.5 +t-12 -58.5zM1467 -42q19 -28 13 -61.5t-34 -52.5t-60.5 -13t-51.5 34t-13 61t33 53q28 19 60.5 13t52.5 -34zM1579 562q69 -113 42.5 -244.5t-134.5 -207.5q-90 -63 -199 -60q-20 -80 -84.5 -127t-143.5 -44.5t-140 57.5q-12 -9 -13 -10q-103 -71 -225 -48.5t-193 126.5 +q-50 73 -53 164q-83 14 -142.5 70.5t-80.5 128t-2 152t81 138.5q-36 60 -38 128t24.5 125t79.5 98.5t121 50.5q32 85 99 148t146.5 91.5t168 17t159.5 -66.5q72 21 140 17.5t128.5 -36t104.5 -80t67.5 -115t17.5 -140.5q52 -16 87 -57t45.5 -89t-5.5 -99.5t-58 -87.5z +M455 1222q14 -20 9.5 -44.5t-24.5 -38.5q-19 -14 -43.5 -9.5t-37.5 24.5q-14 20 -9.5 44.5t24.5 38.5q19 14 43.5 9.5t37.5 -24.5zM614 1503q4 -16 -5 -30.5t-26 -18.5t-31 5.5t-18 26.5q-3 17 6.5 31t25.5 18q17 4 31 -5.5t17 -26.5zM1800 555q4 -20 -6.5 -37t-30.5 -21 +q-19 -4 -36 6.5t-21 30.5t6.5 37t30.5 22q20 4 36.5 -7.5t20.5 -30.5zM1136 1448q16 -27 8.5 -58.5t-35.5 -47.5q-27 -16 -57.5 -8.5t-46.5 34.5q-16 28 -8.5 59t34.5 48t58 9t47 -36zM1882 792q4 -15 -4 -27.5t-23 -16.5q-15 -3 -27.5 5.5t-15.5 22.5q-3 15 5 28t23 16 +q14 3 26.5 -5t15.5 -23zM1691 1033q15 -22 10.5 -49t-26.5 -43q-22 -15 -49 -10t-42 27t-10 49t27 43t48.5 11t41.5 -28z" /> + <glyph glyph-name="uniF2E1" unicode="&#xf2e1;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E2" unicode="&#xf2e2;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E3" unicode="&#xf2e3;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E4" unicode="&#xf2e4;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E5" unicode="&#xf2e5;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E6" unicode="&#xf2e6;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E7" unicode="&#xf2e7;" horiz-adv-x="1792" + /> + <glyph glyph-name="_698" unicode="&#xf2e8;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2E9" unicode="&#xf2e9;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2EA" unicode="&#xf2ea;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2EB" unicode="&#xf2eb;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2EC" unicode="&#xf2ec;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2ED" unicode="&#xf2ed;" horiz-adv-x="1792" + /> + <glyph glyph-name="uniF2EE" unicode="&#xf2ee;" horiz-adv-x="1792" + /> + <glyph glyph-name="lessequal" unicode="&#xf500;" horiz-adv-x="1792" + /> + </font> +</defs></svg> diff --git a/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.ttf b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.ttf new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.ttf differ diff --git a/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff differ diff --git a/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff2 b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff2 new file mode 100644 Binary files /dev/null and b/test/fixtures/cache-tests/asset/fonts/fontawesome-webfont.woff2 differ diff --git a/test/fixtures/cache-tests/asset/liquid.browser.esm.mjs b/test/fixtures/cache-tests/asset/liquid.browser.esm.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/asset/liquid.browser.esm.mjs @@ -0,0 +1,2799 @@ +/* + * liquidjs@9.16.0, https://github.com/harttle/liquidjs + * (c) 2016-2020 harttle + * Released under the MIT License. + */ +class Drop { + valueOf () { + return undefined + } + + liquidMethodMissing (key) { + return undefined + } +} + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ + +var __assign = function () { + __assign = Object.assign || function __assign (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i] + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p] + } + return t + } + return __assign.apply(this, arguments) +} + +const toStr = Object.prototype.toString +const toLowerCase = String.prototype.toLowerCase +/* + * Checks if value is classified as a String primitive or object. + * @param {any} value The value to check. + * @return {Boolean} Returns true if value is a string, else false. + */ +function isString (value) { + return toStr.call(value) === '[object String]' +} +function isFunction (value) { + return typeof value === 'function' +} +function stringify (value) { + value = toValue(value) + return isNil(value) ? '' : String(value) +} +function toValue (value) { + return value instanceof Drop ? value.valueOf() : value +} +function isNumber (value) { + return typeof value === 'number' +} +function toLiquid (value) { + if (value && isFunction(value.toLiquid)) { return toLiquid(value.toLiquid()) } + return value +} +function isNil (value) { + return value === null || value === undefined +} +function isArray (value) { + // be compatible with IE 8 + return toStr.call(value) === '[object Array]' +} +/* + * Iterates over own enumerable string keyed properties of an object and invokes iteratee for each property. + * The iteratee is invoked with three arguments: (value, key, object). + * Iteratee functions may exit iteration early by explicitly returning false. + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @return {Object} Returns object. + */ +function forOwn (object, iteratee) { + object = object || {} + for (const k in object) { + if (object.hasOwnProperty(k)) { + if (iteratee(object[k], k, object) === false) { break } + } + } + return object +} +function last (arr) { + return arr[arr.length - 1] +} +/* + * Checks if value is the language type of Object. + * (e.g. arrays, functions, objects, regexes, new Number(0), and new String('')) + * @param {any} value The value to check. + * @return {Boolean} Returns true if value is an object, else false. + */ +function isObject (value) { + const type = typeof value + return value !== null && (type === 'object' || type === 'function') +} +function range (start, stop, step = 1) { + const arr = [] + for (let i = start; i < stop; i += step) { + arr.push(i) + } + return arr +} +function padStart (str, length, ch = ' ') { + return pad(str, length, ch, (str, ch) => ch + str) +} +function padEnd (str, length, ch = ' ') { + return pad(str, length, ch, (str, ch) => str + ch) +} +function pad (str, length, ch, add) { + str = String(str) + let n = length - str.length + while (n-- > 0) { str = add(str, ch) } + return str +} +function identify (val) { + return val +} +function snakeCase (str) { + return str.replace(/(\w?)([A-Z])/g, (_, a, b) => (a ? a + '_' : '') + b.toLowerCase()) +} +function changeCase (str) { + const hasLowerCase = [...str].some(ch => ch >= 'a' && ch <= 'z') + return hasLowerCase ? str.toUpperCase() : str.toLowerCase() +} +function ellipsis (str, N) { + return str.length > N ? str.substr(0, N - 3) + '...' : str +} +// compare string in case-insensitive way, undefined values to the tail +function caseInsensitiveCompare (a, b) { + if (a == null && b == null) { return 0 } + if (a == null) { return 1 } + if (b == null) { return -1 } + a = toLowerCase.call(a) + b = toLowerCase.call(b) + if (a < b) { return -1 } + if (a > b) { return 1 } + return 0 +} + +class Node { + constructor (key, value, next, prev) { + this.key = key + this.value = value + this.next = next + this.prev = prev + } +} +class LRU { + constructor (limit, size = 0) { + this.limit = limit + this.size = size + this.cache = {} + this.head = new Node('HEAD', null, null, null) + this.tail = new Node('TAIL', null, null, null) + this.head.next = this.tail + this.tail.prev = this.head + } + + write (key, value) { + if (this.cache[key]) { + this.cache[key].value = value + } else { + const node = new Node(key, value, this.head.next, this.head) + this.head.next.prev = node + this.head.next = node + this.cache[key] = node + this.size++ + this.ensureLimit() + } + } + + read (key) { + if (!this.cache[key]) { return } + const { value } = this.cache[key] + this.remove(key) + this.write(key, value) + return value + } + + remove (key) { + const node = this.cache[key] + node.prev.next = node.next + node.next.prev = node.prev + delete this.cache[key] + this.size-- + } + + clear () { + this.head.next = this.tail + this.tail.prev = this.head + this.size = 0 + this.cache = {} + } + + ensureLimit () { + if (this.size > this.limit) { this.remove(this.tail.prev.key) } + } +} + +const defaultOptions = { + root: ['.'], + cache: undefined, + extname: '', + dynamicPartials: true, + jsTruthy: false, + trimTagRight: false, + trimTagLeft: false, + trimOutputRight: false, + trimOutputLeft: false, + greedy: true, + tagDelimiterLeft: '{%', + tagDelimiterRight: '%}', + outputDelimiterLeft: '{{', + outputDelimiterRight: '}}', + strictFilters: false, + strictVariables: false, + globals: {} +} +function normalize (options) { + options = options || {} + if (options.hasOwnProperty('root')) { + options.root = normalizeStringArray(options.root) + } + if (options.hasOwnProperty('cache')) { + let cache + if (typeof options.cache === 'number') { cache = options.cache > 0 ? new LRU(options.cache) : undefined } else if (typeof options.cache === 'object') { cache = options.cache } else { cache = options.cache ? new LRU(1024) : undefined } + options.cache = cache + } + return options +} +function applyDefault (options) { + return Object.assign({}, defaultOptions, options) +} +function normalizeStringArray (value) { + if (isArray(value)) { return value } + if (isString(value)) { return [value] } + return [] +} + +class Context { + constructor (env = {}, opts = defaultOptions, sync = false) { + this.scopes = [{}] + this.registers = {} + this.sync = sync + this.opts = opts + this.globals = opts.globals + this.environments = env + } + + getRegister (key, defaultValue = {}) { + return (this.registers[key] = this.registers[key] || defaultValue) + } + + setRegister (key, value) { + return (this.registers[key] = value) + } + + saveRegister (...keys) { + return keys.map(key => [key, this.getRegister(key)]) + } + + restoreRegister (keyValues) { + return keyValues.forEach(([key, value]) => this.setRegister(key, value)) + } + + getAll () { + return [this.globals, this.environments, ...this.scopes] + .reduce((ctx, val) => __assign(ctx, val), {}) + } + + get (paths) { + const scope = this.findScope(paths[0]) + return this.getFromScope(scope, paths) + } + + getFromScope (scope, paths) { + if (typeof paths === 'string') { paths = paths.split('.') } + return paths.reduce((scope, path) => { + scope = readProperty(scope, path) + if (isNil(scope) && this.opts.strictVariables) { + throw new TypeError(`undefined variable: ${path}`) + } + return scope + }, scope) + } + + push (ctx) { + return this.scopes.push(ctx) + } + + pop () { + return this.scopes.pop() + } + + bottom () { + return this.scopes[0] + } + + findScope (key) { + for (let i = this.scopes.length - 1; i >= 0; i--) { + const candidate = this.scopes[i] + if (key in candidate) { return candidate } + } + if (key in this.environments) { return this.environments } + return this.globals + } +} +function readProperty (obj, key) { + if (isNil(obj)) { return obj } + obj = toLiquid(obj) + if (obj instanceof Drop) { + if (isFunction(obj[key])) { return obj[key]() } + if (obj.hasOwnProperty(key)) { return obj[key] } + return obj.liquidMethodMissing(key) + } + if (key === 'size') { return readSize(obj) } + if (key === 'first') { return readFirst(obj) } + if (key === 'last') { return readLast(obj) } + return obj[key] +} +function readFirst (obj) { + if (isArray(obj)) { return obj[0] } + return obj.first +} +function readLast (obj) { + if (isArray(obj)) { return obj[obj.length - 1] } + return obj.last +} +function readSize (obj) { + if (isArray(obj) || isString(obj)) { return obj.length } + return obj.size +} + +function domResolve (root, path) { + const base = document.createElement('base') + base.href = root + const head = document.getElementsByTagName('head')[0] + head.insertBefore(base, head.firstChild) + const a = document.createElement('a') + a.href = path + const resolved = a.href + head.removeChild(base) + return resolved +} +function resolve (root, filepath, ext) { + if (root.length && last(root) !== '/') { root += '/' } + const url = domResolve(root, filepath) + return url.replace(/^(\w+:\/\/[^/]+)(\/[^?]+)/, (str, origin, path) => { + const last = path.split('/').pop() + if (/\.\w+$/.test(last)) { return str } + return origin + path + ext + }) +} +async function readFile (url) { + return new Promise((resolve, reject) => { + const xhr = new XMLHttpRequest() + xhr.onload = () => { + if (xhr.status >= 200 && xhr.status < 300) { + resolve(xhr.responseText) + } else { + reject(new Error(xhr.statusText)) + } + } + xhr.onerror = () => { + reject(new Error('An error occurred whilst receiving the response.')) + } + xhr.open('GET', url) + xhr.send() + }) +} +function readFileSync (url) { + const xhr = new XMLHttpRequest() + xhr.open('GET', url, false) + xhr.send() + if (xhr.status < 200 || xhr.status >= 300) { + throw new Error(xhr.statusText) + } + return xhr.responseText +} +async function exists (filepath) { + return true +} +function existsSync (filepath) { + return true +} + +var fs = /* #__PURE__ */Object.freeze({ + resolve: resolve, + readFile: readFile, + readFileSync: readFileSync, + exists: exists, + existsSync: existsSync +}) + +var TokenKind; +(function (TokenKind) { + TokenKind[TokenKind.Number = 1] = 'Number' + TokenKind[TokenKind.Literal = 2] = 'Literal' + TokenKind[TokenKind.Tag = 4] = 'Tag' + TokenKind[TokenKind.Output = 8] = 'Output' + TokenKind[TokenKind.HTML = 16] = 'HTML' + TokenKind[TokenKind.Filter = 32] = 'Filter' + TokenKind[TokenKind.Hash = 64] = 'Hash' + TokenKind[TokenKind.PropertyAccess = 128] = 'PropertyAccess' + TokenKind[TokenKind.Word = 256] = 'Word' + TokenKind[TokenKind.Range = 512] = 'Range' + TokenKind[TokenKind.Quoted = 1024] = 'Quoted' + TokenKind[TokenKind.Operator = 2048] = 'Operator' + TokenKind[TokenKind.Delimited = 12] = 'Delimited' +})(TokenKind || (TokenKind = {})) + +function isDelimitedToken (val) { + return !!(getKind(val) & TokenKind.Delimited) +} +function isOperatorToken (val) { + return getKind(val) === TokenKind.Operator +} +function isHTMLToken (val) { + return getKind(val) === TokenKind.HTML +} +function isOutputToken (val) { + return getKind(val) === TokenKind.Output +} +function isTagToken (val) { + return getKind(val) === TokenKind.Tag +} +function isQuotedToken (val) { + return getKind(val) === TokenKind.Quoted +} +function isLiteralToken (val) { + return getKind(val) === TokenKind.Literal +} +function isNumberToken (val) { + return getKind(val) === TokenKind.Number +} +function isPropertyAccessToken (val) { + return getKind(val) === TokenKind.PropertyAccess +} +function isWordToken (val) { + return getKind(val) === TokenKind.Word +} +function isRangeToken (val) { + return getKind(val) === TokenKind.Range +} +function getKind (val) { + return val ? val.kind : -1 +} + +var typeGuards = /* #__PURE__ */Object.freeze({ + isDelimitedToken: isDelimitedToken, + isOperatorToken: isOperatorToken, + isHTMLToken: isHTMLToken, + isOutputToken: isOutputToken, + isTagToken: isTagToken, + isQuotedToken: isQuotedToken, + isLiteralToken: isLiteralToken, + isNumberToken: isNumberToken, + isPropertyAccessToken: isPropertyAccessToken, + isWordToken: isWordToken, + isRangeToken: isRangeToken +}) + +// **DO NOT CHANGE THIS FILE** +// +// This file is generated by bin/character-gen.js +// bitmask character types to boost performance +const TYPES = [0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 4, 4, 4, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 2, 8, 0, 0, 0, 0, 8, 0, 0, 0, 64, 0, 65, 0, 0, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 0, 0, 2, 2, 2, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0] +const VARIABLE = 1 +const BLANK = 4 +const QUOTE = 8 +const INLINE_BLANK = 16 +const NUMBER = 32 +const SIGN = 64 +TYPES[160] = TYPES[5760] = TYPES[6158] = TYPES[8192] = TYPES[8193] = TYPES[8194] = TYPES[8195] = TYPES[8196] = TYPES[8197] = TYPES[8198] = TYPES[8199] = TYPES[8200] = TYPES[8201] = TYPES[8202] = TYPES[8232] = TYPES[8233] = TYPES[8239] = TYPES[8287] = TYPES[12288] = BLANK + +function whiteSpaceCtrl (tokens, options) { + options = Object.assign({ greedy: true }, options) + let inRaw = false + for (let i = 0; i < tokens.length; i++) { + const token = tokens[i] + if (!isDelimitedToken(token)) { continue } + if (!inRaw && token.trimLeft) { + trimLeft(tokens[i - 1], options.greedy) + } + if (isTagToken(token)) { + if (token.name === 'raw') { inRaw = true } else if (token.name === 'endraw') { inRaw = false } + } + if (!inRaw && token.trimRight) { + trimRight(tokens[i + 1], options.greedy) + } + } +} +function trimLeft (token, greedy) { + if (!token || !isHTMLToken(token)) { return } + const mask = greedy ? BLANK : INLINE_BLANK + while (TYPES[token.input.charCodeAt(token.end - 1 - token.trimRight)] & mask) { token.trimRight++ } +} +function trimRight (token, greedy) { + if (!token || !isHTMLToken(token)) { return } + const mask = greedy ? BLANK : INLINE_BLANK + while (TYPES[token.input.charCodeAt(token.begin + token.trimLeft)] & mask) { token.trimLeft++ } + if (token.input.charAt(token.begin + token.trimLeft) === '\n') { token.trimLeft++ } +} + +class Token { + constructor (kind, input, begin, end, file) { + this.kind = kind + this.input = input + this.begin = begin + this.end = end + this.file = file + } + + getText () { + return this.input.slice(this.begin, this.end) + } + + getPosition () { + let [row, col] = [1, 1] + for (let i = 0; i < this.begin; i++) { + if (this.input[i] === '\n') { + row++ + col = 1 + } else { col++ } + } + return [row, col] + } + + size () { + return this.end - this.begin + } +} + +class NumberToken extends Token { + constructor (whole, decimal) { + super(TokenKind.Number, whole.input, whole.begin, decimal ? decimal.end : whole.end, whole.file) + this.whole = whole + this.decimal = decimal + } +} + +// a word can be an identifier, a number, a keyword or a single-word-literal +class WordToken extends Token { + constructor (input, begin, end, file) { + super(TokenKind.Word, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.file = file + this.content = this.getText() + } + + isNumber (allowSign = false) { + const begin = allowSign && TYPES[this.input.charCodeAt(this.begin)] & SIGN + ? this.begin + 1 + : this.begin + for (let i = begin; i < this.end; i++) { + if (!(TYPES[this.input.charCodeAt(i)] & NUMBER)) { return false } + } + return true + } +} + +class EmptyDrop extends Drop { + equals (value) { + if (isString(value) || isArray(value)) { return value.length === 0 } + if (isObject(value)) { return Object.keys(value).length === 0 } + return false + } + + gt () { + return false + } + + geq () { + return false + } + + lt () { + return false + } + + leq () { + return false + } + + valueOf () { + return '' + } +} + +class BlankDrop extends EmptyDrop { + equals (value) { + if (value === false) { return true } + if (isNil(toValue(value))) { return true } + if (isString(value)) { return /^\s*$/.test(value) } + return super.equals(value) + } +} + +class NullDrop extends Drop { + equals (value) { + return isNil(toValue(value)) || value instanceof BlankDrop + } + + gt () { + return false + } + + geq () { + return false + } + + lt () { + return false + } + + leq () { + return false + } + + valueOf () { + return null + } +} + +const literalValues = { + true: true, + false: false, + nil: new NullDrop(), + null: new NullDrop(), + empty: new EmptyDrop(), + blank: new BlankDrop() +} + +class LiteralToken extends Token { + constructor (input, begin, end, file) { + super(TokenKind.Literal, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.file = file + this.literal = this.getText() + } +} + +const precedence = { + '==': 1, + '!=': 1, + '>': 1, + '<': 1, + '>=': 1, + '<=': 1, + contains: 1, + and: 0, + or: 0 +} +class OperatorToken extends Token { + constructor (input, begin, end, file) { + super(TokenKind.Operator, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.file = file + this.operator = this.getText() + } + + getPrecedence () { + return precedence[this.getText()] + } +} + +const rHex = /[\da-fA-F]/ +const rOct = /[0-7]/ +const escapeChar = { + b: '\b', + f: '\f', + n: '\n', + r: '\r', + t: '\t', + v: '\x0B' +} +function hexVal (c) { + const code = c.charCodeAt(0) + if (code >= 97) { return code - 87 } + if (code >= 65) { return code - 55 } + return code - 48 +} +function parseStringLiteral (str) { + let ret = '' + for (let i = 1; i < str.length - 1; i++) { + if (str[i] !== '\\') { + ret += str[i] + continue + } + if (escapeChar[str[i + 1]] !== undefined) { + ret += escapeChar[str[++i]] + } else if (str[i + 1] === 'u') { + let val = 0 + let j = i + 2 + while (j <= i + 5 && rHex.test(str[j])) { + val = val * 16 + hexVal(str[j++]) + } + i = j - 1 + ret += String.fromCharCode(val) + } else if (!rOct.test(str[i + 1])) { + ret += str[++i] + } else { + let j = i + 1 + let val = 0 + while (j <= i + 3 && rOct.test(str[j])) { + val = val * 8 + hexVal(str[j++]) + } + i = j - 1 + ret += String.fromCharCode(val) + } + } + return ret +} + +class PropertyAccessToken extends Token { + constructor (variable, props, end) { + super(TokenKind.PropertyAccess, variable.input, variable.begin, end, variable.file) + this.variable = variable + this.props = props + } + + getVariableAsText () { + if (this.variable instanceof WordToken) { + return this.variable.getText() + } else { + return parseStringLiteral(this.variable.getText()) + } + } +} + +class LiquidError extends Error { + constructor (err, token) { + super(err.message) + this.originalError = err + this.token = token + } + + update () { + const err = this.originalError + const context = mkContext(this.token) + this.message = mkMessage(err.message, this.token) + this.stack = this.message + '\n' + context + + '\n' + this.stack + '\nFrom ' + err.stack + } +} +class TokenizationError extends LiquidError { + constructor (message, token) { + super(new Error(message), token) + this.name = 'TokenizationError' + super.update() + } +} +class ParseError extends LiquidError { + constructor (err, token) { + super(err, token) + this.name = 'ParseError' + this.message = err.message + super.update() + } +} +class RenderError extends LiquidError { + constructor (err, tpl) { + super(err, tpl.token) + this.name = 'RenderError' + this.message = err.message + super.update() + } + + static is (obj) { + return obj instanceof RenderError + } +} +class AssertionError extends Error { + constructor (message) { + super(message) + this.name = 'AssertionError' + this.message = message + '' + } +} +function mkContext (token) { + const [line] = token.getPosition() + const lines = token.input.split('\n') + const begin = Math.max(line - 2, 1) + const end = Math.min(line + 3, lines.length) + const context = range(begin, end + 1) + .map(lineNumber => { + const indicator = (lineNumber === line) ? '>> ' : ' ' + const num = padStart(String(lineNumber), String(end).length) + const text = lines[lineNumber - 1] + return `${indicator}${num}| ${text}` + }) + .join('\n') + return context +} +function mkMessage (msg, token) { + if (token.file) { msg += `, file:${token.file}` } + const [line, col] = token.getPosition() + msg += `, line:${line}, col:${col}` + return msg +} + +function assert (predicate, message) { + if (!predicate) { + const msg = message ? message() : `expect ${predicate} to be true` + throw new AssertionError(msg) + } +} + +class FilterToken extends Token { + constructor (name, args, input, begin, end, file) { + super(TokenKind.Filter, input, begin, end, file) + this.name = name + this.args = args + } +} + +class HashToken extends Token { + constructor (input, begin, end, name, value, file) { + super(TokenKind.Hash, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.name = name + this.value = value + this.file = file + } +} + +class QuotedToken extends Token { + constructor (input, begin, end, file) { + super(TokenKind.Quoted, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.file = file + } +} + +class HTMLToken extends Token { + constructor (input, begin, end, file) { + super(TokenKind.HTML, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.file = file + this.trimLeft = 0 + this.trimRight = 0 + } + + getContent () { + return this.input.slice(this.begin + this.trimLeft, this.end - this.trimRight) + } +} + +class DelimitedToken extends Token { + constructor (kind, content, input, begin, end, trimLeft, trimRight, file) { + super(kind, input, begin, end, file) + this.trimLeft = false + this.trimRight = false + this.content = this.getText() + const tl = content[0] === '-' + const tr = last(content) === '-' + this.content = content + .slice(tl ? 1 : 0, tr ? -1 : content.length) + .trim() + this.trimLeft = tl || trimLeft + this.trimRight = tr || trimRight + } +} + +class TagToken extends DelimitedToken { + constructor (input, begin, end, options, file) { + const { trimTagLeft, trimTagRight, tagDelimiterLeft, tagDelimiterRight } = options + const value = input.slice(begin + tagDelimiterLeft.length, end - tagDelimiterRight.length) + super(TokenKind.Tag, value, input, begin, end, trimTagLeft, trimTagRight, file) + let nameEnd = 0 + while (TYPES[this.content.charCodeAt(nameEnd)] & VARIABLE) { nameEnd++ } + this.name = this.content.slice(0, nameEnd) + if (!this.name) { throw new TokenizationError('illegal tag syntax', this) } + let argsBegin = nameEnd + while (TYPES[this.content.charCodeAt(argsBegin)] & BLANK) { argsBegin++ } + this.args = this.content.slice(argsBegin) + } +} + +class RangeToken extends Token { + constructor (input, begin, end, lhs, rhs, file) { + super(TokenKind.Range, input, begin, end, file) + this.input = input + this.begin = begin + this.end = end + this.lhs = lhs + this.rhs = rhs + this.file = file + } +} + +class OutputToken extends DelimitedToken { + constructor (input, begin, end, options, file) { + const { trimOutputLeft, trimOutputRight, outputDelimiterLeft, outputDelimiterRight } = options + const value = input.slice(begin + outputDelimiterLeft.length, end - outputDelimiterRight.length) + super(TokenKind.Output, value, input, begin, end, trimOutputLeft, trimOutputRight, file) + } +} + +const trie = { + a: { n: { d: { end: true, needBoundary: true } } }, + o: { r: { end: true, needBoundary: true } }, + c: { o: { n: { t: { a: { i: { n: { s: { end: true, needBoundary: true } } } } } } } }, + '=': { '=': { end: true } }, + '!': { '=': { end: true } }, + '>': { end: true, '=': { end: true } }, + '<': { end: true, '=': { end: true } } +} +function matchOperator (str, begin, end = str.length) { + let node = trie + let i = begin + let info + while (node[str[i]] && i < end) { + node = node[str[i++]] + if (node.end) { info = node } + } + if (!info) { return -1 } + if (info.needBoundary && str.charCodeAt(i) & VARIABLE) { return -1 } + return i +} + +class Tokenizer { + constructor (input, file = '') { + this.input = input + this.file = file + this.p = 0 + this.N = input.length + } + + * readExpression () { + const operand = this.readValue() + if (!operand) { return } + yield operand + while (this.p < this.N) { + const operator = this.readOperator() + if (!operator) { return } + const operand = this.readValue() + if (!operand) { return } + yield operator + yield operand + } + } + + readOperator () { + this.skipBlank() + const end = matchOperator(this.input, this.p, this.p + 8) + if (end === -1) { return } + return new OperatorToken(this.input, this.p, (this.p = end), this.file) + } + + readFilters () { + const filters = [] + while (true) { + const filter = this.readFilter() + if (!filter) { return filters } + filters.push(filter) + } + } + + readFilter () { + this.skipBlank() + if (this.end()) { return null } + assert(this.peek() === '|', () => `unexpected token at ${this.snapshot()}`) + this.p++ + const begin = this.p + const name = this.readWord() + if (!name.size()) { return null } + const args = [] + this.skipBlank() + if (this.peek() === ':') { + do { + ++this.p + const arg = this.readFilterArg() + arg && args.push(arg) + while (this.p < this.N && this.peek() !== ',' && this.peek() !== '|') { ++this.p } + } while (this.peek() === ',') + } + return new FilterToken(name.getText(), args, this.input, begin, this.p, this.file) + } + + readFilterArg () { + const key = this.readValue() + if (!key) { return } + this.skipBlank() + if (this.peek() !== ':') { return key } + ++this.p + const value = this.readValue() + return [key.getText(), value] + } + + readTopLevelTokens (options = defaultOptions) { + const tokens = [] + while (this.p < this.N) { + const token = this.readTopLevelToken(options) + tokens.push(token) + } + whiteSpaceCtrl(tokens, options) + return tokens + } + + readTopLevelToken (options) { + const { tagDelimiterLeft, outputDelimiterLeft } = options + if (this.matchWord(tagDelimiterLeft)) { return this.readTagToken(options) } + if (this.matchWord(outputDelimiterLeft)) { return this.readOutputToken(options) } + return this.readHTMLToken(options) + } + + readHTMLToken (options) { + const begin = this.p + while (this.p < this.N) { + const { tagDelimiterLeft, outputDelimiterLeft } = options + if (this.matchWord(tagDelimiterLeft)) { break } + if (this.matchWord(outputDelimiterLeft)) { break } + ++this.p + } + return new HTMLToken(this.input, begin, this.p, this.file) + } + + readTagToken (options) { + const { file, input } = this + const { tagDelimiterRight } = options + const begin = this.p + if (this.readTo(tagDelimiterRight) === -1) { + this.mkError(`tag ${this.snapshot(begin)} not closed`, begin) + } + return new TagToken(input, begin, this.p, options, file) + } + + readOutputToken (options) { + const { file, input } = this + const { outputDelimiterRight } = options + const begin = this.p + if (this.readTo(outputDelimiterRight) === -1) { + this.mkError(`output ${this.snapshot(begin)} not closed`, begin) + } + return new OutputToken(input, begin, this.p, options, file) + } + + mkError (msg, begin) { + throw new TokenizationError(msg, new WordToken(this.input, begin, this.N, this.file)) + } + + snapshot (begin = this.p) { + return JSON.stringify(ellipsis(this.input.slice(begin), 16)) + } + + readWord () { + this.skipBlank() + const begin = this.p + while (this.peekType() & VARIABLE) { ++this.p } + return new WordToken(this.input, begin, this.p, this.file) + } + + readHashes () { + const hashes = [] + while (true) { + const hash = this.readHash() + if (!hash) { return hashes } + hashes.push(hash) + } + } + + readHash () { + this.skipBlank() + if (this.peek() === ',') { ++this.p } + const begin = this.p + const name = this.readWord() + if (!name.size()) { return } + let value + this.skipBlank() + if (this.peek() === ':') { + ++this.p + value = this.readValue() + } + return new HashToken(this.input, begin, this.p, name, value, this.file) + } + + remaining () { + return this.input.slice(this.p) + } + + advance (i = 1) { + this.p += i + } + + end () { + return this.p >= this.N + } + + readTo (end) { + while (this.p < this.N) { + ++this.p + if (this.reverseMatchWord(end)) { return this.p } + } + return -1 + } + + readValue () { + const value = this.readQuoted() || this.readRange() + if (value) { return value } + if (this.peek() === '[') { + this.p++ + const prop = this.readQuoted() + if (!prop) { return } + if (this.peek() !== ']') { return } + this.p++ + return new PropertyAccessToken(prop, [], this.p) + } + const variable = this.readWord() + if (!variable.size()) { return } + let isNumber = variable.isNumber(true) + const props = [] + while (true) { + if (this.peek() === '[') { + isNumber = false + this.p++ + const prop = this.readValue() || new WordToken(this.input, this.p, this.p, this.file) + this.readTo(']') + props.push(prop) + } else if (this.peek() === '.' && this.peek(1) !== '.') { // skip range syntax + this.p++ + const prop = this.readWord() + if (!prop.size()) { break } + if (!prop.isNumber()) { isNumber = false } + props.push(prop) + } else { break } + } + if (!props.length && literalValues.hasOwnProperty(variable.content)) { + return new LiteralToken(this.input, variable.begin, variable.end, this.file) + } + if (isNumber) { return new NumberToken(variable, props[0]) } + return new PropertyAccessToken(variable, props, this.p) + } + + readRange () { + this.skipBlank() + const begin = this.p + if (this.peek() !== '(') { return } + ++this.p + const lhs = this.readValueOrThrow() + this.p += 2 + const rhs = this.readValueOrThrow() + ++this.p + return new RangeToken(this.input, begin, this.p, lhs, rhs, this.file) + } + + readValueOrThrow () { + const value = this.readValue() + assert(value, () => `unexpected token ${this.snapshot()}, value expected`) + return value + } + + readQuoted () { + this.skipBlank() + const begin = this.p + if (!(this.peekType() & QUOTE)) { return } + ++this.p + let escaped = false + while (this.p < this.N) { + ++this.p + if (this.input[this.p - 1] === this.input[begin] && !escaped) { break } + if (escaped) { escaped = false } else if (this.input[this.p - 1] === '\\') { escaped = true } + } + return new QuotedToken(this.input, begin, this.p, this.file) + } + + readFileName () { + const begin = this.p + while (!(this.peekType() & BLANK) && this.peek() !== ',' && this.p < this.N) { this.p++ } + return new WordToken(this.input, begin, this.p, this.file) + } + + matchWord (word) { + for (let i = 0; i < word.length; i++) { + if (word[i] !== this.input[this.p + i]) { return false } + } + return true + } + + reverseMatchWord (word) { + for (let i = 0; i < word.length; i++) { + if (word[word.length - 1 - i] !== this.input[this.p - 1 - i]) { return false } + } + return true + } + + peekType (n = 0) { + return TYPES[this.input.charCodeAt(this.p + n)] + } + + peek (n = 0) { + return this.input[this.p + n] + } + + skipBlank () { + while (this.peekType() & BLANK) { ++this.p } + } +} + +class Emitter { + constructor () { + this.html = '' + this.break = false + this.continue = false + } + + write (html) { + this.html += html + } +} + +class Render { + * renderTemplates (templates, ctx, emitter = new Emitter()) { + for (const tpl of templates) { + try { + const html = yield tpl.render(ctx, emitter) + html && emitter.write(html) + if (emitter.break || emitter.continue) { break } + } catch (e) { + const err = RenderError.is(e) ? e : new RenderError(e, tpl) + throw err + } + } + return emitter.html + } +} + +class ParseStream { + constructor (tokens, parseToken) { + this.handlers = {} + this.stopRequested = false + this.tokens = tokens + this.parseToken = parseToken + } + + on (name, cb) { + this.handlers[name] = cb + return this + } + + trigger (event, arg) { + const h = this.handlers[event] + return h ? (h(arg), true) : false + } + + start () { + this.trigger('start') + let token + while (!this.stopRequested && (token = this.tokens.shift())) { + if (this.trigger('token', token)) { continue } + if (isTagToken(token) && this.trigger(`tag:${token.name}`, token)) { + continue + } + const template = this.parseToken(token, this.tokens) + this.trigger('template', template) + } + if (!this.stopRequested) { this.trigger('end') } + return this + } + + stop () { + this.stopRequested = true + return this + } +} + +class TemplateImpl { + constructor (token) { + this.token = token + } +} + +function isComparable (arg) { + return arg && isFunction(arg.equals) +} + +function isTruthy (val, ctx) { + return !isFalsy(val, ctx) +} +function isFalsy (val, ctx) { + if (ctx.opts.jsTruthy) { + return !val + } else { + return val === false || undefined === val || val === null + } +} + +const operatorImpls = { + '==': (l, r) => { + if (isComparable(l)) { return l.equals(r) } + if (isComparable(r)) { return r.equals(l) } + return l === r + }, + '!=': (l, r) => { + if (isComparable(l)) { return !l.equals(r) } + if (isComparable(r)) { return !r.equals(l) } + return l !== r + }, + '>': (l, r) => { + if (isComparable(l)) { return l.gt(r) } + if (isComparable(r)) { return r.lt(l) } + return l > r + }, + '<': (l, r) => { + if (isComparable(l)) { return l.lt(r) } + if (isComparable(r)) { return r.gt(l) } + return l < r + }, + '>=': (l, r) => { + if (isComparable(l)) { return l.geq(r) } + if (isComparable(r)) { return r.leq(l) } + return l >= r + }, + '<=': (l, r) => { + if (isComparable(l)) { return l.leq(r) } + if (isComparable(r)) { return r.geq(l) } + return l <= r + }, + contains: (l, r) => { + return l && isFunction(l.indexOf) ? l.indexOf(r) > -1 : false + }, + and: (l, r, ctx) => isTruthy(l, ctx) && isTruthy(r, ctx), + or: (l, r, ctx) => isTruthy(l, ctx) || isTruthy(r, ctx) +} + +class Expression { + constructor (str) { + this.operands = [] + const tokenizer = new Tokenizer(str) + this.postfix = toPostfix(tokenizer.readExpression()) + } + + evaluate (ctx) { + for (const token of this.postfix) { + if (isOperatorToken(token)) { + const r = this.operands.pop() + const l = this.operands.pop() + const result = evalOperatorToken(token, l, r, ctx) + this.operands.push(result) + } else { + this.operands.push(evalToken(token, ctx)) + } + } + return this.operands[0] + } + + * value (ctx) { + return toValue(this.evaluate(ctx)) + } +} +function evalToken (token, ctx) { + assert(ctx, () => 'unable to evaluate: context not defined') + if (isPropertyAccessToken(token)) { + const variable = token.getVariableAsText() + const props = token.props.map(prop => evalToken(prop, ctx)) + return ctx.get([variable, ...props]) + } + if (isRangeToken(token)) { return evalRangeToken(token, ctx) } + if (isLiteralToken(token)) { return evalLiteralToken(token) } + if (isNumberToken(token)) { return evalNumberToken(token) } + if (isWordToken(token)) { return token.getText() } + if (isQuotedToken(token)) { return evalQuotedToken(token) } +} +function evalNumberToken (token) { + const str = token.whole.content + '.' + (token.decimal ? token.decimal.content : '') + return Number(str) +} +function evalQuotedToken (token) { + return parseStringLiteral(token.getText()) +} +function evalOperatorToken (token, lhs, rhs, ctx) { + const impl = operatorImpls[token.operator] + return impl(lhs, rhs, ctx) +} +function evalLiteralToken (token) { + return literalValues[token.literal] +} +function evalRangeToken (token, ctx) { + const low = evalToken(token.lhs, ctx) + const high = evalToken(token.rhs, ctx) + return range(+low, +high + 1) +} +function * toPostfix (tokens) { + const ops = [] + for (const token of tokens) { + if (isOperatorToken(token)) { + while (ops.length && ops[ops.length - 1].getPrecedence() > token.getPrecedence()) { + yield ops.pop() + } + ops.push(token) + } else { yield token } + } + while (ops.length) { + yield ops.pop() + } +} + +/** + * Key-Value Pairs Representing Tag Arguments + * Example: + * For the markup `, foo:'bar', coo:2 reversed %}`, + * hash['foo'] === 'bar' + * hash['coo'] === 2 + * hash['reversed'] === undefined + */ +class Hash { + constructor (markup) { + this.hash = {} + const tokenizer = new Tokenizer(markup) + for (const hash of tokenizer.readHashes()) { + this.hash[hash.name.content] = hash.value + } + } + + * render (ctx) { + const hash = {} + for (const key of Object.keys(this.hash)) { + hash[key] = yield evalToken(this.hash[key], ctx) + } + return hash + } +} + +function createResolvedThenable (value) { + const ret = { + then: (resolve) => resolve(value), + catch: () => ret + } + return ret +} +function createRejectedThenable (err) { + const ret = { + then: (resolve, reject) => { + if (reject) { return reject(err) } + return ret + }, + catch: (reject) => reject(err) + } + return ret +} +function isThenable (val) { + return val && isFunction(val.then) +} +function isAsyncIterator (val) { + return val && isFunction(val.next) && isFunction(val.throw) && isFunction(val.return) +} +// convert an async iterator to a thenable (Promise compatible) +function toThenable (val) { + if (isThenable(val)) { return val } + if (isAsyncIterator(val)) { return reduce() } + return createResolvedThenable(val) + function reduce (prev) { + let state + try { + state = val.next(prev) + } catch (err) { + return createRejectedThenable(err) + } + if (state.done) { return createResolvedThenable(state.value) } + return toThenable(state.value).then(reduce, err => { + let state + try { + state = val.throw(err) + } catch (e) { + return createRejectedThenable(e) + } + if (state.done) { return createResolvedThenable(state.value) } + return reduce(state.value) + }) + } +} +function toPromise (val) { + return Promise.resolve(toThenable(val)) +} +// get the value of async iterator in synchronous manner +function toValue$1 (val) { + let ret + toThenable(val) + .then((x) => { + ret = x + return createResolvedThenable(ret) + }) + .catch((err) => { + throw err + }) + return ret +} + +class Tag extends TemplateImpl { + constructor (token, tokens, liquid) { + super(token) + this.name = token.name + const impl = liquid.tags.get(token.name) + this.impl = Object.create(impl) + this.impl.liquid = liquid + if (this.impl.parse) { + this.impl.parse(token, tokens) + } + } + + * render (ctx, emitter) { + const hash = yield new Hash(this.token.args).render(ctx) + const impl = this.impl + if (isFunction(impl.render)) { return yield impl.render(ctx, emitter, hash) } + } +} +Tag.impls = {} + +function isKeyValuePair (arr) { + return isArray(arr) +} + +class Filter { + constructor (name, impl, args) { + this.name = name + this.impl = impl || identify + this.args = args + } + + * render (value, context) { + const argv = [] + for (const arg of this.args) { + if (isKeyValuePair(arg)) { argv.push([arg[0], yield evalToken(arg[1], context)]) } else { argv.push(yield evalToken(arg, context)) } + } + return yield this.impl.apply({ context }, [value, ...argv]) + } +} + +class Value { + /** + * @param str the value to be valuated, eg.: "foobar" | truncate: 3 + */ + constructor (str, filterMap) { + this.filterMap = filterMap + this.filters = [] + const tokenizer = new Tokenizer(str) + this.initial = tokenizer.readValue() + this.filters = tokenizer.readFilters().map(({ name, args }) => new Filter(name, this.filterMap.get(name), args)) + } + + * value (ctx) { + let val = yield evalToken(this.initial, ctx) + for (const filter of this.filters) { + val = yield filter.render(val, ctx) + } + return val + } +} + +class Output extends TemplateImpl { + constructor (token, filters) { + super(token) + this.value = new Value(token.content, filters) + } + + * render (ctx, emitter) { + const val = yield this.value.value(ctx) + emitter.write(stringify(toValue(val))) + } +} + +class HTML extends TemplateImpl { + constructor (token) { + super(token) + this.str = token.getContent() + } + + * render (ctx, emitter) { + emitter.write(this.str) + } +} + +class Parser { + constructor (liquid) { + this.liquid = liquid + } + + parse (tokens) { + let token + const templates = [] + while ((token = tokens.shift())) { + templates.push(this.parseToken(token, tokens)) + } + return templates + } + + parseToken (token, remainTokens) { + try { + if (isTagToken(token)) { + return new Tag(token, remainTokens, this.liquid) + } + if (isOutputToken(token)) { + return new Output(token, this.liquid.filters) + } + return new HTML(token) + } catch (e) { + throw new ParseError(e, token) + } + } + + parseStream (tokens) { + return new ParseStream(tokens, (token, tokens) => this.parseToken(token, tokens)) + } +} + +var assign = { + parse: function (token) { + const tokenizer = new Tokenizer(token.args) + this.key = tokenizer.readWord().content + tokenizer.skipBlank() + assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`) + tokenizer.advance() + this.value = tokenizer.remaining() + }, + render: function * (ctx) { + ctx.bottom()[this.key] = yield this.liquid._evalValue(this.value, ctx) + } +} + +function toEnumerable (val) { + if (isArray(val)) { return val } + if (isString(val) && val.length > 0) { return [val] } + if (isObject(val)) { return Object.keys(val).map((key) => [key, val[key]]) } + return [] +} +function toArray (val) { + if (isArray(val)) { return val } + return [val] +} + +class ForloopDrop extends Drop { + constructor (length) { + super() + this.i = 0 + this.length = length + } + + next () { + this.i++ + } + + index0 () { + return this.i + } + + index () { + return this.i + 1 + } + + first () { + return this.i === 0 + } + + last () { + return this.i === this.length - 1 + } + + rindex () { + return this.length - this.i + } + + rindex0 () { + return this.length - this.i - 1 + } + + valueOf () { + return JSON.stringify(this) + } +} + +var For = { + type: 'block', + parse: function (token, remainTokens) { + const toknenizer = new Tokenizer(token.args) + const variable = toknenizer.readWord() + const inStr = toknenizer.readWord() + const collection = toknenizer.readValue() + assert(variable.size() && inStr.content === 'in' && collection, () => `illegal tag: ${token.getText()}`) + this.variable = variable.content + this.collection = collection + this.hash = new Hash(toknenizer.remaining()) + this.templates = [] + this.elseTemplates = [] + let p + const stream = this.liquid.parser.parseStream(remainTokens) + .on('start', () => (p = this.templates)) + .on('tag:else', () => (p = this.elseTemplates)) + .on('tag:endfor', () => stream.stop()) + .on('template', (tpl) => p.push(tpl)) + .on('end', () => { + throw new Error(`tag ${token.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + const r = this.liquid.renderer + let collection = toEnumerable(yield evalToken(this.collection, ctx)) + if (!collection.length) { + yield r.renderTemplates(this.elseTemplates, ctx, emitter) + return + } + const hash = yield this.hash.render(ctx) + const offset = hash.offset || 0 + const limit = (hash.limit === undefined) ? collection.length : hash.limit + collection = collection.slice(offset, offset + limit) + if ('reversed' in hash) { collection.reverse() } + const scope = { forloop: new ForloopDrop(collection.length) } + ctx.push(scope) + for (const item of collection) { + scope[this.variable] = item + yield r.renderTemplates(this.templates, ctx, emitter) + if (emitter.break) { + emitter.break = false + break + } + emitter.continue = false + scope.forloop.next() + } + ctx.pop() + } +} + +var capture = { + parse: function (tagToken, remainTokens) { + const tokenizer = new Tokenizer(tagToken.args) + this.variable = readVariableName(tokenizer) + assert(this.variable, () => `${tagToken.args} not valid identifier`) + this.templates = [] + const stream = this.liquid.parser.parseStream(remainTokens) + stream.on('tag:endcapture', () => stream.stop()) + .on('template', (tpl) => this.templates.push(tpl)) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx) { + const r = this.liquid.renderer + const html = yield r.renderTemplates(this.templates, ctx) + ctx.bottom()[this.variable] = html + } +} +function readVariableName (tokenizer) { + const word = tokenizer.readWord().content + if (word) { return word } + const quoted = tokenizer.readQuoted() + if (quoted) { return evalQuotedToken(quoted) } +} + +var Case = { + parse: function (tagToken, remainTokens) { + this.cond = tagToken.args + this.cases = [] + this.elseTemplates = [] + let p = [] + const stream = this.liquid.parser.parseStream(remainTokens) + .on('tag:when', (token) => { + this.cases.push({ + val: token.args, + templates: p = [] + }) + }) + .on('tag:else', () => (p = this.elseTemplates)) + .on('tag:endcase', () => stream.stop()) + .on('template', (tpl) => p.push(tpl)) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + const r = this.liquid.renderer + const cond = yield new Expression(this.cond).value(ctx) + for (let i = 0; i < this.cases.length; i++) { + const branch = this.cases[i] + const val = yield new Expression(branch.val).value(ctx) + if (val === cond) { + yield r.renderTemplates(branch.templates, ctx, emitter) + return + } + } + yield r.renderTemplates(this.elseTemplates, ctx, emitter) + } +} + +var comment = { + parse: function (tagToken, remainTokens) { + const stream = this.liquid.parser.parseStream(remainTokens) + stream + .on('token', (token) => { + if (token.name === 'endcomment') { stream.stop() } + }) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + } +} + +var BlockMode; +(function (BlockMode) { + /* store rendered html into blocks */ + BlockMode[BlockMode.OUTPUT = 0] = 'OUTPUT' + /* output rendered html directly */ + BlockMode[BlockMode.STORE = 1] = 'STORE' +})(BlockMode || (BlockMode = {})) +var BlockMode$1 = BlockMode + +var include = { + parse: function (token) { + const args = token.args + const tokenizer = new Tokenizer(args) + this.file = this.liquid.options.dynamicPartials + ? tokenizer.readValue() + : tokenizer.readFileName() + assert(this.file, () => `illegal argument "${token.args}"`) + const begin = tokenizer.p + const withStr = tokenizer.readWord() + if (withStr.content === 'with') { + tokenizer.skipBlank() + if (tokenizer.peek() !== ':') { + this.withVar = tokenizer.readValue() + } else { tokenizer.p = begin } + } else { tokenizer.p = begin } + this.hash = new Hash(tokenizer.remaining()) + }, + render: function * (ctx, emitter) { + const { liquid, hash, withVar, file } = this + const { renderer } = liquid + const filepath = ctx.opts.dynamicPartials + ? (isQuotedToken(file) + ? yield renderer.renderTemplates(liquid.parse(evalQuotedToken(file)), ctx) + : yield evalToken(file, ctx)) + : file.getText() + assert(filepath, () => `illegal filename "${file.getText()}":"${filepath}"`) + const saved = ctx.saveRegister('blocks', 'blockMode') + ctx.setRegister('blocks', {}) + ctx.setRegister('blockMode', BlockMode$1.OUTPUT) + const scope = yield hash.render(ctx) + if (withVar) { scope[filepath] = evalToken(withVar, ctx) } + const templates = yield liquid._parseFile(filepath, ctx.opts, ctx.sync) + ctx.push(scope) + yield renderer.renderTemplates(templates, ctx, emitter) + ctx.pop() + ctx.restoreRegister(saved) + } +} + +var render = { + parse: function (token) { + const args = token.args + const tokenizer = new Tokenizer(args) + this.file = this.liquid.options.dynamicPartials + ? tokenizer.readValue() + : tokenizer.readFileName() + assert(this.file, () => `illegal argument "${token.args}"`) + while (!tokenizer.end()) { + tokenizer.skipBlank() + const begin = tokenizer.p + const keyword = tokenizer.readWord() + if (keyword.content === 'with' || keyword.content === 'for') { + tokenizer.skipBlank() + if (tokenizer.peek() !== ':') { + const value = tokenizer.readValue() + if (value) { + const beforeAs = tokenizer.p + const asStr = tokenizer.readWord() + let alias + if (asStr.content === 'as') { alias = tokenizer.readWord() } else { tokenizer.p = beforeAs } + this[keyword.content] = { value, alias: alias && alias.content } + tokenizer.skipBlank() + if (tokenizer.peek() === ',') { tokenizer.advance() } + continue + } + } + } + tokenizer.p = begin + break + } + this.hash = new Hash(tokenizer.remaining()) + }, + render: function * (ctx, emitter) { + const { liquid, file, hash } = this + const { renderer } = liquid + const filepath = ctx.opts.dynamicPartials + ? (isQuotedToken(file) + ? yield renderer.renderTemplates(liquid.parse(evalQuotedToken(file)), ctx) + : evalToken(file, ctx)) + : file.getText() + assert(filepath, () => `illegal filename "${file.getText()}":"${filepath}"`) + const childCtx = new Context({}, ctx.opts, ctx.sync) + const scope = yield hash.render(ctx) + if (this.with) { + const { value, alias } = this.with + scope[alias || filepath] = evalToken(value, ctx) + } + childCtx.push(scope) + if (this.for) { + const { value, alias } = this.for + let collection = evalToken(value, ctx) + collection = toEnumerable(collection) + scope.forloop = new ForloopDrop(collection.length) + for (const item of collection) { + scope[alias] = item + const templates = yield liquid._parseFile(filepath, childCtx.opts, childCtx.sync) + yield renderer.renderTemplates(templates, childCtx, emitter) + scope.forloop.next() + } + } else { + const templates = yield liquid._parseFile(filepath, childCtx.opts, childCtx.sync) + yield renderer.renderTemplates(templates, childCtx, emitter) + } + } +} + +var decrement = { + parse: function (token) { + const tokenizer = new Tokenizer(token.args) + this.variable = tokenizer.readWord().content + }, + render: function (context, emitter) { + const scope = context.environments + if (!isNumber(scope[this.variable])) { + scope[this.variable] = 0 + } + emitter.write(stringify(--scope[this.variable])) + } +} + +var cycle = { + parse: function (tagToken) { + const tokenizer = new Tokenizer(tagToken.args) + const group = tokenizer.readValue() + tokenizer.skipBlank() + this.candidates = [] + if (group) { + if (tokenizer.peek() === ':') { + this.group = group + tokenizer.advance() + } else { this.candidates.push(group) } + } + while (!tokenizer.end()) { + const value = tokenizer.readValue() + if (value) { this.candidates.push(value) } + tokenizer.readTo(',') + } + assert(this.candidates.length, () => `empty candidates: ${tagToken.getText()}`) + }, + render: function (ctx, emitter) { + const group = evalToken(this.group, ctx) + const fingerprint = `cycle:${group}:` + this.candidates.join(',') + const groups = ctx.getRegister('cycle') + let idx = groups[fingerprint] + if (idx === undefined) { + idx = groups[fingerprint] = 0 + } + const candidate = this.candidates[idx] + idx = (idx + 1) % this.candidates.length + groups[fingerprint] = idx + const html = evalToken(candidate, ctx) + emitter.write(html) + } +} + +var If = { + parse: function (tagToken, remainTokens) { + this.branches = [] + this.elseTemplates = [] + let p + const stream = this.liquid.parser.parseStream(remainTokens) + .on('start', () => this.branches.push({ + cond: tagToken.args, + templates: (p = []) + })) + .on('tag:elsif', (token) => { + this.branches.push({ + cond: token.args, + templates: p = [] + }) + }) + .on('tag:else', () => (p = this.elseTemplates)) + .on('tag:endif', () => stream.stop()) + .on('template', (tpl) => p.push(tpl)) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + const r = this.liquid.renderer + for (const branch of this.branches) { + const cond = yield new Expression(branch.cond).value(ctx) + if (isTruthy(cond, ctx)) { + yield r.renderTemplates(branch.templates, ctx, emitter) + return + } + } + yield r.renderTemplates(this.elseTemplates, ctx, emitter) + } +} + +var increment = { + parse: function (token) { + const tokenizer = new Tokenizer(token.args) + this.variable = tokenizer.readWord().content + }, + render: function (context, emitter) { + const scope = context.environments + if (!isNumber(scope[this.variable])) { + scope[this.variable] = 0 + } + const val = scope[this.variable] + scope[this.variable]++ + emitter.write(stringify(val)) + } +} + +var layout = { + parse: function (token, remainTokens) { + const tokenizer = new Tokenizer(token.args) + const file = this.liquid.options.dynamicPartials ? tokenizer.readValue() : tokenizer.readFileName() + assert(file, () => `illegal argument "${token.args}"`) + this.file = file + this.hash = new Hash(tokenizer.remaining()) + this.tpls = this.liquid.parser.parse(remainTokens) + }, + render: function * (ctx, emitter) { + const { liquid, hash, file } = this + const { renderer } = liquid + const filepath = ctx.opts.dynamicPartials + ? (isQuotedToken(file) + ? yield renderer.renderTemplates(liquid.parse(evalQuotedToken(file)), ctx) + : evalToken(this.file, ctx)) + : file.getText() + assert(filepath, () => `illegal filename "${file.getText()}":"${filepath}"`) + // render the remaining tokens immediately + ctx.setRegister('blockMode', BlockMode$1.STORE) + const blocks = ctx.getRegister('blocks') + const html = yield renderer.renderTemplates(this.tpls, ctx) + if (blocks[''] === undefined) { blocks[''] = html } + const templates = yield liquid._parseFile(filepath, ctx.opts, ctx.sync) + ctx.push(yield hash.render(ctx)) + ctx.setRegister('blockMode', BlockMode$1.OUTPUT) + const partial = yield renderer.renderTemplates(templates, ctx) + ctx.pop() + emitter.write(partial) + } +} + +var block = { + parse: function (token, remainTokens) { + const match = /\w+/.exec(token.args) + this.block = match ? match[0] : '' + this.tpls = [] + const stream = this.liquid.parser.parseStream(remainTokens) + .on('tag:endblock', () => stream.stop()) + .on('template', (tpl) => this.tpls.push(tpl)) + .on('end', () => { + throw new Error(`tag ${token.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + const blocks = ctx.getRegister('blocks') + const childDefined = blocks[this.block] + const r = this.liquid.renderer + const html = childDefined !== undefined + ? childDefined + : yield r.renderTemplates(this.tpls, ctx) + if (ctx.getRegister('blockMode', BlockMode$1.OUTPUT) === BlockMode$1.STORE) { + blocks[this.block] = html + return + } + emitter.write(html) + } +} + +var raw = { + parse: function (tagToken, remainTokens) { + this.tokens = [] + const stream = this.liquid.parser.parseStream(remainTokens) + stream + .on('token', (token) => { + if (token.name === 'endraw') { stream.stop() } else { this.tokens.push(token) } + }) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function () { + return this.tokens.map((token) => token.getText()).join('') + } +} + +class TablerowloopDrop extends ForloopDrop { + constructor (length, cols) { + super(length) + this.length = length + this.cols = cols + } + + row () { + return Math.floor(this.i / this.cols) + 1 + } + + col0 () { + return (this.i % this.cols) + } + + col () { + return this.col0() + 1 + } + + col_first () { + return this.col0() === 0 + } + + col_last () { + return this.col() === this.cols + } +} + +var tablerow = { + parse: function (tagToken, remainTokens) { + const tokenizer = new Tokenizer(tagToken.args) + this.variable = tokenizer.readWord() + tokenizer.skipBlank() + const tmp = tokenizer.readWord() + assert(tmp && tmp.content === 'in', () => `illegal tag: ${tagToken.getText()}`) + this.collection = tokenizer.readValue() + this.hash = new Hash(tokenizer.remaining()) + this.templates = [] + let p + const stream = this.liquid.parser.parseStream(remainTokens) + .on('start', () => (p = this.templates)) + .on('tag:endtablerow', () => stream.stop()) + .on('template', (tpl) => p.push(tpl)) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + let collection = toEnumerable(yield evalToken(this.collection, ctx)) + const hash = yield this.hash.render(ctx) + const offset = hash.offset || 0 + const limit = (hash.limit === undefined) ? collection.length : hash.limit + collection = collection.slice(offset, offset + limit) + const cols = hash.cols || collection.length + const r = this.liquid.renderer + const tablerowloop = new TablerowloopDrop(collection.length, cols) + const scope = { tablerowloop } + ctx.push(scope) + for (let idx = 0; idx < collection.length; idx++, tablerowloop.next()) { + scope[this.variable.content] = collection[idx] + if (tablerowloop.col0() === 0) { + if (tablerowloop.row() !== 1) { emitter.write('</tr>') } + emitter.write(`<tr class="row${tablerowloop.row()}">`) + } + emitter.write(`<td class="col${tablerowloop.col()}">`) + yield r.renderTemplates(this.templates, ctx, emitter) + emitter.write('</td>') + } + if (collection.length) { emitter.write('</tr>') } + ctx.pop() + } +} + +var unless = { + parse: function (tagToken, remainTokens) { + this.templates = [] + this.elseTemplates = [] + let p + const stream = this.liquid.parser.parseStream(remainTokens) + .on('start', () => { + p = this.templates + this.cond = tagToken.args + }) + .on('tag:else', () => (p = this.elseTemplates)) + .on('tag:endunless', () => stream.stop()) + .on('template', (tpl) => p.push(tpl)) + .on('end', () => { + throw new Error(`tag ${tagToken.getText()} not closed`) + }) + stream.start() + }, + render: function * (ctx, emitter) { + const r = this.liquid.renderer + const cond = yield new Expression(this.cond).value(ctx) + yield (isFalsy(cond, ctx) + ? r.renderTemplates(this.templates, ctx, emitter) + : r.renderTemplates(this.elseTemplates, ctx, emitter)) + } +} + +var Break = { + render: function (ctx, emitter) { + emitter.break = true + } +} + +var Continue = { + render: function (ctx, emitter) { + emitter.continue = true + } +} + +const tags = { + assign, for: For, capture, case: Case, comment, include, render, decrement, increment, cycle, if: If, layout, block, raw, tablerow, unless, break: Break, continue: Continue +} + +const escapeMap = { + '&': '&amp;', + '<': '&lt;', + '>': '&gt;', + '"': '&#34;', + "'": '&#39;' +} +const unescapeMap = { + '&amp;': '&', + '&lt;': '<', + '&gt;': '>', + '&#34;': '"', + '&#39;': "'" +} +function escape (str) { + return stringify(str).replace(/&|<|>|"|'/g, m => escapeMap[m]) +} +function unescape (str) { + return String(str).replace(/&(amp|lt|gt|#34|#39);/g, m => unescapeMap[m]) +} +function escapeOnce (str) { + return escape(unescape(str)) +} +function newlineToBr (v) { + return v.replace(/\n/g, '<br/>') +} +function stripHtml (v) { + return v.replace(/<script.*?<\/script>|<!--.*?-->|<style.*?<\/style>|<.*?>/g, '') +} + +const abs = Math.abs +const atLeast = Math.max +const atMost = Math.min +const ceil = Math.ceil +const dividedBy = (v, arg) => v / arg +const floor = Math.floor +const minus = (v, arg) => v - arg +const modulo = (v, arg) => v % arg +const times = (v, arg) => v * arg +function round (v, arg = 0) { + const amp = Math.pow(10, arg) + return Math.round(v * amp) / amp +} +function plus (v, arg) { + return Number(v) + Number(arg) +} +function sortNatural (input, property) { + if (!input || !input.sort) { return [] } + if (property !== undefined) { + return [...input].sort((lhs, rhs) => caseInsensitiveCompare(lhs[property], rhs[property])) + } + return [...input].sort(caseInsensitiveCompare) +} + +const urlDecode = (x) => x.split('+').map(decodeURIComponent).join(' ') +const urlEncode = (x) => x.split(' ').map(encodeURIComponent).join('+') + +const join = (v, arg) => v.join(arg === undefined ? ' ' : arg) +const last$1 = (v) => isArray(v) ? last(v) : '' +const first = (v) => isArray(v) ? v[0] : '' +const reverse = (v) => [...v].reverse() +function sort (arr, property) { + const getValue = (obj) => property ? this.context.getFromScope(obj, property.split('.')) : obj + return toArray(arr).sort((lhs, rhs) => { + lhs = getValue(lhs) + rhs = getValue(rhs) + return lhs < rhs ? -1 : (lhs > rhs ? 1 : 0) + }) +} +const size = (v) => (v && v.length) || 0 +function map (arr, property) { + return toArray(arr).map(obj => this.context.getFromScope(obj, property.split('.'))) +} +function concat (v, arg) { + return toArray(v).concat(arg) +} +function slice (v, begin, length = 1) { + begin = begin < 0 ? v.length + begin : begin + return v.slice(begin, begin + length) +} +function where (arr, property, expected) { + return toArray(arr).filter(obj => { + const value = this.context.getFromScope(obj, String(property).split('.')) + return expected === undefined ? isTruthy(value, this.context) : value === expected + }) +} +function uniq (arr) { + const u = {} + return (arr || []).filter(val => { + if (u.hasOwnProperty(String(val))) { return false } + u[String(val)] = true + return true + }) +} + +const rFormat = /%([-_0^#:]+)?(\d+)?([EO])?(.)/ +const monthNames = [ + 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', + 'September', 'October', 'November', 'December' +] +const dayNames = [ + 'Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday' +] +const monthNamesShort = monthNames.map(abbr) +const dayNamesShort = dayNames.map(abbr) +const suffixes = { + 1: 'st', + 2: 'nd', + 3: 'rd', + default: 'th' +} +function abbr (str) { + return str.slice(0, 3) +} +// prototype extensions +function daysInMonth (d) { + const feb = isLeapYear(d) ? 29 : 28 + return [31, feb, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] +} +function getDayOfYear (d) { + let num = 0 + for (let i = 0; i < d.getMonth(); ++i) { + num += daysInMonth(d)[i] + } + return num + d.getDate() +} +function getWeekOfYear (d, startDay) { + // Skip to startDay of this week + const now = getDayOfYear(d) + (startDay - d.getDay()) + // Find the first startDay of the year + const jan1 = new Date(d.getFullYear(), 0, 1) + const then = (7 - jan1.getDay() + startDay) + return String(Math.floor((now - then) / 7) + 1) +} +function isLeapYear (d) { + const year = d.getFullYear() + return !!((year & 3) === 0 && (year % 100 || (year % 400 === 0 && year))) +} +function getSuffix (d) { + const str = d.getDate().toString() + const index = parseInt(str.slice(-1)) + return suffixes[index] || suffixes.default +} +function century (d) { + return parseInt(d.getFullYear().toString().substring(0, 2), 10) +} +// default to 0 +const padWidths = { + d: 2, + e: 2, + H: 2, + I: 2, + j: 3, + k: 2, + l: 2, + L: 3, + m: 2, + M: 2, + S: 2, + U: 2, + W: 2 +} +// default to '0' +const padChars = { + a: ' ', + A: ' ', + b: ' ', + B: ' ', + c: ' ', + e: ' ', + k: ' ', + l: ' ', + p: ' ', + P: ' ' +} +const formatCodes = { + a: (d) => dayNamesShort[d.getDay()], + A: (d) => dayNames[d.getDay()], + b: (d) => monthNamesShort[d.getMonth()], + B: (d) => monthNames[d.getMonth()], + c: (d) => d.toLocaleString(), + C: (d) => century(d), + d: (d) => d.getDate(), + e: (d) => d.getDate(), + H: (d) => d.getHours(), + I: (d) => String(d.getHours() % 12 || 12), + j: (d) => getDayOfYear(d), + k: (d) => d.getHours(), + l: (d) => String(d.getHours() % 12 || 12), + L: (d) => d.getMilliseconds(), + m: (d) => d.getMonth() + 1, + M: (d) => d.getMinutes(), + N: (d, opts) => { + const width = Number(opts.width) || 9 + const str = String(d.getMilliseconds()).substr(0, width) + return padEnd(str, width, '0') + }, + p: (d) => (d.getHours() < 12 ? 'AM' : 'PM'), + P: (d) => (d.getHours() < 12 ? 'am' : 'pm'), + q: (d) => getSuffix(d), + s: (d) => Math.round(d.valueOf() / 1000), + S: (d) => d.getSeconds(), + u: (d) => d.getDay() || 7, + U: (d) => getWeekOfYear(d, 0), + w: (d) => d.getDay(), + W: (d) => getWeekOfYear(d, 1), + x: (d) => d.toLocaleDateString(), + X: (d) => d.toLocaleTimeString(), + y: (d) => d.getFullYear().toString().substring(2, 4), + Y: (d) => d.getFullYear(), + z: (d, opts) => { + const offset = d.getTimezoneOffset() + const nOffset = Math.abs(offset) + const h = Math.floor(nOffset / 60) + const m = nOffset % 60 + return (offset > 0 ? '-' : '+') + + padStart(h, 2, '0') + + (opts.flags[':'] ? ':' : '') + + padStart(m, 2, '0') + }, + t: () => '\t', + n: () => '\n', + '%': () => '%' +} +formatCodes.h = formatCodes.b +function strftime (d, formatStr) { + let output = '' + let remaining = formatStr + let match + while ((match = rFormat.exec(remaining))) { + output += remaining.slice(0, match.index) + remaining = remaining.slice(match.index + match[0].length) + output += format(d, match) + } + return output + remaining +} +function format (d, match) { + const [input, flagStr = '', width, modifier, conversion] = match + const convert = formatCodes[conversion] + if (!convert) { return input } + const flags = {} + for (const flag of flagStr) { flags[flag] = true } + let ret = String(convert(d, { flags, width, modifier })) + let padChar = padChars[conversion] || '0' + let padWidth = width || padWidths[conversion] || 0 + if (flags['^']) { ret = ret.toUpperCase() } else if (flags['#']) { ret = changeCase(ret) } + if (flags._) { padChar = ' ' } else if (flags['0']) { padChar = '0' } + if (flags['-']) { padWidth = 0 } + return padStart(ret, padWidth, padChar) +} + +function date (v, arg) { + let date = v + if (v === 'now' || v === 'today') { + date = new Date() + } else if (isNumber(v)) { + date = new Date(v * 1000) + } else if (isString(v)) { + date = /^\d+$/.test(v) ? new Date(+v * 1000) : new Date(v) + } + return isValidDate(date) ? strftime(date, arg) : v +} +function isValidDate (date) { + return date instanceof Date && !isNaN(date.getTime()) +} + +function Default (v, arg) { + if (isArray(v) || isString(v)) { return v.length ? v : arg } + return isFalsy(toValue(v), this.context) ? arg : v +} +function json (v) { + return JSON.stringify(v) +} + +/** + * String related filters + * + * * prefer stringify() to String() since `undefined`, `null` should eval '' + */ +function append (v, arg) { + assert(arg !== undefined, () => 'append expect 2 arguments') + return stringify(v) + stringify(arg) +} +function prepend (v, arg) { + assert(arg !== undefined, () => 'prepend expect 2 arguments') + return stringify(arg) + stringify(v) +} +function lstrip (v) { + return stringify(v).replace(/^\s+/, '') +} +function downcase (v) { + return stringify(v).toLowerCase() +} +function upcase (str) { + return stringify(str).toUpperCase() +} +function remove (v, arg) { + return stringify(v).split(arg).join('') +} +function removeFirst (v, l) { + return stringify(v).replace(l, '') +} +function rstrip (str) { + return stringify(str).replace(/\s+$/, '') +} +function split (v, arg) { + return stringify(v).split(arg) +} +function strip (v) { + return stringify(v).trim() +} +function stripNewlines (v) { + return stringify(v).replace(/\n/g, '') +} +function capitalize (str) { + str = stringify(str) + return str.charAt(0).toUpperCase() + str.slice(1) +} +function replace (v, pattern, replacement) { + return stringify(v).split(pattern).join(replacement) +} +function replaceFirst (v, arg1, arg2) { + return stringify(v).replace(arg1, arg2) +} +function truncate (v, l = 50, o = '...') { + v = stringify(v) + if (v.length <= l) { return v } + return v.substr(0, l - o.length) + o +} +function truncatewords (v, l = 15, o = '...') { + const arr = v.split(/\s+/) + let ret = arr.slice(0, l).join(' ') + if (arr.length >= l) { ret += o } + return ret +} + +var builtinFilters = /* #__PURE__ */Object.freeze({ + escape: escape, + escapeOnce: escapeOnce, + newlineToBr: newlineToBr, + stripHtml: stripHtml, + abs: abs, + atLeast: atLeast, + atMost: atMost, + ceil: ceil, + dividedBy: dividedBy, + floor: floor, + minus: minus, + modulo: modulo, + times: times, + round: round, + plus: plus, + sortNatural: sortNatural, + urlDecode: urlDecode, + urlEncode: urlEncode, + join: join, + last: last$1, + first: first, + reverse: reverse, + sort: sort, + size: size, + map: map, + concat: concat, + slice: slice, + where: where, + uniq: uniq, + date: date, + Default: Default, + json: json, + append: append, + prepend: prepend, + lstrip: lstrip, + downcase: downcase, + upcase: upcase, + remove: remove, + removeFirst: removeFirst, + rstrip: rstrip, + split: split, + strip: strip, + stripNewlines: stripNewlines, + capitalize: capitalize, + replace: replace, + replaceFirst: replaceFirst, + truncate: truncate, + truncatewords: truncatewords +}) + +class TagMap { + constructor () { + this.impls = {} + } + + get (name) { + const impl = this.impls[name] + assert(impl, () => `tag "${name}" not found`) + return impl + } + + set (name, impl) { + this.impls[name] = impl + } +} + +class FilterMap { + constructor (strictFilters) { + this.strictFilters = strictFilters + this.impls = {} + } + + get (name) { + const impl = this.impls[name] + assert(impl || !this.strictFilters, () => `undefined filter: ${name}`) + return impl + } + + set (name, impl) { + this.impls[name] = impl + } + + create (name, args) { + return new Filter(name, this.get(name), args) + } +} + +class Liquid { + constructor (opts = {}) { + this.options = applyDefault(normalize(opts)) + this.parser = new Parser(this) + this.renderer = new Render() + this.fs = opts.fs || fs + this.filters = new FilterMap(this.options.strictFilters) + this.tags = new TagMap() + forOwn(tags, (conf, name) => this.registerTag(snakeCase(name), conf)) + forOwn(builtinFilters, (handler, name) => this.registerFilter(snakeCase(name), handler)) + } + + parse (html, filepath) { + const tokenizer = new Tokenizer(html, filepath) + const tokens = tokenizer.readTopLevelTokens(this.options) + return this.parser.parse(tokens) + } + + _render (tpl, scope, opts, sync) { + const options = Object.assign({}, this.options, normalize(opts)) + const ctx = new Context(scope, options, sync) + return this.renderer.renderTemplates(tpl, ctx) + } + + async render (tpl, scope, opts) { + return toPromise(this._render(tpl, scope, opts, false)) + } + + renderSync (tpl, scope, opts) { + return toValue$1(this._render(tpl, scope, opts, true)) + } + + _parseAndRender (html, scope, opts, sync) { + const tpl = this.parse(html) + return this._render(tpl, scope, opts, sync) + } + + async parseAndRender (html, scope, opts) { + return toPromise(this._parseAndRender(html, scope, opts, false)) + } + + parseAndRenderSync (html, scope, opts) { + return toValue$1(this._parseAndRender(html, scope, opts, true)) + } + + * _parseFile (file, opts, sync) { + const options = Object.assign({}, this.options, normalize(opts)) + const paths = options.root.map(root => this.fs.resolve(root, file, options.extname)) + if (this.fs.fallback !== undefined) { + const filepath = this.fs.fallback(file) + if (filepath !== undefined) { paths.push(filepath) } + } + for (const filepath of paths) { + const { cache } = options + if (cache) { + const tpls = yield cache.read(filepath) + if (tpls) { return tpls } + } + if (!(sync ? this.fs.existsSync(filepath) : yield this.fs.exists(filepath))) { continue } + const tpl = this.parse(sync ? this.fs.readFileSync(filepath) : yield this.fs.readFile(filepath), filepath) + if (cache) { cache.write(filepath, tpl) } + return tpl + } + throw this.lookupError(file, options.root) + } + + async parseFile (file, opts) { + return toPromise(this._parseFile(file, opts, false)) + } + + parseFileSync (file, opts) { + return toValue$1(this._parseFile(file, opts, true)) + } + + async renderFile (file, ctx, opts) { + const templates = await this.parseFile(file, opts) + return this.render(templates, ctx, opts) + } + + renderFileSync (file, ctx, opts) { + const options = normalize(opts) + const templates = this.parseFileSync(file, options) + return this.renderSync(templates, ctx, opts) + } + + _evalValue (str, ctx) { + const value = new Value(str, this.filters) + return value.value(ctx) + } + + async evalValue (str, ctx) { + return toPromise(this._evalValue(str, ctx)) + } + + evalValueSync (str, ctx) { + return toValue$1(this._evalValue(str, ctx)) + } + + registerFilter (name, filter) { + this.filters.set(name, filter) + } + + registerTag (name, tag) { + this.tags.set(name, tag) + } + + plugin (plugin) { + return plugin.call(this, Liquid) + } + + express () { + const self = this; // eslint-disable-line + return function (filePath, ctx, callback) { + const opts = { root: [...normalizeStringArray(this.root), ...self.options.root] } + self.renderFile(filePath, ctx, opts).then(html => callback(null, html), callback) + } + } + + lookupError (file, roots) { + const err = new Error('ENOENT') + err.message = `ENOENT: Failed to lookup "${file}" in "${roots}"` + err.code = 'ENOENT' + return err + } + + /** + * @deprecated use parseFile instead + */ + async getTemplate (file, opts) { + return this.parseFile(file, opts) + } + + /** + * @deprecated use parseFileSync instead + */ + getTemplateSync (file, opts) { + return this.parseFileSync(file, opts) + } +} + +export { AssertionError, Context, Drop, Emitter, Expression, Hash, Liquid, ParseError, ParseStream, TagToken, Token, TokenizationError, Tokenizer, typeGuards as TypeGuards, assert, evalQuotedToken, evalToken, isFalsy, isTruthy, toPromise, toThenable, toValue$1 as toValue } diff --git a/test/fixtures/cache-tests/asset/marked.min.js b/test/fixtures/cache-tests/asset/marked.min.js new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/asset/marked.min.js @@ -0,0 +1,6 @@ +/** + * marked - a markdown parser + * Copyright (c) 2011-2018, Christopher Jeffrey. (MIT Licensed) + * https://github.com/markedjs/marked + */ +!function(e){"use strict";var k={newline:/^\n+/,code:/^( {4}[^\n]+\n*)+/,fences:f,hr:/^ {0,3}((?:- *){3,}|(?:_ *){3,}|(?:\* *){3,})(?:\n+|$)/,heading:/^ *(#{1,6}) *([^\n]+?) *(?:#+ *)?(?:\n+|$)/,nptable:f,blockquote:/^( {0,3}> ?(paragraph|[^\n]*)(?:\n|$))+/,list:/^( {0,3})(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,html:"^ {0,3}(?:<(script|pre|style)[\\s>][\\s\\S]*?(?:</\\1>[^\\n]*\\n+|$)|comment[^\\n]*(\\n+|$)|<\\?[\\s\\S]*?\\?>\\n*|<![A-Z][\\s\\S]*?>\\n*|<!\\[CDATA\\[[\\s\\S]*?\\]\\]>\\n*|</?(tag)(?: +|\\n|/?>)[\\s\\S]*?(?:\\n{2,}|$)|<(?!script|pre|style)([a-z][\\w-]*)(?:attribute)*? */?>(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$)|</(?!script|pre|style)[a-z][\\w-]*\\s*>(?=\\h*\\n)[\\s\\S]*?(?:\\n{2,}|$))",def:/^ {0,3}\[(label)\]: *\n? *<?([^\s>]+)>?(?:(?: +\n? *| *\n *)(title))? *(?:\n+|$)/,table:f,lheading:/^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,paragraph:/^([^\n]+(?:\n(?!hr|heading|lheading| {0,3}>|<\/?(?:tag)(?: +|\n|\/?>)|<(?:script|pre|style|!--))[^\n]+)*)/,text:/^[^\n]+/};function a(e){this.tokens=[],this.tokens.links=Object.create(null),this.options=e||m.defaults,this.rules=k.normal,this.options.pedantic?this.rules=k.pedantic:this.options.gfm&&(this.options.tables?this.rules=k.tables:this.rules=k.gfm)}k._label=/(?!\s*\])(?:\\[\[\]]|[^\[\]])+/,k._title=/(?:"(?:\\"?|[^"\\])*"|'[^'\n]*(?:\n[^'\n]+)*\n?'|\([^()]*\))/,k.def=i(k.def).replace("label",k._label).replace("title",k._title).getRegex(),k.bullet=/(?:[*+-]|\d{1,9}\.)/,k.item=/^( *)(bull) ?[^\n]*(?:\n(?!\1bull ?)[^\n]*)*/,k.item=i(k.item,"gm").replace(/bull/g,k.bullet).getRegex(),k.list=i(k.list).replace(/bull/g,k.bullet).replace("hr","\\n+(?=\\1?(?:(?:- *){3,}|(?:_ *){3,}|(?:\\* *){3,})(?:\\n+|$))").replace("def","\\n+(?="+k.def.source+")").getRegex(),k._tag="address|article|aside|base|basefont|blockquote|body|caption|center|col|colgroup|dd|details|dialog|dir|div|dl|dt|fieldset|figcaption|figure|footer|form|frame|frameset|h[1-6]|head|header|hr|html|iframe|legend|li|link|main|menu|menuitem|meta|nav|noframes|ol|optgroup|option|p|param|section|source|summary|table|tbody|td|tfoot|th|thead|title|tr|track|ul",k._comment=/<!--(?!-?>)[\s\S]*?-->/,k.html=i(k.html,"i").replace("comment",k._comment).replace("tag",k._tag).replace("attribute",/ +[a-zA-Z:_][\w.:-]*(?: *= *"[^"\n]*"| *= *'[^'\n]*'| *= *[^\s"'=<>`]+)?/).getRegex(),k.paragraph=i(k.paragraph).replace("hr",k.hr).replace("heading",k.heading).replace("lheading",k.lheading).replace("tag",k._tag).getRegex(),k.blockquote=i(k.blockquote).replace("paragraph",k.paragraph).getRegex(),k.normal=d({},k),k.gfm=d({},k.normal,{fences:/^ {0,3}(`{3,}|~{3,})([^`\n]*)\n(?:|([\s\S]*?)\n)(?: {0,3}\1[~`]* *(?:\n+|$)|$)/,paragraph:/^/,heading:/^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/}),k.gfm.paragraph=i(k.paragraph).replace("(?!","(?!"+k.gfm.fences.source.replace("\\1","\\2")+"|"+k.list.source.replace("\\1","\\3")+"|").getRegex(),k.tables=d({},k.gfm,{nptable:/^ *([^|\n ].*\|.*)\n *([-:]+ *\|[-| :]*)(?:\n((?:.*[^>\n ].*(?:\n|$))*)\n*|$)/,table:/^ *\|(.+)\n *\|?( *[-:]+[-| :]*)(?:\n((?: *[^>\n ].*(?:\n|$))*)\n*|$)/}),k.pedantic=d({},k.normal,{html:i("^ *(?:comment *(?:\\n|\\s*$)|<(tag)[\\s\\S]+?</\\1> *(?:\\n{2,}|\\s*$)|<tag(?:\"[^\"]*\"|'[^']*'|\\s[^'\"/>\\s]*)*?/?> *(?:\\n{2,}|\\s*$))").replace("comment",k._comment).replace(/tag/g,"(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:|[^\\w\\s@]*@)\\b").getRegex(),def:/^ *\[([^\]]+)\]: *<?([^\s>]+)>?(?: +(["(][^\n]+[")]))? *(?:\n+|$)/}),a.rules=k,a.lex=function(e,t){return new a(t).lex(e)},a.prototype.lex=function(e){return e=e.replace(/\r\n|\r/g,"\n").replace(/\t/g," ").replace(/\u00a0/g," ").replace(/\u2424/g,"\n"),this.token(e,!0)},a.prototype.token=function(e,t){var n,r,s,i,l,o,a,h,p,u,c,g,f,d,m,b;for(e=e.replace(/^ +$/gm,"");e;)if((s=this.rules.newline.exec(e))&&(e=e.substring(s[0].length),1<s[0].length&&this.tokens.push({type:"space"})),s=this.rules.code.exec(e))e=e.substring(s[0].length),s=s[0].replace(/^ {4}/gm,""),this.tokens.push({type:"code",text:this.options.pedantic?s:y(s,"\n")});else if(s=this.rules.fences.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"code",lang:s[2]?s[2].trim():s[2],text:s[3]||""});else if(s=this.rules.heading.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"heading",depth:s[1].length,text:s[2]});else if(t&&(s=this.rules.nptable.exec(e))&&(o={type:"table",header:x(s[1].replace(/^ *| *\| *$/g,"")),align:s[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:s[3]?s[3].replace(/\n$/,"").split("\n"):[]}).header.length===o.align.length){for(e=e.substring(s[0].length),c=0;c<o.align.length;c++)/^ *-+: *$/.test(o.align[c])?o.align[c]="right":/^ *:-+: *$/.test(o.align[c])?o.align[c]="center":/^ *:-+ *$/.test(o.align[c])?o.align[c]="left":o.align[c]=null;for(c=0;c<o.cells.length;c++)o.cells[c]=x(o.cells[c],o.header.length);this.tokens.push(o)}else if(s=this.rules.hr.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"hr"});else if(s=this.rules.blockquote.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"blockquote_start"}),s=s[0].replace(/^ *> ?/gm,""),this.token(s,t),this.tokens.push({type:"blockquote_end"});else if(s=this.rules.list.exec(e)){for(e=e.substring(s[0].length),a={type:"list_start",ordered:d=1<(i=s[2]).length,start:d?+i:"",loose:!1},this.tokens.push(a),n=!(h=[]),f=(s=s[0].match(this.rules.item)).length,c=0;c<f;c++)u=(o=s[c]).length,~(o=o.replace(/^ *([*+-]|\d+\.) */,"")).indexOf("\n ")&&(u-=o.length,o=this.options.pedantic?o.replace(/^ {1,4}/gm,""):o.replace(new RegExp("^ {1,"+u+"}","gm"),"")),c!==f-1&&(l=k.bullet.exec(s[c+1])[0],(1<i.length?1===l.length:1<l.length||this.options.smartLists&&l!==i)&&(e=s.slice(c+1).join("\n")+e,c=f-1)),r=n||/\n\n(?!\s*$)/.test(o),c!==f-1&&(n="\n"===o.charAt(o.length-1),r||(r=n)),r&&(a.loose=!0),b=void 0,(m=/^\[[ xX]\] /.test(o))&&(b=" "!==o[1],o=o.replace(/^\[[ xX]\] +/,"")),p={type:"list_item_start",task:m,checked:b,loose:r},h.push(p),this.tokens.push(p),this.token(o,!1),this.tokens.push({type:"list_item_end"});if(a.loose)for(f=h.length,c=0;c<f;c++)h[c].loose=!0;this.tokens.push({type:"list_end"})}else if(s=this.rules.html.exec(e))e=e.substring(s[0].length),this.tokens.push({type:this.options.sanitize?"paragraph":"html",pre:!this.options.sanitizer&&("pre"===s[1]||"script"===s[1]||"style"===s[1]),text:s[0]});else if(t&&(s=this.rules.def.exec(e)))e=e.substring(s[0].length),s[3]&&(s[3]=s[3].substring(1,s[3].length-1)),g=s[1].toLowerCase().replace(/\s+/g," "),this.tokens.links[g]||(this.tokens.links[g]={href:s[2],title:s[3]});else if(t&&(s=this.rules.table.exec(e))&&(o={type:"table",header:x(s[1].replace(/^ *| *\| *$/g,"")),align:s[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:s[3]?s[3].replace(/(?: *\| *)?\n$/,"").split("\n"):[]}).header.length===o.align.length){for(e=e.substring(s[0].length),c=0;c<o.align.length;c++)/^ *-+: *$/.test(o.align[c])?o.align[c]="right":/^ *:-+: *$/.test(o.align[c])?o.align[c]="center":/^ *:-+ *$/.test(o.align[c])?o.align[c]="left":o.align[c]=null;for(c=0;c<o.cells.length;c++)o.cells[c]=x(o.cells[c].replace(/^ *\| *| *\| *$/g,""),o.header.length);this.tokens.push(o)}else if(s=this.rules.lheading.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"heading",depth:"="===s[2]?1:2,text:s[1]});else if(t&&(s=this.rules.paragraph.exec(e)))e=e.substring(s[0].length),this.tokens.push({type:"paragraph",text:"\n"===s[1].charAt(s[1].length-1)?s[1].slice(0,-1):s[1]});else if(s=this.rules.text.exec(e))e=e.substring(s[0].length),this.tokens.push({type:"text",text:s[0]});else if(e)throw new Error("Infinite loop on byte: "+e.charCodeAt(0));return this.tokens};var n={escape:/^\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/,autolink:/^<(scheme:[^\s\x00-\x1f<>]*|email)>/,url:f,tag:"^comment|^</[a-zA-Z][\\w:-]*\\s*>|^<[a-zA-Z][\\w-]*(?:attribute)*?\\s*/?>|^<\\?[\\s\\S]*?\\?>|^<![a-zA-Z]+\\s[\\s\\S]*?>|^<!\\[CDATA\\[[\\s\\S]*?\\]\\]>",link:/^!?\[(label)\]\(href(?:\s+(title))?\s*\)/,reflink:/^!?\[(label)\]\[(?!\s*\])((?:\\[\[\]]?|[^\[\]\\])+)\]/,nolink:/^!?\[(?!\s*\])((?:\[[^\[\]]*\]|\\[\[\]]|[^\[\]])*)\](?:\[\])?/,strong:/^__([^\s_])__(?!_)|^\*\*([^\s*])\*\*(?!\*)|^__([^\s][\s\S]*?[^\s])__(?!_)|^\*\*([^\s][\s\S]*?[^\s])\*\*(?!\*)/,em:/^_([^\s_])_(?!_)|^\*([^\s*"<\[])\*(?!\*)|^_([^\s][\s\S]*?[^\s_])_(?!_|[^\spunctuation])|^_([^\s_][\s\S]*?[^\s])_(?!_|[^\spunctuation])|^\*([^\s"<\[][\s\S]*?[^\s*])\*(?!\*)|^\*([^\s*"<\[][\s\S]*?[^\s])\*(?!\*)/,code:/^(`+)([^`]|[^`][\s\S]*?[^`])\1(?!`)/,br:/^( {2,}|\\)\n(?!\s*$)/,del:f,text:/^(`+|[^`])[\s\S]*?(?=[\\<!\[`*]|\b_| {2,}\n|$)/};function h(e,t){if(this.options=t||m.defaults,this.links=e,this.rules=n.normal,this.renderer=this.options.renderer||new r,this.renderer.options=this.options,!this.links)throw new Error("Tokens array requires a `links` property.");this.options.pedantic?this.rules=n.pedantic:this.options.gfm&&(this.options.breaks?this.rules=n.breaks:this.rules=n.gfm)}function r(e){this.options=e||m.defaults}function s(){}function p(e){this.tokens=[],this.token=null,this.options=e||m.defaults,this.options.renderer=this.options.renderer||new r,this.renderer=this.options.renderer,this.renderer.options=this.options,this.slugger=new t}function t(){this.seen={}}function u(e,t){if(t){if(u.escapeTest.test(e))return e.replace(u.escapeReplace,function(e){return u.replacements[e]})}else if(u.escapeTestNoEncode.test(e))return e.replace(u.escapeReplaceNoEncode,function(e){return u.replacements[e]});return e}function c(e){return e.replace(/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi,function(e,t){return"colon"===(t=t.toLowerCase())?":":"#"===t.charAt(0)?"x"===t.charAt(1)?String.fromCharCode(parseInt(t.substring(2),16)):String.fromCharCode(+t.substring(1)):""})}function i(n,e){return n=n.source||n,e=e||"",{replace:function(e,t){return t=(t=t.source||t).replace(/(^|[^\[])\^/g,"$1"),n=n.replace(e,t),this},getRegex:function(){return new RegExp(n,e)}}}function l(e,t,n){if(e){try{var r=decodeURIComponent(c(n)).replace(/[^\w:]/g,"").toLowerCase()}catch(e){return null}if(0===r.indexOf("javascript:")||0===r.indexOf("vbscript:")||0===r.indexOf("data:"))return null}t&&!g.test(n)&&(n=function(e,t){o[" "+e]||(/^[^:]+:\/*[^/]*$/.test(e)?o[" "+e]=e+"/":o[" "+e]=y(e,"/",!0));return e=o[" "+e],"//"===t.slice(0,2)?e.replace(/:[\s\S]*/,":")+t:"/"===t.charAt(0)?e.replace(/(:\/*[^/]*)[\s\S]*/,"$1")+t:e+t}(t,n));try{n=encodeURI(n).replace(/%25/g,"%")}catch(e){return null}return n}n._punctuation="!\"#$%&'()*+,\\-./:;<=>?@\\[^_{|}~",n.em=i(n.em).replace(/punctuation/g,n._punctuation).getRegex(),n._escapes=/\\([!"#$%&'()*+,\-./:;<=>?@\[\]\\^_`{|}~])/g,n._scheme=/[a-zA-Z][a-zA-Z0-9+.-]{1,31}/,n._email=/[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+(@)[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+(?![-_])/,n.autolink=i(n.autolink).replace("scheme",n._scheme).replace("email",n._email).getRegex(),n._attribute=/\s+[a-zA-Z:_][\w.:-]*(?:\s*=\s*"[^"]*"|\s*=\s*'[^']*'|\s*=\s*[^\s"'=<>`]+)?/,n.tag=i(n.tag).replace("comment",k._comment).replace("attribute",n._attribute).getRegex(),n._label=/(?:\[[^\[\]]*\]|\\[\[\]]?|`[^`]*`|[^\[\]\\])*?/,n._href=/\s*(<(?:\\[<>]?|[^\s<>\\])*>|(?:\\[()]?|\([^\s\x00-\x1f\\]*\)|[^\s\x00-\x1f()\\])*?)/,n._title=/"(?:\\"?|[^"\\])*"|'(?:\\'?|[^'\\])*'|\((?:\\\)?|[^)\\])*\)/,n.link=i(n.link).replace("label",n._label).replace("href",n._href).replace("title",n._title).getRegex(),n.reflink=i(n.reflink).replace("label",n._label).getRegex(),n.normal=d({},n),n.pedantic=d({},n.normal,{strong:/^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,em:/^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/,link:i(/^!?\[(label)\]\((.*?)\)/).replace("label",n._label).getRegex(),reflink:i(/^!?\[(label)\]\s*\[([^\]]*)\]/).replace("label",n._label).getRegex()}),n.gfm=d({},n.normal,{escape:i(n.escape).replace("])","~|])").getRegex(),_extended_email:/[A-Za-z0-9._+-]+(@)[a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]*[a-zA-Z0-9])+(?![-_])/,url:/^((?:ftp|https?):\/\/|www\.)(?:[a-zA-Z0-9\-]+\.?)+[^\s<]*|^email/,_backpedal:/(?:[^?!.,:;*_~()&]+|\([^)]*\)|&(?![a-zA-Z0-9]+;$)|[?!.,:;*_~)]+(?!$))+/,del:/^~+(?=\S)([\s\S]*?\S)~+/,text:i(n.text).replace("]|","~]|").replace("|$","|https?://|ftp://|www\\.|[a-zA-Z0-9.!#$%&'*+/=?^_`{\\|}~-]+@|$").getRegex()}),n.gfm.url=i(n.gfm.url,"i").replace("email",n.gfm._extended_email).getRegex(),n.breaks=d({},n.gfm,{br:i(n.br).replace("{2,}","*").getRegex(),text:i(n.gfm.text).replace("{2,}","*").getRegex()}),h.rules=n,h.output=function(e,t,n){return new h(t,n).output(e)},h.prototype.output=function(e){for(var t,n,r,s,i,l,o="";e;)if(i=this.rules.escape.exec(e))e=e.substring(i[0].length),o+=u(i[1]);else if(i=this.rules.tag.exec(e))!this.inLink&&/^<a /i.test(i[0])?this.inLink=!0:this.inLink&&/^<\/a>/i.test(i[0])&&(this.inLink=!1),!this.inRawBlock&&/^<(pre|code|kbd|script)(\s|>)/i.test(i[0])?this.inRawBlock=!0:this.inRawBlock&&/^<\/(pre|code|kbd|script)(\s|>)/i.test(i[0])&&(this.inRawBlock=!1),e=e.substring(i[0].length),o+=this.options.sanitize?this.options.sanitizer?this.options.sanitizer(i[0]):u(i[0]):i[0];else if(i=this.rules.link.exec(e))e=e.substring(i[0].length),this.inLink=!0,r=i[2],this.options.pedantic?(t=/^([^'"]*[^\s])\s+(['"])(.*)\2/.exec(r))?(r=t[1],s=t[3]):s="":s=i[3]?i[3].slice(1,-1):"",r=r.trim().replace(/^<([\s\S]*)>$/,"$1"),o+=this.outputLink(i,{href:h.escapes(r),title:h.escapes(s)}),this.inLink=!1;else if((i=this.rules.reflink.exec(e))||(i=this.rules.nolink.exec(e))){if(e=e.substring(i[0].length),t=(i[2]||i[1]).replace(/\s+/g," "),!(t=this.links[t.toLowerCase()])||!t.href){o+=i[0].charAt(0),e=i[0].substring(1)+e;continue}this.inLink=!0,o+=this.outputLink(i,t),this.inLink=!1}else if(i=this.rules.strong.exec(e))e=e.substring(i[0].length),o+=this.renderer.strong(this.output(i[4]||i[3]||i[2]||i[1]));else if(i=this.rules.em.exec(e))e=e.substring(i[0].length),o+=this.renderer.em(this.output(i[6]||i[5]||i[4]||i[3]||i[2]||i[1]));else if(i=this.rules.code.exec(e))e=e.substring(i[0].length),o+=this.renderer.codespan(u(i[2].trim(),!0));else if(i=this.rules.br.exec(e))e=e.substring(i[0].length),o+=this.renderer.br();else if(i=this.rules.del.exec(e))e=e.substring(i[0].length),o+=this.renderer.del(this.output(i[1]));else if(i=this.rules.autolink.exec(e))e=e.substring(i[0].length),r="@"===i[2]?"mailto:"+(n=u(this.mangle(i[1]))):n=u(i[1]),o+=this.renderer.link(r,null,n);else if(this.inLink||!(i=this.rules.url.exec(e))){if(i=this.rules.text.exec(e))e=e.substring(i[0].length),this.inRawBlock?o+=this.renderer.text(i[0]):o+=this.renderer.text(u(this.smartypants(i[0])));else if(e)throw new Error("Infinite loop on byte: "+e.charCodeAt(0))}else{if("@"===i[2])r="mailto:"+(n=u(i[0]));else{for(;l=i[0],i[0]=this.rules._backpedal.exec(i[0])[0],l!==i[0];);n=u(i[0]),r="www."===i[1]?"http://"+n:n}e=e.substring(i[0].length),o+=this.renderer.link(r,null,n)}return o},h.escapes=function(e){return e?e.replace(h.rules._escapes,"$1"):e},h.prototype.outputLink=function(e,t){var n=t.href,r=t.title?u(t.title):null;return"!"!==e[0].charAt(0)?this.renderer.link(n,r,this.output(e[1])):this.renderer.image(n,r,u(e[1]))},h.prototype.smartypants=function(e){return this.options.smartypants?e.replace(/---/g,"—").replace(/--/g,"–").replace(/(^|[-\u2014/(\[{"\s])'/g,"$1‘").replace(/'/g,"’").replace(/(^|[-\u2014/(\[{\u2018\s])"/g,"$1“").replace(/"/g,"”").replace(/\.{3}/g,"…"):e},h.prototype.mangle=function(e){if(!this.options.mangle)return e;for(var t,n="",r=e.length,s=0;s<r;s++)t=e.charCodeAt(s),.5<Math.random()&&(t="x"+t.toString(16)),n+="&#"+t+";";return n},r.prototype.code=function(e,t,n){var r=(t||"").match(/\S*/)[0];if(this.options.highlight){var s=this.options.highlight(e,r);null!=s&&s!==e&&(n=!0,e=s)}return r?'<pre><code class="'+this.options.langPrefix+u(r,!0)+'">'+(n?e:u(e,!0))+"</code></pre>\n":"<pre><code>"+(n?e:u(e,!0))+"</code></pre>"},r.prototype.blockquote=function(e){return"<blockquote>\n"+e+"</blockquote>\n"},r.prototype.html=function(e){return e},r.prototype.heading=function(e,t,n,r){return this.options.headerIds?"<h"+t+' id="'+this.options.headerPrefix+r.slug(n)+'">'+e+"</h"+t+">\n":"<h"+t+">"+e+"</h"+t+">\n"},r.prototype.hr=function(){return this.options.xhtml?"<hr/>\n":"<hr>\n"},r.prototype.list=function(e,t,n){var r=t?"ol":"ul";return"<"+r+(t&&1!==n?' start="'+n+'"':"")+">\n"+e+"</"+r+">\n"},r.prototype.listitem=function(e){return"<li>"+e+"</li>\n"},r.prototype.checkbox=function(e){return"<input "+(e?'checked="" ':"")+'disabled="" type="checkbox"'+(this.options.xhtml?" /":"")+"> "},r.prototype.paragraph=function(e){return"<p>"+e+"</p>\n"},r.prototype.table=function(e,t){return t&&(t="<tbody>"+t+"</tbody>"),"<table>\n<thead>\n"+e+"</thead>\n"+t+"</table>\n"},r.prototype.tablerow=function(e){return"<tr>\n"+e+"</tr>\n"},r.prototype.tablecell=function(e,t){var n=t.header?"th":"td";return(t.align?"<"+n+' align="'+t.align+'">':"<"+n+">")+e+"</"+n+">\n"},r.prototype.strong=function(e){return"<strong>"+e+"</strong>"},r.prototype.em=function(e){return"<em>"+e+"</em>"},r.prototype.codespan=function(e){return"<code>"+e+"</code>"},r.prototype.br=function(){return this.options.xhtml?"<br/>":"<br>"},r.prototype.del=function(e){return"<del>"+e+"</del>"},r.prototype.link=function(e,t,n){if(null===(e=l(this.options.sanitize,this.options.baseUrl,e)))return n;var r='<a href="'+u(e)+'"';return t&&(r+=' title="'+t+'"'),r+=">"+n+"</a>"},r.prototype.image=function(e,t,n){if(null===(e=l(this.options.sanitize,this.options.baseUrl,e)))return n;var r='<img src="'+e+'" alt="'+n+'"';return t&&(r+=' title="'+t+'"'),r+=this.options.xhtml?"/>":">"},r.prototype.text=function(e){return e},s.prototype.strong=s.prototype.em=s.prototype.codespan=s.prototype.del=s.prototype.text=function(e){return e},s.prototype.link=s.prototype.image=function(e,t,n){return""+n},s.prototype.br=function(){return""},p.parse=function(e,t){return new p(t).parse(e)},p.prototype.parse=function(e){this.inline=new h(e.links,this.options),this.inlineText=new h(e.links,d({},this.options,{renderer:new s})),this.tokens=e.reverse();for(var t="";this.next();)t+=this.tok();return t},p.prototype.next=function(){return this.token=this.tokens.pop()},p.prototype.peek=function(){return this.tokens[this.tokens.length-1]||0},p.prototype.parseText=function(){for(var e=this.token.text;"text"===this.peek().type;)e+="\n"+this.next().text;return this.inline.output(e)},p.prototype.tok=function(){switch(this.token.type){case"space":return"";case"hr":return this.renderer.hr();case"heading":return this.renderer.heading(this.inline.output(this.token.text),this.token.depth,c(this.inlineText.output(this.token.text)),this.slugger);case"code":return this.renderer.code(this.token.text,this.token.lang,this.token.escaped);case"table":var e,t,n,r,s="",i="";for(n="",e=0;e<this.token.header.length;e++)n+=this.renderer.tablecell(this.inline.output(this.token.header[e]),{header:!0,align:this.token.align[e]});for(s+=this.renderer.tablerow(n),e=0;e<this.token.cells.length;e++){for(t=this.token.cells[e],n="",r=0;r<t.length;r++)n+=this.renderer.tablecell(this.inline.output(t[r]),{header:!1,align:this.token.align[r]});i+=this.renderer.tablerow(n)}return this.renderer.table(s,i);case"blockquote_start":for(i="";"blockquote_end"!==this.next().type;)i+=this.tok();return this.renderer.blockquote(i);case"list_start":i="";for(var l=this.token.ordered,o=this.token.start;"list_end"!==this.next().type;)i+=this.tok();return this.renderer.list(i,l,o);case"list_item_start":i="";var a=this.token.loose;for(this.token.task&&(i+=this.renderer.checkbox(this.token.checked));"list_item_end"!==this.next().type;)i+=a||"text"!==this.token.type?this.tok():this.parseText();return this.renderer.listitem(i);case"html":return this.renderer.html(this.token.text);case"paragraph":return this.renderer.paragraph(this.inline.output(this.token.text));case"text":return this.renderer.paragraph(this.parseText());default:var h='Token with "'+this.token.type+'" type was not found.';if(!this.options.silent)throw new Error(h);console.log(h)}},t.prototype.slug=function(e){var t=e.toLowerCase().trim().replace(/[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&()*+,./:;<=>?@[\]^`{|}~]/g,"").replace(/\s/g,"-");if(this.seen.hasOwnProperty(t))for(var n=t;this.seen[n]++,t=n+"-"+this.seen[n],this.seen.hasOwnProperty(t););return this.seen[t]=0,t},u.escapeTest=/[&<>"']/,u.escapeReplace=/[&<>"']/g,u.replacements={"&":"&amp;","<":"&lt;",">":"&gt;",'"':"&quot;","'":"&#39;"},u.escapeTestNoEncode=/[<>"']|&(?!#?\w+;)/,u.escapeReplaceNoEncode=/[<>"']|&(?!#?\w+;)/g;var o={},g=/^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;function f(){}function d(e){for(var t,n,r=1;r<arguments.length;r++)for(n in t=arguments[r])Object.prototype.hasOwnProperty.call(t,n)&&(e[n]=t[n]);return e}function x(e,t){var n=e.replace(/\|/g,function(e,t,n){for(var r=!1,s=t;0<=--s&&"\\"===n[s];)r=!r;return r?"|":" |"}).split(/ \|/),r=0;if(n.length>t)n.splice(t);else for(;n.length<t;)n.push("");for(;r<n.length;r++)n[r]=n[r].trim().replace(/\\\|/g,"|");return n}function y(e,t,n){if(0===e.length)return"";for(var r=0;r<e.length;){var s=e.charAt(e.length-r-1);if(s!==t||n){if(s===t||!n)break;r++}else r++}return e.substr(0,e.length-r)}function m(e,n,r){if(null==e)throw new Error("marked(): input parameter is undefined or null");if("string"!=typeof e)throw new Error("marked(): input parameter is of type "+Object.prototype.toString.call(e)+", string expected");if(r||"function"==typeof n){r||(r=n,n=null);var s,i,l=(n=d({},m.defaults,n||{})).highlight,t=0;try{s=a.lex(e,n)}catch(e){return r(e)}i=s.length;var o=function(t){if(t)return n.highlight=l,r(t);var e;try{e=p.parse(s,n)}catch(e){t=e}return n.highlight=l,t?r(t):r(null,e)};if(!l||l.length<3)return o();if(delete n.highlight,!i)return o();for(;t<s.length;t++)!function(n){"code"!==n.type?--i||o():l(n.text,n.lang,function(e,t){return e?o(e):null==t||t===n.text?--i||o():(n.text=t,n.escaped=!0,void(--i||o()))})}(s[t])}else try{return n&&(n=d({},m.defaults,n)),p.parse(a.lex(e,n),n)}catch(e){if(e.message+="\nPlease report this to https://github.com/markedjs/marked.",(n||m.defaults).silent)return"<p>An error occurred:</p><pre>"+u(e.message+"",!0)+"</pre>";throw e}}f.exec=f,m.options=m.setOptions=function(e){return d(m.defaults,e),m},m.getDefaults=function(){return{baseUrl:null,breaks:!1,gfm:!0,headerIds:!0,headerPrefix:"",highlight:null,langPrefix:"language-",mangle:!0,pedantic:!1,renderer:new r,sanitize:!1,sanitizer:null,silent:!1,smartLists:!1,smartypants:!1,tables:!0,xhtml:!1}},m.defaults=m.getDefaults(),m.Parser=p,m.parser=p.parse,m.Renderer=r,m.TextRenderer=s,m.Lexer=a,m.lexer=a.lex,m.InlineLexer=h,m.inlineLexer=h.output,m.Slugger=t,m.parse=m,"undefined"!=typeof module&&"object"==typeof exports?module.exports=m:"function"==typeof define&&define.amd?define(function(){return m}):e.marked=m}(this||("undefined"!=typeof window?window:global)); \ No newline at end of file diff --git a/test/fixtures/cache-tests/asset/style.css b/test/fixtures/cache-tests/asset/style.css new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/asset/style.css @@ -0,0 +1,201 @@ +html { + font-family: Helvetica, Arial, sans-serif; + font-size: 14px; + line-height: 22px; +} + +body { + margin: 2em 4em 10em 4em; +} + +h1 { + line-height: 1.2em; +} + +table { + border-spacing: 0; +} + +th { + font-weight: normal; + padding: 0.2em 0.4em; +} + +ul { + list-style-type: none; +} + +ul#ToC { + column-count: 4; +} + +th.category a { + color: white; +} + +.category { + background-color: black; + color: white; + font-weight: bold; +} + +.name { + margin: 0; + text-align: right; +} + +.clickhint { + cursor: pointer; +} + +.uuid { + color: #999; + margin-left: 0.5em; +} + +.hint { + color: #999; + font-style: normal; +} + +.shade { + background-color: #fafafa; +} + +.description { + background-color: #eee; + padding: 3px 6px; +} + +.warning { + padding: 0.5em; + margin: 0.5em auto; + background-color: #fde2e3; + border-left: solid 4px #f47477; +} + +.key-item { + white-space: nowrap; +} + +#download { + display: none; +} + +#key { + background-color: white; + margin-bottom: 0; + padding: 10px; + border: 1px solid #999; +} + +input.select { + margin-left: 6px; +} + +code { + white-space: nowrap; +} + +pre code { + white-space: pre; +} + +@font-face { + font-family: 'FontAwesome'; + src: url('/asset/fonts/fontawesome-webfont.eot?v=4.7.0'); + src: url('/asset/fonts/fontawesome-webfont.eot?#iefix&v=4.7.0') format('embedded-opentype'), url('/asset/fonts/fontawesome-webfont.woff2?v=4.7.0') format('woff2'), url('/asset/fonts/fontawesome-webfont.woff?v=4.7.0') format('woff'), url('/asset/fonts/fontawesome-webfont.ttf?v=4.7.0') format('truetype'), url('/asset/fonts/fontawesome-webfont.svg?v=4.7.0#fontawesomeregular') format('svg'); + font-weight: normal; + font-style: normal; +} +.fa { + display: inline-block; + font: normal normal normal 18px/22px FontAwesome; + text-rendering: auto; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + margin: 0 10px; +} + +.modal { + position: fixed; + top: 50%; + left: 50%; + width: 50vw; + height: 70vh; + transform: translate(-50%, -50%); + background-color: #fafafa; + opacity: 0; + visibility: hidden; + transition: all 0.3s ease; + align-items: center; + justify-content: center; + padding: 2em; + border: 2px solid #aaa; + border-radius: 0.5em; + overflow-y: auto; +} +.modal-open { + visibility: visible; + opacity: 1; + transition-delay: 0s; +} + +.modal-exit { + position: absolute; + right: 15px; + top: 15px; + outline: none; + appearance: none; + color: red; + background: none; + border: 0px; + font-weight: bold; + font-size: 1.5em; + cursor: pointer; +} + +.modal ul { + list-style-type: disc; +} + +.modal pre { + background-color: #ddd; + padding: 0.5em 1em; + width: auto; +} + +.noFrame { + display: none; +} + + +@page { + size: A4; + margin: 11mm 17mm 17mm 17mm; +} + +@media screen { + #key { + position: fixed; + bottom: 0; + width: 80vw; + left: 10vw; + border-bottom: none; + } +} + +@media print { + html, body { + width: 210mm; + height: 297mm; + } + + #key { + width: 100%; + } + + .noPrint * { + display: none; + } +} diff --git a/test/fixtures/cache-tests/docker/Dockerfile b/test/fixtures/cache-tests/docker/Dockerfile new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/Dockerfile @@ -0,0 +1,89 @@ +FROM --platform=linux/amd64 ubuntu:devel +LABEL maintainer="Mark Nottingham <mnot@mnot.net>" + + +# package installs + +RUN apt-get update \ + && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ + nodejs npm git openssh-client telnet \ + squid \ + nginx \ + trafficserver \ + apache2 \ + varnish \ + golang \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get clean + + +# squid + +COPY squid/squid.conf /etc/squid/conf.d/cache-test.conf + +ENV SQUID_CACHE_DIR=/var/spool/squid +ENV SQUID_LOG_DIR=/var/log/squid +ENV SQUID_USER=proxy +EXPOSE 8001 + + +# nginx + +RUN mkdir /var/cache/nginx +COPY nginx/nginx.conf /etc/nginx/sites-enabled/cache-test.conf +EXPOSE 8002 + + +# trafficserver + +COPY trafficserver/ip_allow.config /etc/trafficserver/ip_allow.config +COPY trafficserver/records.config /etc/trafficserver/records.config +COPY trafficserver/remap.config /etc/trafficserver/remap.config + +ENV TS_CACHE_DIR=/var/run/trafficserver +ENV TS_LOG_DIR=/var/log/trafficserver +ENV TS_USER=trafficserver +EXPOSE 8003 + + +# apache + +RUN a2enmod cache_socache +RUN a2enmod cache_disk +RUN a2enmod proxy_http + +COPY apache/ports.conf /etc/apache2/ports.conf +COPY apache/apache.conf /etc/apache2/sites-enabled/cache-test.conf +EXPOSE 8004 + + +# varnish + +EXPOSE 8005 + + +# caddy + +RUN go install github.com/caddyserver/xcaddy/cmd/xcaddy@latest +RUN ~/go/bin/xcaddy build --with github.com/caddyserver/cache-handler +COPY caddy/Caddyfile /etc/caddy/Caddyfile +EXPOSE 8006 + +# setup + +COPY setup.sh /sbin/setup.sh +RUN chmod 755 /sbin/setup.sh +RUN /sbin/setup.sh + +# serve + +COPY serve.sh /sbin/serve.sh +RUN chmod 755 /sbin/serve.sh + + +# entrypoint + +COPY entrypoint.sh /sbin/entrypoint.sh +RUN chmod 755 /sbin/entrypoint.sh +ENTRYPOINT ["/sbin/entrypoint.sh"] +CMD [] diff --git a/test/fixtures/cache-tests/docker/Makefile b/test/fixtures/cache-tests/docker/Makefile new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/Makefile @@ -0,0 +1,11 @@ + +PKGNAME=mnot/proxy-cache-tests + + +.PHONY: build +build: + docker build --pull -t $(PKGNAME) . + +.PHONY: upload +upload: + docker push $(PKGNAME) diff --git a/test/fixtures/cache-tests/docker/README.md b/test/fixtures/cache-tests/docker/README.md new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/README.md @@ -0,0 +1,16 @@ +# Docker Image for Proxy Caches + +This Docker image runs reverse proxy caches for testing. + +To add a new reverse proxy: + +1. In `Dockerfile`: + 1. add its ubuntu package to the `apt-get` line + 2. make any configuration adjustments with `COPY` and/or `RUN` in a new section. In particular: + a. The proxy should listen on a dedicated port (the next in the 8000 series that's available) + b. It should use localhost:8000 for the origin server +2. In `setup.sh`, run any additional configuration steps that are necessary +3. In `entrypoint.sh`, add the needed commands (usually `sed`) to change the origin server hostname (for when the docker runs on desktop) +4. In `serve.sh`, start the server in the background +5. In `/test-docker.sh`, add the `PROXY_PORT` to the case statement + diff --git a/test/fixtures/cache-tests/docker/apache/apache.conf b/test/fixtures/cache-tests/docker/apache/apache.conf new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/apache/apache.conf @@ -0,0 +1,20 @@ +Mutex sem + +<VirtualHost *:8004> + + ProxyPass "/" "http://localhost:8000/" + CacheEnable disk / + + CacheRoot /var/cache/apache + CacheDirLevels 5 + CacheDirLength 3 + + CacheDefaultExpire 0 + CacheIgnoreNoLastMod On + CacheStoreExpired On + CacheNegotiatedDocs On + UseCanonicalName On + CacheHeader On + CacheDetailHeader on + +</VirtualHost> diff --git a/test/fixtures/cache-tests/docker/apache/ports.conf b/test/fixtures/cache-tests/docker/apache/ports.conf new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/apache/ports.conf @@ -0,0 +1 @@ +Listen 8004 diff --git a/test/fixtures/cache-tests/docker/caddy/Caddyfile b/test/fixtures/cache-tests/docker/caddy/Caddyfile new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/caddy/Caddyfile @@ -0,0 +1,27 @@ +# The Caddyfile is an easy way to configure your Caddy web server. +# +# Unless the file starts with a global options block, the first +# uncommented line is always the address of your site. +# +# To use your own domain name (with automatic HTTPS), first make +# sure your domain's A/AAAA DNS records are properly pointed to +# this machine's public IP, then replace ":80" below with your +# domain name. + +{ + cache +} + +:8006 { + cache { + default_cache_control no-store + } + reverse_proxy http://127.0.0.1:8000 { + transport http { + dial_timeout 10s + } + } +} + +# Refer to the Caddy docs for more information: +# https://caddyserver.com/docs/caddyfile diff --git a/test/fixtures/cache-tests/docker/entrypoint.sh b/test/fixtures/cache-tests/docker/entrypoint.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/docker/entrypoint.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# If an argument is passed, make it the hostname for the origin server in proxy configs, and start the servers. + +if [[ ! -z $1 ]] ; then + + # squid + sed -i s/localhost/$1/g /etc/squid/conf.d/cache-test.conf + + # nginx + sed -i s/localhost/$1/g /etc/nginx/sites-enabled/cache-test.conf + sed -i s/worker_connections 768/worker_connections 2048/ /etc/nginx/nginx.conf + + # trafficserver + sed -i s/localhost:8000/$1:8000/g /etc/trafficserver/remap.config + + # apache + sed -i s/localhost/$1/g /etc/apache2/sites-enabled/cache-test.conf + + # varnish + sed -i s/127.0.0.1/$1/ /etc/varnish/default.vcl + + # caddy + sed -i s/127.0.0.1/$1/ /etc/caddy/Caddyfile + + serve.sh + fi + + +/bin/bash + diff --git a/test/fixtures/cache-tests/docker/nginx/nginx.conf b/test/fixtures/cache-tests/docker/nginx/nginx.conf new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/nginx/nginx.conf @@ -0,0 +1,13 @@ + +proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my-cache:8m max_size=1000m inactive=600m; +proxy_temp_path /var/cache/nginx/tmp; + +server { + listen 8002; + + location / { + proxy_pass http://localhost:8000; + proxy_cache my-cache; + proxy_cache_revalidate on; + } +} diff --git a/test/fixtures/cache-tests/docker/serve.sh b/test/fixtures/cache-tests/docker/serve.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/docker/serve.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +echo "* Starting squid" +squid -f /etc/squid/squid.conf -N & + +echo "* Starting nginx" +/usr/sbin/nginx -g "daemon off;" & + +echo "* starting TrafficServer" +/usr/bin/traffic_manager & + +echo "* Starting Apache" +source /etc/apache2/envvars +/usr/sbin/apache2 -X & + +echo "* Starting Varnish" +/usr/sbin/varnishd -j unix -a 0.0.0.0:8005 -f /etc/varnish/default.vcl -p default_ttl=0 -p default_grace=0 -p default_keep=3600 -s malloc,64M + +echo "* Starting Caddy" +HOME=/root /caddy run --config /etc/caddy/Caddyfile diff --git a/test/fixtures/cache-tests/docker/setup.sh b/test/fixtures/cache-tests/docker/setup.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/docker/setup.sh @@ -0,0 +1,41 @@ +#!/bin/bash + + +## squid + +# Create log dir +mkdir -p ${SQUID_LOG_DIR} +chmod -R 755 ${SQUID_LOG_DIR} +chown -R ${SQUID_USER}:${SQUID_USER} ${SQUID_LOG_DIR} + +# Create cache dir +mkdir -p ${SQUID_CACHE_DIR} +chown -R ${SQUID_USER}:${SQUID_USER} ${SQUID_CACHE_DIR} + +if [[ ! -d ${SQUID_CACHE_DIR}/00 ]]; then +echo "Initializing cache..." +$(which squid) -N -f /etc/squid/squid.conf -z 2&> /dev/null +fi + + +## apache + +mkdir /var/run/apache2 +mkdir /var/cache/apache +chown -R www-data /var/cache/apache + + +## trafficserver + +# Create log dir +mkdir -p ${TS_LOG_DIR} +chmod -R 755 ${TS_LOG_DIR} +chown -R ${TS_USER}:${TS_USER} ${TS_LOG_DIR} + +# Create cache dir +mkdir -p ${TS_CACHE_DIR} +chown -R ${TS_USER}:${TS_USER} ${TS_CACHE_DIR} + + +# varnish +sed -i s/8080/8000/ /etc/varnish/default.vcl diff --git a/test/fixtures/cache-tests/docker/squid/squid.conf b/test/fixtures/cache-tests/docker/squid/squid.conf new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/squid/squid.conf @@ -0,0 +1,9 @@ + +http_port 8001 accel defaultsite=localhost no-vhost +cache_peer localhost parent 8000 0 no-query no-digest originserver default name=myAccel +acl our_sites dstdomain localhost +cache_peer_access myAccel allow all +http_access allow all + +shutdown_lifetime 1 second +connect_retries 3 diff --git a/test/fixtures/cache-tests/docker/trafficserver/ip_allow.config b/test/fixtures/cache-tests/docker/trafficserver/ip_allow.config new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/trafficserver/ip_allow.config @@ -0,0 +1 @@ +src_ip=0.0.0.0-255.255.255.255 action=ip_allow diff --git a/test/fixtures/cache-tests/docker/trafficserver/records.config b/test/fixtures/cache-tests/docker/trafficserver/records.config new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/trafficserver/records.config @@ -0,0 +1,181 @@ +############################################################################## +# *NOTE*: All options covered in this file should be documented in the docs: +# +# https://docs.trafficserver.apache.org/records.config +############################################################################## + +############################################################################## +# Thread configurations. Docs: +# https://docs.trafficserver.apache.org/records.config#thread-variables +############################################################################## +CONFIG proxy.config.exec_thread.autoconfig INT 1 +CONFIG proxy.config.exec_thread.autoconfig.scale FLOAT 1.5 +CONFIG proxy.config.exec_thread.limit INT 2 +CONFIG proxy.config.accept_threads INT 1 +CONFIG proxy.config.task_threads INT 2 +CONFIG proxy.config.cache.threads_per_disk INT 8 +CONFIG proxy.config.exec_thread.affinity INT 1 + +############################################################################## +# Specify server addresses and ports to bind for HTTP and HTTPS. Docs: +# https://docs.trafficserver.apache.org/records.config#proxy.config.http.server_ports +############################################################################## +CONFIG proxy.config.http.server_ports STRING 8003 + +############################################################################## +# Via: headers. Docs: +# https://docs.trafficserver.apache.org/records.config#proxy-config-http-insert-response-via-str +############################################################################## +CONFIG proxy.config.http.insert_request_via_str INT 1 +CONFIG proxy.config.http.insert_response_via_str INT 0 + +############################################################################## +# Parent proxy configuration, in addition to these settings also see parent.config. Docs: +# https://docs.trafficserver.apache.org/records.config#parent-proxy-configuration +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/parent.config.en.html +############################################################################## +CONFIG proxy.config.http.parent_proxy_routing_enable INT 0 +CONFIG proxy.config.http.parent_proxy.retry_time INT 300 +CONFIG proxy.config.http.parent_proxy.connect_attempts_timeout INT 30 +CONFIG proxy.config.http.forward.proxy_auth_to_parent INT 0 +CONFIG proxy.config.http.uncacheable_requests_bypass_parent INT 1 + +############################################################################## +# HTTP connection timeouts (secs). Docs: +# https://docs.trafficserver.apache.org/records.config#http-connection-timeouts +############################################################################## +CONFIG proxy.config.http.keep_alive_no_activity_timeout_in INT 120 +CONFIG proxy.config.http.keep_alive_no_activity_timeout_out INT 120 +CONFIG proxy.config.http.transaction_no_activity_timeout_in INT 30 +CONFIG proxy.config.http.transaction_no_activity_timeout_out INT 30 +CONFIG proxy.config.http.transaction_active_timeout_in INT 900 +CONFIG proxy.config.http.transaction_active_timeout_out INT 0 +CONFIG proxy.config.http.accept_no_activity_timeout INT 120 +CONFIG proxy.config.net.default_inactivity_timeout INT 86400 + +############################################################################## +# Origin server connect attempts. Docs: +# https://docs.trafficserver.apache.org/records.config#origin-server-connect-attempts +############################################################################## +CONFIG proxy.config.http.connect_attempts_max_retries INT 3 +CONFIG proxy.config.http.connect_attempts_max_retries_dead_server INT 1 +CONFIG proxy.config.http.connect_attempts_rr_retries INT 3 +CONFIG proxy.config.http.connect_attempts_timeout INT 30 +CONFIG proxy.config.http.post_connect_attempts_timeout INT 1800 +CONFIG proxy.config.http.down_server.cache_time INT 60 +CONFIG proxy.config.http.down_server.abort_threshold INT 10 + +############################################################################## +# Negative response caching, for redirects and errors. Docs: +# https://docs.trafficserver.apache.org/records.config#negative-response-caching +############################################################################## +CONFIG proxy.config.http.negative_caching_enabled INT 0 +CONFIG proxy.config.http.negative_caching_lifetime INT 1800 + +############################################################################## +# Proxy users variables. Docs: +# https://docs.trafficserver.apache.org/records.config#proxy-user-variables +############################################################################## +CONFIG proxy.config.http.insert_client_ip INT 1 +CONFIG proxy.config.http.insert_squid_x_forwarded_for INT 1 + +############################################################################## +# Security. Docs: +# https://docs.trafficserver.apache.org/records.config#security +############################################################################## +CONFIG proxy.config.http.push_method_enabled INT 0 + +############################################################################## +# Enable / disable HTTP caching. Useful for testing, but also as an +# overridable (per remap) config +############################################################################## +CONFIG proxy.config.http.cache.http INT 1 + +############################################################################## +# Cache control. Docs: +# https://docs.trafficserver.apache.org/records.config#cache-control +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/cache.config.en.html +############################################################################## +CONFIG proxy.config.http.cache.ignore_client_cc_max_age INT 1 +CONFIG proxy.config.http.normalize_ae INT 1 +CONFIG proxy.config.http.cache.cache_responses_to_cookies INT 1 +CONFIG proxy.config.http.cache.cache_urls_that_look_dynamic INT 1 + # https://docs.trafficserver.apache.org/records.config#proxy-config-http-cache-when-to-revalidate +CONFIG proxy.config.http.cache.when_to_revalidate INT 0 + # https://docs.trafficserver.apache.org/records.config#proxy-config-http-cache-required-headers +CONFIG proxy.config.http.cache.required_headers INT 2 + +############################################################################## +# Heuristic cache expiration. Docs: +# https://docs.trafficserver.apache.org/records.config#heuristic-expiration +############################################################################## +CONFIG proxy.config.http.cache.heuristic_min_lifetime INT 3600 +CONFIG proxy.config.http.cache.heuristic_max_lifetime INT 86400 +CONFIG proxy.config.http.cache.heuristic_lm_factor FLOAT 0.10 + +############################################################################## +# Network. Docs: +# https://docs.trafficserver.apache.org/records.config#network +############################################################################## +CONFIG proxy.config.net.connections_throttle INT 30000 +CONFIG proxy.config.net.max_connections_in INT 30000 +CONFIG proxy.config.net.max_connections_active_in INT 10000 + +############################################################################## +# RAM and disk cache configurations. Docs: +# https://docs.trafficserver.apache.org/records.config#ram-cache +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/storage.config.en.html +############################################################################## +CONFIG proxy.config.cache.ram_cache.size INT -1 +CONFIG proxy.config.cache.ram_cache_cutoff INT 4194304 + # https://docs.trafficserver.apache.org/records.config#proxy-config-cache-limits-http-max-alts +CONFIG proxy.config.cache.limits.http.max_alts INT 5 + # https://docs.trafficserver.apache.org/records.config#proxy-config-cache-max-doc-size +CONFIG proxy.config.cache.max_doc_size INT 0 +CONFIG proxy.config.cache.min_average_object_size INT 8000 + +############################################################################## +# Logging Config. Docs: +# https://docs.trafficserver.apache.org/records.config#logging-configuration +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/logging.yaml.en.html +############################################################################## +CONFIG proxy.config.log.logging_enabled INT 3 +CONFIG proxy.config.log.max_space_mb_for_logs INT 25000 +CONFIG proxy.config.log.max_space_mb_headroom INT 1000 +CONFIG proxy.config.log.rolling_enabled INT 1 +CONFIG proxy.config.log.rolling_interval_sec INT 86400 +CONFIG proxy.config.log.rolling_size_mb INT 10 +CONFIG proxy.config.log.auto_delete_rolled_files INT 1 +CONFIG proxy.config.log.periodic_tasks_interval INT 5 + +############################################################################## +# These settings control remapping, and if the proxy allows (open) forward proxy or not. Docs: +# https://docs.trafficserver.apache.org/records.config#url-remap-rules +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/remap.config.en.html +############################################################################## +CONFIG proxy.config.url_remap.remap_required INT 1 + # https://docs.trafficserver.apache.org/records.config#proxy-config-url-remap-pristine-host-hdr +CONFIG proxy.config.url_remap.pristine_host_hdr INT 1 + # https://docs.trafficserver.apache.org/records.config#reverse-proxy +CONFIG proxy.config.reverse_proxy.enabled INT 1 + +############################################################################## +# SSL Termination. Docs: +# https://docs.trafficserver.apache.org/records.config#client-related-configuration +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/ssl_multicert.config.en.html +############################################################################## +CONFIG proxy.config.ssl.client.verify.server INT 0 +CONFIG proxy.config.ssl.client.CA.cert.filename STRING NULL +CONFIG proxy.config.ssl.server.cipher_suite STRING ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-DSS-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:DHE-DSS-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-RSA-AES256-SHA:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-DSS-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-DSS-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-DSS-AES256-SHA:DHE-RSA-AES128-SHA:DHE-DSS-AES128-SHA:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!aECDH:!EDH-DSS-DES-CBC3-SHA:!EDH-RSA-DES-CBC3-SHA:!KRB5-DES-CBC3-SHA + +############################################################################## +# Debugging. Docs: +# https://docs.trafficserver.apache.org/records.config#diagnostic-logging-configuration +############################################################################## +CONFIG proxy.config.diags.debug.enabled INT 0 +CONFIG proxy.config.diags.debug.tags STRING http|dns +# ToDo: Undocumented +CONFIG proxy.config.dump_mem_info_frequency INT 0 +CONFIG proxy.config.http.slow.log.threshold INT 0 + +CONFIG proxy.config.admin.user_id STRING trafficserver diff --git a/test/fixtures/cache-tests/docker/trafficserver/remap.config b/test/fixtures/cache-tests/docker/trafficserver/remap.config new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/docker/trafficserver/remap.config @@ -0,0 +1,197 @@ +# +# remap.config - URL Remapping Config File +# +# Documentation: +# https://docs.trafficserver.apache.org/en/latest/admin-guide/files/remap.config.en.html +# +# Using remap.config allows you to accomplish two things: +# +# 1) Rewrite a URL (from the client) before sending it to the Origin Server. +# 2) Protect the proxy server, to only allow certain requests. +# +# With the default configurations, at least one remap rule is required. This +# can be relaxed with the following configuration in records.config: +# +# CONFIG proxy.config.url_remap.remap_required INT 0 +# +# Be aware, doing so makes the proxy a generic, open-relay! +# +# The format is: +# <map_type> client-URL origin-server-URL <tag_value> <filtering> +# +# Where client-URL and origin-server-URL are both of the format +# <scheme>://<host>:<port>/<path_prefix> +# +# The <tag_value> directive is optional and can be different for different +# types of <map_type>. The <filtering arguments> are optional ACL-like +# arguments unique for each remap rule +# +# Six different types of mappings are possible: +# map +# map_with_referer +# map_with_recv_port +# reverse_map +# redirect +# redirect_temporary +# +# Each of these map types can be prefixed with the string 'regex_' to indicate +# that the rule will have regular expression strings. See the last part of +# this description for more information on regex support. +# +# The 'map' mapping is the most straightforward. Requests that match the +# client-URL are rewritten into the origin-server-URL. The user agent will see +# the page on the remapped URL, but will not be notified of the address +# change. +# +# The 'map_with_referer' is an extended version of 'map', which can be used to +# activate the so-called "deep linking protection" feature available in +# Apache Traffic Server. +# +# The 'map_with_recv_port' is exactly like 'map' except that it uses the port +# at which the request was received to perform the mapping instead of the port +# present in the request. When present, 'map_with_recv_port' mappings are +# checked first. If there is a match, then it is chosen without evaluating the +# "regular" forward mapping rules. +# +# The 'reverse_map' mapping is used to rewrite location headers sent by the +# origin server. The 'redirect' mapping creates a permanent redirect message +# and informs the browser of the URL change. +# +# The 'redirect_temporary' mapping acts in the same way but tells the browser +# that this redirect is only temporary. We need to map the URL in reverse +# proxy mode so that user agents know to contact Traffic Server and not +# attempt to contact the Origin Server directly. +# +# For example, you can set up a reverse proxy for www.example.com with the +# real content situated at server1.example.com with the rules: +# +# map http://www.example.com/ http://server1.example.com/ +# reverse_map http://server1.example.com/ http://www.example.com/ +# +# Or you could permanently redirect users trying to access www.oldserver.com +# to www.newserver.com with the following rule: +# +# redirect http://www.oldserver.com/ http://www.newserver.com +# +# If the redirect is only temporary, you want to only temporarily remap the +# URL. You could use the following rule to divert users away from a failed +# server: +# +# redirect_temporary http://broken.firm.com http://working.firm.com +# +# In order to use "deep linking protection" Traffic Server's feature, the +# 'map_with_referer' mapping scheme must be used. In general, the format of is +# the following: +# +# map_with_referer client-URL origin-server-URL redirect-URL regex1 [regex2 ...] +# +# 'redirect-URL' is a redirection URL specified according to RFC 2616 and can +# contain special formatting instructions for run-time modifications of the +# resulting redirection URL. All regexes Perl compatible regular expressions, +# which describes the content of the "Referer" header which must be +# verified. In case an actual request does not have "Referer" header or it +# does not match with referer regular expression, the HTTP request will be +# redirected to 'redirect-URL'. +# +# At least one regular expressions must be specified in order to activate +# 'deep linking protection'. There are limitations for the number of referer +# regular expression strings - 2048. In order to enable the 'deep linking +# protection' feature in Traffic Server, configure records.config with: +# +# CONFIG proxy.config.http.referer_filter INT 1 +# +# In order to enable run-time formatting for redirect0URL, configure +# +# CONFIG proxy.config.http.referer_format_redirect INT 1 +# +# When run-time formatting for redirect-URL was enabled the following format +# symbols can be used: +# +# %r - to substitute original "Referer" header string +# %f - to substitute client-URL from 'map_with_referer' record +# %t - to substitute origin-server-URL from 'map_with_referer' record +# %o - to substitute request URL to origin server, which was created a +# the result of a mapping operation +# +# Note: There is a special referer type "~*" that can be used in order to +# specify that the Referer header is optional in the request. If "~*" referer +# was used in map_with_referer mapping, only requests with Referer header will +# be verified for validity. If the "~" symbol was specified before referer +# regular expression, it means that the request with a matching referer header +# will be redirected to redirectURL. It can be used to create a so-called +# negative referer list. If "*" was used as a referer regular expression - +# all referers are allowed. Various combinations of "*" and "~" in a referer +# list can be used to create different filtering rules. +# +# Examples: +# map_with_referer http://y.foo.bar.com/x/yy/ http://foo.bar.com/x/yy/ http://games.bar.com/new_games .*\.bar\.com www.bar-friends.com +# +# Explanation: Referer header must be in the request, only ".*\.bar\.com" +# and "www.bar-friends.com" are allowed. +# +# map_with_referer http://y.foo.bar.com/x/yy/ http://foo.bar.com/x/yy/ http://games.bar.com/new_games * ~.*\.evil\.com +# +# Explanation: Referer header must be in the request but all referers are +# allowed except ".*\.evil\.com". +# +# map_with_referer http://y.foo.bar.com/x/yy/ http://foo.bar.com/x/yy/ http://games.bar.com/error ~* * ~.*\.evil\.com +# +# Explanation: Referer header is optional. However, if Referer header exists, +# only request from ".*\.evil\.com" will be redirected to redirect-URL. +# +# There are optional filtering arguments that can be specified at the end of the mapping definition line: +# +# @action=allow|deny +# @src_ip=IP-address +# @method=HTTP method string (CONNECT|DELETE|GET|HEAD|OPTIONS|POST|PURGE|PUT|TRACE|PUSH) +# @plugin=<plugin_path> +# @pparam=<plugin_param> +# +# There is no limitation for the number of filtering arguments. +# +# Example: +# map http://foo.cow.com/ http://bar.cow.com @src_ip=10.72.118.51-10.72.118.62 @method=GET @method=DELETE @src_ip=192.168.0.1-192.168.0.254 @action=allow @method=PUT +# +# Traffic Server supports WebSockets but it must be enabled via remap. WebSocket upgrades are automatically +# detected when there exists a remap rule containing a ws:// scheme. +# +# Example: +# map ws://bar.com/ ws://foo.com/ +# +# Explanation: When a request comes in with the appropriate upgrade headers, Traffic Server will use this +# remap rule in an attempt to establish and maintain a websocket connection. +# +# Named filters can be created and applied to blocks of mappings +# using the .definefilter, .activatefilter, and .deactivatefilter +# directives. Named filters must be defined using .definefilter +# before being used. Once defined, .activatefilter can used to +# activate a filter for all mappings that follow until deactivated +# with .deactivatefilter. +# +# Example: +# .definefilter disable_delete_purge @action=deny @method=delete @method=purge +# .definefilter internal_only @action=allow @src_ip=192.168.0.1-192.168.0.254 @src_ip=10.0.0.1-10.0.0.254 +# +# .activatefilter disable_delete_purge +# +# map http://foo.example.com/ http://bar.example.com/ +# +# .activatefilter internal_only +# map http://www.example.com/admin http://internal.example.com/admin +# .deactivatefilter internal_only +# +# map http://www.example.com/ http://internal.example.com/ +# +# +# Regex support: Regular expressions can be specified in the rules with the +# following limitations: +# +# 1) Only the host field can have regexes - the scheme, port and other +# fields cannot. +# 2) The number of capturing sub-patterns is limited to 9; +# this means $0 through $9 can be used as substitution place holders ($0 +# will be the entire input string) +# 3) The number of substitutions in the expansion string is limited to 10. +# + +map http://localhost:8003/ http://localhost:8000/ diff --git a/test/fixtures/cache-tests/index.html b/test/fixtures/cache-tests/index.html new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/index.html @@ -0,0 +1,97 @@ +<!doctype html> +<html> + <head> + <meta charset="utf-8"> + <title>HTTP Caching Tests</title> + <link rel="stylesheet" href="/asset/style.css" type="text/css"> + <meta property="og:type" content="website"> + <meta property="og:title" content="HTTP Caching Tests"> + <meta property="og:description" content="Finding out how browser and proxy caches behave (and misbehave)"> + <meta property="og:url" content="https://cache-tests.fyi/"> + <meta property="og:site_name" content="HTTP Caching Tests"> + <meta property="og:image" content="https://cache-tests.fyi/asset/badge.png"> + <link rel="preload" as="fetch" crossorigin href="/test-engine/lib/tpl/explain-test.liquid"> + <link rel="preload" as="fetch" crossorigin href="/test-engine/lib/tpl/checks.liquid"> + <link rel="preload" as="fetch" crossorigin href="/test-engine/lib/tpl/header-list.liquid"> + <link rel="preload" as="fetch" crossorigin href="/test-engine/lib/tpl/header-magic.liquid"> + <script type="module"> + import index from './results/index.mjs' + import * as summary from './test-engine/lib/summary.mjs' + import * as display from './test-engine/lib/display.mjs' + import baseTests from './tests/index.mjs' + + const loc = new URL(window.location) + const suiteIds = loc.searchParams.getAll('suite') + const testIds = loc.searchParams.getAll('id') + const isFrame = loc.searchParams.get('frame') && true || false + const isDefault = testIds.length === 0 && suiteIds.length === 0 + + if (! isFrame) { + document.querySelectorAll('.noFrame').forEach(e => {e.style.display = 'block'}) + } + + const ToC = document.getElementById('ToC') + const target = document.getElementById('target') + const key = document.getElementById('key') + + display.showKey(key) + summary.loadResults(index) + .then(results => { + if (isDefault) { + summary.showToC(ToC, baseTests) + } + summary.showResults(target, baseTests, results, testIds, suiteIds) + }) + .then(() => { + if (window.location.hash !== "") { + const fragId = window.location.hash.substring(1) + document.getElementById(fragId).scrollIntoView() + } + if (! isFrame) { + summary.selectClickListen() + } + }) + </script> + </head> + <body> + <h1 class='noFrame'>HTTP Caching Tests</h1> + + <p class="warning noFrame">These tests are a work in progress. The reported results may be + faulty, and do not necessarily reflect the true capabilities of each cache. They should not be + used evaluate or compare feature support. + <strong>This is an open source project</strong>; to make contributions, add your + implementation's results, file issues or learn more, see <a + href="https://github.com/http-tests/cache-tests">the repository</a>.</p> + + <p class='noFrame noPrint'><span>See also </span><a href="/spec/rfc9111.html">test results interspersed with the current specification text</a>.</p> + + <p id="key"> + <span class="key-item"><span class="fa" data-kind="pass"></span>passed &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="optional_fail"></span>optional test failed &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="fail"></span>conformance test failed &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="yes"></span> / <span class="fa" data-kind="no"></span>behaviour check results &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="harness_fail"></span>test harness failure &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="setup_fail"></span>test failed during setup &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="retry"></span>cache retries a request &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="dependency_fail"></span>test dependency failed &nbsp;</span> + <span class="key-item"><span class="fa" data-kind="untested"></span>not tested</span> + </p> + + <ul id="ToC" class="noFrame noPrint"> + </ul> + + <form method="GET" id="selectForm"> + + <p class="noFrame noPrint"><i> + Click on test names for details of what is sent and checked. Hover over failed tests for the reason why they failed. + </i></p> + + <p class="noFrame noPrint"> + <a href="#" id="select">Select test results to display</a> + </p> + + <table id="target"> + </table> + </form> + </body> +</html> diff --git a/test/fixtures/cache-tests/package.json b/test/fixtures/cache-tests/package.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/package.json @@ -0,0 +1,42 @@ +{ + "name": "http-cache-tests", + "version": "0.4.5", + "description": "Tests for HTTP caches", + "homepage": "https://cache-tests.fyi/", + "author": "Mark Nottingham", + "license": "BSD-3-Clause", + "engines": { + "node": ">=14.8.0" + }, + "dependencies": { + "liquidjs": "^10.9.2", + "marked": "^15.0.0", + "node-fetch": "3.1.1", + "node-fetch-with-proxy": "^0.1.6", + "npm": "^10.1.0" + }, + "scripts": { + "server": "node test-engine/server/server.mjs", + "cli": "node --no-warnings test-engine/cli.mjs", + "export": "node --no-warnings test-engine/export.mjs", + "validate": "node --no-warnings test-engine/export.mjs validate", + "lint": "standard test-engine tests spec", + "fix": "standard --fix test-engine tests spec" + }, + "config": { + "protocol": "http", + "port": "8000", + "base": "", + "id": "", + "pidfile": "/tmp/http-cache-test-server.pid" + }, + "repository": { + "type": "git", + "url": "https://github.com/http-tests/cache-tests.git" + }, + "devDependencies": { + "ajv": "^8.12.0", + "font-awesome": "^4.7.0", + "standard": "^17.1.0" + } +} diff --git a/test/fixtures/cache-tests/results/apache.json b/test/fixtures/cache-tests/results/apache.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/apache.json @@ -0,0 +1,675 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": [ + "Setup", + "retry" + ], + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": [ + "Setup", + "retry" + ], + "304-etag-update-response-Content-Location": [ + "Setup", + "retry" + ], + "304-etag-update-response-Content-MD5": [ + "Setup", + "retry" + ], + "304-etag-update-response-Content-Range": [ + "Setup", + "retry" + ], + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": [ + "Setup", + "retry" + ], + "304-etag-update-response-ETag": [ + "Setup", + "retry" + ], + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": true, + "304-etag-update-response-Set-Cookie2": true, + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": true, + "304-etag-update-response-X-Frame-Options": true, + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": true, + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": true, + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": true, + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": true, + "ccreq-ma1": true, + "ccreq-magreaterage": true, + "ccreq-max-stale": true, + "ccreq-max-stale-age": true, + "ccreq-min-fresh": true, + "ccreq-min-fresh-age": true, + "ccreq-no-cache": true, + "ccreq-no-cache-etag": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-cache-lm": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-store": true, + "ccreq-oic": true, + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:05:30 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": [ + "Assertion", + "Response 2 Age header not present." + ], + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": true, + "conditional-etag-strong-respond-multiple-last": true, + "conditional-etag-strong-respond-multiple-second": true, + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-unquoted-respond-unquoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": true, + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-weak-respond-lowercase": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-weak-respond-omit-slash": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": true, + "conditional-lm-fresh-no-lm": [ + "Setup", + "Response 2 does not come from cache" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": true, + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": true, + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": true, + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-decimal-zero": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": true, + "freshness-max-age-space-before-equals": true, + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-fresh-stale-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sameline": true, + "freshness-max-age-two-stale-fresh-sepline": true, + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": true, + "headers-omit-headers-listed-in-Connection": true, + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": true, + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": true, + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": true, + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Setup", + "Response 2 status is 500, not 599" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": true, + "heuristic-delta-1800": true, + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": true, + "heuristic-delta-3600": true, + "heuristic-delta-43200": true, + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": true, + "heuristic-delta-600": true, + "heuristic-delta-86400": true, + "invalidate-DELETE": true, + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": true, + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": true, + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": true, + "other-age-update-expires": true, + "other-age-update-max-age": true, + "other-authorization": true, + "other-authorization-must-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-public": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-smaxage": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-cookie": true, + "other-date-update": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:05:24 GMT\", not \"Tue, 09 Jul 2024 01:05:21 GMT\"" + ], + "other-date-update-expires": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:05:24 GMT\", not \"Tue, 09 Jul 2024 01:05:21 GMT\"" + ], + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": true, + "other-set-cookie": true, + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-extension": true, + "pragma-response-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": true, + "stale-close": true, + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": true, + "stale-sie-close": true, + "stale-warning-become": true, + "stale-warning-stored": true, + "stale-while-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-while-revalidate-window": [ + "Setup", + "Response 2 does not come from cache" + ], + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-204-stale": true, + "status-299-fresh": [ + "Setup", + "Response 2 status is 500, not 299" + ], + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": true, + "status-303-stale": true, + "status-307-fresh": true, + "status-307-stale": true, + "status-308-fresh": true, + "status-308-stale": true, + "status-400-fresh": true, + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Setup", + "Response 2 status is 500, not 499" + ], + "status-499-stale": true, + "status-500-fresh": true, + "status-500-stale": true, + "status-502-fresh": true, + "status-502-stale": true, + "status-503-fresh": true, + "status-503-stale": true, + "status-504-fresh": true, + "status-504-stale": true, + "status-599-fresh": [ + "Setup", + "Response 2 status is 500, not 599" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} diff --git a/test/fixtures/cache-tests/results/caddy.json b/test/fixtures/cache-tests/results/caddy.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/caddy.json @@ -0,0 +1,843 @@ +{ + "304-etag-update-response-Cache-Control": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Clear-Site-Data": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Encoding": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Foo": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Length": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Location": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-MD5": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Range": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Security-Policy": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Content-Type": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-ETag": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Expires": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Public-Key-Pins": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Set-Cookie": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Set-Cookie2": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Test-Header": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-X-Content-Foo": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-X-Frame-Options": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-X-Test-Header": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-X-XSS-Protection": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-lm-use-stored-Test-Header": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": true, + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": [ + "Assertion", + "Request 3 should have been conditional, but it was not." + ], + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "cc-resp-no-cache-revalidate-fresh": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-max-stale-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-min-fresh": true, + "ccreq-min-fresh-age": true, + "ccreq-no-cache": true, + "ccreq-no-cache-etag": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-cache-lm": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "cdn-cc-invalid-sh-type-unknown": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-cc-invalid-sh-type-wrong": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:03:11 GMT\"" + ], + "cdn-fresh-cc-nostore": true, + "cdn-max-age": true, + "cdn-max-age-0": true, + "cdn-max-age-0-expires": true, + "cdn-max-age-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-case-insensitive": true, + "cdn-max-age-cc-max-age-invalid-expires": true, + "cdn-max-age-expires": true, + "cdn-max-age-extension": true, + "cdn-max-age-long-cc-max-age": true, + "cdn-max-age-max": true, + "cdn-max-age-max-plus": true, + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": true, + "cdn-private": true, + "cdn-remove-age-exceed": [ + "Assertion", + "Response 2 Age header not present." + ], + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-strong-generate": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-strong-respond-multiple-last": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-strong-respond-multiple-second": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-etag-unquoted-respond-unquoted": true, + "conditional-etag-vary-headers": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": true, + "conditional-etag-weak-respond-lowercase": true, + "conditional-etag-weak-respond-omit-slash": true, + "conditional-lm-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-lm-fresh-earlier": [ + "Assertion", + "Response 2 does not come from cache" + ], + "conditional-lm-fresh-no-lm": [ + "Setup", + "Response 2 does not come from cache" + ], + "conditional-lm-fresh-rfc850": [ + "Setup", + "Response 2 does not come from cache" + ], + "conditional-lm-stale": true, + "freshness-expires-32bit": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-far-future": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-future": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": true, + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": true, + "freshness-expires-invalid-date": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-invalid-date-dashes": true, + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": true, + "freshness-expires-invalid-no-comma": true, + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": true, + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-month": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-tz": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-weekday": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-decimal-zero": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": true, + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": true, + "freshness-max-age-space-before-equals": true, + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-fresh-stale-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sameline": true, + "freshness-max-age-two-stale-fresh-sepline": true, + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "FetchError", + "request to http://localhost:8006/test/3a29d44d-d103-492b-9046-be67546d71d7 failed, reason: Parse Error: Empty Content-Length" + ], + "head-200-retain": [ + "FetchError", + "request to http://localhost:8006/test/d07b2651-4270-4ea6-ae9f-041b1b82aae8 failed, reason: Parse Error: Empty Content-Length" + ], + "head-200-update": [ + "FetchError", + "request to http://localhost:8006/test/eb317c8b-cea4-4b4c-b72b-71df1aa6e863 failed, reason: Parse Error: Empty Content-Length" + ], + "head-410-update": [ + "FetchError", + "request to http://localhost:8006/test/7f947275-bffe-4c72-a493-c64352423d8e failed, reason: Parse Error: Empty Content-Length" + ], + "head-writethrough": [ + "FetchError", + "request to http://localhost:8006/test/f9b80f6e-7980-4a44-ae58-d58ac539223c failed, reason: Parse Error: Empty Content-Length" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": true, + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": true, + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 502, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1800": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-3600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-43200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-86400": [ + "Assertion", + "Response 2 does not come from cache" + ], + "invalidate-DELETE": true, + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": true, + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": true, + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": true, + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": true, + "other-age-update-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-update-max-age": true, + "other-authorization": true, + "other-authorization-must-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-public": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-smaxage": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-date-update-expires-update": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-set-cookie": true, + "partial-store-complete-reuse-partial": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-complete-reuse-partial-no-last": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-complete-reuse-partial-suffix": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-partial-complete": [ + "Setup", + "Response 2 status is 206, not 200" + ], + "partial-store-partial-reuse-partial": true, + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response body is \"01234\", not \"234\"" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response body is \"01234\", not \"234\"" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response body is \"01234\", not \"4\"" + ], + "partial-use-headers": [ + "Setup", + "Response 2 status is 200, not 206" + ], + "partial-use-stored-headers": [ + "Setup", + "Response 2 status is 200, not 206" + ], + "pragma-request-extension": true, + "pragma-request-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-sie-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-warning-become": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-warning-stored": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-while-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-while-revalidate-window": [ + "Setup", + "Response 2 does not come from cache" + ], + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-302-stale": true, + "status-303-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-303-stale": true, + "status-307-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-307-stale": true, + "status-308-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-308-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-500-stale": true, + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-503-stale": true, + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} diff --git a/test/fixtures/cache-tests/results/chrome.json b/test/fixtures/cache-tests/results/chrome.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/chrome.json @@ -0,0 +1,599 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": [ + "Assertion", + "Response 2 header Clear-Site-Data is \"null\", not \"cookies\"" + ], + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": [ + "Assertion", + "Response 2 header Content-Location is \"/foo\", not \"/bar\"" + ], + "304-etag-update-response-Content-MD5": [ + "Assertion", + "Response 2 header Content-MD5 is \"rL0Y20zC+Fzt72VPzMSk2A==\", not \"N7UdGUp1E+RbVvZSTy1R8g==\"" + ], + "304-etag-update-response-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"apetixmbqfujync\", not \"aqgwmcsiyoeukaq\"" + ], + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": [ + "Assertion", + "Response 2 header Content-Type is \"text/plain\", not \"text/plain;charset=utf-8\"" + ], + "304-etag-update-response-ETag": [ + "Assertion", + "Response 2 header ETag is \"\"abcdef\"\", not \"\"ghijkl\"\"" + ], + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": [ + "Assertion", + "Response 2 header X-Content-Foo is \"azyxwvutsrqponm\", not \"aaaaaaaaaaaaaaa\"" + ], + "304-etag-update-response-X-Frame-Options": [ + "Assertion", + "Response 2 header X-Frame-Options is \"deny\", not \"sameorigin\"" + ], + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": [ + "Assertion", + "Response 2 header X-XSS-Protection is \"1\", not \"1; mode=block\"" + ], + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": true, + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-immutable-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cc-resp-immutable-stale": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": true, + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-private": true, + "ccreq-ma0": true, + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-max-stale-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-min-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-min-fresh-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache": true, + "ccreq-no-cache-etag": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-cache-lm": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": [ + "Assertion", + "Request 2 header If-None-Match is \"\"abcdef\"\"" + ], + "conditional-etag-weak-generate-weak": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-2-digit-year": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-aest": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-spaces": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-decimal-zero": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-private": true, + "freshness-max-age-s-maxage-private-multiple": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": true, + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": true, + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-none": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": true, + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": [ + "Assertion", + "Response 2 header Clear-Site-Data is \"null\", not \"cookies\"" + ], + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "headers-store-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": true, + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": true, + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": true, + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": true, + "heuristic-delta-1800": true, + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": true, + "heuristic-delta-3600": true, + "heuristic-delta-43200": true, + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": true, + "heuristic-delta-600": true, + "heuristic-delta-86400": true, + "invalidate-DELETE": true, + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": true, + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": true, + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": [ + "Assertion", + "Response 2 Age header not present." + ], + "other-age-update-expires": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-age-update-max-age": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": true, + "other-set-cookie": true, + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-extension": true, + "pragma-response-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "TypeError", + "Failed to fetch" + ], + "stale-close-must-revalidate": [ + "TypeError", + "Failed to fetch" + ], + "stale-close-no-cache": [ + "TypeError", + "Failed to fetch" + ], + "stale-sie-503": [ + "TypeError", + "Failed to fetch" + ], + "stale-sie-close": [ + "TypeError", + "Failed to fetch" + ], + "stale-warning-become": [ + "TypeError", + "Failed to fetch" + ], + "stale-warning-stored": [ + "TypeError", + "Failed to fetch" + ], + "stale-while-revalidate": true, + "stale-while-revalidate-window": true, + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": true, + "status-299-stale": true, + "status-400-fresh": true, + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": true, + "status-499-stale": true, + "status-500-fresh": true, + "status-500-stale": true, + "status-502-fresh": true, + "status-502-stale": true, + "status-503-fresh": true, + "status-503-stale": true, + "status-504-fresh": true, + "status-504-stale": true, + "status-599-fresh": true, + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": [ + "Assertion", + "Response 3 does not come from cache" + ], + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} \ No newline at end of file diff --git a/test/fixtures/cache-tests/results/fastly.json b/test/fixtures/cache-tests/results/fastly.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/fastly.json @@ -0,0 +1,882 @@ +{ + "304-etag-update-response-Cache-Control": [ + "Assertion", + "Response 2 header Cache-Control is \"max-age=1\", not \"max-age=3600\"" + ], + "304-etag-update-response-Clear-Site-Data": [ + "Assertion", + "Response 2 header Clear-Site-Data is \"cache\", not \"cookies\"" + ], + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": [ + "Assertion", + "Response 2 header Content-Foo is \"awsokgcyuqmieaw\", not \"axurolifczwtqnk\"" + ], + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": [ + "Assertion", + "Response 2 header Content-Location is \"/foo\", not \"/bar\"" + ], + "304-etag-update-response-Content-MD5": [ + "Assertion", + "Response 2 header Content-MD5 is \"rL0Y20zC+Fzt72VPzMSk2A==\", not \"N7UdGUp1E+RbVvZSTy1R8g==\"" + ], + "304-etag-update-response-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"null\", not \"aqgwmcsiyoeukaq\"" + ], + "304-etag-update-response-Content-Security-Policy": [ + "Assertion", + "Response 2 header Content-Security-Policy is \"default-src 'self'\", not \"default-src 'self' cdn.example.com\"" + ], + "304-etag-update-response-Content-Type": [ + "Assertion", + "Response 2 header Content-Type is \"text/plain\", not \"text/plain;charset=utf-8\"" + ], + "304-etag-update-response-ETag": [ + "Assertion", + "Response 2 header ETag is \"\"abcdef\"\", not \"\"ghijkl\"\"" + ], + "304-etag-update-response-Expires": [ + "Assertion", + "Response 2 header Expires is \"Fri, 01 Jan 2038 01:01:01 GMT\", not \"Mon, 11 Jan 2038 11:11:11 GMT\"" + ], + "304-etag-update-response-Public-Key-Pins": [ + "Assertion", + "Response 2 header Public-Key-Pins is \"auoicwqkeysmgau\", not \"avqlgbwrmhcxsni\"" + ], + "304-etag-update-response-Set-Cookie": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"a=b\", not \"a=c\"" + ], + "304-etag-update-response-Test-Header": [ + "Assertion", + "Response 2 header Test-Header is \"aaaaaaaaaaaaaaa\", not \"abcdefghijklmno\"" + ], + "304-etag-update-response-X-Content-Foo": [ + "Assertion", + "Response 2 header X-Content-Foo is \"azyxwvutsrqponm\", not \"aaaaaaaaaaaaaaa\"" + ], + "304-etag-update-response-X-Frame-Options": [ + "Assertion", + "Response 2 header X-Frame-Options is \"deny\", not \"sameorigin\"" + ], + "304-etag-update-response-X-Test-Header": [ + "Assertion", + "Response 2 header X-Test-Header is \"adgjmpsvybehknq\", not \"aeimquycgkoswae\"" + ], + "304-etag-update-response-X-XSS-Protection": [ + "Assertion", + "Response 2 header X-XSS-Protection is \"1\", not \"1; mode=block\"" + ], + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": true, + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": true, + "age-parse-parameter": true, + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-no-cache-case-insensitive": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-no-cache-revalidate": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-cache-revalidate-fresh": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-max-stale-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-min-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-min-fresh-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache-etag": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-cache-lm": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:17:30 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": [ + "Assertion", + "Response 2 Age header not present." + ], + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": true, + "conditional-etag-strong-respond-multiple-last": true, + "conditional-etag-strong-respond-multiple-second": true, + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-unquoted": true, + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": true, + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": true, + "conditional-etag-weak-respond-lowercase": true, + "conditional-etag-weak-respond-omit-slash": true, + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": true, + "conditional-lm-fresh-no-lm": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-age-slow-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": true, + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": true, + "freshness-expires-invalid-multiple-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-spaces": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-no-comma": true, + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": true, + "freshness-expires-old-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": true, + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-decimal-five": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-decimal-zero": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": true, + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-410-update": [ + "Setup", + "Response 3 status is 410, not 200" + ], + "head-writethrough": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"null\", not \"ananananananana\"" + ], + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 503, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": [ + "Setup", + "retry" + ], + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1800": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-3600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-43200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-86400": [ + "Assertion", + "Response 2 does not come from cache" + ], + "invalidate-DELETE": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 header age is 0, should be bigger than 0" + ], + "other-age-gen": true, + "other-age-update-expires": [ + "Assertion", + "Response 2 header Age is 3, should be bigger than 32" + ], + "other-age-update-max-age": true, + "other-authorization": [ + "Assertion", + "Response 2 comes from cache" + ], + "other-authorization-must-revalidate": true, + "other-authorization-public": true, + "other-authorization-smaxage": true, + "other-cookie": true, + "other-date-update": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:17:25 GMT\", not \"Tue, 09 Jul 2024 01:17:22 GMT\"" + ], + "other-date-update-expires": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:17:25 GMT\", not \"Tue, 09 Jul 2024 01:17:22 GMT\"" + ], + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-set-cookie": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": true, + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Setup", + "retry" + ], + "stale-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-sie-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-warning-become": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-warning-stored": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-while-revalidate": true, + "stale-while-revalidate-window": true, + "status-200-fresh": true, + "status-200-must-understand": true, + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-303-stale": true, + "status-307-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-307-stale": true, + "status-308-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-308-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Setup", + "retry" + ], + "status-500-stale": [ + "Setup", + "retry" + ], + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Setup", + "retry" + ], + "status-503-stale": [ + "Setup", + "retry" + ], + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-empty-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-empty-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-foo-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-foo": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ] +} diff --git a/test/fixtures/cache-tests/results/firefox.json b/test/fixtures/cache-tests/results/firefox.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/firefox.json @@ -0,0 +1,587 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": [ + "Assertion", + "Response 2 header Content-Location is \"/foo\", not \"/bar\"" + ], + "304-etag-update-response-Content-MD5": [ + "Assertion", + "Response 2 header Content-MD5 is \"rL0Y20zC+Fzt72VPzMSk2A==\", not \"N7UdGUp1E+RbVvZSTy1R8g==\"" + ], + "304-etag-update-response-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"apetixmbqfujync\", not \"aqgwmcsiyoeukaq\"" + ], + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": [ + "Assertion", + "Response 2 header Content-Type is \"text/plain\", not \"text/plain;charset=utf-8\"" + ], + "304-etag-update-response-ETag": [ + "Assertion", + "Response 2 header ETag is \"\"abcdef\"\", not \"\"ghijkl\"\"" + ], + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": true, + "304-etag-update-response-X-Frame-Options": true, + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": true, + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": true, + "age-parse-parameter": true, + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-immutable-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cc-resp-immutable-stale": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-no-cache-revalidate": true, + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-private": true, + "ccreq-ma0": true, + "ccreq-ma1": true, + "ccreq-magreaterage": true, + "ccreq-max-stale": true, + "ccreq-max-stale-age": true, + "ccreq-min-fresh": true, + "ccreq-min-fresh-age": true, + "ccreq-no-cache": true, + "ccreq-no-cache-etag": true, + "ccreq-no-cache-lm": true, + "ccreq-no-store": true, + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": [ + "Assertion", + "Request 2 header If-None-Match is \"\"abcdef\"\"" + ], + "conditional-etag-weak-generate-weak": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-2-digit-year": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-aest": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-spaces": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": true, + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": true, + "freshness-max-age-decimal-zero": true, + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-private": true, + "freshness-max-age-s-maxage-private-multiple": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-fresh-stale-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sameline": true, + "freshness-max-age-two-stale-fresh-sepline": true, + "freshness-none": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "headers-store-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": true, + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": true, + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": true, + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": true, + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": true, + "heuristic-delta-1800": true, + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": true, + "heuristic-delta-3600": true, + "heuristic-delta-43200": true, + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": true, + "heuristic-delta-600": true, + "heuristic-delta-86400": true, + "invalidate-DELETE": true, + "invalidate-DELETE-cl": true, + "invalidate-DELETE-failed": [ + "Assertion", + "Response 3 does not come from cache" + ], + "invalidate-DELETE-location": true, + "invalidate-M-SEARCH": true, + "invalidate-M-SEARCH-cl": true, + "invalidate-M-SEARCH-failed": [ + "Assertion", + "Response 3 does not come from cache" + ], + "invalidate-M-SEARCH-location": true, + "invalidate-POST": true, + "invalidate-POST-cl": true, + "invalidate-POST-failed": [ + "Assertion", + "Response 3 does not come from cache" + ], + "invalidate-POST-location": true, + "invalidate-PUT": true, + "invalidate-PUT-cl": true, + "invalidate-PUT-failed": [ + "Assertion", + "Response 3 does not come from cache" + ], + "invalidate-PUT-location": true, + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": [ + "Assertion", + "Response 2 Age header not present." + ], + "other-age-update-expires": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-age-update-max-age": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": true, + "other-set-cookie": true, + "partial-store-complete-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial-no-last": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": [ + "Setup", + "Response 2 does not come from cache" + ], + "partial-use-stored-headers": [ + "Setup", + "Response 2 does not come from cache" + ], + "pragma-request-extension": true, + "pragma-request-no-cache": true, + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-close-must-revalidate": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-close-no-cache": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-sie-503": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-sie-close": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-warning-become": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-warning-stored": [ + "TypeError", + "NetworkError when attempting to fetch resource." + ], + "stale-while-revalidate": true, + "stale-while-revalidate-window": true, + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-500-stale": true, + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-503-stale": true, + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": [ + "Assertion", + "Response 3 does not come from cache" + ], + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} \ No newline at end of file diff --git a/test/fixtures/cache-tests/results/index.mjs b/test/fixtures/cache-tests/results/index.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/index.mjs @@ -0,0 +1,71 @@ + +export default [ + { + file: 'chrome.json', + name: 'Chrome', + type: 'browser', + version: '126.0.6478.127' + }, + { + file: 'firefox.json', + name: 'Firefox', + type: 'browser', + version: '127.0.2', + link: 'https://github.com/http-tests/cache-tests/wiki/Firefox' + }, + { + file: 'safari.json', + name: 'Safari', + type: 'browser', + version: 'Version 17.5 (19618.2.12.11.6)' + }, + { + file: 'nginx.json', + name: 'nginx', + type: 'rev-proxy', + version: '1.26.0-1ubuntu2', + link: 'https://github.com/http-tests/cache-tests/wiki/nginx' + }, + { + file: 'squid.json', + name: 'Squid', + type: 'rev-proxy', + version: '6.9-1ubuntu1', + link: 'https://github.com/http-tests/cache-tests/wiki/Squid' + }, + { + file: 'trafficserver.json', + name: 'ATS', + type: 'rev-proxy', + version: '9.2.4+ds-2', + link: 'https://github.com/http-tests/cache-tests/wiki/Traffic-Server' + }, + { + file: 'apache.json', + name: 'httpd', + type: 'rev-proxy', + version: '2.4.59-2ubuntu2', + link: 'https://github.com/http-tests/cache-tests/wiki/Apache-httpd' + }, + { + file: 'varnish.json', + name: 'Varnish', + type: 'rev-proxy', + version: '7.1.1-1.1ubuntu1', + link: 'https://github.com/http-tests/cache-tests/wiki/Varnish' + }, + { + file: 'caddy.json', + name: 'caddy', + type: 'rev-proxy', + version: '0.7.0', + link: 'https://github.com/http-tests/cache-tests/wiki/Caddy' + }, + { + file: 'fastly.json', + name: 'Fastly', + type: 'cdn', + version: '2024-07-09', + link: 'https://github.com/http-tests/cache-tests/wiki/Fastly' + } +] diff --git a/test/fixtures/cache-tests/results/nginx.json b/test/fixtures/cache-tests/results/nginx.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/nginx.json @@ -0,0 +1,849 @@ +{ + "304-etag-update-response-Cache-Control": [ + "Assertion", + "Response 2 header Cache-Control is \"max-age=1\", not \"max-age=3600\"" + ], + "304-etag-update-response-Clear-Site-Data": [ + "Assertion", + "Response 2 header Clear-Site-Data is \"cache\", not \"cookies\"" + ], + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": [ + "AbortError", + "The user aborted a request." + ], + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": [ + "Assertion", + "Response 2 header Content-Location is \"/foo\", not \"/bar\"" + ], + "304-etag-update-response-Content-MD5": [ + "Assertion", + "Response 2 header Content-MD5 is \"rL0Y20zC+Fzt72VPzMSk2A==\", not \"N7UdGUp1E+RbVvZSTy1R8g==\"" + ], + "304-etag-update-response-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"apetixmbqfujync\", not \"aqgwmcsiyoeukaq\"" + ], + "304-etag-update-response-Content-Security-Policy": [ + "Assertion", + "Response 2 header Content-Security-Policy is \"default-src 'self'\", not \"default-src 'self' cdn.example.com\"" + ], + "304-etag-update-response-Content-Type": [ + "AbortError", + "The user aborted a request." + ], + "304-etag-update-response-ETag": [ + "AbortError", + "The user aborted a request." + ], + "304-etag-update-response-Expires": [ + "Assertion", + "Response 2 header Expires is \"Fri, 01 Jan 2038 01:01:01 GMT\", not \"Mon, 11 Jan 2038 11:11:11 GMT\"" + ], + "304-etag-update-response-Public-Key-Pins": [ + "Assertion", + "Response 2 header Public-Key-Pins is \"auoicwqkeysmgau\", not \"avqlgbwrmhcxsni\"" + ], + "304-etag-update-response-Set-Cookie": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"a=b\", not \"a=c\"" + ], + "304-etag-update-response-Test-Header": [ + "Assertion", + "Response 2 header Test-Header is \"aaaaaaaaaaaaaaa\", not \"abcdefghijklmno\"" + ], + "304-etag-update-response-X-Content-Foo": [ + "Assertion", + "Response 2 header X-Content-Foo is \"azyxwvutsrqponm\", not \"aaaaaaaaaaaaaaa\"" + ], + "304-etag-update-response-X-Frame-Options": [ + "Assertion", + "Response 2 header X-Frame-Options is \"deny\", not \"sameorigin\"" + ], + "304-etag-update-response-X-Test-Header": [ + "Assertion", + "Response 2 header X-Test-Header is \"adgjmpsvybehknq\", not \"aeimquycgkoswae\"" + ], + "304-etag-update-response-X-XSS-Protection": [ + "Assertion", + "Response 2 header X-XSS-Protection is \"1\", not \"1; mode=block\"" + ], + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": true, + "age-parse-large": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-large-minus-one": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-larger": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-suffix-twoline": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-cache-revalidate-fresh": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-max-stale-age": true, + "ccreq-min-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-min-fresh-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache-etag": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-cache-lm": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:06:50 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": [ + "Assertion", + "Response 2 Age header not present." + ], + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": true, + "conditional-etag-strong-respond-multiple-last": true, + "conditional-etag-strong-respond-multiple-second": true, + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-unquoted": true, + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": true, + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": true, + "conditional-etag-weak-respond-lowercase": true, + "conditional-etag-weak-respond-omit-slash": true, + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-no-lm": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-age-slow-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": true, + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": true, + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": true, + "freshness-expires-invalid-no-comma": true, + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-past": true, + "freshness-expires-present": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-decimal-five": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-decimal-zero": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": true, + "freshness-max-age-space-before-equals": true, + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": true, + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-410-update": [ + "Setup", + "Response 3 status is 410, not 200" + ], + "head-writethrough": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 502, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1800": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-3600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-43200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-86400": [ + "Assertion", + "Response 2 does not come from cache" + ], + "invalidate-DELETE": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": [ + "AbortError", + "The user aborted a request." + ], + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": [ + "Assertion", + "Response 2 Age header not present." + ], + "other-age-update-expires": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-age-update-max-age": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-authorization": [ + "Assertion", + "Response 2 comes from cache" + ], + "other-authorization-must-revalidate": true, + "other-authorization-public": true, + "other-authorization-smaxage": true, + "other-cookie": true, + "other-date-update": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:06:44 GMT\", not \"Tue, 09 Jul 2024 01:06:41 GMT\"" + ], + "other-date-update-expires": [ + "Assertion", + "Response 2 header Date is \"Tue, 09 Jul 2024 01:06:44 GMT\", not \"Tue, 09 Jul 2024 01:06:41 GMT\"" + ], + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-set-cookie": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-complete-reuse-partial-no-last": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-complete-reuse-partial-suffix": [ + "Assertion", + "Response 2 status is 200, not 206" + ], + "partial-store-partial-complete": [ + "Setup", + "Response 2 status is 206, not 200" + ], + "partial-store-partial-reuse-partial": [ + "Setup", + "Request 1 header Range is \"undefined\", not \"bytes=-5\"" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response body is \"01234\", not \"234\"" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response body is \"01234\", not \"234\"" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response body is \"01234\", not \"4\"" + ], + "partial-use-headers": [ + "Setup", + "Response 2 status is 200, not 206" + ], + "partial-use-stored-headers": [ + "Setup", + "Response 2 status is 200, not 206" + ], + "pragma-request-extension": true, + "pragma-request-no-cache": true, + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": true, + "stale-sie-close": true, + "stale-warning-become": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-warning-stored": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-while-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-while-revalidate-window": [ + "Setup", + "Response 2 does not come from cache" + ], + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": true, + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": true, + "status-303-stale": true, + "status-307-fresh": true, + "status-307-stale": true, + "status-308-fresh": true, + "status-308-stale": true, + "status-400-fresh": true, + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": true, + "status-499-stale": true, + "status-500-fresh": true, + "status-500-stale": true, + "status-502-fresh": true, + "status-502-stale": true, + "status-503-fresh": true, + "status-503-stale": true, + "status-504-fresh": true, + "status-504-stale": true, + "status-599-fresh": true, + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": true, + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star": true, + "vary-syntax-star-foo": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star-lines": true +} diff --git a/test/fixtures/cache-tests/results/safari.json b/test/fixtures/cache-tests/results/safari.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/safari.json @@ -0,0 +1,611 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": [ + "Assertion", + "Response 2 header Content-Foo is \"awsokgcyuqmieaw\", not \"axurolifczwtqnk\"" + ], + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": [ + "Assertion", + "Response 2 header Content-Location is \"/foo\", not \"/bar\"" + ], + "304-etag-update-response-Content-MD5": [ + "Assertion", + "Response 2 header Content-MD5 is \"rL0Y20zC+Fzt72VPzMSk2A==\", not \"N7UdGUp1E+RbVvZSTy1R8g==\"" + ], + "304-etag-update-response-Content-Range": [ + "Assertion", + "Response 2 header Content-Range is \"apetixmbqfujync\", not \"aqgwmcsiyoeukaq\"" + ], + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": [ + "Assertion", + "Response 2 header Content-Type is \"text/plain\", not \"text/plain;charset=utf-8\"" + ], + "304-etag-update-response-ETag": [ + "Assertion", + "Response 2 header ETag is \"\"abcdef\"\", not \"\"ghijkl\"\"" + ], + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": [ + "Assertion", + "Response 2 header X-Content-Foo is \"azyxwvutsrqponm\", not \"aaaaaaaaaaaaaaa\"" + ], + "304-etag-update-response-X-Frame-Options": [ + "Assertion", + "Response 2 header X-Frame-Options is \"deny\", not \"sameorigin\"" + ], + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": [ + "Assertion", + "Response 2 header X-XSS-Protection is \"1\", not \"1; mode=block\"" + ], + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-suffix-twoline": [ + "Assertion", + "Response 2 comes from cache" + ], + "cc-resp-immutable-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cc-resp-immutable-stale": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": true, + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-private": true, + "ccreq-ma0": true, + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": true, + "ccreq-max-stale-age": true, + "ccreq-min-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-min-fresh-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache": true, + "ccreq-no-cache-etag": true, + "ccreq-no-cache-lm": true, + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "TypeError", + "Load failed" + ], + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": true, + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": true, + "freshness-max-age-decimal-zero": true, + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": true, + "freshness-max-age-s-maxage-private": true, + "freshness-max-age-s-maxage-private-multiple": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": true, + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-none": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "headers-store-Set-Cookie2": [ + "Assertion", + "Response 2 header Set-Cookie2 is \"null\", not \"a=c\"" + ], + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": true, + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": true, + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": true, + "heuristic-403-not_cached": true, + "heuristic-404-cached": true, + "heuristic-405-cached": true, + "heuristic-410-cached": true, + "heuristic-414-cached": true, + "heuristic-501-cached": true, + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": true, + "heuristic-delta-1800": true, + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": true, + "heuristic-delta-3600": true, + "heuristic-delta-43200": true, + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": true, + "heuristic-delta-600": true, + "heuristic-delta-86400": true, + "invalidate-DELETE": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": [ + "Assertion", + "Response 2 Age header not present." + ], + "other-age-update-expires": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-age-update-max-age": [ + "Assertion", + "Response 2 header Age is 30, should be bigger than 32" + ], + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": true, + "other-set-cookie": true, + "partial-store-complete-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial-no-last": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": true, + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": [ + "Setup", + "Response 2 does not come from cache" + ], + "partial-use-stored-headers": [ + "Setup", + "Response 2 does not come from cache" + ], + "pragma-request-extension": true, + "pragma-request-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-extension": true, + "pragma-response-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "TypeError", + "Load failed" + ], + "stale-close-must-revalidate": [ + "TypeError", + "Load failed" + ], + "stale-close-no-cache": [ + "TypeError", + "Load failed" + ], + "stale-sie-503": [ + "TypeError", + "Load failed" + ], + "stale-sie-close": [ + "TypeError", + "Load failed" + ], + "stale-warning-become": [ + "TypeError", + "Load failed" + ], + "stale-warning-stored": [ + "TypeError", + "Load failed" + ], + "stale-while-revalidate": true, + "stale-while-revalidate-window": true, + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-500-stale": true, + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-503-stale": true, + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": [ + "Assertion", + "Response 3 does not come from cache" + ], + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} \ No newline at end of file diff --git a/test/fixtures/cache-tests/results/squid.json b/test/fixtures/cache-tests/results/squid.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/squid.json @@ -0,0 +1,681 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": true, + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": [ + "Assertion", + "Response 2 header Content-Length is \"10\", not \"36\"" + ], + "304-etag-update-response-Content-Location": true, + "304-etag-update-response-Content-MD5": true, + "304-etag-update-response-Content-Range": true, + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": true, + "304-etag-update-response-ETag": true, + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Set-Cookie2": true, + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": true, + "304-etag-update-response-X-Frame-Options": true, + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": true, + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-large": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-large-minus-one": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-larger": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": true, + "age-parse-parameter": true, + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": true, + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": true, + "ccreq-ma1": true, + "ccreq-magreaterage": true, + "ccreq-max-stale": true, + "ccreq-max-stale-age": true, + "ccreq-min-fresh": true, + "ccreq-min-fresh-age": true, + "ccreq-no-cache": true, + "ccreq-no-cache-etag": [ + "Setup", + "Response 2 status is 502, not 200" + ], + "ccreq-no-cache-lm": [ + "Setup", + "Response 2 status is 502, not 200" + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": true, + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 00:51:04 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": [ + "Assertion", + "Response 2 Age header not present." + ], + "cdn-remove-header": true, + "conditional-304-etag": [ + "Assertion", + "Response 2 header ETag is \"null\", not \"\"abcdef\"\"" + ], + "conditional-etag-forward": true, + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "request 2 doesn't have if-none-match header" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": true, + "conditional-etag-strong-respond-multiple-last": true, + "conditional-etag-strong-respond-multiple-second": true, + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-unquoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": [ + "Assertion", + "request 2 doesn't have if-none-match header" + ], + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-weak-respond-lowercase": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-weak-respond-omit-slash": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": true, + "conditional-lm-fresh-no-lm": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": true, + "freshness-expires-invalid-2-digit-year": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-aest": true, + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": true, + "freshness-expires-old-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": true, + "freshness-expires-wrong-case-tz": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": true, + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": true, + "freshness-max-age-decimal-zero": true, + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": true, + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": true, + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": true, + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 502, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": true, + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": true, + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": true, + "heuristic-delta-1800": true, + "heuristic-delta-30": true, + "heuristic-delta-300": true, + "heuristic-delta-3600": true, + "heuristic-delta-43200": true, + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": true, + "heuristic-delta-600": true, + "heuristic-delta-86400": true, + "invalidate-DELETE": true, + "invalidate-DELETE-cl": true, + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": true, + "invalidate-M-SEARCH": true, + "invalidate-M-SEARCH-cl": true, + "invalidate-M-SEARCH-failed": [ + "Assertion", + "Response 3 does not come from cache" + ], + "invalidate-M-SEARCH-location": true, + "invalidate-POST": true, + "invalidate-POST-cl": true, + "invalidate-POST-failed": true, + "invalidate-POST-location": true, + "invalidate-PUT": true, + "invalidate-PUT-cl": true, + "invalidate-PUT-failed": true, + "invalidate-PUT-location": true, + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 age header not present." + ], + "other-age-gen": true, + "other-age-update-expires": true, + "other-age-update-max-age": true, + "other-authorization": [ + "Setup", + "Request 1 header Authorization is \"undefined\", not \"FOO\"" + ], + "other-authorization-must-revalidate": [ + "Setup", + "Request 1 header Authorization is \"undefined\", not \"FOO\"" + ], + "other-authorization-public": [ + "Setup", + "Request 1 header Authorization is \"undefined\", not \"FOO\"" + ], + "other-authorization-smaxage": [ + "Setup", + "Request 1 header Authorization is \"undefined\", not \"FOO\"" + ], + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": true, + "other-set-cookie": true, + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": true, + "stale-close": true, + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": true, + "stale-sie-close": true, + "stale-warning-become": [ + "Assertion", + "Response 2 warning header not present." + ], + "stale-warning-stored": [ + "Assertion", + "Response 2 warning header not present." + ], + "stale-while-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-while-revalidate-window": [ + "Setup", + "Response 2 does not come from cache" + ], + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-303-stale": true, + "status-307-fresh": true, + "status-307-stale": true, + "status-308-fresh": true, + "status-308-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-500-stale": true, + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-503-stale": true, + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": true, + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} diff --git a/test/fixtures/cache-tests/results/trafficserver.json b/test/fixtures/cache-tests/results/trafficserver.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/trafficserver.json @@ -0,0 +1,678 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": true, + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": true, + "304-etag-update-response-Content-MD5": true, + "304-etag-update-response-Content-Range": true, + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": [ + "Assertion", + "Response 2 header Content-Type is \"text/plain\", not \"text/plain;charset=utf-8\"" + ], + "304-etag-update-response-ETag": true, + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "304-etag-update-response-Set-Cookie2": true, + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": true, + "304-etag-update-response-X-Frame-Options": true, + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": true, + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": true, + "age-parse-parameter": true, + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-cache-revalidate-fresh": true, + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": true, + "ccreq-max-stale-age": true, + "ccreq-min-fresh": true, + "ccreq-min-fresh-age": true, + "ccreq-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache-etag": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-cache-lm": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": true, + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:02:07 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": true, + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": true, + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": true, + "conditional-etag-strong-respond-multiple-last": true, + "conditional-etag-strong-respond-multiple-second": true, + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-quoted": true, + "conditional-etag-unquoted-respond-unquoted": true, + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-weak-respond-lowercase": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-weak-respond-omit-slash": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": true, + "conditional-lm-fresh-no-lm": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": true, + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": true, + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-2-digit-year": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-aest": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-multiple-spaces": true, + "freshness-expires-invalid-no-comma": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-tz": true, + "freshness-expires-wrong-case-weekday": true, + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": true, + "freshness-max-age-a100": true, + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": true, + "freshness-max-age-decimal-five": true, + "freshness-max-age-decimal-zero": true, + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": true, + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-fresh-stale-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sameline": true, + "freshness-max-age-two-stale-fresh-sepline": true, + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Response 3 does not come from cache" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-410-update": [ + "Setup", + "Response 3 does not come from cache" + ], + "head-writethrough": true, + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": [ + "Assertion", + "Response 2 includes unexpected header a: \"1\"" + ], + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": true, + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Assertion", + "Response 2 header Set-Cookie is \"null\", not \"a=c\"" + ], + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 400, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": true, + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": true, + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": true, + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1800": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-3600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-43200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-86400": [ + "Assertion", + "Response 2 does not come from cache" + ], + "invalidate-DELETE": [ + "Setup", + "Response 2 status is 403, not 200" + ], + "invalidate-DELETE-cl": [ + "Setup", + "Response 2 status is 403, not 200" + ], + "invalidate-DELETE-failed": [ + "Setup", + "Response 2 status is 403, not 500" + ], + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": true, + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": true, + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": true, + "other-age-gen": true, + "other-age-update-expires": true, + "other-age-update-max-age": true, + "other-authorization": [ + "Assertion", + "Response 2 comes from cache" + ], + "other-authorization-must-revalidate": true, + "other-authorization-public": true, + "other-authorization-smaxage": true, + "other-cookie": true, + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-set-cookie": true, + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Assertion", + "Request 2 header range is \"undefined\", not \"bytes=5-\"" + ], + "partial-store-partial-reuse-partial": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-absent": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": true, + "pragma-response-extension": true, + "pragma-response-no-cache": [ + "Assertion", + "Response 2 does not come from cache" + ], + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": true, + "stale-close": true, + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": true, + "stale-sie-close": true, + "stale-warning-become": true, + "stale-warning-stored": true, + "stale-while-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-while-revalidate-window": [ + "Setup", + "Response 2 does not come from cache" + ], + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": true, + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": true, + "status-303-stale": true, + "status-307-fresh": true, + "status-307-stale": true, + "status-308-fresh": true, + "status-308-stale": true, + "status-400-fresh": true, + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": true, + "status-499-stale": true, + "status-500-fresh": true, + "status-500-stale": true, + "status-502-fresh": true, + "status-502-stale": true, + "status-503-fresh": true, + "status-503-stale": true, + "status-504-fresh": true, + "status-504-stale": true, + "status-599-fresh": true, + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": true, + "vary-normalise-lang-case": true, + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": true, + "vary-normalise-space": true, + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": true, + "vary-syntax-empty-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-foo-star": true, + "vary-syntax-star": true, + "vary-syntax-star-foo": true, + "vary-syntax-star-star": true, + "vary-syntax-star-star-lines": true +} diff --git a/test/fixtures/cache-tests/results/varnish.json b/test/fixtures/cache-tests/results/varnish.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/results/varnish.json @@ -0,0 +1,804 @@ +{ + "304-etag-update-response-Cache-Control": true, + "304-etag-update-response-Clear-Site-Data": true, + "304-etag-update-response-Content-Encoding": [ + "Assertion", + "Response 2 header Content-Encoding is \"arizqhypgxofwne\", not \"askcumewogyqias\"" + ], + "304-etag-update-response-Content-Foo": true, + "304-etag-update-response-Content-Length": true, + "304-etag-update-response-Content-Location": true, + "304-etag-update-response-Content-MD5": true, + "304-etag-update-response-Content-Range": [ + "Setup", + "Response 1 status is 503, not 200" + ], + "304-etag-update-response-Content-Security-Policy": true, + "304-etag-update-response-Content-Type": true, + "304-etag-update-response-ETag": true, + "304-etag-update-response-Expires": true, + "304-etag-update-response-Public-Key-Pins": true, + "304-etag-update-response-Set-Cookie": [ + "Setup", + "Request 2 should have been conditional, but it was not." + ], + "304-etag-update-response-Set-Cookie2": true, + "304-etag-update-response-Test-Header": true, + "304-etag-update-response-X-Content-Foo": true, + "304-etag-update-response-X-Frame-Options": true, + "304-etag-update-response-X-Test-Header": true, + "304-etag-update-response-X-XSS-Protection": true, + "304-lm-use-stored-Test-Header": true, + "age-parse-dup-0": true, + "age-parse-dup-0-twoline": true, + "age-parse-dup-old": true, + "age-parse-float": [ + "Assertion", + "Response 2 does not come from cache" + ], + "age-parse-large": true, + "age-parse-large-minus-one": true, + "age-parse-larger": true, + "age-parse-negative": true, + "age-parse-nonnumeric": true, + "age-parse-numeric-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-parameter": [ + "Assertion", + "Response 2 comes from cache" + ], + "age-parse-prefix": true, + "age-parse-prefix-twoline": true, + "age-parse-suffix": true, + "age-parse-suffix-twoline": true, + "cc-resp-must-revalidate-fresh": true, + "cc-resp-must-revalidate-stale": true, + "cc-resp-no-cache": true, + "cc-resp-no-cache-case-insensitive": true, + "cc-resp-no-cache-revalidate": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-cache-revalidate-fresh": [ + "Assertion", + "Request 2 should have been conditional, but it was not." + ], + "cc-resp-no-store": true, + "cc-resp-no-store-case-insensitive": true, + "cc-resp-no-store-fresh": true, + "cc-resp-no-store-old-max-age": true, + "cc-resp-no-store-old-new": true, + "cc-resp-private-shared": true, + "ccreq-ma0": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-ma1": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-magreaterage": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-max-stale": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-max-stale-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "ccreq-min-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-min-fresh-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-no-cache-etag": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-cache-lm": [ + "Assertion", + "request 2 wasn't sent to server" + ], + "ccreq-no-store": [ + "Assertion", + "Response 2 comes from cache" + ], + "ccreq-oic": [ + "Assertion", + "Response 1 status is 200, not 504" + ], + "cdn-cc-invalid-sh-type-unknown": true, + "cdn-cc-invalid-sh-type-wrong": true, + "cdn-date-update-exceed": true, + "cdn-expires-update-exceed": [ + "Assertion", + "Response 2 header Expires is \"null\", not \"Tue, 09 Jul 2024 01:02:40 GMT\"" + ], + "cdn-fresh-cc-nostore": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-0": true, + "cdn-max-age-0-expires": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-age": true, + "cdn-max-age-case-insensitive": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-cc-max-age-invalid-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-expires": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-extension": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-long-cc-max-age": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-max-age-max": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-max-plus": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-short-cc-max-age": [ + "Assertion", + "Response 2 does not come from cache" + ], + "cdn-max-age-space-after-equals": true, + "cdn-max-age-space-before-equals": true, + "cdn-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-no-store-cc-fresh": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-private": [ + "Assertion", + "Response 2 comes from cache" + ], + "cdn-remove-age-exceed": true, + "cdn-remove-header": true, + "conditional-304-etag": true, + "conditional-etag-forward": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-forward-unquoted": [ + "Assertion", + "Request 1 header If-None-Match is \"undefined\", not \"\"abcdef\"\"" + ], + "conditional-etag-precedence": true, + "conditional-etag-quoted-respond-unquoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-generate": true, + "conditional-etag-strong-generate-unquoted": [ + "Assertion", + "Request 2 header If-None-Match is \"abcdef\", not \"\"abcdef\"\"" + ], + "conditional-etag-strong-respond": true, + "conditional-etag-strong-respond-multiple-first": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-respond-multiple-last": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-respond-multiple-second": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-strong-respond-obs-text": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-quoted": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-etag-unquoted-respond-unquoted": true, + "conditional-etag-vary-headers": true, + "conditional-etag-vary-headers-mismatch": true, + "conditional-etag-weak-generate-weak": true, + "conditional-etag-weak-respond": true, + "conditional-etag-weak-respond-backslash": true, + "conditional-etag-weak-respond-lowercase": true, + "conditional-etag-weak-respond-omit-slash": true, + "conditional-lm-fresh": true, + "conditional-lm-fresh-earlier": true, + "conditional-lm-fresh-no-lm": [ + "Assertion", + "Response 2 status is 200, not 304" + ], + "conditional-lm-fresh-rfc850": true, + "conditional-lm-stale": true, + "freshness-expires-32bit": true, + "freshness-expires-age-fast-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-expires-age-slow-date": true, + "freshness-expires-ansi-c": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-far-future": true, + "freshness-expires-future": true, + "freshness-expires-invalid": true, + "freshness-expires-invalid-1-digit-hour": true, + "freshness-expires-invalid-2-digit-year": true, + "freshness-expires-invalid-aest": true, + "freshness-expires-invalid-date": true, + "freshness-expires-invalid-date-dashes": true, + "freshness-expires-invalid-multiple-lines": true, + "freshness-expires-invalid-multiple-spaces": true, + "freshness-expires-invalid-no-comma": true, + "freshness-expires-invalid-time-periods": true, + "freshness-expires-invalid-utc": true, + "freshness-expires-old-date": true, + "freshness-expires-past": true, + "freshness-expires-present": true, + "freshness-expires-rfc850": true, + "freshness-expires-wrong-case-month": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-tz": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-expires-wrong-case-weekday": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age": true, + "freshness-max-age-0": true, + "freshness-max-age-0-expires": true, + "freshness-max-age-100a": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-a100": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-age": true, + "freshness-max-age-case-insenstive": true, + "freshness-max-age-date": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-decimal-five": true, + "freshness-max-age-decimal-zero": true, + "freshness-max-age-expires": true, + "freshness-max-age-expires-invalid": true, + "freshness-max-age-extension": true, + "freshness-max-age-ignore-quoted": true, + "freshness-max-age-ignore-quoted-rev": true, + "freshness-max-age-leading-zero": true, + "freshness-max-age-max": true, + "freshness-max-age-max-minus-1": true, + "freshness-max-age-max-plus": true, + "freshness-max-age-max-plus-1": true, + "freshness-max-age-negative": true, + "freshness-max-age-quoted": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-s-maxage-shared-longer": true, + "freshness-max-age-s-maxage-shared-longer-multiple": true, + "freshness-max-age-s-maxage-shared-longer-reversed": true, + "freshness-max-age-s-maxage-shared-shorter": true, + "freshness-max-age-s-maxage-shared-shorter-expires": true, + "freshness-max-age-single-quoted": true, + "freshness-max-age-space-after-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-space-before-equals": [ + "Assertion", + "Response 2 comes from cache" + ], + "freshness-max-age-stale": true, + "freshness-max-age-two-fresh-stale-sameline": true, + "freshness-max-age-two-fresh-stale-sepline": true, + "freshness-max-age-two-stale-fresh-sameline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-max-age-two-stale-fresh-sepline": [ + "Assertion", + "Response 2 does not come from cache" + ], + "freshness-none": true, + "freshness-s-maxage-shared": true, + "head-200-freshness-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-200-retain": [ + "Assertion", + "Response 2 header Template-A is \"null\", not \"1\"" + ], + "head-200-update": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "head-410-update": [ + "Setup", + "Response 3 status is 410, not 200" + ], + "head-writethrough": [ + "Assertion", + "Request 2 had method GET, not HEAD" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Cache-Control-no-cache-single": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-omit-headers-listed-in-Connection": true, + "headers-store-Cache-Control": true, + "headers-store-Clear-Site-Data": true, + "headers-store-Connection": true, + "headers-store-Content-Encoding": true, + "headers-store-Content-Foo": true, + "headers-store-Content-Length": true, + "headers-store-Content-Location": true, + "headers-store-Content-MD5": true, + "headers-store-Content-Range": [ + "Setup", + "Response 1 status is 503, not 200" + ], + "headers-store-Content-Security-Policy": true, + "headers-store-Content-Type": true, + "headers-store-ETag": true, + "headers-store-Expires": true, + "headers-store-Keep-Alive": true, + "headers-store-Proxy-Authenticate": true, + "headers-store-Proxy-Authentication-Info": true, + "headers-store-Proxy-Authorization": true, + "headers-store-Proxy-Connection": true, + "headers-store-Public-Key-Pins": true, + "headers-store-Set-Cookie": [ + "Setup", + "Response 2 does not come from cache" + ], + "headers-store-Set-Cookie2": true, + "headers-store-TE": true, + "headers-store-Test-Header": true, + "headers-store-Transfer-Encoding": [ + "Setup", + "Response 1 status is 503, not 200" + ], + "headers-store-Upgrade": true, + "headers-store-X-Content-Foo": true, + "headers-store-X-Frame-Options": true, + "headers-store-X-Test-Header": true, + "headers-store-X-XSS-Protection": true, + "heuristic-200-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-201-not_cached": true, + "heuristic-202-not_cached": true, + "heuristic-203-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-204-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-403-not_cached": true, + "heuristic-404-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-405-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-410-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-414-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-501-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-502-not_cached": true, + "heuristic-503-not_cached": true, + "heuristic-504-not_cached": true, + "heuristic-599-cached": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-599-not_cached": true, + "heuristic-delta-10": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-1800": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-30": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-300": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-3600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-43200": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-5": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-60": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-600": [ + "Assertion", + "Response 2 does not come from cache" + ], + "heuristic-delta-86400": [ + "Assertion", + "Response 2 does not come from cache" + ], + "invalidate-DELETE": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-DELETE-failed": true, + "invalidate-DELETE-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-M-SEARCH-failed": true, + "invalidate-M-SEARCH-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-POST-failed": true, + "invalidate-POST-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-cl": [ + "Assertion", + "Response 3 comes from cache" + ], + "invalidate-PUT-failed": true, + "invalidate-PUT-location": [ + "Assertion", + "Response 3 comes from cache" + ], + "method-POST": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-age-delay": [ + "Assertion", + "Response 1 header age is 0, should be bigger than 0" + ], + "other-age-gen": true, + "other-age-update-expires": true, + "other-age-update-max-age": true, + "other-authorization": true, + "other-authorization-must-revalidate": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-public": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-authorization-smaxage": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-cookie": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-date-update": true, + "other-date-update-expires": true, + "other-date-update-expires-update": true, + "other-fresh-content-disposition-attachment": true, + "other-heuristic-content-disposition-attachment": [ + "Assertion", + "Response 2 does not come from cache" + ], + "other-set-cookie": [ + "Assertion", + "Response 2 does not come from cache" + ], + "partial-store-complete-reuse-partial": true, + "partial-store-complete-reuse-partial-no-last": true, + "partial-store-complete-reuse-partial-suffix": true, + "partial-store-partial-complete": [ + "Setup", + "Response 1 status is 503, not 206" + ], + "partial-store-partial-reuse-partial": [ + "Setup", + "Response 1 status is 503, not 206" + ], + "partial-store-partial-reuse-partial-absent": [ + "Setup", + "Response 1 status is 503, not 206" + ], + "partial-store-partial-reuse-partial-byterange": [ + "Setup", + "Response 1 status is 503, not 206" + ], + "partial-store-partial-reuse-partial-suffix": [ + "Setup", + "Response 1 status is 503, not 206" + ], + "partial-use-headers": true, + "partial-use-stored-headers": true, + "pragma-request-extension": true, + "pragma-request-no-cache": true, + "pragma-response-extension": true, + "pragma-response-no-cache": true, + "pragma-response-no-cache-heuristic": [ + "Assertion", + "Response 2 does not come from cache" + ], + "query-args-different": true, + "query-args-same": true, + "stale-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-close-must-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-no-cache": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-proxy-revalidate": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-close-s-maxage=2": [ + "Assertion", + "Response 2 comes from cache" + ], + "stale-sie-503": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-sie-close": [ + "Assertion", + "Response 2 does not come from cache" + ], + "stale-warning-become": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-warning-stored": [ + "Setup", + "Response 2 does not come from cache" + ], + "stale-while-revalidate": true, + "stale-while-revalidate-window": true, + "status-200-fresh": true, + "status-200-must-understand": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-200-stale": true, + "status-203-fresh": true, + "status-203-stale": true, + "status-204-fresh": true, + "status-204-stale": true, + "status-299-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-299-stale": true, + "status-301-fresh": true, + "status-301-stale": true, + "status-302-fresh": true, + "status-302-stale": true, + "status-303-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-303-stale": true, + "status-307-fresh": true, + "status-307-stale": true, + "status-308-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-308-stale": true, + "status-400-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-400-stale": true, + "status-404-fresh": true, + "status-404-stale": true, + "status-410-fresh": true, + "status-410-stale": true, + "status-499-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-499-stale": true, + "status-500-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-500-stale": true, + "status-502-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-502-stale": true, + "status-503-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-503-stale": true, + "status-504-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-504-stale": true, + "status-599-fresh": [ + "Assertion", + "Response 2 does not come from cache" + ], + "status-599-must-understand": true, + "status-599-stale": true, + "vary-2-match": true, + "vary-2-match-omit": true, + "vary-2-no-match": true, + "vary-3-match": true, + "vary-3-no-match": true, + "vary-3-omit": true, + "vary-3-order": true, + "vary-cache-key": true, + "vary-invalidate": true, + "vary-match": true, + "vary-no-match": true, + "vary-normalise-combine": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-case": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-order": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-select": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-lang-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-normalise-space": [ + "Assertion", + "Response 2 does not come from cache" + ], + "vary-omit": true, + "vary-omit-stored": true, + "vary-star": true, + "vary-syntax-empty-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-empty-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-foo-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star": true, + "vary-syntax-star-foo": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star": [ + "Assertion", + "Response 2 comes from cache" + ], + "vary-syntax-star-star-lines": [ + "Assertion", + "Response 2 comes from cache" + ] +} diff --git a/test/fixtures/cache-tests/spec/Makefile b/test/fixtures/cache-tests/spec/Makefile new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/Makefile @@ -0,0 +1,24 @@ +saxpath ?= "lib/saxon9.jar" +saxon ?= java -classpath $(saxpath) net.sf.saxon.Transform -l +themedir ?= ../../../httpwg-theme +saxparam ?= bootstrapCssUrl='/spec/bootstrap.min.css' \ + siteJsUrl='/spec/script.mjs' \ + siteCssUrl='/spec/style.css' \ + banner='This copy of the specification has test results interspersed throughout; click on ℹ️ to see them.' +stylesheet := lib/rfcbootstrap.xslt + +specs := rfc9111 +TARGETS := $(addsuffix .html,$(specs)) + +.PHONY: all clean + +all: $(TARGETS) + +clean: + rm -f $(TARGETS) + +%.cleanxml: %.xml + $(saxon) $< lib/xreffer.xslt | $(saxon) - lib/clean-for-DTD.xslt > $@ + +%.html: %.cleanxml $(stylesheet) + $(saxon) $< $(stylesheet) $(saxparam) pageUrl='https://httpwg.org/specs/$@' > $@ diff --git a/test/fixtures/cache-tests/spec/bootstrap.min.css b/test/fixtures/cache-tests/spec/bootstrap.min.css new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/bootstrap.min.css @@ -0,0 +1,7 @@ +/*! + * Bootstrap v4.5.3 (https://getbootstrap.com/) + * Copyright 2011-2020 The Bootstrap Authors + * Copyright 2011-2020 Twitter, Inc. + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */:root{--blue:#007bff;--indigo:#6610f2;--purple:#6f42c1;--pink:#e83e8c;--red:#dc3545;--orange:#fd7e14;--yellow:#ffc107;--green:#28a745;--teal:#20c997;--cyan:#17a2b8;--white:#fff;--gray:#6c757d;--gray-dark:#343a40;--primary:#007bff;--secondary:#6c757d;--success:#28a745;--info:#17a2b8;--warning:#ffc107;--danger:#dc3545;--light:#f8f9fa;--dark:#343a40;--breakpoint-xs:0;--breakpoint-sm:576px;--breakpoint-md:768px;--breakpoint-lg:992px;--breakpoint-xl:1200px;--font-family-sans-serif:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";--font-family-monospace:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace}*,::after,::before{box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:transparent}article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-size:1rem;font-weight:400;line-height:1.5;color:#212529;text-align:left;background-color:#fff}[tabindex="-1"]:focus:not(:focus-visible){outline:0!important}hr{box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[data-original-title],abbr[title]{text-decoration:underline;-webkit-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0;-webkit-text-decoration-skip-ink:none;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}dl,ol,ul{margin-top:0;margin-bottom:1rem}ol ol,ol ul,ul ol,ul ul{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#007bff;text-decoration:none;background-color:transparent}a:hover{color:#0056b3;text-decoration:underline}a:not([href]):not([class]){color:inherit;text-decoration:none}a:not([href]):not([class]):hover{color:inherit;text-decoration:none}code,kbd,pre,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto;-ms-overflow-style:scrollbar}figure{margin:0 0 1rem}img{vertical-align:middle;border-style:none}svg{overflow:hidden;vertical-align:middle}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#6c757d;text-align:left;caption-side:bottom}th{text-align:inherit;text-align:-webkit-match-parent}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}button,input,optgroup,select,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button}[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled),button:not(:disabled){cursor:pointer}[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner,button::-moz-focus-inner{padding:0;border-style:none}input[type=checkbox],input[type=radio]{box-sizing:border-box;padding:0}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important}.h1,.h2,.h3,.h4,.h5,.h6,h1,h2,h3,h4,h5,h6{margin-bottom:.5rem;font-weight:500;line-height:1.2}.h1,h1{font-size:2.5rem}.h2,h2{font-size:2rem}.h3,h3{font-size:1.75rem}.h4,h4{font-size:1.5rem}.h5,h5{font-size:1.25rem}.h6,h6{font-size:1rem}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:6rem;font-weight:300;line-height:1.2}.display-2{font-size:5.5rem;font-weight:300;line-height:1.2}.display-3{font-size:4.5rem;font-weight:300;line-height:1.2}.display-4{font-size:3.5rem;font-weight:300;line-height:1.2}hr{margin-top:1rem;margin-bottom:1rem;border:0;border-top:1px solid rgba(0,0,0,.1)}.small,small{font-size:80%;font-weight:400}.mark,mark{padding:.2em;background-color:#fcf8e3}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:90%;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote-footer{display:block;font-size:80%;color:#6c757d}.blockquote-footer::before{content:"\2014\00A0"}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:90%;color:#6c757d}code{font-size:87.5%;color:#e83e8c;word-wrap:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:87.5%;color:#fff;background-color:#212529;border-radius:.2rem}kbd kbd{padding:0;font-size:100%;font-weight:700}pre{display:block;font-size:87.5%;color:#212529}pre code{font-size:inherit;color:inherit;word-break:normal}.pre-scrollable{max-height:340px;overflow-y:scroll}.container,.container-fluid,.container-lg,.container-md,.container-sm,.container-xl{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media (min-width:576px){.container,.container-sm{max-width:540px}}@media (min-width:768px){.container,.container-md,.container-sm{max-width:720px}}@media (min-width:992px){.container,.container-lg,.container-md,.container-sm{max-width:960px}}@media (min-width:1200px){.container,.container-lg,.container-md,.container-sm,.container-xl{max-width:1140px}}.row{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-15px;margin-left:-15px}.no-gutters{margin-right:0;margin-left:0}.no-gutters>.col,.no-gutters>[class*=col-]{padding-right:0;padding-left:0}.col,.col-1,.col-10,.col-11,.col-12,.col-2,.col-3,.col-4,.col-5,.col-6,.col-7,.col-8,.col-9,.col-auto,.col-lg,.col-lg-1,.col-lg-10,.col-lg-11,.col-lg-12,.col-lg-2,.col-lg-3,.col-lg-4,.col-lg-5,.col-lg-6,.col-lg-7,.col-lg-8,.col-lg-9,.col-lg-auto,.col-md,.col-md-1,.col-md-10,.col-md-11,.col-md-12,.col-md-2,.col-md-3,.col-md-4,.col-md-5,.col-md-6,.col-md-7,.col-md-8,.col-md-9,.col-md-auto,.col-sm,.col-sm-1,.col-sm-10,.col-sm-11,.col-sm-12,.col-sm-2,.col-sm-3,.col-sm-4,.col-sm-5,.col-sm-6,.col-sm-7,.col-sm-8,.col-sm-9,.col-sm-auto,.col-xl,.col-xl-1,.col-xl-10,.col-xl-11,.col-xl-12,.col-xl-2,.col-xl-3,.col-xl-4,.col-xl-5,.col-xl-6,.col-xl-7,.col-xl-8,.col-xl-9,.col-xl-auto{position:relative;width:100%;padding-right:15px;padding-left:15px}.col{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.row-cols-1>*{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.row-cols-2>*{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.row-cols-3>*{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.row-cols-4>*{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.row-cols-5>*{-ms-flex:0 0 20%;flex:0 0 20%;max-width:20%}.row-cols-6>*{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-first{-ms-flex-order:-1;order:-1}.order-last{-ms-flex-order:13;order:13}.order-0{-ms-flex-order:0;order:0}.order-1{-ms-flex-order:1;order:1}.order-2{-ms-flex-order:2;order:2}.order-3{-ms-flex-order:3;order:3}.order-4{-ms-flex-order:4;order:4}.order-5{-ms-flex-order:5;order:5}.order-6{-ms-flex-order:6;order:6}.order-7{-ms-flex-order:7;order:7}.order-8{-ms-flex-order:8;order:8}.order-9{-ms-flex-order:9;order:9}.order-10{-ms-flex-order:10;order:10}.order-11{-ms-flex-order:11;order:11}.order-12{-ms-flex-order:12;order:12}.offset-1{margin-left:8.333333%}.offset-2{margin-left:16.666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.333333%}.offset-5{margin-left:41.666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.333333%}.offset-8{margin-left:66.666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.333333%}.offset-11{margin-left:91.666667%}@media (min-width:576px){.col-sm{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.row-cols-sm-1>*{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.row-cols-sm-2>*{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.row-cols-sm-3>*{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.row-cols-sm-4>*{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.row-cols-sm-5>*{-ms-flex:0 0 20%;flex:0 0 20%;max-width:20%}.row-cols-sm-6>*{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-sm-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-sm-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-sm-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-sm-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-sm-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-sm-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-sm-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-sm-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-sm-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-sm-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-sm-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-sm-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-sm-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-sm-first{-ms-flex-order:-1;order:-1}.order-sm-last{-ms-flex-order:13;order:13}.order-sm-0{-ms-flex-order:0;order:0}.order-sm-1{-ms-flex-order:1;order:1}.order-sm-2{-ms-flex-order:2;order:2}.order-sm-3{-ms-flex-order:3;order:3}.order-sm-4{-ms-flex-order:4;order:4}.order-sm-5{-ms-flex-order:5;order:5}.order-sm-6{-ms-flex-order:6;order:6}.order-sm-7{-ms-flex-order:7;order:7}.order-sm-8{-ms-flex-order:8;order:8}.order-sm-9{-ms-flex-order:9;order:9}.order-sm-10{-ms-flex-order:10;order:10}.order-sm-11{-ms-flex-order:11;order:11}.order-sm-12{-ms-flex-order:12;order:12}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.333333%}.offset-sm-2{margin-left:16.666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.333333%}.offset-sm-5{margin-left:41.666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.333333%}.offset-sm-8{margin-left:66.666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.333333%}.offset-sm-11{margin-left:91.666667%}}@media (min-width:768px){.col-md{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.row-cols-md-1>*{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.row-cols-md-2>*{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.row-cols-md-3>*{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.row-cols-md-4>*{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.row-cols-md-5>*{-ms-flex:0 0 20%;flex:0 0 20%;max-width:20%}.row-cols-md-6>*{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-md-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-md-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-md-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-md-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-md-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-md-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-md-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-md-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-md-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-md-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-md-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-md-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-md-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-md-first{-ms-flex-order:-1;order:-1}.order-md-last{-ms-flex-order:13;order:13}.order-md-0{-ms-flex-order:0;order:0}.order-md-1{-ms-flex-order:1;order:1}.order-md-2{-ms-flex-order:2;order:2}.order-md-3{-ms-flex-order:3;order:3}.order-md-4{-ms-flex-order:4;order:4}.order-md-5{-ms-flex-order:5;order:5}.order-md-6{-ms-flex-order:6;order:6}.order-md-7{-ms-flex-order:7;order:7}.order-md-8{-ms-flex-order:8;order:8}.order-md-9{-ms-flex-order:9;order:9}.order-md-10{-ms-flex-order:10;order:10}.order-md-11{-ms-flex-order:11;order:11}.order-md-12{-ms-flex-order:12;order:12}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.333333%}.offset-md-2{margin-left:16.666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.333333%}.offset-md-5{margin-left:41.666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.333333%}.offset-md-8{margin-left:66.666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.333333%}.offset-md-11{margin-left:91.666667%}}@media (min-width:992px){.col-lg{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.row-cols-lg-1>*{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.row-cols-lg-2>*{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.row-cols-lg-3>*{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.row-cols-lg-4>*{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.row-cols-lg-5>*{-ms-flex:0 0 20%;flex:0 0 20%;max-width:20%}.row-cols-lg-6>*{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-lg-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-lg-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-lg-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-lg-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-lg-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-lg-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-lg-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-lg-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-lg-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-lg-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-lg-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-lg-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-lg-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-lg-first{-ms-flex-order:-1;order:-1}.order-lg-last{-ms-flex-order:13;order:13}.order-lg-0{-ms-flex-order:0;order:0}.order-lg-1{-ms-flex-order:1;order:1}.order-lg-2{-ms-flex-order:2;order:2}.order-lg-3{-ms-flex-order:3;order:3}.order-lg-4{-ms-flex-order:4;order:4}.order-lg-5{-ms-flex-order:5;order:5}.order-lg-6{-ms-flex-order:6;order:6}.order-lg-7{-ms-flex-order:7;order:7}.order-lg-8{-ms-flex-order:8;order:8}.order-lg-9{-ms-flex-order:9;order:9}.order-lg-10{-ms-flex-order:10;order:10}.order-lg-11{-ms-flex-order:11;order:11}.order-lg-12{-ms-flex-order:12;order:12}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.333333%}.offset-lg-2{margin-left:16.666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.333333%}.offset-lg-5{margin-left:41.666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.333333%}.offset-lg-8{margin-left:66.666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.333333%}.offset-lg-11{margin-left:91.666667%}}@media (min-width:1200px){.col-xl{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;max-width:100%}.row-cols-xl-1>*{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.row-cols-xl-2>*{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.row-cols-xl-3>*{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.row-cols-xl-4>*{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.row-cols-xl-5>*{-ms-flex:0 0 20%;flex:0 0 20%;max-width:20%}.row-cols-xl-6>*{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-xl-auto{-ms-flex:0 0 auto;flex:0 0 auto;width:auto;max-width:100%}.col-xl-1{-ms-flex:0 0 8.333333%;flex:0 0 8.333333%;max-width:8.333333%}.col-xl-2{-ms-flex:0 0 16.666667%;flex:0 0 16.666667%;max-width:16.666667%}.col-xl-3{-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-xl-4{-ms-flex:0 0 33.333333%;flex:0 0 33.333333%;max-width:33.333333%}.col-xl-5{-ms-flex:0 0 41.666667%;flex:0 0 41.666667%;max-width:41.666667%}.col-xl-6{-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-xl-7{-ms-flex:0 0 58.333333%;flex:0 0 58.333333%;max-width:58.333333%}.col-xl-8{-ms-flex:0 0 66.666667%;flex:0 0 66.666667%;max-width:66.666667%}.col-xl-9{-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-xl-10{-ms-flex:0 0 83.333333%;flex:0 0 83.333333%;max-width:83.333333%}.col-xl-11{-ms-flex:0 0 91.666667%;flex:0 0 91.666667%;max-width:91.666667%}.col-xl-12{-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-xl-first{-ms-flex-order:-1;order:-1}.order-xl-last{-ms-flex-order:13;order:13}.order-xl-0{-ms-flex-order:0;order:0}.order-xl-1{-ms-flex-order:1;order:1}.order-xl-2{-ms-flex-order:2;order:2}.order-xl-3{-ms-flex-order:3;order:3}.order-xl-4{-ms-flex-order:4;order:4}.order-xl-5{-ms-flex-order:5;order:5}.order-xl-6{-ms-flex-order:6;order:6}.order-xl-7{-ms-flex-order:7;order:7}.order-xl-8{-ms-flex-order:8;order:8}.order-xl-9{-ms-flex-order:9;order:9}.order-xl-10{-ms-flex-order:10;order:10}.order-xl-11{-ms-flex-order:11;order:11}.order-xl-12{-ms-flex-order:12;order:12}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.333333%}.offset-xl-2{margin-left:16.666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.333333%}.offset-xl-5{margin-left:41.666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.333333%}.offset-xl-8{margin-left:66.666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.333333%}.offset-xl-11{margin-left:91.666667%}}.table{width:100%;margin-bottom:1rem;color:#212529}.table td,.table th{padding:.75rem;vertical-align:top;border-top:1px solid #dee2e6}.table thead th{vertical-align:bottom;border-bottom:2px solid #dee2e6}.table tbody+tbody{border-top:2px solid #dee2e6}.table-sm td,.table-sm th{padding:.3rem}.table-bordered{border:1px solid #dee2e6}.table-bordered td,.table-bordered th{border:1px solid #dee2e6}.table-bordered thead td,.table-bordered thead th{border-bottom-width:2px}.table-borderless tbody+tbody,.table-borderless td,.table-borderless th,.table-borderless thead th{border:0}.table-striped tbody tr:nth-of-type(odd){background-color:rgba(0,0,0,.05)}.table-hover tbody tr:hover{color:#212529;background-color:rgba(0,0,0,.075)}.table-primary,.table-primary>td,.table-primary>th{background-color:#b8daff}.table-primary tbody+tbody,.table-primary td,.table-primary th,.table-primary thead th{border-color:#7abaff}.table-hover .table-primary:hover{background-color:#9fcdff}.table-hover .table-primary:hover>td,.table-hover .table-primary:hover>th{background-color:#9fcdff}.table-secondary,.table-secondary>td,.table-secondary>th{background-color:#d6d8db}.table-secondary tbody+tbody,.table-secondary td,.table-secondary th,.table-secondary thead th{border-color:#b3b7bb}.table-hover .table-secondary:hover{background-color:#c8cbcf}.table-hover .table-secondary:hover>td,.table-hover .table-secondary:hover>th{background-color:#c8cbcf}.table-success,.table-success>td,.table-success>th{background-color:#c3e6cb}.table-success tbody+tbody,.table-success td,.table-success th,.table-success thead th{border-color:#8fd19e}.table-hover .table-success:hover{background-color:#b1dfbb}.table-hover .table-success:hover>td,.table-hover .table-success:hover>th{background-color:#b1dfbb}.table-info,.table-info>td,.table-info>th{background-color:#bee5eb}.table-info tbody+tbody,.table-info td,.table-info th,.table-info thead th{border-color:#86cfda}.table-hover .table-info:hover{background-color:#abdde5}.table-hover .table-info:hover>td,.table-hover .table-info:hover>th{background-color:#abdde5}.table-warning,.table-warning>td,.table-warning>th{background-color:#ffeeba}.table-warning tbody+tbody,.table-warning td,.table-warning th,.table-warning thead th{border-color:#ffdf7e}.table-hover .table-warning:hover{background-color:#ffe8a1}.table-hover .table-warning:hover>td,.table-hover .table-warning:hover>th{background-color:#ffe8a1}.table-danger,.table-danger>td,.table-danger>th{background-color:#f5c6cb}.table-danger tbody+tbody,.table-danger td,.table-danger th,.table-danger thead th{border-color:#ed969e}.table-hover .table-danger:hover{background-color:#f1b0b7}.table-hover .table-danger:hover>td,.table-hover .table-danger:hover>th{background-color:#f1b0b7}.table-light,.table-light>td,.table-light>th{background-color:#fdfdfe}.table-light tbody+tbody,.table-light td,.table-light th,.table-light thead th{border-color:#fbfcfc}.table-hover .table-light:hover{background-color:#ececf6}.table-hover .table-light:hover>td,.table-hover .table-light:hover>th{background-color:#ececf6}.table-dark,.table-dark>td,.table-dark>th{background-color:#c6c8ca}.table-dark tbody+tbody,.table-dark td,.table-dark th,.table-dark thead th{border-color:#95999c}.table-hover .table-dark:hover{background-color:#b9bbbe}.table-hover .table-dark:hover>td,.table-hover .table-dark:hover>th{background-color:#b9bbbe}.table-active,.table-active>td,.table-active>th{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover>td,.table-hover .table-active:hover>th{background-color:rgba(0,0,0,.075)}.table .thead-dark th{color:#fff;background-color:#343a40;border-color:#454d55}.table .thead-light th{color:#495057;background-color:#e9ecef;border-color:#dee2e6}.table-dark{color:#fff;background-color:#343a40}.table-dark td,.table-dark th,.table-dark thead th{border-color:#454d55}.table-dark.table-bordered{border:0}.table-dark.table-striped tbody tr:nth-of-type(odd){background-color:rgba(255,255,255,.05)}.table-dark.table-hover tbody tr:hover{color:#fff;background-color:rgba(255,255,255,.075)}@media (max-width:575.98px){.table-responsive-sm{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-sm>.table-bordered{border:0}}@media (max-width:767.98px){.table-responsive-md{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-md>.table-bordered{border:0}}@media (max-width:991.98px){.table-responsive-lg{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-lg>.table-bordered{border:0}}@media (max-width:1199.98px){.table-responsive-xl{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive-xl>.table-bordered{border:0}}.table-responsive{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch}.table-responsive>.table-bordered{border:0}.form-control{display:block;width:100%;height:calc(1.5em + .75rem + 2px);padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#495057;background-color:#fff;background-clip:padding-box;border:1px solid #ced4da;border-radius:.25rem;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.form-control{transition:none}}.form-control::-ms-expand{background-color:transparent;border:0}.form-control:-moz-focusring{color:transparent;text-shadow:0 0 0 #495057}.form-control:focus{color:#495057;background-color:#fff;border-color:#80bdff;outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.form-control::-webkit-input-placeholder{color:#6c757d;opacity:1}.form-control::-moz-placeholder{color:#6c757d;opacity:1}.form-control:-ms-input-placeholder{color:#6c757d;opacity:1}.form-control::-ms-input-placeholder{color:#6c757d;opacity:1}.form-control::placeholder{color:#6c757d;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#e9ecef;opacity:1}input[type=date].form-control,input[type=datetime-local].form-control,input[type=month].form-control,input[type=time].form-control{-webkit-appearance:none;-moz-appearance:none;appearance:none}select.form-control:focus::-ms-value{color:#495057;background-color:#fff}.form-control-file,.form-control-range{display:block;width:100%}.col-form-label{padding-top:calc(.375rem + 1px);padding-bottom:calc(.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(.5rem + 1px);padding-bottom:calc(.5rem + 1px);font-size:1.25rem;line-height:1.5}.col-form-label-sm{padding-top:calc(.25rem + 1px);padding-bottom:calc(.25rem + 1px);font-size:.875rem;line-height:1.5}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;font-size:1rem;line-height:1.5;color:#212529;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-lg,.form-control-plaintext.form-control-sm{padding-right:0;padding-left:0}.form-control-sm{height:calc(1.5em + .5rem + 2px);padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.form-control-lg{height:calc(1.5em + 1rem + 2px);padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}select.form-control[multiple],select.form-control[size]{height:auto}textarea.form-control{height:auto}.form-group{margin-bottom:1rem}.form-text{display:block;margin-top:.25rem}.form-row{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-5px;margin-left:-5px}.form-row>.col,.form-row>[class*=col-]{padding-right:5px;padding-left:5px}.form-check{position:relative;display:block;padding-left:1.25rem}.form-check-input{position:absolute;margin-top:.3rem;margin-left:-1.25rem}.form-check-input:disabled~.form-check-label,.form-check-input[disabled]~.form-check-label{color:#6c757d}.form-check-label{margin-bottom:0}.form-check-inline{display:-ms-inline-flexbox;display:inline-flex;-ms-flex-align:center;align-items:center;padding-left:0;margin-right:.75rem}.form-check-inline .form-check-input{position:static;margin-top:0;margin-right:.3125rem;margin-left:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#28a745}.valid-tooltip{position:absolute;top:100%;left:0;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(40,167,69,.9);border-radius:.25rem}.is-valid~.valid-feedback,.is-valid~.valid-tooltip,.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip{display:block}.form-control.is-valid,.was-validated .form-control:valid{border-color:#28a745;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='8' height='8' viewBox='0 0 8 8'%3e%3cpath fill='%2328a745' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-valid:focus,.was-validated .form-control:valid:focus{border-color:#28a745;box-shadow:0 0 0 .2rem rgba(40,167,69,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.custom-select.is-valid,.was-validated .custom-select:valid{border-color:#28a745;padding-right:calc(.75em + 2.3125rem);background:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='4' height='5' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e") no-repeat right .75rem center/8px 10px,url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='8' height='8' viewBox='0 0 8 8'%3e%3cpath fill='%2328a745' d='M2.3 6.73L.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e") #fff no-repeat center right 1.75rem/calc(.75em + .375rem) calc(.75em + .375rem)}.custom-select.is-valid:focus,.was-validated .custom-select:valid:focus{border-color:#28a745;box-shadow:0 0 0 .2rem rgba(40,167,69,.25)}.form-check-input.is-valid~.form-check-label,.was-validated .form-check-input:valid~.form-check-label{color:#28a745}.form-check-input.is-valid~.valid-feedback,.form-check-input.is-valid~.valid-tooltip,.was-validated .form-check-input:valid~.valid-feedback,.was-validated .form-check-input:valid~.valid-tooltip{display:block}.custom-control-input.is-valid~.custom-control-label,.was-validated .custom-control-input:valid~.custom-control-label{color:#28a745}.custom-control-input.is-valid~.custom-control-label::before,.was-validated .custom-control-input:valid~.custom-control-label::before{border-color:#28a745}.custom-control-input.is-valid:checked~.custom-control-label::before,.was-validated .custom-control-input:valid:checked~.custom-control-label::before{border-color:#34ce57;background-color:#34ce57}.custom-control-input.is-valid:focus~.custom-control-label::before,.was-validated .custom-control-input:valid:focus~.custom-control-label::before{box-shadow:0 0 0 .2rem rgba(40,167,69,.25)}.custom-control-input.is-valid:focus:not(:checked)~.custom-control-label::before,.was-validated .custom-control-input:valid:focus:not(:checked)~.custom-control-label::before{border-color:#28a745}.custom-file-input.is-valid~.custom-file-label,.was-validated .custom-file-input:valid~.custom-file-label{border-color:#28a745}.custom-file-input.is-valid:focus~.custom-file-label,.was-validated .custom-file-input:valid:focus~.custom-file-label{border-color:#28a745;box-shadow:0 0 0 .2rem rgba(40,167,69,.25)}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#dc3545}.invalid-tooltip{position:absolute;top:100%;left:0;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(220,53,69,.9);border-radius:.25rem}.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip,.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip{display:block}.form-control.is-invalid,.was-validated .form-control:invalid{border-color:#dc3545;padding-right:calc(1.5em + .75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' fill='none' stroke='%23dc3545' viewBox='0 0 12 12'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(.375em + .1875rem) center;background-size:calc(.75em + .375rem) calc(.75em + .375rem)}.form-control.is-invalid:focus,.was-validated .form-control:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .2rem rgba(220,53,69,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + .75rem);background-position:top calc(.375em + .1875rem) right calc(.375em + .1875rem)}.custom-select.is-invalid,.was-validated .custom-select:invalid{border-color:#dc3545;padding-right:calc(.75em + 2.3125rem);background:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='4' height='5' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e") no-repeat right .75rem center/8px 10px,url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' fill='none' stroke='%23dc3545' viewBox='0 0 12 12'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e") #fff no-repeat center right 1.75rem/calc(.75em + .375rem) calc(.75em + .375rem)}.custom-select.is-invalid:focus,.was-validated .custom-select:invalid:focus{border-color:#dc3545;box-shadow:0 0 0 .2rem rgba(220,53,69,.25)}.form-check-input.is-invalid~.form-check-label,.was-validated .form-check-input:invalid~.form-check-label{color:#dc3545}.form-check-input.is-invalid~.invalid-feedback,.form-check-input.is-invalid~.invalid-tooltip,.was-validated .form-check-input:invalid~.invalid-feedback,.was-validated .form-check-input:invalid~.invalid-tooltip{display:block}.custom-control-input.is-invalid~.custom-control-label,.was-validated .custom-control-input:invalid~.custom-control-label{color:#dc3545}.custom-control-input.is-invalid~.custom-control-label::before,.was-validated .custom-control-input:invalid~.custom-control-label::before{border-color:#dc3545}.custom-control-input.is-invalid:checked~.custom-control-label::before,.was-validated .custom-control-input:invalid:checked~.custom-control-label::before{border-color:#e4606d;background-color:#e4606d}.custom-control-input.is-invalid:focus~.custom-control-label::before,.was-validated .custom-control-input:invalid:focus~.custom-control-label::before{box-shadow:0 0 0 .2rem rgba(220,53,69,.25)}.custom-control-input.is-invalid:focus:not(:checked)~.custom-control-label::before,.was-validated .custom-control-input:invalid:focus:not(:checked)~.custom-control-label::before{border-color:#dc3545}.custom-file-input.is-invalid~.custom-file-label,.was-validated .custom-file-input:invalid~.custom-file-label{border-color:#dc3545}.custom-file-input.is-invalid:focus~.custom-file-label,.was-validated .custom-file-input:invalid:focus~.custom-file-label{border-color:#dc3545;box-shadow:0 0 0 .2rem rgba(220,53,69,.25)}.form-inline{display:-ms-flexbox;display:flex;-ms-flex-flow:row wrap;flex-flow:row wrap;-ms-flex-align:center;align-items:center}.form-inline .form-check{width:100%}@media (min-width:576px){.form-inline label{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;-ms-flex-pack:center;justify-content:center;margin-bottom:0}.form-inline .form-group{display:-ms-flexbox;display:flex;-ms-flex:0 0 auto;flex:0 0 auto;-ms-flex-flow:row wrap;flex-flow:row wrap;-ms-flex-align:center;align-items:center;margin-bottom:0}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-plaintext{display:inline-block}.form-inline .custom-select,.form-inline .input-group{width:auto}.form-inline .form-check{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;-ms-flex-pack:center;justify-content:center;width:auto;padding-left:0}.form-inline .form-check-input{position:relative;-ms-flex-negative:0;flex-shrink:0;margin-top:0;margin-right:.25rem;margin-left:0}.form-inline .custom-control{-ms-flex-align:center;align-items:center;-ms-flex-pack:center;justify-content:center}.form-inline .custom-control-label{margin-bottom:0}}.btn{display:inline-block;font-weight:400;color:#212529;text-align:center;vertical-align:middle;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:transparent;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;line-height:1.5;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.btn{transition:none}}.btn:hover{color:#212529;text-decoration:none}.btn.focus,.btn:focus{outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.btn.disabled,.btn:disabled{opacity:.65}.btn:not(:disabled):not(.disabled){cursor:pointer}a.btn.disabled,fieldset:disabled a.btn{pointer-events:none}.btn-primary{color:#fff;background-color:#007bff;border-color:#007bff}.btn-primary:hover{color:#fff;background-color:#0069d9;border-color:#0062cc}.btn-primary.focus,.btn-primary:focus{color:#fff;background-color:#0069d9;border-color:#0062cc;box-shadow:0 0 0 .2rem rgba(38,143,255,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#007bff;border-color:#007bff}.btn-primary:not(:disabled):not(.disabled).active,.btn-primary:not(:disabled):not(.disabled):active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#0062cc;border-color:#005cbf}.btn-primary:not(:disabled):not(.disabled).active:focus,.btn-primary:not(:disabled):not(.disabled):active:focus,.show>.btn-primary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(38,143,255,.5)}.btn-secondary{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-secondary:hover{color:#fff;background-color:#5a6268;border-color:#545b62}.btn-secondary.focus,.btn-secondary:focus{color:#fff;background-color:#5a6268;border-color:#545b62;box-shadow:0 0 0 .2rem rgba(130,138,145,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-secondary:not(:disabled):not(.disabled).active,.btn-secondary:not(:disabled):not(.disabled):active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#545b62;border-color:#4e555b}.btn-secondary:not(:disabled):not(.disabled).active:focus,.btn-secondary:not(:disabled):not(.disabled):active:focus,.show>.btn-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(130,138,145,.5)}.btn-success{color:#fff;background-color:#28a745;border-color:#28a745}.btn-success:hover{color:#fff;background-color:#218838;border-color:#1e7e34}.btn-success.focus,.btn-success:focus{color:#fff;background-color:#218838;border-color:#1e7e34;box-shadow:0 0 0 .2rem rgba(72,180,97,.5)}.btn-success.disabled,.btn-success:disabled{color:#fff;background-color:#28a745;border-color:#28a745}.btn-success:not(:disabled):not(.disabled).active,.btn-success:not(:disabled):not(.disabled):active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#1e7e34;border-color:#1c7430}.btn-success:not(:disabled):not(.disabled).active:focus,.btn-success:not(:disabled):not(.disabled):active:focus,.show>.btn-success.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(72,180,97,.5)}.btn-info{color:#fff;background-color:#17a2b8;border-color:#17a2b8}.btn-info:hover{color:#fff;background-color:#138496;border-color:#117a8b}.btn-info.focus,.btn-info:focus{color:#fff;background-color:#138496;border-color:#117a8b;box-shadow:0 0 0 .2rem rgba(58,176,195,.5)}.btn-info.disabled,.btn-info:disabled{color:#fff;background-color:#17a2b8;border-color:#17a2b8}.btn-info:not(:disabled):not(.disabled).active,.btn-info:not(:disabled):not(.disabled):active,.show>.btn-info.dropdown-toggle{color:#fff;background-color:#117a8b;border-color:#10707f}.btn-info:not(:disabled):not(.disabled).active:focus,.btn-info:not(:disabled):not(.disabled):active:focus,.show>.btn-info.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(58,176,195,.5)}.btn-warning{color:#212529;background-color:#ffc107;border-color:#ffc107}.btn-warning:hover{color:#212529;background-color:#e0a800;border-color:#d39e00}.btn-warning.focus,.btn-warning:focus{color:#212529;background-color:#e0a800;border-color:#d39e00;box-shadow:0 0 0 .2rem rgba(222,170,12,.5)}.btn-warning.disabled,.btn-warning:disabled{color:#212529;background-color:#ffc107;border-color:#ffc107}.btn-warning:not(:disabled):not(.disabled).active,.btn-warning:not(:disabled):not(.disabled):active,.show>.btn-warning.dropdown-toggle{color:#212529;background-color:#d39e00;border-color:#c69500}.btn-warning:not(:disabled):not(.disabled).active:focus,.btn-warning:not(:disabled):not(.disabled):active:focus,.show>.btn-warning.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(222,170,12,.5)}.btn-danger{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-danger:hover{color:#fff;background-color:#c82333;border-color:#bd2130}.btn-danger.focus,.btn-danger:focus{color:#fff;background-color:#c82333;border-color:#bd2130;box-shadow:0 0 0 .2rem rgba(225,83,97,.5)}.btn-danger.disabled,.btn-danger:disabled{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-danger:not(:disabled):not(.disabled).active,.btn-danger:not(:disabled):not(.disabled):active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#bd2130;border-color:#b21f2d}.btn-danger:not(:disabled):not(.disabled).active:focus,.btn-danger:not(:disabled):not(.disabled):active:focus,.show>.btn-danger.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(225,83,97,.5)}.btn-light{color:#212529;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:hover{color:#212529;background-color:#e2e6ea;border-color:#dae0e5}.btn-light.focus,.btn-light:focus{color:#212529;background-color:#e2e6ea;border-color:#dae0e5;box-shadow:0 0 0 .2rem rgba(216,217,219,.5)}.btn-light.disabled,.btn-light:disabled{color:#212529;background-color:#f8f9fa;border-color:#f8f9fa}.btn-light:not(:disabled):not(.disabled).active,.btn-light:not(:disabled):not(.disabled):active,.show>.btn-light.dropdown-toggle{color:#212529;background-color:#dae0e5;border-color:#d3d9df}.btn-light:not(:disabled):not(.disabled).active:focus,.btn-light:not(:disabled):not(.disabled):active:focus,.show>.btn-light.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(216,217,219,.5)}.btn-dark{color:#fff;background-color:#343a40;border-color:#343a40}.btn-dark:hover{color:#fff;background-color:#23272b;border-color:#1d2124}.btn-dark.focus,.btn-dark:focus{color:#fff;background-color:#23272b;border-color:#1d2124;box-shadow:0 0 0 .2rem rgba(82,88,93,.5)}.btn-dark.disabled,.btn-dark:disabled{color:#fff;background-color:#343a40;border-color:#343a40}.btn-dark:not(:disabled):not(.disabled).active,.btn-dark:not(:disabled):not(.disabled):active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#1d2124;border-color:#171a1d}.btn-dark:not(:disabled):not(.disabled).active:focus,.btn-dark:not(:disabled):not(.disabled):active:focus,.show>.btn-dark.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(82,88,93,.5)}.btn-outline-primary{color:#007bff;border-color:#007bff}.btn-outline-primary:hover{color:#fff;background-color:#007bff;border-color:#007bff}.btn-outline-primary.focus,.btn-outline-primary:focus{box-shadow:0 0 0 .2rem rgba(0,123,255,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#007bff;background-color:transparent}.btn-outline-primary:not(:disabled):not(.disabled).active,.btn-outline-primary:not(:disabled):not(.disabled):active,.show>.btn-outline-primary.dropdown-toggle{color:#fff;background-color:#007bff;border-color:#007bff}.btn-outline-primary:not(:disabled):not(.disabled).active:focus,.btn-outline-primary:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-primary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(0,123,255,.5)}.btn-outline-secondary{color:#6c757d;border-color:#6c757d}.btn-outline-secondary:hover{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-outline-secondary.focus,.btn-outline-secondary:focus{box-shadow:0 0 0 .2rem rgba(108,117,125,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#6c757d;background-color:transparent}.btn-outline-secondary:not(:disabled):not(.disabled).active,.btn-outline-secondary:not(:disabled):not(.disabled):active,.show>.btn-outline-secondary.dropdown-toggle{color:#fff;background-color:#6c757d;border-color:#6c757d}.btn-outline-secondary:not(:disabled):not(.disabled).active:focus,.btn-outline-secondary:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-secondary.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(108,117,125,.5)}.btn-outline-success{color:#28a745;border-color:#28a745}.btn-outline-success:hover{color:#fff;background-color:#28a745;border-color:#28a745}.btn-outline-success.focus,.btn-outline-success:focus{box-shadow:0 0 0 .2rem rgba(40,167,69,.5)}.btn-outline-success.disabled,.btn-outline-success:disabled{color:#28a745;background-color:transparent}.btn-outline-success:not(:disabled):not(.disabled).active,.btn-outline-success:not(:disabled):not(.disabled):active,.show>.btn-outline-success.dropdown-toggle{color:#fff;background-color:#28a745;border-color:#28a745}.btn-outline-success:not(:disabled):not(.disabled).active:focus,.btn-outline-success:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-success.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(40,167,69,.5)}.btn-outline-info{color:#17a2b8;border-color:#17a2b8}.btn-outline-info:hover{color:#fff;background-color:#17a2b8;border-color:#17a2b8}.btn-outline-info.focus,.btn-outline-info:focus{box-shadow:0 0 0 .2rem rgba(23,162,184,.5)}.btn-outline-info.disabled,.btn-outline-info:disabled{color:#17a2b8;background-color:transparent}.btn-outline-info:not(:disabled):not(.disabled).active,.btn-outline-info:not(:disabled):not(.disabled):active,.show>.btn-outline-info.dropdown-toggle{color:#fff;background-color:#17a2b8;border-color:#17a2b8}.btn-outline-info:not(:disabled):not(.disabled).active:focus,.btn-outline-info:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-info.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(23,162,184,.5)}.btn-outline-warning{color:#ffc107;border-color:#ffc107}.btn-outline-warning:hover{color:#212529;background-color:#ffc107;border-color:#ffc107}.btn-outline-warning.focus,.btn-outline-warning:focus{box-shadow:0 0 0 .2rem rgba(255,193,7,.5)}.btn-outline-warning.disabled,.btn-outline-warning:disabled{color:#ffc107;background-color:transparent}.btn-outline-warning:not(:disabled):not(.disabled).active,.btn-outline-warning:not(:disabled):not(.disabled):active,.show>.btn-outline-warning.dropdown-toggle{color:#212529;background-color:#ffc107;border-color:#ffc107}.btn-outline-warning:not(:disabled):not(.disabled).active:focus,.btn-outline-warning:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-warning.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(255,193,7,.5)}.btn-outline-danger{color:#dc3545;border-color:#dc3545}.btn-outline-danger:hover{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-outline-danger.focus,.btn-outline-danger:focus{box-shadow:0 0 0 .2rem rgba(220,53,69,.5)}.btn-outline-danger.disabled,.btn-outline-danger:disabled{color:#dc3545;background-color:transparent}.btn-outline-danger:not(:disabled):not(.disabled).active,.btn-outline-danger:not(:disabled):not(.disabled):active,.show>.btn-outline-danger.dropdown-toggle{color:#fff;background-color:#dc3545;border-color:#dc3545}.btn-outline-danger:not(:disabled):not(.disabled).active:focus,.btn-outline-danger:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-danger.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(220,53,69,.5)}.btn-outline-light{color:#f8f9fa;border-color:#f8f9fa}.btn-outline-light:hover{color:#212529;background-color:#f8f9fa;border-color:#f8f9fa}.btn-outline-light.focus,.btn-outline-light:focus{box-shadow:0 0 0 .2rem rgba(248,249,250,.5)}.btn-outline-light.disabled,.btn-outline-light:disabled{color:#f8f9fa;background-color:transparent}.btn-outline-light:not(:disabled):not(.disabled).active,.btn-outline-light:not(:disabled):not(.disabled):active,.show>.btn-outline-light.dropdown-toggle{color:#212529;background-color:#f8f9fa;border-color:#f8f9fa}.btn-outline-light:not(:disabled):not(.disabled).active:focus,.btn-outline-light:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-light.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(248,249,250,.5)}.btn-outline-dark{color:#343a40;border-color:#343a40}.btn-outline-dark:hover{color:#fff;background-color:#343a40;border-color:#343a40}.btn-outline-dark.focus,.btn-outline-dark:focus{box-shadow:0 0 0 .2rem rgba(52,58,64,.5)}.btn-outline-dark.disabled,.btn-outline-dark:disabled{color:#343a40;background-color:transparent}.btn-outline-dark:not(:disabled):not(.disabled).active,.btn-outline-dark:not(:disabled):not(.disabled):active,.show>.btn-outline-dark.dropdown-toggle{color:#fff;background-color:#343a40;border-color:#343a40}.btn-outline-dark:not(:disabled):not(.disabled).active:focus,.btn-outline-dark:not(:disabled):not(.disabled):active:focus,.show>.btn-outline-dark.dropdown-toggle:focus{box-shadow:0 0 0 .2rem rgba(52,58,64,.5)}.btn-link{font-weight:400;color:#007bff;text-decoration:none}.btn-link:hover{color:#0056b3;text-decoration:underline}.btn-link.focus,.btn-link:focus{text-decoration:underline}.btn-link.disabled,.btn-link:disabled{color:#6c757d;pointer-events:none}.btn-group-lg>.btn,.btn-lg{padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.btn-group-sm>.btn,.btn-sm{padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:.5rem}input[type=button].btn-block,input[type=reset].btn-block,input[type=submit].btn-block{width:100%}.fade{transition:opacity .15s linear}@media (prefers-reduced-motion:reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{position:relative;height:0;overflow:hidden;transition:height .35s ease}@media (prefers-reduced-motion:reduce){.collapsing{transition:none}}.dropdown,.dropleft,.dropright,.dropup{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:10rem;padding:.5rem 0;margin:.125rem 0 0;font-size:1rem;color:#212529;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem}.dropdown-menu-left{right:auto;left:0}.dropdown-menu-right{right:0;left:auto}@media (min-width:576px){.dropdown-menu-sm-left{right:auto;left:0}.dropdown-menu-sm-right{right:0;left:auto}}@media (min-width:768px){.dropdown-menu-md-left{right:auto;left:0}.dropdown-menu-md-right{right:0;left:auto}}@media (min-width:992px){.dropdown-menu-lg-left{right:auto;left:0}.dropdown-menu-lg-right{right:0;left:auto}}@media (min-width:1200px){.dropdown-menu-xl-left{right:auto;left:0}.dropdown-menu-xl-right{right:0;left:auto}}.dropup .dropdown-menu{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-menu{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropright .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropright .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-toggle::after{vertical-align:0}.dropleft .dropdown-menu{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropleft .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropleft .dropdown-toggle::after{display:none}.dropleft .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropleft .dropdown-toggle:empty::after{margin-left:0}.dropleft .dropdown-toggle::before{vertical-align:0}.dropdown-menu[x-placement^=bottom],.dropdown-menu[x-placement^=left],.dropdown-menu[x-placement^=right],.dropdown-menu[x-placement^=top]{right:auto;bottom:auto}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid #e9ecef}.dropdown-item{display:block;width:100%;padding:.25rem 1.5rem;clear:both;font-weight:400;color:#212529;text-align:inherit;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:focus,.dropdown-item:hover{color:#16181b;text-decoration:none;background-color:#f8f9fa}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background-color:#007bff}.dropdown-item.disabled,.dropdown-item:disabled{color:#6c757d;pointer-events:none;background-color:transparent}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1.5rem;margin-bottom:0;font-size:.875rem;color:#6c757d;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1.5rem;color:#212529}.btn-group,.btn-group-vertical{position:relative;display:-ms-inline-flexbox;display:inline-flex;vertical-align:middle}.btn-group-vertical>.btn,.btn-group>.btn{position:relative;-ms-flex:1 1 auto;flex:1 1 auto}.btn-group-vertical>.btn:hover,.btn-group>.btn:hover{z-index:1}.btn-group-vertical>.btn.active,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn:focus,.btn-group>.btn.active,.btn-group>.btn:active,.btn-group>.btn:focus{z-index:1}.btn-toolbar{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;-ms-flex-pack:start;justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn-group:not(:first-child),.btn-group>.btn:not(:first-child){margin-left:-1px}.btn-group>.btn-group:not(:last-child)>.btn,.btn-group>.btn:not(:last-child):not(.dropdown-toggle){border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn-group:not(:first-child)>.btn,.btn-group>.btn:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropright .dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after{margin-left:0}.dropleft .dropdown-toggle-split::before{margin-right:0}.btn-group-sm>.btn+.dropdown-toggle-split,.btn-sm+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-group-lg>.btn+.dropdown-toggle-split,.btn-lg+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{-ms-flex-direction:column;flex-direction:column;-ms-flex-align:start;align-items:flex-start;-ms-flex-pack:center;justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn-group:not(:first-child),.btn-group-vertical>.btn:not(:first-child){margin-top:-1px}.btn-group-vertical>.btn-group:not(:last-child)>.btn,.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle){border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn-group:not(:first-child)>.btn,.btn-group-vertical>.btn:not(:first-child){border-top-left-radius:0;border-top-right-radius:0}.btn-group-toggle>.btn,.btn-group-toggle>.btn-group>.btn{margin-bottom:0}.btn-group-toggle>.btn input[type=checkbox],.btn-group-toggle>.btn input[type=radio],.btn-group-toggle>.btn-group>.btn input[type=checkbox],.btn-group-toggle>.btn-group>.btn input[type=radio]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;-ms-flex-align:stretch;align-items:stretch;width:100%}.input-group>.custom-file,.input-group>.custom-select,.input-group>.form-control,.input-group>.form-control-plaintext{position:relative;-ms-flex:1 1 auto;flex:1 1 auto;width:1%;min-width:0;margin-bottom:0}.input-group>.custom-file+.custom-file,.input-group>.custom-file+.custom-select,.input-group>.custom-file+.form-control,.input-group>.custom-select+.custom-file,.input-group>.custom-select+.custom-select,.input-group>.custom-select+.form-control,.input-group>.form-control+.custom-file,.input-group>.form-control+.custom-select,.input-group>.form-control+.form-control,.input-group>.form-control-plaintext+.custom-file,.input-group>.form-control-plaintext+.custom-select,.input-group>.form-control-plaintext+.form-control{margin-left:-1px}.input-group>.custom-file .custom-file-input:focus~.custom-file-label,.input-group>.custom-select:focus,.input-group>.form-control:focus{z-index:3}.input-group>.custom-file .custom-file-input:focus{z-index:4}.input-group>.custom-select:not(:last-child),.input-group>.form-control:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.custom-select:not(:first-child),.input-group>.form-control:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.input-group>.custom-file{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center}.input-group>.custom-file:not(:last-child) .custom-file-label,.input-group>.custom-file:not(:last-child) .custom-file-label::after{border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.custom-file:not(:first-child) .custom-file-label{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-append,.input-group-prepend{display:-ms-flexbox;display:flex}.input-group-append .btn,.input-group-prepend .btn{position:relative;z-index:2}.input-group-append .btn:focus,.input-group-prepend .btn:focus{z-index:3}.input-group-append .btn+.btn,.input-group-append .btn+.input-group-text,.input-group-append .input-group-text+.btn,.input-group-append .input-group-text+.input-group-text,.input-group-prepend .btn+.btn,.input-group-prepend .btn+.input-group-text,.input-group-prepend .input-group-text+.btn,.input-group-prepend .input-group-text+.input-group-text{margin-left:-1px}.input-group-prepend{margin-right:-1px}.input-group-append{margin-left:-1px}.input-group-text{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;padding:.375rem .75rem;margin-bottom:0;font-size:1rem;font-weight:400;line-height:1.5;color:#495057;text-align:center;white-space:nowrap;background-color:#e9ecef;border:1px solid #ced4da;border-radius:.25rem}.input-group-text input[type=checkbox],.input-group-text input[type=radio]{margin-top:0}.input-group-lg>.custom-select,.input-group-lg>.form-control:not(textarea){height:calc(1.5em + 1rem + 2px)}.input-group-lg>.custom-select,.input-group-lg>.form-control,.input-group-lg>.input-group-append>.btn,.input-group-lg>.input-group-append>.input-group-text,.input-group-lg>.input-group-prepend>.btn,.input-group-lg>.input-group-prepend>.input-group-text{padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.input-group-sm>.custom-select,.input-group-sm>.form-control:not(textarea){height:calc(1.5em + .5rem + 2px)}.input-group-sm>.custom-select,.input-group-sm>.form-control,.input-group-sm>.input-group-append>.btn,.input-group-sm>.input-group-append>.input-group-text,.input-group-sm>.input-group-prepend>.btn,.input-group-sm>.input-group-prepend>.input-group-text{padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.input-group-lg>.custom-select,.input-group-sm>.custom-select{padding-right:1.75rem}.input-group>.input-group-append:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group>.input-group-append:last-child>.input-group-text:not(:last-child),.input-group>.input-group-append:not(:last-child)>.btn,.input-group>.input-group-append:not(:last-child)>.input-group-text,.input-group>.input-group-prepend>.btn,.input-group>.input-group-prepend>.input-group-text{border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.input-group-append>.btn,.input-group>.input-group-append>.input-group-text,.input-group>.input-group-prepend:first-child>.btn:not(:first-child),.input-group>.input-group-prepend:first-child>.input-group-text:not(:first-child),.input-group>.input-group-prepend:not(:first-child)>.btn,.input-group>.input-group-prepend:not(:first-child)>.input-group-text{border-top-left-radius:0;border-bottom-left-radius:0}.custom-control{position:relative;z-index:1;display:block;min-height:1.5rem;padding-left:1.5rem;-webkit-print-color-adjust:exact;color-adjust:exact}.custom-control-inline{display:-ms-inline-flexbox;display:inline-flex;margin-right:1rem}.custom-control-input{position:absolute;left:0;z-index:-1;width:1rem;height:1.25rem;opacity:0}.custom-control-input:checked~.custom-control-label::before{color:#fff;border-color:#007bff;background-color:#007bff}.custom-control-input:focus~.custom-control-label::before{box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.custom-control-input:focus:not(:checked)~.custom-control-label::before{border-color:#80bdff}.custom-control-input:not(:disabled):active~.custom-control-label::before{color:#fff;background-color:#b3d7ff;border-color:#b3d7ff}.custom-control-input:disabled~.custom-control-label,.custom-control-input[disabled]~.custom-control-label{color:#6c757d}.custom-control-input:disabled~.custom-control-label::before,.custom-control-input[disabled]~.custom-control-label::before{background-color:#e9ecef}.custom-control-label{position:relative;margin-bottom:0;vertical-align:top}.custom-control-label::before{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;pointer-events:none;content:"";background-color:#fff;border:#adb5bd solid 1px}.custom-control-label::after{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;content:"";background:no-repeat 50%/50% 50%}.custom-checkbox .custom-control-label::before{border-radius:.25rem}.custom-checkbox .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='8' height='8' viewBox='0 0 8 8'%3e%3cpath fill='%23fff' d='M6.564.75l-3.59 3.612-1.538-1.55L0 4.26l2.974 2.99L8 2.193z'/%3e%3c/svg%3e")}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::before{border-color:#007bff;background-color:#007bff}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='4' height='4' viewBox='0 0 4 4'%3e%3cpath stroke='%23fff' d='M0 2h4'/%3e%3c/svg%3e")}.custom-checkbox .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(0,123,255,.5)}.custom-checkbox .custom-control-input:disabled:indeterminate~.custom-control-label::before{background-color:rgba(0,123,255,.5)}.custom-radio .custom-control-label::before{border-radius:50%}.custom-radio .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='12' height='12' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e")}.custom-radio .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(0,123,255,.5)}.custom-switch{padding-left:2.25rem}.custom-switch .custom-control-label::before{left:-2.25rem;width:1.75rem;pointer-events:all;border-radius:.5rem}.custom-switch .custom-control-label::after{top:calc(.25rem + 2px);left:calc(-2.25rem + 2px);width:calc(1rem - 4px);height:calc(1rem - 4px);background-color:#adb5bd;border-radius:.5rem;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-transform .15s ease-in-out;transition:transform .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:transform .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-transform .15s ease-in-out}@media (prefers-reduced-motion:reduce){.custom-switch .custom-control-label::after{transition:none}}.custom-switch .custom-control-input:checked~.custom-control-label::after{background-color:#fff;-webkit-transform:translateX(.75rem);transform:translateX(.75rem)}.custom-switch .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(0,123,255,.5)}.custom-select{display:inline-block;width:100%;height:calc(1.5em + .75rem + 2px);padding:.375rem 1.75rem .375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#495057;vertical-align:middle;background:#fff url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='4' height='5' viewBox='0 0 4 5'%3e%3cpath fill='%23343a40' d='M2 0L0 2h4zm0 5L0 3h4z'/%3e%3c/svg%3e") no-repeat right .75rem center/8px 10px;border:1px solid #ced4da;border-radius:.25rem;-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-select:focus{border-color:#80bdff;outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.custom-select:focus::-ms-value{color:#495057;background-color:#fff}.custom-select[multiple],.custom-select[size]:not([size="1"]){height:auto;padding-right:.75rem;background-image:none}.custom-select:disabled{color:#6c757d;background-color:#e9ecef}.custom-select::-ms-expand{display:none}.custom-select:-moz-focusring{color:transparent;text-shadow:0 0 0 #495057}.custom-select-sm{height:calc(1.5em + .5rem + 2px);padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:.875rem}.custom-select-lg{height:calc(1.5em + 1rem + 2px);padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem}.custom-file{position:relative;display:inline-block;width:100%;height:calc(1.5em + .75rem + 2px);margin-bottom:0}.custom-file-input{position:relative;z-index:2;width:100%;height:calc(1.5em + .75rem + 2px);margin:0;opacity:0}.custom-file-input:focus~.custom-file-label{border-color:#80bdff;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.custom-file-input:disabled~.custom-file-label,.custom-file-input[disabled]~.custom-file-label{background-color:#e9ecef}.custom-file-input:lang(en)~.custom-file-label::after{content:"Browse"}.custom-file-input~.custom-file-label[data-browse]::after{content:attr(data-browse)}.custom-file-label{position:absolute;top:0;right:0;left:0;z-index:1;height:calc(1.5em + .75rem + 2px);padding:.375rem .75rem;font-weight:400;line-height:1.5;color:#495057;background-color:#fff;border:1px solid #ced4da;border-radius:.25rem}.custom-file-label::after{position:absolute;top:0;right:0;bottom:0;z-index:3;display:block;height:calc(1.5em + .75rem);padding:.375rem .75rem;line-height:1.5;color:#495057;content:"Browse";background-color:#e9ecef;border-left:inherit;border-radius:0 .25rem .25rem 0}.custom-range{width:100%;height:1.4rem;padding:0;background-color:transparent;-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-range:focus{outline:0}.custom-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(0,123,255,.25)}.custom-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(0,123,255,.25)}.custom-range:focus::-ms-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(0,123,255,.25)}.custom-range::-moz-focus-outer{border:0}.custom-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-.25rem;background-color:#007bff;border:0;border-radius:1rem;-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-webkit-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.custom-range::-webkit-slider-thumb{-webkit-transition:none;transition:none}}.custom-range::-webkit-slider-thumb:active{background-color:#b3d7ff}.custom-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.custom-range::-moz-range-thumb{width:1rem;height:1rem;background-color:#007bff;border:0;border-radius:1rem;-moz-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;-moz-appearance:none;appearance:none}@media (prefers-reduced-motion:reduce){.custom-range::-moz-range-thumb{-moz-transition:none;transition:none}}.custom-range::-moz-range-thumb:active{background-color:#b3d7ff}.custom-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem}.custom-range::-ms-thumb{width:1rem;height:1rem;margin-top:0;margin-right:.2rem;margin-left:.2rem;background-color:#007bff;border:0;border-radius:1rem;-ms-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;appearance:none}@media (prefers-reduced-motion:reduce){.custom-range::-ms-thumb{-ms-transition:none;transition:none}}.custom-range::-ms-thumb:active{background-color:#b3d7ff}.custom-range::-ms-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:transparent;border-color:transparent;border-width:.5rem}.custom-range::-ms-fill-lower{background-color:#dee2e6;border-radius:1rem}.custom-range::-ms-fill-upper{margin-right:15px;background-color:#dee2e6;border-radius:1rem}.custom-range:disabled::-webkit-slider-thumb{background-color:#adb5bd}.custom-range:disabled::-webkit-slider-runnable-track{cursor:default}.custom-range:disabled::-moz-range-thumb{background-color:#adb5bd}.custom-range:disabled::-moz-range-track{cursor:default}.custom-range:disabled::-ms-thumb{background-color:#adb5bd}.custom-control-label::before,.custom-file-label,.custom-select{transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.custom-control-label::before,.custom-file-label,.custom-select{transition:none}}.nav{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem}.nav-link:focus,.nav-link:hover{text-decoration:none}.nav-link.disabled{color:#6c757d;pointer-events:none;cursor:default}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-item{margin-bottom:-1px}.nav-tabs .nav-link{border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:focus,.nav-tabs .nav-link:hover{border-color:#e9ecef #e9ecef #dee2e6}.nav-tabs .nav-link.disabled{color:#6c757d;background-color:transparent;border-color:transparent}.nav-tabs .nav-item.show .nav-link,.nav-tabs .nav-link.active{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#007bff}.nav-fill .nav-item,.nav-fill>.nav-link{-ms-flex:1 1 auto;flex:1 1 auto;text-align:center}.nav-justified .nav-item,.nav-justified>.nav-link{-ms-flex-preferred-size:0;flex-basis:0;-ms-flex-positive:1;flex-grow:1;text-align:center}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;-ms-flex-align:center;align-items:center;-ms-flex-pack:justify;justify-content:space-between;padding:.5rem 1rem}.navbar .container,.navbar .container-fluid,.navbar .container-lg,.navbar .container-md,.navbar .container-sm,.navbar .container-xl{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;-ms-flex-align:center;align-items:center;-ms-flex-pack:justify;justify-content:space-between}.navbar-brand{display:inline-block;padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;line-height:inherit;white-space:nowrap}.navbar-brand:focus,.navbar-brand:hover{text-decoration:none}.navbar-nav{display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static;float:none}.navbar-text{display:inline-block;padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{-ms-flex-preferred-size:100%;flex-basis:100%;-ms-flex-positive:1;flex-grow:1;-ms-flex-align:center;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem}.navbar-toggler:focus,.navbar-toggler:hover{text-decoration:none}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;content:"";background:no-repeat center center;background-size:100% 100%}@media (max-width:575.98px){.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid,.navbar-expand-sm>.container-lg,.navbar-expand-sm>.container-md,.navbar-expand-sm>.container-sm,.navbar-expand-sm>.container-xl{padding-right:0;padding-left:0}}@media (min-width:576px){.navbar-expand-sm{-ms-flex-flow:row nowrap;flex-flow:row nowrap;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-sm .navbar-nav{-ms-flex-direction:row;flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid,.navbar-expand-sm>.container-lg,.navbar-expand-sm>.container-md,.navbar-expand-sm>.container-sm,.navbar-expand-sm>.container-xl{-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-sm .navbar-collapse{display:-ms-flexbox!important;display:flex!important;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}}@media (max-width:767.98px){.navbar-expand-md>.container,.navbar-expand-md>.container-fluid,.navbar-expand-md>.container-lg,.navbar-expand-md>.container-md,.navbar-expand-md>.container-sm,.navbar-expand-md>.container-xl{padding-right:0;padding-left:0}}@media (min-width:768px){.navbar-expand-md{-ms-flex-flow:row nowrap;flex-flow:row nowrap;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-md .navbar-nav{-ms-flex-direction:row;flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md>.container,.navbar-expand-md>.container-fluid,.navbar-expand-md>.container-lg,.navbar-expand-md>.container-md,.navbar-expand-md>.container-sm,.navbar-expand-md>.container-xl{-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-md .navbar-collapse{display:-ms-flexbox!important;display:flex!important;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}}@media (max-width:991.98px){.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid,.navbar-expand-lg>.container-lg,.navbar-expand-lg>.container-md,.navbar-expand-lg>.container-sm,.navbar-expand-lg>.container-xl{padding-right:0;padding-left:0}}@media (min-width:992px){.navbar-expand-lg{-ms-flex-flow:row nowrap;flex-flow:row nowrap;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-lg .navbar-nav{-ms-flex-direction:row;flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid,.navbar-expand-lg>.container-lg,.navbar-expand-lg>.container-md,.navbar-expand-lg>.container-sm,.navbar-expand-lg>.container-xl{-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-lg .navbar-collapse{display:-ms-flexbox!important;display:flex!important;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}}@media (max-width:1199.98px){.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid,.navbar-expand-xl>.container-lg,.navbar-expand-xl>.container-md,.navbar-expand-xl>.container-sm,.navbar-expand-xl>.container-xl{padding-right:0;padding-left:0}}@media (min-width:1200px){.navbar-expand-xl{-ms-flex-flow:row nowrap;flex-flow:row nowrap;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-xl .navbar-nav{-ms-flex-direction:row;flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid,.navbar-expand-xl>.container-lg,.navbar-expand-xl>.container-md,.navbar-expand-xl>.container-sm,.navbar-expand-xl>.container-xl{-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-xl .navbar-collapse{display:-ms-flexbox!important;display:flex!important;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}}.navbar-expand{-ms-flex-flow:row nowrap;flex-flow:row nowrap;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand>.container,.navbar-expand>.container-fluid,.navbar-expand>.container-lg,.navbar-expand>.container-md,.navbar-expand>.container-sm,.navbar-expand>.container-xl{padding-right:0;padding-left:0}.navbar-expand .navbar-nav{-ms-flex-direction:row;flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand>.container,.navbar-expand>.container-fluid,.navbar-expand>.container-lg,.navbar-expand>.container-md,.navbar-expand>.container-sm,.navbar-expand>.container-xl{-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand .navbar-collapse{display:-ms-flexbox!important;display:flex!important;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-light .navbar-brand{color:rgba(0,0,0,.9)}.navbar-light .navbar-brand:focus,.navbar-light .navbar-brand:hover{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.5)}.navbar-light .navbar-nav .nav-link:focus,.navbar-light .navbar-nav .nav-link:hover{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .active>.nav-link,.navbar-light .navbar-nav .nav-link.active,.navbar-light .navbar-nav .nav-link.show,.navbar-light .navbar-nav .show>.nav-link{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.5);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='30' height='30' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%280, 0, 0, 0.5%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-light .navbar-text{color:rgba(0,0,0,.5)}.navbar-light .navbar-text a{color:rgba(0,0,0,.9)}.navbar-light .navbar-text a:focus,.navbar-light .navbar-text a:hover{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand{color:#fff}.navbar-dark .navbar-brand:focus,.navbar-dark .navbar-brand:hover{color:#fff}.navbar-dark .navbar-nav .nav-link{color:rgba(255,255,255,.5)}.navbar-dark .navbar-nav .nav-link:focus,.navbar-dark .navbar-nav .nav-link:hover{color:rgba(255,255,255,.75)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(255,255,255,.25)}.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.active,.navbar-dark .navbar-nav .nav-link.show,.navbar-dark .navbar-nav .show>.nav-link{color:#fff}.navbar-dark .navbar-toggler{color:rgba(255,255,255,.5);border-color:rgba(255,255,255,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' width='30' height='30' viewBox='0 0 30 30'%3e%3cpath stroke='rgba%28255, 255, 255, 0.5%29' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.navbar-dark .navbar-text{color:rgba(255,255,255,.5)}.navbar-dark .navbar-text a{color:#fff}.navbar-dark .navbar-text a:focus,.navbar-dark .navbar-text a:hover{color:#fff}.card{position:relative;display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0;border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card>.list-group:last-child{border-bottom-width:0;border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{-ms-flex:1 1 auto;flex:1 1 auto;min-height:1px;padding:1.25rem}.card-title{margin-bottom:.75rem}.card-subtitle{margin-top:-.375rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link:hover{text-decoration:none}.card-link+.card-link{margin-left:1.25rem}.card-header{padding:.75rem 1.25rem;margin-bottom:0;background-color:rgba(0,0,0,.03);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:calc(.25rem - 1px) calc(.25rem - 1px) 0 0}.card-footer{padding:.75rem 1.25rem;background-color:rgba(0,0,0,.03);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 calc(.25rem - 1px) calc(.25rem - 1px)}.card-header-tabs{margin-right:-.625rem;margin-bottom:-.75rem;margin-left:-.625rem;border-bottom:0}.card-header-pills{margin-right:-.625rem;margin-left:-.625rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1.25rem;border-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom,.card-img-top{-ms-flex-negative:0;flex-shrink:0;width:100%}.card-img,.card-img-top{border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card-img,.card-img-bottom{border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card-deck .card{margin-bottom:15px}@media (min-width:576px){.card-deck{display:-ms-flexbox;display:flex;-ms-flex-flow:row wrap;flex-flow:row wrap;margin-right:-15px;margin-left:-15px}.card-deck .card{-ms-flex:1 0 0%;flex:1 0 0%;margin-right:15px;margin-bottom:0;margin-left:15px}}.card-group>.card{margin-bottom:15px}@media (min-width:576px){.card-group{display:-ms-flexbox;display:flex;-ms-flex-flow:row wrap;flex-flow:row wrap}.card-group>.card{-ms-flex:1 0 0%;flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}.card-group>.card:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:not(:last-child) .card-header,.card-group>.card:not(:last-child) .card-img-top{border-top-right-radius:0}.card-group>.card:not(:last-child) .card-footer,.card-group>.card:not(:last-child) .card-img-bottom{border-bottom-right-radius:0}.card-group>.card:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:not(:first-child) .card-header,.card-group>.card:not(:first-child) .card-img-top{border-top-left-radius:0}.card-group>.card:not(:first-child) .card-footer,.card-group>.card:not(:first-child) .card-img-bottom{border-bottom-left-radius:0}}.card-columns .card{margin-bottom:.75rem}@media (min-width:576px){.card-columns{-webkit-column-count:3;-moz-column-count:3;column-count:3;-webkit-column-gap:1.25rem;-moz-column-gap:1.25rem;column-gap:1.25rem;orphans:1;widows:1}.card-columns .card{display:inline-block;width:100%}}.accordion{overflow-anchor:none}.accordion>.card{overflow:hidden}.accordion>.card:not(:last-of-type){border-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.accordion>.card:not(:first-of-type){border-top-left-radius:0;border-top-right-radius:0}.accordion>.card>.card-header{border-radius:0;margin-bottom:-1px}.breadcrumb{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;padding:.75rem 1rem;margin-bottom:1rem;list-style:none;background-color:#e9ecef;border-radius:.25rem}.breadcrumb-item{display:-ms-flexbox;display:flex}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{display:inline-block;padding-right:.5rem;color:#6c757d;content:"/"}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:underline}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:none}.breadcrumb-item.active{color:#6c757d}.pagination{display:-ms-flexbox;display:flex;padding-left:0;list-style:none;border-radius:.25rem}.page-link{position:relative;display:block;padding:.5rem .75rem;margin-left:-1px;line-height:1.25;color:#007bff;background-color:#fff;border:1px solid #dee2e6}.page-link:hover{z-index:2;color:#0056b3;text-decoration:none;background-color:#e9ecef;border-color:#dee2e6}.page-link:focus{z-index:3;outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.25)}.page-item:first-child .page-link{margin-left:0;border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.page-item.active .page-link{z-index:3;color:#fff;background-color:#007bff;border-color:#007bff}.page-item.disabled .page-link{color:#6c757d;pointer-events:none;cursor:auto;background-color:#fff;border-color:#dee2e6}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem;line-height:1.5}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:.875rem;line-height:1.5}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.25em .4em;font-size:75%;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media (prefers-reduced-motion:reduce){.badge{transition:none}}a.badge:focus,a.badge:hover{text-decoration:none}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.badge-pill{padding-right:.6em;padding-left:.6em;border-radius:10rem}.badge-primary{color:#fff;background-color:#007bff}a.badge-primary:focus,a.badge-primary:hover{color:#fff;background-color:#0062cc}a.badge-primary.focus,a.badge-primary:focus{outline:0;box-shadow:0 0 0 .2rem rgba(0,123,255,.5)}.badge-secondary{color:#fff;background-color:#6c757d}a.badge-secondary:focus,a.badge-secondary:hover{color:#fff;background-color:#545b62}a.badge-secondary.focus,a.badge-secondary:focus{outline:0;box-shadow:0 0 0 .2rem rgba(108,117,125,.5)}.badge-success{color:#fff;background-color:#28a745}a.badge-success:focus,a.badge-success:hover{color:#fff;background-color:#1e7e34}a.badge-success.focus,a.badge-success:focus{outline:0;box-shadow:0 0 0 .2rem rgba(40,167,69,.5)}.badge-info{color:#fff;background-color:#17a2b8}a.badge-info:focus,a.badge-info:hover{color:#fff;background-color:#117a8b}a.badge-info.focus,a.badge-info:focus{outline:0;box-shadow:0 0 0 .2rem rgba(23,162,184,.5)}.badge-warning{color:#212529;background-color:#ffc107}a.badge-warning:focus,a.badge-warning:hover{color:#212529;background-color:#d39e00}a.badge-warning.focus,a.badge-warning:focus{outline:0;box-shadow:0 0 0 .2rem rgba(255,193,7,.5)}.badge-danger{color:#fff;background-color:#dc3545}a.badge-danger:focus,a.badge-danger:hover{color:#fff;background-color:#bd2130}a.badge-danger.focus,a.badge-danger:focus{outline:0;box-shadow:0 0 0 .2rem rgba(220,53,69,.5)}.badge-light{color:#212529;background-color:#f8f9fa}a.badge-light:focus,a.badge-light:hover{color:#212529;background-color:#dae0e5}a.badge-light.focus,a.badge-light:focus{outline:0;box-shadow:0 0 0 .2rem rgba(248,249,250,.5)}.badge-dark{color:#fff;background-color:#343a40}a.badge-dark:focus,a.badge-dark:hover{color:#fff;background-color:#1d2124}a.badge-dark.focus,a.badge-dark:focus{outline:0;box-shadow:0 0 0 .2rem rgba(52,58,64,.5)}.jumbotron{padding:2rem 1rem;margin-bottom:2rem;background-color:#e9ecef;border-radius:.3rem}@media (min-width:576px){.jumbotron{padding:4rem 2rem}}.jumbotron-fluid{padding-right:0;padding-left:0;border-radius:0}.alert{position:relative;padding:.75rem 1.25rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:4rem}.alert-dismissible .close{position:absolute;top:0;right:0;z-index:2;padding:.75rem 1.25rem;color:inherit}.alert-primary{color:#004085;background-color:#cce5ff;border-color:#b8daff}.alert-primary hr{border-top-color:#9fcdff}.alert-primary .alert-link{color:#002752}.alert-secondary{color:#383d41;background-color:#e2e3e5;border-color:#d6d8db}.alert-secondary hr{border-top-color:#c8cbcf}.alert-secondary .alert-link{color:#202326}.alert-success{color:#155724;background-color:#d4edda;border-color:#c3e6cb}.alert-success hr{border-top-color:#b1dfbb}.alert-success .alert-link{color:#0b2e13}.alert-info{color:#0c5460;background-color:#d1ecf1;border-color:#bee5eb}.alert-info hr{border-top-color:#abdde5}.alert-info .alert-link{color:#062c33}.alert-warning{color:#856404;background-color:#fff3cd;border-color:#ffeeba}.alert-warning hr{border-top-color:#ffe8a1}.alert-warning .alert-link{color:#533f03}.alert-danger{color:#721c24;background-color:#f8d7da;border-color:#f5c6cb}.alert-danger hr{border-top-color:#f1b0b7}.alert-danger .alert-link{color:#491217}.alert-light{color:#818182;background-color:#fefefe;border-color:#fdfdfe}.alert-light hr{border-top-color:#ececf6}.alert-light .alert-link{color:#686868}.alert-dark{color:#1b1e21;background-color:#d6d8d9;border-color:#c6c8ca}.alert-dark hr{border-top-color:#b9bbbe}.alert-dark .alert-link{color:#040505}@-webkit-keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}.progress{display:-ms-flexbox;display:flex;height:1rem;overflow:hidden;line-height:0;font-size:.75rem;background-color:#e9ecef;border-radius:.25rem}.progress-bar{display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;-ms-flex-pack:center;justify-content:center;overflow:hidden;color:#fff;text-align:center;white-space:nowrap;background-color:#007bff;transition:width .6s ease}@media (prefers-reduced-motion:reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-size:1rem 1rem}.progress-bar-animated{-webkit-animation:progress-bar-stripes 1s linear infinite;animation:progress-bar-stripes 1s linear infinite}@media (prefers-reduced-motion:reduce){.progress-bar-animated{-webkit-animation:none;animation:none}}.media{display:-ms-flexbox;display:flex;-ms-flex-align:start;align-items:flex-start}.media-body{-ms-flex:1;flex:1}.list-group{display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0;border-radius:.25rem}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:focus,.list-group-item-action:hover{z-index:1;color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#212529;background-color:#e9ecef}.list-group-item{position:relative;display:block;padding:.75rem 1.25rem;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:inherit;border-top-right-radius:inherit}.list-group-item:last-child{border-bottom-right-radius:inherit;border-bottom-left-radius:inherit}.list-group-item.disabled,.list-group-item:disabled{color:#6c757d;pointer-events:none;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#007bff;border-color:#007bff}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:-1px;border-top-width:1px}.list-group-horizontal{-ms-flex-direction:row;flex-direction:row}.list-group-horizontal>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}@media (min-width:576px){.list-group-horizontal-sm{-ms-flex-direction:row;flex-direction:row}.list-group-horizontal-sm>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-sm>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:768px){.list-group-horizontal-md{-ms-flex-direction:row;flex-direction:row}.list-group-horizontal-md>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-md>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:992px){.list-group-horizontal-lg{-ms-flex-direction:row;flex-direction:row}.list-group-horizontal-lg>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-lg>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}@media (min-width:1200px){.list-group-horizontal-xl{-ms-flex-direction:row;flex-direction:row}.list-group-horizontal-xl>.list-group-item:first-child{border-bottom-left-radius:.25rem;border-top-right-radius:0}.list-group-horizontal-xl>.list-group-item:last-child{border-top-right-radius:.25rem;border-bottom-left-radius:0}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:1px;border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:-1px;border-left-width:1px}}.list-group-flush{border-radius:0}.list-group-flush>.list-group-item{border-width:0 0 1px}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-primary{color:#004085;background-color:#b8daff}.list-group-item-primary.list-group-item-action:focus,.list-group-item-primary.list-group-item-action:hover{color:#004085;background-color:#9fcdff}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#004085;border-color:#004085}.list-group-item-secondary{color:#383d41;background-color:#d6d8db}.list-group-item-secondary.list-group-item-action:focus,.list-group-item-secondary.list-group-item-action:hover{color:#383d41;background-color:#c8cbcf}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#383d41;border-color:#383d41}.list-group-item-success{color:#155724;background-color:#c3e6cb}.list-group-item-success.list-group-item-action:focus,.list-group-item-success.list-group-item-action:hover{color:#155724;background-color:#b1dfbb}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#155724;border-color:#155724}.list-group-item-info{color:#0c5460;background-color:#bee5eb}.list-group-item-info.list-group-item-action:focus,.list-group-item-info.list-group-item-action:hover{color:#0c5460;background-color:#abdde5}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#0c5460;border-color:#0c5460}.list-group-item-warning{color:#856404;background-color:#ffeeba}.list-group-item-warning.list-group-item-action:focus,.list-group-item-warning.list-group-item-action:hover{color:#856404;background-color:#ffe8a1}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#856404;border-color:#856404}.list-group-item-danger{color:#721c24;background-color:#f5c6cb}.list-group-item-danger.list-group-item-action:focus,.list-group-item-danger.list-group-item-action:hover{color:#721c24;background-color:#f1b0b7}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#721c24;border-color:#721c24}.list-group-item-light{color:#818182;background-color:#fdfdfe}.list-group-item-light.list-group-item-action:focus,.list-group-item-light.list-group-item-action:hover{color:#818182;background-color:#ececf6}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#818182;border-color:#818182}.list-group-item-dark{color:#1b1e21;background-color:#c6c8ca}.list-group-item-dark.list-group-item-action:focus,.list-group-item-dark.list-group-item-action:hover{color:#1b1e21;background-color:#b9bbbe}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#1b1e21;border-color:#1b1e21}.close{float:right;font-size:1.5rem;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.5}.close:hover{color:#000;text-decoration:none}.close:not(:disabled):not(.disabled):focus,.close:not(:disabled):not(.disabled):hover{opacity:.75}button.close{padding:0;background-color:transparent;border:0}a.close.disabled{pointer-events:none}.toast{-ms-flex-preferred-size:350px;flex-basis:350px;max-width:350px;font-size:.875rem;background-color:rgba(255,255,255,.85);background-clip:padding-box;border:1px solid rgba(0,0,0,.1);box-shadow:0 .25rem .75rem rgba(0,0,0,.1);opacity:0;border-radius:.25rem}.toast:not(:last-child){margin-bottom:.75rem}.toast.showing{opacity:1}.toast.show{display:block;opacity:1}.toast.hide{display:none}.toast-header{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;padding:.25rem .75rem;color:#6c757d;background-color:rgba(255,255,255,.85);background-clip:padding-box;border-bottom:1px solid rgba(0,0,0,.05);border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.toast-body{padding:.75rem}.modal-open{overflow:hidden}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal{position:fixed;top:0;left:0;z-index:1050;display:none;width:100%;height:100%;overflow:hidden;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{transition:-webkit-transform .3s ease-out;transition:transform .3s ease-out;transition:transform .3s ease-out,-webkit-transform .3s ease-out;-webkit-transform:translate(0,-50px);transform:translate(0,-50px)}@media (prefers-reduced-motion:reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{-webkit-transform:none;transform:none}.modal.modal-static .modal-dialog{-webkit-transform:scale(1.02);transform:scale(1.02)}.modal-dialog-scrollable{display:-ms-flexbox;display:flex;max-height:calc(100% - 1rem)}.modal-dialog-scrollable .modal-content{max-height:calc(100vh - 1rem);overflow:hidden}.modal-dialog-scrollable .modal-footer,.modal-dialog-scrollable .modal-header{-ms-flex-negative:0;flex-shrink:0}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;min-height:calc(100% - 1rem)}.modal-dialog-centered::before{display:block;height:calc(100vh - 1rem);height:-webkit-min-content;height:-moz-min-content;height:min-content;content:""}.modal-dialog-centered.modal-dialog-scrollable{-ms-flex-direction:column;flex-direction:column;-ms-flex-pack:center;justify-content:center;height:100%}.modal-dialog-centered.modal-dialog-scrollable .modal-content{max-height:none}.modal-dialog-centered.modal-dialog-scrollable::before{content:none}.modal-content{position:relative;display:-ms-flexbox;display:flex;-ms-flex-direction:column;flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;outline:0}.modal-backdrop{position:fixed;top:0;left:0;z-index:1040;width:100vw;height:100vh;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:-ms-flexbox;display:flex;-ms-flex-align:start;align-items:flex-start;-ms-flex-pack:justify;justify-content:space-between;padding:1rem 1rem;border-bottom:1px solid #dee2e6;border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.modal-header .close{padding:1rem 1rem;margin:-1rem -1rem -1rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;-ms-flex:1 1 auto;flex:1 1 auto;padding:1rem}.modal-footer{display:-ms-flexbox;display:flex;-ms-flex-wrap:wrap;flex-wrap:wrap;-ms-flex-align:center;align-items:center;-ms-flex-pack:end;justify-content:flex-end;padding:.75rem;border-top:1px solid #dee2e6;border-bottom-right-radius:calc(.3rem - 1px);border-bottom-left-radius:calc(.3rem - 1px)}.modal-footer>*{margin:.25rem}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media (min-width:576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-scrollable{max-height:calc(100% - 3.5rem)}.modal-dialog-scrollable .modal-content{max-height:calc(100vh - 3.5rem)}.modal-dialog-centered{min-height:calc(100% - 3.5rem)}.modal-dialog-centered::before{height:calc(100vh - 3.5rem);height:-webkit-min-content;height:-moz-min-content;height:min-content}.modal-sm{max-width:300px}}@media (min-width:992px){.modal-lg,.modal-xl{max-width:800px}}@media (min-width:1200px){.modal-xl{max-width:1140px}}.tooltip{position:absolute;z-index:1070;display:block;margin:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-auto[x-placement^=top],.bs-tooltip-top{padding:.4rem 0}.bs-tooltip-auto[x-placement^=top] .arrow,.bs-tooltip-top .arrow{bottom:0}.bs-tooltip-auto[x-placement^=top] .arrow::before,.bs-tooltip-top .arrow::before{top:0;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-auto[x-placement^=right],.bs-tooltip-right{padding:0 .4rem}.bs-tooltip-auto[x-placement^=right] .arrow,.bs-tooltip-right .arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-auto[x-placement^=right] .arrow::before,.bs-tooltip-right .arrow::before{right:0;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-auto[x-placement^=bottom],.bs-tooltip-bottom{padding:.4rem 0}.bs-tooltip-auto[x-placement^=bottom] .arrow,.bs-tooltip-bottom .arrow{top:0}.bs-tooltip-auto[x-placement^=bottom] .arrow::before,.bs-tooltip-bottom .arrow::before{bottom:0;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-auto[x-placement^=left],.bs-tooltip-left{padding:0 .4rem}.bs-tooltip-auto[x-placement^=left] .arrow,.bs-tooltip-left .arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-auto[x-placement^=left] .arrow::before,.bs-tooltip-left .arrow::before{left:0;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0;z-index:1060;display:block;max-width:276px;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji";font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem}.popover .arrow{position:absolute;display:block;width:1rem;height:.5rem;margin:0 .3rem}.popover .arrow::after,.popover .arrow::before{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-auto[x-placement^=top],.bs-popover-top{margin-bottom:.5rem}.bs-popover-auto[x-placement^=top]>.arrow,.bs-popover-top>.arrow{bottom:calc(-.5rem - 1px)}.bs-popover-auto[x-placement^=top]>.arrow::before,.bs-popover-top>.arrow::before{bottom:0;border-width:.5rem .5rem 0;border-top-color:rgba(0,0,0,.25)}.bs-popover-auto[x-placement^=top]>.arrow::after,.bs-popover-top>.arrow::after{bottom:1px;border-width:.5rem .5rem 0;border-top-color:#fff}.bs-popover-auto[x-placement^=right],.bs-popover-right{margin-left:.5rem}.bs-popover-auto[x-placement^=right]>.arrow,.bs-popover-right>.arrow{left:calc(-.5rem - 1px);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-auto[x-placement^=right]>.arrow::before,.bs-popover-right>.arrow::before{left:0;border-width:.5rem .5rem .5rem 0;border-right-color:rgba(0,0,0,.25)}.bs-popover-auto[x-placement^=right]>.arrow::after,.bs-popover-right>.arrow::after{left:1px;border-width:.5rem .5rem .5rem 0;border-right-color:#fff}.bs-popover-auto[x-placement^=bottom],.bs-popover-bottom{margin-top:.5rem}.bs-popover-auto[x-placement^=bottom]>.arrow,.bs-popover-bottom>.arrow{top:calc(-.5rem - 1px)}.bs-popover-auto[x-placement^=bottom]>.arrow::before,.bs-popover-bottom>.arrow::before{top:0;border-width:0 .5rem .5rem .5rem;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-auto[x-placement^=bottom]>.arrow::after,.bs-popover-bottom>.arrow::after{top:1px;border-width:0 .5rem .5rem .5rem;border-bottom-color:#fff}.bs-popover-auto[x-placement^=bottom] .popover-header::before,.bs-popover-bottom .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-.5rem;content:"";border-bottom:1px solid #f7f7f7}.bs-popover-auto[x-placement^=left],.bs-popover-left{margin-right:.5rem}.bs-popover-auto[x-placement^=left]>.arrow,.bs-popover-left>.arrow{right:calc(-.5rem - 1px);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-auto[x-placement^=left]>.arrow::before,.bs-popover-left>.arrow::before{right:0;border-width:.5rem 0 .5rem .5rem;border-left-color:rgba(0,0,0,.25)}.bs-popover-auto[x-placement^=left]>.arrow::after,.bs-popover-left>.arrow::after{right:1px;border-width:.5rem 0 .5rem .5rem;border-left-color:#fff}.popover-header{padding:.5rem .75rem;margin-bottom:0;font-size:1rem;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:.5rem .75rem;color:#212529}.carousel{position:relative}.carousel.pointer-event{-ms-touch-action:pan-y;touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;-webkit-backface-visibility:hidden;backface-visibility:hidden;transition:-webkit-transform .6s ease-in-out;transition:transform .6s ease-in-out;transition:transform .6s ease-in-out,-webkit-transform .6s ease-in-out}@media (prefers-reduced-motion:reduce){.carousel-item{transition:none}}.carousel-item-next,.carousel-item-prev,.carousel-item.active{display:block}.active.carousel-item-right,.carousel-item-next:not(.carousel-item-left){-webkit-transform:translateX(100%);transform:translateX(100%)}.active.carousel-item-left,.carousel-item-prev:not(.carousel-item-right){-webkit-transform:translateX(-100%);transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;-webkit-transform:none;transform:none}.carousel-fade .carousel-item-next.carousel-item-left,.carousel-fade .carousel-item-prev.carousel-item-right,.carousel-fade .carousel-item.active{z-index:1;opacity:1}.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-right{z-index:0;opacity:0;transition:opacity 0s .6s}@media (prefers-reduced-motion:reduce){.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-right{transition:none}}.carousel-control-next,.carousel-control-prev{position:absolute;top:0;bottom:0;z-index:1;display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;-ms-flex-pack:center;justify-content:center;width:15%;color:#fff;text-align:center;opacity:.5;transition:opacity .15s ease}@media (prefers-reduced-motion:reduce){.carousel-control-next,.carousel-control-prev{transition:none}}.carousel-control-next:focus,.carousel-control-next:hover,.carousel-control-prev:focus,.carousel-control-prev:hover{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-next-icon,.carousel-control-prev-icon{display:inline-block;width:20px;height:20px;background:no-repeat 50%/100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' width='8' height='8' viewBox='0 0 8 8'%3e%3cpath d='M5.25 0l-4 4 4 4 1.5-1.5L4.25 4l2.5-2.5L5.25 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='%23fff' width='8' height='8' viewBox='0 0 8 8'%3e%3cpath d='M2.75 0l-1.5 1.5L3.75 4l-2.5 2.5L2.75 8l4-4-4-4z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:15;display:-ms-flexbox;display:flex;-ms-flex-pack:center;justify-content:center;padding-left:0;margin-right:15%;margin-left:15%;list-style:none}.carousel-indicators li{box-sizing:content-box;-ms-flex:0 1 auto;flex:0 1 auto;width:30px;height:3px;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border-top:10px solid transparent;border-bottom:10px solid transparent;opacity:.5;transition:opacity .6s ease}@media (prefers-reduced-motion:reduce){.carousel-indicators li{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center}@-webkit-keyframes spinner-border{to{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}@keyframes spinner-border{to{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}.spinner-border{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;border:.25em solid currentColor;border-right-color:transparent;border-radius:50%;-webkit-animation:spinner-border .75s linear infinite;animation:spinner-border .75s linear infinite}.spinner-border-sm{width:1rem;height:1rem;border-width:.2em}@-webkit-keyframes spinner-grow{0%{-webkit-transform:scale(0);transform:scale(0)}50%{opacity:1;-webkit-transform:none;transform:none}}@keyframes spinner-grow{0%{-webkit-transform:scale(0);transform:scale(0)}50%{opacity:1;-webkit-transform:none;transform:none}}.spinner-grow{display:inline-block;width:2rem;height:2rem;vertical-align:text-bottom;background-color:currentColor;border-radius:50%;opacity:0;-webkit-animation:spinner-grow .75s linear infinite;animation:spinner-grow .75s linear infinite}.spinner-grow-sm{width:1rem;height:1rem}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.bg-primary{background-color:#007bff!important}a.bg-primary:focus,a.bg-primary:hover,button.bg-primary:focus,button.bg-primary:hover{background-color:#0062cc!important}.bg-secondary{background-color:#6c757d!important}a.bg-secondary:focus,a.bg-secondary:hover,button.bg-secondary:focus,button.bg-secondary:hover{background-color:#545b62!important}.bg-success{background-color:#28a745!important}a.bg-success:focus,a.bg-success:hover,button.bg-success:focus,button.bg-success:hover{background-color:#1e7e34!important}.bg-info{background-color:#17a2b8!important}a.bg-info:focus,a.bg-info:hover,button.bg-info:focus,button.bg-info:hover{background-color:#117a8b!important}.bg-warning{background-color:#ffc107!important}a.bg-warning:focus,a.bg-warning:hover,button.bg-warning:focus,button.bg-warning:hover{background-color:#d39e00!important}.bg-danger{background-color:#dc3545!important}a.bg-danger:focus,a.bg-danger:hover,button.bg-danger:focus,button.bg-danger:hover{background-color:#bd2130!important}.bg-light{background-color:#f8f9fa!important}a.bg-light:focus,a.bg-light:hover,button.bg-light:focus,button.bg-light:hover{background-color:#dae0e5!important}.bg-dark{background-color:#343a40!important}a.bg-dark:focus,a.bg-dark:hover,button.bg-dark:focus,button.bg-dark:hover{background-color:#1d2124!important}.bg-white{background-color:#fff!important}.bg-transparent{background-color:transparent!important}.border{border:1px solid #dee2e6!important}.border-top{border-top:1px solid #dee2e6!important}.border-right{border-right:1px solid #dee2e6!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-left{border-left:1px solid #dee2e6!important}.border-0{border:0!important}.border-top-0{border-top:0!important}.border-right-0{border-right:0!important}.border-bottom-0{border-bottom:0!important}.border-left-0{border-left:0!important}.border-primary{border-color:#007bff!important}.border-secondary{border-color:#6c757d!important}.border-success{border-color:#28a745!important}.border-info{border-color:#17a2b8!important}.border-warning{border-color:#ffc107!important}.border-danger{border-color:#dc3545!important}.border-light{border-color:#f8f9fa!important}.border-dark{border-color:#343a40!important}.border-white{border-color:#fff!important}.rounded-sm{border-radius:.2rem!important}.rounded{border-radius:.25rem!important}.rounded-top{border-top-left-radius:.25rem!important;border-top-right-radius:.25rem!important}.rounded-right{border-top-right-radius:.25rem!important;border-bottom-right-radius:.25rem!important}.rounded-bottom{border-bottom-right-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-left{border-top-left-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-lg{border-radius:.3rem!important}.rounded-circle{border-radius:50%!important}.rounded-pill{border-radius:50rem!important}.rounded-0{border-radius:0!important}.clearfix::after{display:block;clear:both;content:""}.d-none{display:none!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:-ms-flexbox!important;display:flex!important}.d-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}@media (min-width:576px){.d-sm-none{display:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:-ms-flexbox!important;display:flex!important}.d-sm-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:768px){.d-md-none{display:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:-ms-flexbox!important;display:flex!important}.d-md-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:992px){.d-lg-none{display:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:-ms-flexbox!important;display:flex!important}.d-lg-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media (min-width:1200px){.d-xl-none{display:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:-ms-flexbox!important;display:flex!important}.d-xl-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}@media print{.d-print-none{display:none!important}.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:-ms-flexbox!important;display:flex!important}.d-print-inline-flex{display:-ms-inline-flexbox!important;display:inline-flex!important}}.embed-responsive{position:relative;display:block;width:100%;padding:0;overflow:hidden}.embed-responsive::before{display:block;content:""}.embed-responsive .embed-responsive-item,.embed-responsive embed,.embed-responsive iframe,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-21by9::before{padding-top:42.857143%}.embed-responsive-16by9::before{padding-top:56.25%}.embed-responsive-4by3::before{padding-top:75%}.embed-responsive-1by1::before{padding-top:100%}.flex-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-center{-ms-flex-align:center!important;align-items:center!important}.align-items-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}@media (min-width:576px){.flex-sm-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-sm-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-sm-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-sm-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-sm-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-sm-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-sm-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-sm-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-sm-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-sm-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-sm-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-sm-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-sm-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-sm-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-sm-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-sm-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-sm-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-sm-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-sm-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-sm-center{-ms-flex-align:center!important;align-items:center!important}.align-items-sm-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-sm-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-sm-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-sm-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-sm-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-sm-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-sm-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-sm-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-sm-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-sm-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-sm-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-sm-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-sm-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-sm-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:768px){.flex-md-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-md-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-md-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-md-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-md-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-md-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-md-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-md-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-md-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-md-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-md-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-md-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-md-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-md-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-md-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-md-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-md-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-md-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-md-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-md-center{-ms-flex-align:center!important;align-items:center!important}.align-items-md-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-md-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-md-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-md-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-md-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-md-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-md-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-md-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-md-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-md-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-md-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-md-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-md-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-md-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:992px){.flex-lg-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-lg-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-lg-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-lg-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-lg-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-lg-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-lg-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-lg-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-lg-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-lg-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-lg-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-lg-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-lg-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-lg-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-lg-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-lg-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-lg-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-lg-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-lg-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-lg-center{-ms-flex-align:center!important;align-items:center!important}.align-items-lg-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-lg-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-lg-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-lg-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-lg-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-lg-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-lg-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-lg-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-lg-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-lg-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-lg-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-lg-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-lg-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-lg-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media (min-width:1200px){.flex-xl-row{-ms-flex-direction:row!important;flex-direction:row!important}.flex-xl-column{-ms-flex-direction:column!important;flex-direction:column!important}.flex-xl-row-reverse{-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-xl-column-reverse{-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-xl-wrap{-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-xl-nowrap{-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-xl-wrap-reverse{-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-xl-fill{-ms-flex:1 1 auto!important;flex:1 1 auto!important}.flex-xl-grow-0{-ms-flex-positive:0!important;flex-grow:0!important}.flex-xl-grow-1{-ms-flex-positive:1!important;flex-grow:1!important}.flex-xl-shrink-0{-ms-flex-negative:0!important;flex-shrink:0!important}.flex-xl-shrink-1{-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-xl-start{-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-xl-end{-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-xl-center{-ms-flex-pack:center!important;justify-content:center!important}.justify-content-xl-between{-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-xl-around{-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-xl-start{-ms-flex-align:start!important;align-items:flex-start!important}.align-items-xl-end{-ms-flex-align:end!important;align-items:flex-end!important}.align-items-xl-center{-ms-flex-align:center!important;align-items:center!important}.align-items-xl-baseline{-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-xl-stretch{-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-xl-start{-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-xl-end{-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-xl-center{-ms-flex-line-pack:center!important;align-content:center!important}.align-content-xl-between{-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-xl-around{-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-xl-stretch{-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-xl-auto{-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-xl-start{-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-xl-end{-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-xl-center{-ms-flex-item-align:center!important;align-self:center!important}.align-self-xl-baseline{-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-xl-stretch{-ms-flex-item-align:stretch!important;align-self:stretch!important}}.float-left{float:left!important}.float-right{float:right!important}.float-none{float:none!important}@media (min-width:576px){.float-sm-left{float:left!important}.float-sm-right{float:right!important}.float-sm-none{float:none!important}}@media (min-width:768px){.float-md-left{float:left!important}.float-md-right{float:right!important}.float-md-none{float:none!important}}@media (min-width:992px){.float-lg-left{float:left!important}.float-lg-right{float:right!important}.float-lg-none{float:none!important}}@media (min-width:1200px){.float-xl-left{float:left!important}.float-xl-right{float:right!important}.float-xl-none{float:none!important}}.user-select-all{-webkit-user-select:all!important;-moz-user-select:all!important;-ms-user-select:all!important;user-select:all!important}.user-select-auto{-webkit-user-select:auto!important;-moz-user-select:auto!important;-ms-user-select:auto!important;user-select:auto!important}.user-select-none{-webkit-user-select:none!important;-moz-user-select:none!important;-ms-user-select:none!important;user-select:none!important}.overflow-auto{overflow:auto!important}.overflow-hidden{overflow:hidden!important}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}@supports ((position:-webkit-sticky) or (position:sticky)){.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;overflow:visible;clip:auto;white-space:normal}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{box-shadow:none!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mw-100{max-width:100%!important}.mh-100{max-height:100%!important}.min-vw-100{min-width:100vw!important}.min-vh-100{min-height:100vh!important}.vw-100{width:100vw!important}.vh-100{height:100vh!important}.m-0{margin:0!important}.mt-0,.my-0{margin-top:0!important}.mr-0,.mx-0{margin-right:0!important}.mb-0,.my-0{margin-bottom:0!important}.ml-0,.mx-0{margin-left:0!important}.m-1{margin:.25rem!important}.mt-1,.my-1{margin-top:.25rem!important}.mr-1,.mx-1{margin-right:.25rem!important}.mb-1,.my-1{margin-bottom:.25rem!important}.ml-1,.mx-1{margin-left:.25rem!important}.m-2{margin:.5rem!important}.mt-2,.my-2{margin-top:.5rem!important}.mr-2,.mx-2{margin-right:.5rem!important}.mb-2,.my-2{margin-bottom:.5rem!important}.ml-2,.mx-2{margin-left:.5rem!important}.m-3{margin:1rem!important}.mt-3,.my-3{margin-top:1rem!important}.mr-3,.mx-3{margin-right:1rem!important}.mb-3,.my-3{margin-bottom:1rem!important}.ml-3,.mx-3{margin-left:1rem!important}.m-4{margin:1.5rem!important}.mt-4,.my-4{margin-top:1.5rem!important}.mr-4,.mx-4{margin-right:1.5rem!important}.mb-4,.my-4{margin-bottom:1.5rem!important}.ml-4,.mx-4{margin-left:1.5rem!important}.m-5{margin:3rem!important}.mt-5,.my-5{margin-top:3rem!important}.mr-5,.mx-5{margin-right:3rem!important}.mb-5,.my-5{margin-bottom:3rem!important}.ml-5,.mx-5{margin-left:3rem!important}.p-0{padding:0!important}.pt-0,.py-0{padding-top:0!important}.pr-0,.px-0{padding-right:0!important}.pb-0,.py-0{padding-bottom:0!important}.pl-0,.px-0{padding-left:0!important}.p-1{padding:.25rem!important}.pt-1,.py-1{padding-top:.25rem!important}.pr-1,.px-1{padding-right:.25rem!important}.pb-1,.py-1{padding-bottom:.25rem!important}.pl-1,.px-1{padding-left:.25rem!important}.p-2{padding:.5rem!important}.pt-2,.py-2{padding-top:.5rem!important}.pr-2,.px-2{padding-right:.5rem!important}.pb-2,.py-2{padding-bottom:.5rem!important}.pl-2,.px-2{padding-left:.5rem!important}.p-3{padding:1rem!important}.pt-3,.py-3{padding-top:1rem!important}.pr-3,.px-3{padding-right:1rem!important}.pb-3,.py-3{padding-bottom:1rem!important}.pl-3,.px-3{padding-left:1rem!important}.p-4{padding:1.5rem!important}.pt-4,.py-4{padding-top:1.5rem!important}.pr-4,.px-4{padding-right:1.5rem!important}.pb-4,.py-4{padding-bottom:1.5rem!important}.pl-4,.px-4{padding-left:1.5rem!important}.p-5{padding:3rem!important}.pt-5,.py-5{padding-top:3rem!important}.pr-5,.px-5{padding-right:3rem!important}.pb-5,.py-5{padding-bottom:3rem!important}.pl-5,.px-5{padding-left:3rem!important}.m-n1{margin:-.25rem!important}.mt-n1,.my-n1{margin-top:-.25rem!important}.mr-n1,.mx-n1{margin-right:-.25rem!important}.mb-n1,.my-n1{margin-bottom:-.25rem!important}.ml-n1,.mx-n1{margin-left:-.25rem!important}.m-n2{margin:-.5rem!important}.mt-n2,.my-n2{margin-top:-.5rem!important}.mr-n2,.mx-n2{margin-right:-.5rem!important}.mb-n2,.my-n2{margin-bottom:-.5rem!important}.ml-n2,.mx-n2{margin-left:-.5rem!important}.m-n3{margin:-1rem!important}.mt-n3,.my-n3{margin-top:-1rem!important}.mr-n3,.mx-n3{margin-right:-1rem!important}.mb-n3,.my-n3{margin-bottom:-1rem!important}.ml-n3,.mx-n3{margin-left:-1rem!important}.m-n4{margin:-1.5rem!important}.mt-n4,.my-n4{margin-top:-1.5rem!important}.mr-n4,.mx-n4{margin-right:-1.5rem!important}.mb-n4,.my-n4{margin-bottom:-1.5rem!important}.ml-n4,.mx-n4{margin-left:-1.5rem!important}.m-n5{margin:-3rem!important}.mt-n5,.my-n5{margin-top:-3rem!important}.mr-n5,.mx-n5{margin-right:-3rem!important}.mb-n5,.my-n5{margin-bottom:-3rem!important}.ml-n5,.mx-n5{margin-left:-3rem!important}.m-auto{margin:auto!important}.mt-auto,.my-auto{margin-top:auto!important}.mr-auto,.mx-auto{margin-right:auto!important}.mb-auto,.my-auto{margin-bottom:auto!important}.ml-auto,.mx-auto{margin-left:auto!important}@media (min-width:576px){.m-sm-0{margin:0!important}.mt-sm-0,.my-sm-0{margin-top:0!important}.mr-sm-0,.mx-sm-0{margin-right:0!important}.mb-sm-0,.my-sm-0{margin-bottom:0!important}.ml-sm-0,.mx-sm-0{margin-left:0!important}.m-sm-1{margin:.25rem!important}.mt-sm-1,.my-sm-1{margin-top:.25rem!important}.mr-sm-1,.mx-sm-1{margin-right:.25rem!important}.mb-sm-1,.my-sm-1{margin-bottom:.25rem!important}.ml-sm-1,.mx-sm-1{margin-left:.25rem!important}.m-sm-2{margin:.5rem!important}.mt-sm-2,.my-sm-2{margin-top:.5rem!important}.mr-sm-2,.mx-sm-2{margin-right:.5rem!important}.mb-sm-2,.my-sm-2{margin-bottom:.5rem!important}.ml-sm-2,.mx-sm-2{margin-left:.5rem!important}.m-sm-3{margin:1rem!important}.mt-sm-3,.my-sm-3{margin-top:1rem!important}.mr-sm-3,.mx-sm-3{margin-right:1rem!important}.mb-sm-3,.my-sm-3{margin-bottom:1rem!important}.ml-sm-3,.mx-sm-3{margin-left:1rem!important}.m-sm-4{margin:1.5rem!important}.mt-sm-4,.my-sm-4{margin-top:1.5rem!important}.mr-sm-4,.mx-sm-4{margin-right:1.5rem!important}.mb-sm-4,.my-sm-4{margin-bottom:1.5rem!important}.ml-sm-4,.mx-sm-4{margin-left:1.5rem!important}.m-sm-5{margin:3rem!important}.mt-sm-5,.my-sm-5{margin-top:3rem!important}.mr-sm-5,.mx-sm-5{margin-right:3rem!important}.mb-sm-5,.my-sm-5{margin-bottom:3rem!important}.ml-sm-5,.mx-sm-5{margin-left:3rem!important}.p-sm-0{padding:0!important}.pt-sm-0,.py-sm-0{padding-top:0!important}.pr-sm-0,.px-sm-0{padding-right:0!important}.pb-sm-0,.py-sm-0{padding-bottom:0!important}.pl-sm-0,.px-sm-0{padding-left:0!important}.p-sm-1{padding:.25rem!important}.pt-sm-1,.py-sm-1{padding-top:.25rem!important}.pr-sm-1,.px-sm-1{padding-right:.25rem!important}.pb-sm-1,.py-sm-1{padding-bottom:.25rem!important}.pl-sm-1,.px-sm-1{padding-left:.25rem!important}.p-sm-2{padding:.5rem!important}.pt-sm-2,.py-sm-2{padding-top:.5rem!important}.pr-sm-2,.px-sm-2{padding-right:.5rem!important}.pb-sm-2,.py-sm-2{padding-bottom:.5rem!important}.pl-sm-2,.px-sm-2{padding-left:.5rem!important}.p-sm-3{padding:1rem!important}.pt-sm-3,.py-sm-3{padding-top:1rem!important}.pr-sm-3,.px-sm-3{padding-right:1rem!important}.pb-sm-3,.py-sm-3{padding-bottom:1rem!important}.pl-sm-3,.px-sm-3{padding-left:1rem!important}.p-sm-4{padding:1.5rem!important}.pt-sm-4,.py-sm-4{padding-top:1.5rem!important}.pr-sm-4,.px-sm-4{padding-right:1.5rem!important}.pb-sm-4,.py-sm-4{padding-bottom:1.5rem!important}.pl-sm-4,.px-sm-4{padding-left:1.5rem!important}.p-sm-5{padding:3rem!important}.pt-sm-5,.py-sm-5{padding-top:3rem!important}.pr-sm-5,.px-sm-5{padding-right:3rem!important}.pb-sm-5,.py-sm-5{padding-bottom:3rem!important}.pl-sm-5,.px-sm-5{padding-left:3rem!important}.m-sm-n1{margin:-.25rem!important}.mt-sm-n1,.my-sm-n1{margin-top:-.25rem!important}.mr-sm-n1,.mx-sm-n1{margin-right:-.25rem!important}.mb-sm-n1,.my-sm-n1{margin-bottom:-.25rem!important}.ml-sm-n1,.mx-sm-n1{margin-left:-.25rem!important}.m-sm-n2{margin:-.5rem!important}.mt-sm-n2,.my-sm-n2{margin-top:-.5rem!important}.mr-sm-n2,.mx-sm-n2{margin-right:-.5rem!important}.mb-sm-n2,.my-sm-n2{margin-bottom:-.5rem!important}.ml-sm-n2,.mx-sm-n2{margin-left:-.5rem!important}.m-sm-n3{margin:-1rem!important}.mt-sm-n3,.my-sm-n3{margin-top:-1rem!important}.mr-sm-n3,.mx-sm-n3{margin-right:-1rem!important}.mb-sm-n3,.my-sm-n3{margin-bottom:-1rem!important}.ml-sm-n3,.mx-sm-n3{margin-left:-1rem!important}.m-sm-n4{margin:-1.5rem!important}.mt-sm-n4,.my-sm-n4{margin-top:-1.5rem!important}.mr-sm-n4,.mx-sm-n4{margin-right:-1.5rem!important}.mb-sm-n4,.my-sm-n4{margin-bottom:-1.5rem!important}.ml-sm-n4,.mx-sm-n4{margin-left:-1.5rem!important}.m-sm-n5{margin:-3rem!important}.mt-sm-n5,.my-sm-n5{margin-top:-3rem!important}.mr-sm-n5,.mx-sm-n5{margin-right:-3rem!important}.mb-sm-n5,.my-sm-n5{margin-bottom:-3rem!important}.ml-sm-n5,.mx-sm-n5{margin-left:-3rem!important}.m-sm-auto{margin:auto!important}.mt-sm-auto,.my-sm-auto{margin-top:auto!important}.mr-sm-auto,.mx-sm-auto{margin-right:auto!important}.mb-sm-auto,.my-sm-auto{margin-bottom:auto!important}.ml-sm-auto,.mx-sm-auto{margin-left:auto!important}}@media (min-width:768px){.m-md-0{margin:0!important}.mt-md-0,.my-md-0{margin-top:0!important}.mr-md-0,.mx-md-0{margin-right:0!important}.mb-md-0,.my-md-0{margin-bottom:0!important}.ml-md-0,.mx-md-0{margin-left:0!important}.m-md-1{margin:.25rem!important}.mt-md-1,.my-md-1{margin-top:.25rem!important}.mr-md-1,.mx-md-1{margin-right:.25rem!important}.mb-md-1,.my-md-1{margin-bottom:.25rem!important}.ml-md-1,.mx-md-1{margin-left:.25rem!important}.m-md-2{margin:.5rem!important}.mt-md-2,.my-md-2{margin-top:.5rem!important}.mr-md-2,.mx-md-2{margin-right:.5rem!important}.mb-md-2,.my-md-2{margin-bottom:.5rem!important}.ml-md-2,.mx-md-2{margin-left:.5rem!important}.m-md-3{margin:1rem!important}.mt-md-3,.my-md-3{margin-top:1rem!important}.mr-md-3,.mx-md-3{margin-right:1rem!important}.mb-md-3,.my-md-3{margin-bottom:1rem!important}.ml-md-3,.mx-md-3{margin-left:1rem!important}.m-md-4{margin:1.5rem!important}.mt-md-4,.my-md-4{margin-top:1.5rem!important}.mr-md-4,.mx-md-4{margin-right:1.5rem!important}.mb-md-4,.my-md-4{margin-bottom:1.5rem!important}.ml-md-4,.mx-md-4{margin-left:1.5rem!important}.m-md-5{margin:3rem!important}.mt-md-5,.my-md-5{margin-top:3rem!important}.mr-md-5,.mx-md-5{margin-right:3rem!important}.mb-md-5,.my-md-5{margin-bottom:3rem!important}.ml-md-5,.mx-md-5{margin-left:3rem!important}.p-md-0{padding:0!important}.pt-md-0,.py-md-0{padding-top:0!important}.pr-md-0,.px-md-0{padding-right:0!important}.pb-md-0,.py-md-0{padding-bottom:0!important}.pl-md-0,.px-md-0{padding-left:0!important}.p-md-1{padding:.25rem!important}.pt-md-1,.py-md-1{padding-top:.25rem!important}.pr-md-1,.px-md-1{padding-right:.25rem!important}.pb-md-1,.py-md-1{padding-bottom:.25rem!important}.pl-md-1,.px-md-1{padding-left:.25rem!important}.p-md-2{padding:.5rem!important}.pt-md-2,.py-md-2{padding-top:.5rem!important}.pr-md-2,.px-md-2{padding-right:.5rem!important}.pb-md-2,.py-md-2{padding-bottom:.5rem!important}.pl-md-2,.px-md-2{padding-left:.5rem!important}.p-md-3{padding:1rem!important}.pt-md-3,.py-md-3{padding-top:1rem!important}.pr-md-3,.px-md-3{padding-right:1rem!important}.pb-md-3,.py-md-3{padding-bottom:1rem!important}.pl-md-3,.px-md-3{padding-left:1rem!important}.p-md-4{padding:1.5rem!important}.pt-md-4,.py-md-4{padding-top:1.5rem!important}.pr-md-4,.px-md-4{padding-right:1.5rem!important}.pb-md-4,.py-md-4{padding-bottom:1.5rem!important}.pl-md-4,.px-md-4{padding-left:1.5rem!important}.p-md-5{padding:3rem!important}.pt-md-5,.py-md-5{padding-top:3rem!important}.pr-md-5,.px-md-5{padding-right:3rem!important}.pb-md-5,.py-md-5{padding-bottom:3rem!important}.pl-md-5,.px-md-5{padding-left:3rem!important}.m-md-n1{margin:-.25rem!important}.mt-md-n1,.my-md-n1{margin-top:-.25rem!important}.mr-md-n1,.mx-md-n1{margin-right:-.25rem!important}.mb-md-n1,.my-md-n1{margin-bottom:-.25rem!important}.ml-md-n1,.mx-md-n1{margin-left:-.25rem!important}.m-md-n2{margin:-.5rem!important}.mt-md-n2,.my-md-n2{margin-top:-.5rem!important}.mr-md-n2,.mx-md-n2{margin-right:-.5rem!important}.mb-md-n2,.my-md-n2{margin-bottom:-.5rem!important}.ml-md-n2,.mx-md-n2{margin-left:-.5rem!important}.m-md-n3{margin:-1rem!important}.mt-md-n3,.my-md-n3{margin-top:-1rem!important}.mr-md-n3,.mx-md-n3{margin-right:-1rem!important}.mb-md-n3,.my-md-n3{margin-bottom:-1rem!important}.ml-md-n3,.mx-md-n3{margin-left:-1rem!important}.m-md-n4{margin:-1.5rem!important}.mt-md-n4,.my-md-n4{margin-top:-1.5rem!important}.mr-md-n4,.mx-md-n4{margin-right:-1.5rem!important}.mb-md-n4,.my-md-n4{margin-bottom:-1.5rem!important}.ml-md-n4,.mx-md-n4{margin-left:-1.5rem!important}.m-md-n5{margin:-3rem!important}.mt-md-n5,.my-md-n5{margin-top:-3rem!important}.mr-md-n5,.mx-md-n5{margin-right:-3rem!important}.mb-md-n5,.my-md-n5{margin-bottom:-3rem!important}.ml-md-n5,.mx-md-n5{margin-left:-3rem!important}.m-md-auto{margin:auto!important}.mt-md-auto,.my-md-auto{margin-top:auto!important}.mr-md-auto,.mx-md-auto{margin-right:auto!important}.mb-md-auto,.my-md-auto{margin-bottom:auto!important}.ml-md-auto,.mx-md-auto{margin-left:auto!important}}@media (min-width:992px){.m-lg-0{margin:0!important}.mt-lg-0,.my-lg-0{margin-top:0!important}.mr-lg-0,.mx-lg-0{margin-right:0!important}.mb-lg-0,.my-lg-0{margin-bottom:0!important}.ml-lg-0,.mx-lg-0{margin-left:0!important}.m-lg-1{margin:.25rem!important}.mt-lg-1,.my-lg-1{margin-top:.25rem!important}.mr-lg-1,.mx-lg-1{margin-right:.25rem!important}.mb-lg-1,.my-lg-1{margin-bottom:.25rem!important}.ml-lg-1,.mx-lg-1{margin-left:.25rem!important}.m-lg-2{margin:.5rem!important}.mt-lg-2,.my-lg-2{margin-top:.5rem!important}.mr-lg-2,.mx-lg-2{margin-right:.5rem!important}.mb-lg-2,.my-lg-2{margin-bottom:.5rem!important}.ml-lg-2,.mx-lg-2{margin-left:.5rem!important}.m-lg-3{margin:1rem!important}.mt-lg-3,.my-lg-3{margin-top:1rem!important}.mr-lg-3,.mx-lg-3{margin-right:1rem!important}.mb-lg-3,.my-lg-3{margin-bottom:1rem!important}.ml-lg-3,.mx-lg-3{margin-left:1rem!important}.m-lg-4{margin:1.5rem!important}.mt-lg-4,.my-lg-4{margin-top:1.5rem!important}.mr-lg-4,.mx-lg-4{margin-right:1.5rem!important}.mb-lg-4,.my-lg-4{margin-bottom:1.5rem!important}.ml-lg-4,.mx-lg-4{margin-left:1.5rem!important}.m-lg-5{margin:3rem!important}.mt-lg-5,.my-lg-5{margin-top:3rem!important}.mr-lg-5,.mx-lg-5{margin-right:3rem!important}.mb-lg-5,.my-lg-5{margin-bottom:3rem!important}.ml-lg-5,.mx-lg-5{margin-left:3rem!important}.p-lg-0{padding:0!important}.pt-lg-0,.py-lg-0{padding-top:0!important}.pr-lg-0,.px-lg-0{padding-right:0!important}.pb-lg-0,.py-lg-0{padding-bottom:0!important}.pl-lg-0,.px-lg-0{padding-left:0!important}.p-lg-1{padding:.25rem!important}.pt-lg-1,.py-lg-1{padding-top:.25rem!important}.pr-lg-1,.px-lg-1{padding-right:.25rem!important}.pb-lg-1,.py-lg-1{padding-bottom:.25rem!important}.pl-lg-1,.px-lg-1{padding-left:.25rem!important}.p-lg-2{padding:.5rem!important}.pt-lg-2,.py-lg-2{padding-top:.5rem!important}.pr-lg-2,.px-lg-2{padding-right:.5rem!important}.pb-lg-2,.py-lg-2{padding-bottom:.5rem!important}.pl-lg-2,.px-lg-2{padding-left:.5rem!important}.p-lg-3{padding:1rem!important}.pt-lg-3,.py-lg-3{padding-top:1rem!important}.pr-lg-3,.px-lg-3{padding-right:1rem!important}.pb-lg-3,.py-lg-3{padding-bottom:1rem!important}.pl-lg-3,.px-lg-3{padding-left:1rem!important}.p-lg-4{padding:1.5rem!important}.pt-lg-4,.py-lg-4{padding-top:1.5rem!important}.pr-lg-4,.px-lg-4{padding-right:1.5rem!important}.pb-lg-4,.py-lg-4{padding-bottom:1.5rem!important}.pl-lg-4,.px-lg-4{padding-left:1.5rem!important}.p-lg-5{padding:3rem!important}.pt-lg-5,.py-lg-5{padding-top:3rem!important}.pr-lg-5,.px-lg-5{padding-right:3rem!important}.pb-lg-5,.py-lg-5{padding-bottom:3rem!important}.pl-lg-5,.px-lg-5{padding-left:3rem!important}.m-lg-n1{margin:-.25rem!important}.mt-lg-n1,.my-lg-n1{margin-top:-.25rem!important}.mr-lg-n1,.mx-lg-n1{margin-right:-.25rem!important}.mb-lg-n1,.my-lg-n1{margin-bottom:-.25rem!important}.ml-lg-n1,.mx-lg-n1{margin-left:-.25rem!important}.m-lg-n2{margin:-.5rem!important}.mt-lg-n2,.my-lg-n2{margin-top:-.5rem!important}.mr-lg-n2,.mx-lg-n2{margin-right:-.5rem!important}.mb-lg-n2,.my-lg-n2{margin-bottom:-.5rem!important}.ml-lg-n2,.mx-lg-n2{margin-left:-.5rem!important}.m-lg-n3{margin:-1rem!important}.mt-lg-n3,.my-lg-n3{margin-top:-1rem!important}.mr-lg-n3,.mx-lg-n3{margin-right:-1rem!important}.mb-lg-n3,.my-lg-n3{margin-bottom:-1rem!important}.ml-lg-n3,.mx-lg-n3{margin-left:-1rem!important}.m-lg-n4{margin:-1.5rem!important}.mt-lg-n4,.my-lg-n4{margin-top:-1.5rem!important}.mr-lg-n4,.mx-lg-n4{margin-right:-1.5rem!important}.mb-lg-n4,.my-lg-n4{margin-bottom:-1.5rem!important}.ml-lg-n4,.mx-lg-n4{margin-left:-1.5rem!important}.m-lg-n5{margin:-3rem!important}.mt-lg-n5,.my-lg-n5{margin-top:-3rem!important}.mr-lg-n5,.mx-lg-n5{margin-right:-3rem!important}.mb-lg-n5,.my-lg-n5{margin-bottom:-3rem!important}.ml-lg-n5,.mx-lg-n5{margin-left:-3rem!important}.m-lg-auto{margin:auto!important}.mt-lg-auto,.my-lg-auto{margin-top:auto!important}.mr-lg-auto,.mx-lg-auto{margin-right:auto!important}.mb-lg-auto,.my-lg-auto{margin-bottom:auto!important}.ml-lg-auto,.mx-lg-auto{margin-left:auto!important}}@media (min-width:1200px){.m-xl-0{margin:0!important}.mt-xl-0,.my-xl-0{margin-top:0!important}.mr-xl-0,.mx-xl-0{margin-right:0!important}.mb-xl-0,.my-xl-0{margin-bottom:0!important}.ml-xl-0,.mx-xl-0{margin-left:0!important}.m-xl-1{margin:.25rem!important}.mt-xl-1,.my-xl-1{margin-top:.25rem!important}.mr-xl-1,.mx-xl-1{margin-right:.25rem!important}.mb-xl-1,.my-xl-1{margin-bottom:.25rem!important}.ml-xl-1,.mx-xl-1{margin-left:.25rem!important}.m-xl-2{margin:.5rem!important}.mt-xl-2,.my-xl-2{margin-top:.5rem!important}.mr-xl-2,.mx-xl-2{margin-right:.5rem!important}.mb-xl-2,.my-xl-2{margin-bottom:.5rem!important}.ml-xl-2,.mx-xl-2{margin-left:.5rem!important}.m-xl-3{margin:1rem!important}.mt-xl-3,.my-xl-3{margin-top:1rem!important}.mr-xl-3,.mx-xl-3{margin-right:1rem!important}.mb-xl-3,.my-xl-3{margin-bottom:1rem!important}.ml-xl-3,.mx-xl-3{margin-left:1rem!important}.m-xl-4{margin:1.5rem!important}.mt-xl-4,.my-xl-4{margin-top:1.5rem!important}.mr-xl-4,.mx-xl-4{margin-right:1.5rem!important}.mb-xl-4,.my-xl-4{margin-bottom:1.5rem!important}.ml-xl-4,.mx-xl-4{margin-left:1.5rem!important}.m-xl-5{margin:3rem!important}.mt-xl-5,.my-xl-5{margin-top:3rem!important}.mr-xl-5,.mx-xl-5{margin-right:3rem!important}.mb-xl-5,.my-xl-5{margin-bottom:3rem!important}.ml-xl-5,.mx-xl-5{margin-left:3rem!important}.p-xl-0{padding:0!important}.pt-xl-0,.py-xl-0{padding-top:0!important}.pr-xl-0,.px-xl-0{padding-right:0!important}.pb-xl-0,.py-xl-0{padding-bottom:0!important}.pl-xl-0,.px-xl-0{padding-left:0!important}.p-xl-1{padding:.25rem!important}.pt-xl-1,.py-xl-1{padding-top:.25rem!important}.pr-xl-1,.px-xl-1{padding-right:.25rem!important}.pb-xl-1,.py-xl-1{padding-bottom:.25rem!important}.pl-xl-1,.px-xl-1{padding-left:.25rem!important}.p-xl-2{padding:.5rem!important}.pt-xl-2,.py-xl-2{padding-top:.5rem!important}.pr-xl-2,.px-xl-2{padding-right:.5rem!important}.pb-xl-2,.py-xl-2{padding-bottom:.5rem!important}.pl-xl-2,.px-xl-2{padding-left:.5rem!important}.p-xl-3{padding:1rem!important}.pt-xl-3,.py-xl-3{padding-top:1rem!important}.pr-xl-3,.px-xl-3{padding-right:1rem!important}.pb-xl-3,.py-xl-3{padding-bottom:1rem!important}.pl-xl-3,.px-xl-3{padding-left:1rem!important}.p-xl-4{padding:1.5rem!important}.pt-xl-4,.py-xl-4{padding-top:1.5rem!important}.pr-xl-4,.px-xl-4{padding-right:1.5rem!important}.pb-xl-4,.py-xl-4{padding-bottom:1.5rem!important}.pl-xl-4,.px-xl-4{padding-left:1.5rem!important}.p-xl-5{padding:3rem!important}.pt-xl-5,.py-xl-5{padding-top:3rem!important}.pr-xl-5,.px-xl-5{padding-right:3rem!important}.pb-xl-5,.py-xl-5{padding-bottom:3rem!important}.pl-xl-5,.px-xl-5{padding-left:3rem!important}.m-xl-n1{margin:-.25rem!important}.mt-xl-n1,.my-xl-n1{margin-top:-.25rem!important}.mr-xl-n1,.mx-xl-n1{margin-right:-.25rem!important}.mb-xl-n1,.my-xl-n1{margin-bottom:-.25rem!important}.ml-xl-n1,.mx-xl-n1{margin-left:-.25rem!important}.m-xl-n2{margin:-.5rem!important}.mt-xl-n2,.my-xl-n2{margin-top:-.5rem!important}.mr-xl-n2,.mx-xl-n2{margin-right:-.5rem!important}.mb-xl-n2,.my-xl-n2{margin-bottom:-.5rem!important}.ml-xl-n2,.mx-xl-n2{margin-left:-.5rem!important}.m-xl-n3{margin:-1rem!important}.mt-xl-n3,.my-xl-n3{margin-top:-1rem!important}.mr-xl-n3,.mx-xl-n3{margin-right:-1rem!important}.mb-xl-n3,.my-xl-n3{margin-bottom:-1rem!important}.ml-xl-n3,.mx-xl-n3{margin-left:-1rem!important}.m-xl-n4{margin:-1.5rem!important}.mt-xl-n4,.my-xl-n4{margin-top:-1.5rem!important}.mr-xl-n4,.mx-xl-n4{margin-right:-1.5rem!important}.mb-xl-n4,.my-xl-n4{margin-bottom:-1.5rem!important}.ml-xl-n4,.mx-xl-n4{margin-left:-1.5rem!important}.m-xl-n5{margin:-3rem!important}.mt-xl-n5,.my-xl-n5{margin-top:-3rem!important}.mr-xl-n5,.mx-xl-n5{margin-right:-3rem!important}.mb-xl-n5,.my-xl-n5{margin-bottom:-3rem!important}.ml-xl-n5,.mx-xl-n5{margin-left:-3rem!important}.m-xl-auto{margin:auto!important}.mt-xl-auto,.my-xl-auto{margin-top:auto!important}.mr-xl-auto,.mx-xl-auto{margin-right:auto!important}.mb-xl-auto,.my-xl-auto{margin-bottom:auto!important}.ml-xl-auto,.mx-xl-auto{margin-left:auto!important}}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;pointer-events:auto;content:"";background-color:rgba(0,0,0,0)}.text-monospace{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace!important}.text-justify{text-align:justify!important}.text-wrap{white-space:normal!important}.text-nowrap{white-space:nowrap!important}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text-left{text-align:left!important}.text-right{text-align:right!important}.text-center{text-align:center!important}@media (min-width:576px){.text-sm-left{text-align:left!important}.text-sm-right{text-align:right!important}.text-sm-center{text-align:center!important}}@media (min-width:768px){.text-md-left{text-align:left!important}.text-md-right{text-align:right!important}.text-md-center{text-align:center!important}}@media (min-width:992px){.text-lg-left{text-align:left!important}.text-lg-right{text-align:right!important}.text-lg-center{text-align:center!important}}@media (min-width:1200px){.text-xl-left{text-align:left!important}.text-xl-right{text-align:right!important}.text-xl-center{text-align:center!important}}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.font-weight-light{font-weight:300!important}.font-weight-lighter{font-weight:lighter!important}.font-weight-normal{font-weight:400!important}.font-weight-bold{font-weight:700!important}.font-weight-bolder{font-weight:bolder!important}.font-italic{font-style:italic!important}.text-white{color:#fff!important}.text-primary{color:#007bff!important}a.text-primary:focus,a.text-primary:hover{color:#0056b3!important}.text-secondary{color:#6c757d!important}a.text-secondary:focus,a.text-secondary:hover{color:#494f54!important}.text-success{color:#28a745!important}a.text-success:focus,a.text-success:hover{color:#19692c!important}.text-info{color:#17a2b8!important}a.text-info:focus,a.text-info:hover{color:#0f6674!important}.text-warning{color:#ffc107!important}a.text-warning:focus,a.text-warning:hover{color:#ba8b00!important}.text-danger{color:#dc3545!important}a.text-danger:focus,a.text-danger:hover{color:#a71d2a!important}.text-light{color:#f8f9fa!important}a.text-light:focus,a.text-light:hover{color:#cbd3da!important}.text-dark{color:#343a40!important}a.text-dark:focus,a.text-dark:hover{color:#121416!important}.text-body{color:#212529!important}.text-muted{color:#6c757d!important}.text-black-50{color:rgba(0,0,0,.5)!important}.text-white-50{color:rgba(255,255,255,.5)!important}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.text-decoration-none{text-decoration:none!important}.text-break{word-break:break-word!important;word-wrap:break-word!important}.text-reset{color:inherit!important}.visible{visibility:visible!important}.invisible{visibility:hidden!important}@media print{*,::after,::before{text-shadow:none!important;box-shadow:none!important}a:not(.btn){text-decoration:underline}abbr[title]::after{content:" (" attr(title) ")"}pre{white-space:pre-wrap!important}blockquote,pre{border:1px solid #adb5bd;page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}h2,h3,p{orphans:3;widows:3}h2,h3{page-break-after:avoid}@page{size:a3}body{min-width:992px!important}.container{min-width:992px!important}.navbar{display:none}.badge{border:1px solid #000}.table{border-collapse:collapse!important}.table td,.table th{background-color:#fff!important}.table-bordered td,.table-bordered th{border:1px solid #dee2e6!important}.table-dark{color:inherit}.table-dark tbody+tbody,.table-dark td,.table-dark th,.table-dark thead th{border-color:#dee2e6}.table .thead-dark th{color:inherit;border-color:#dee2e6}} +/*# sourceMappingURL=bootstrap.min.css.map */ \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/lib/clean-for-DTD.xslt b/test/fixtures/cache-tests/spec/lib/clean-for-DTD.xslt new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/clean-for-DTD.xslt @@ -0,0 +1,2356 @@ +<!-- + Strip rfc2629.xslt extensions, generating XML input for "official" xml2rfc + + Copyright (c) 2006-2020, Julian Reschke (julian.reschke@greenbytes.de) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Julian Reschke nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +--> + +<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" + version="1.0" + xmlns:exslt="http://exslt.org/common" + xmlns:ed="http://greenbytes.de/2002/rfcedit" + xmlns:grddl="http://www.w3.org/2003/g/data-view#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:x="http://purl.org/net/xml2rfc/ext" + xmlns:xi="http://www.w3.org/2001/XInclude" + xmlns:xhtml="http://www.w3.org/1999/xhtml" + exclude-result-prefixes="ed exslt grddl rdf svg x xi xhtml" +> + +<!-- re-use some of the default RFC2629.xslt rules --> +<xsl:import href="rfc2629-no-doctype.xslt"/> + +<!-- undo strip-space decls --> +<xsl:preserve-space elements="*"/> + +<!-- generate UTF-8 XML with no doctype decl and artwork/sourcecode serialized as CDATA --> +<xsl:output method="xml" version="1.0" encoding="UTF-8" cdata-section-elements="artwork sourcecode" /> + +<!-- Workaround for http://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/297 --> +<xsl:param name="xml2rfc-ext-strip-vbare">false</xsl:param> + +<!-- xml2rfc target --> +<xsl:param name="xml2rfc-ext-xml2rfc-backend"> + <xsl:variable name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth &gt;= 201705">201706</xsl:when> + <xsl:when test="$pub-yearmonth &gt; 201612">201610</xsl:when> + <xsl:otherwise>201510</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'xml2rfc-backend'"/> + <xsl:with-param name="default" select="$default"/> + </xsl:call-template> +</xsl:param> +<xsl:param name="xml2rfc-ext-xml2rfc-voc">2</xsl:param> + +<!-- kick into cleanup mode --> +<xsl:template match="/"> + <xsl:text>&#10;</xsl:text> + <xsl:comment> + This XML document is the output of clean-for-DTD.xslt; a tool that strips + extensions to RFC 7749 from documents for processing with xml2rfc. +</xsl:comment> +<xsl:text>&#10;</xsl:text> +<xsl:comment>TARGET-GENERATOR: <xsl:value-of select="$xml2rfc-ext-xml2rfc-backend"/></xsl:comment> +<xsl:text>&#10;</xsl:text> +<xsl:comment>TARGET-VOCABULARY: <xsl:value-of select="$xml2rfc-ext-xml2rfc-voc"/></xsl:comment> + <xsl:apply-templates select="/" mode="cleanup"/> +</xsl:template> + +<!-- rules for identity transformations --> + +<xsl:template match="processing-instruction()" mode="cleanup"> + <xsl:text>&#10;</xsl:text> + <xsl:copy/> +</xsl:template> + +<xsl:template match="comment()|@*" mode="cleanup"><xsl:copy/></xsl:template> + +<xsl:template match="text()" mode="cleanup"><xsl:copy/></xsl:template> + +<xsl:template match="/" mode="cleanup"> + <xsl:copy><xsl:apply-templates select="node()" mode="cleanup" /></xsl:copy> +</xsl:template> + +<xsl:template match="*" mode="cleanup"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </xsl:element> +</xsl:template> + + +<!-- remove PI extensions --> + +<xsl:template match="processing-instruction('rfc-ext')" mode="cleanup"/> +<xsl:template match="processing-instruction('BEGININC')" mode="cleanup"/> +<xsl:template match="processing-instruction('ENDINC')" mode="cleanup"/> + +<!-- process include PI --> +<xsl:template match="processing-instruction('rfc')" mode="cleanup"> + <xsl:variable name="include"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="."/> + <xsl:with-param name="attr" select="'include'"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$include=''"> + <xsl:text>&#10;</xsl:text> + <xsl:copy/> + </xsl:when> + <xsl:when test="substring($include, string-length($include) - 3) != '.xml'"> + <xsl:apply-templates select="document(concat($include,'.xml'))" mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="document($include)" mode="cleanup"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + + +<!-- add issues appendix --> + +<xsl:template match="back" mode="cleanup"> + <back> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + <xsl:if test="not(/*/@ed:suppress-issue-appendix='yes') and //ed:issue[@status='closed']"> + <section title="Resolved issues (to be removed by RFC Editor before publication)"> + <t> + Issues that were either rejected or resolved in this version of this + document. + </t> + <xsl:apply-templates select="//ed:issue[@status='closed']" mode="issues" /> + </section> + </xsl:if> + <xsl:if test="not(/*/@ed:suppress-issue-appendix='yes') and //ed:issue[@status='open']"> + <section title="Open issues (to be removed by RFC Editor prior to publication)"> + <xsl:apply-templates select="//ed:issue[@status!='closed']" mode="issues" /> + </section> + </xsl:if> + </back> +</xsl:template> + + +<!-- V3 features --> + +<xsl:template match="boilerplate" mode="cleanup"/> +<xsl:template match="link" mode="cleanup"/> +<xsl:template match="rfc/@scripts" mode="cleanup"/> +<xsl:template match="rfc/@version" mode="cleanup"> + <xsl:if test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:copy-of select="."/> + </xsl:if> +</xsl:template> +<xsl:template match="@pn" mode="cleanup"/> + +<xsl:template match="br" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <br> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </br> + </xsl:when> + <xsl:otherwise> + <xsl:text> </xsl:text> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:u-map" mode="cleanup"/> +<xsl:template match="u" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <u> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </u> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="emit-u"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- experimental for QUIC tls draft --> +<xsl:template match="t/contact" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <contact> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </contact> + </xsl:when> + <xsl:when test="@asciiFullname"> + <xsl:value-of select="@asciiFullname"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@fullname"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- extensions --> + +<xsl:template match="x:abnf-char-sequence" mode="cleanup"> + <xsl:choose> + <xsl:when test="substring(.,1,1) != '&quot;' or substring(.,string-length(.),1) != '&quot;'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg" select="'contents of x:abnf-char-sequence needs to be quoted.'" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text>%x</xsl:text> + <xsl:call-template name="to-abnf-char-sequence"> + <xsl:with-param name="chars" select="substring(.,2,string-length(.)-2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:anchor-alias" mode="cleanup"/> + +<xsl:template match="x:bcp14|bcp14" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <bcp14> + <xsl:apply-templates mode="cleanup"/> + </bcp14> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates mode="cleanup"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:assign-section-number" mode="cleanup"/> +<xsl:template match="x:link" mode="cleanup"/> +<xsl:template match="x:source" mode="cleanup"/> +<xsl:template match="x:feedback" mode="cleanup"/> +<xsl:template match="date/@x:include-day" mode="cleanup"/> + +<xsl:template match="x:parse-xml" mode="cleanup"> + <xsl:apply-templates/> +</xsl:template> + +<xsl:template match="x:prose" mode="cleanup"> + <xsl:variable name="text" select="."/> + <xsl:comment>Converted from rfc2629.xslt x:prose extension</xsl:comment> + <xsl:choose> + <xsl:when test="contains($text,' ')"> + <seriesInfo name="{substring-before($text,' ')}" value="{substring-after($text,' ')}"/> + </xsl:when> + <xsl:otherwise> + <seriesInfo name="" value="{$text}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="t/@keepWithNext|t/@keepWithPrevious" mode="cleanup"/> + +<xsl:template match="refcontent" mode="cleanup"> + <xsl:variable name="text"> + <xsl:apply-templates mode="cleanup"/> + </xsl:variable> + <xsl:comment>Converted from rfc2629.xslt refcontent extension</xsl:comment> + <xsl:choose> + <xsl:when test="contains($text,' ')"> + <seriesInfo name="{substring-before($text,' ')}" value="{substring-after($text,' ')}"/> + </xsl:when> + <xsl:otherwise> + <seriesInfo name="" value="{$text}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="postalLine" mode="cleanup"> + <xsl:comment>converted from v3 &lt;postalLine&gt;</xsl:comment> + <street><xsl:value-of select="."/></street> +</xsl:template> + +<xsl:template match="x:ref" mode="cleanup"> + <xsl:variable name="val" select="normalize-space(.)"/> + <xsl:variable name="target" select="//*[@anchor and (@anchor=$val or x:anchor-alias/@value=$val)][not(ancestor::ed:del)] | //reference/x:source[x:defines=$val]"/> + <xsl:if test="count($target)>1"> + <xsl:message terminate="yes">FATAL: multiple x:ref targets found for <xsl:value-of select="$val"/>.</xsl:message> + </xsl:if> + <xsl:choose> + <xsl:when test="$target/self::x:source"> + <!-- drop it--> + <xsl:value-of select="."/> + </xsl:when> + <xsl:when test="$target"> + <xsl:variable name="current" select="."/> + <xsl:for-each select="$target"> + <!-- make it the context --> + <xsl:choose> + <xsl:when test="self::preamble"> + <!-- it's not an element we can link to --> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">couldn't create the link as <xsl:value-of select="name()"/> does not support the anchor attribute.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$current"/> + </xsl:when> + <xsl:otherwise> + <xref target="{$target/@anchor}" format="none"><xsl:value-of select="$current"/></xref> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:when test="//x:source"> + <xsl:variable name="ref" select="."/> + <xsl:variable name="out"> + <!-- try referenced documents one by one --> + <xsl:for-each select="//reference[x:source]"> + <xsl:variable name="extdoc" select="document(x:source/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor and (x:anchor-alias/@value=$val)]"/> + <xsl:choose> + <xsl:when test="not($nodes)"> + <xsl:call-template name="trace"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found in source file '<xsl:value-of select="x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' found in source file '<xsl:value-of select="x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ref"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:variable> + <xsl:copy-of select="$out"/> + <xsl:if test="string-length($out)=0"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found anywhere in references.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$val"/> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">internal link target for '<xsl:value-of select="$val"/>' does not exist.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:blockquote|blockquote" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <blockquote> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </blockquote> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="blockquote-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="blockquote-to-v2"> + <t> + <xsl:apply-templates select="@anchor" mode="cleanup"/> + <list> + <xsl:choose> + <xsl:when test="t|ul|ol|dl|artwork|figure|sourcecode"> + <xsl:apply-templates mode="cleanup" /> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:apply-templates mode="cleanup" /> + </t> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@quotedFrom"> + <t> + <xsl:text>&#8212; </xsl:text> + <xsl:choose> + <xsl:when test="@cite"><eref target="{@cite}"><xsl:value-of select="@quotedFrom"/></eref></xsl:when> + <xsl:otherwise><xsl:value-of select="@quotedFrom"/></xsl:otherwise> + </xsl:choose> + </t> + </xsl:if> + </list> + </t> +</xsl:template> + +<xsl:template match="li/blockquote" mode="cleanup"> + <list style="empty"> + <xsl:choose> + <xsl:when test="t|ul|ol|dl|artwork|figure|sourcecode"> + <xsl:apply-templates mode="cleanup" /> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:apply-templates mode="cleanup" /> + </t> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@quotedFrom"> + <t> + <xsl:text>&#8212; </xsl:text> + <xsl:choose> + <xsl:when test="@cite"><eref target="{@cite}"><xsl:value-of select="@quotedFrom"/></eref></xsl:when> + <xsl:otherwise><xsl:value-of select="@quotedFrom"/></xsl:otherwise> + </xsl:choose> + </t> + </xsl:if> + </list> +</xsl:template> + +<xsl:template match="x:h" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <strong> + <xsl:apply-templates mode="cleanup"/> + </strong> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates mode="cleanup" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:highlight" mode="cleanup"> + <xsl:apply-templates mode="cleanup" /> +</xsl:template> + +<xsl:template match="x:lt" mode="cleanup"> + <t> + <xsl:apply-templates select="@hangText|@anchor" mode="cleanup"/> + <xsl:for-each select="t"> + <xsl:apply-templates mode="cleanup"/> + <xsl:if test="position()!=last()"> + <vspace blankLines="1"/> + </xsl:if> + </xsl:for-each> + </t> +</xsl:template> + +<xsl:template match="x:note|aside" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <aside> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </aside> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="aside-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="aside-to-v2"> + <t> + <xsl:apply-templates select="@anchor" mode="cleanup"/> + <list> + <xsl:apply-templates mode="cleanup"/> + </list> + </t> +</xsl:template> + +<xsl:template match="x:q" mode="cleanup"> + <xsl:text>"</xsl:text> + <xsl:apply-templates mode="cleanup"/> + <xsl:text>"</xsl:text> +</xsl:template> + +<xsl:template match="x:dfn" mode="cleanup"> + <!-- help xml2rfc to keep dfn and following text on the same page --> + <!-- removed for now because it broke httpbis-p2 (def of 200 OK in -25) + <xsl:if test="not(preceding-sibling::x:dfn) and count(following-sibling::list)=1 and normalize-space(../text()='')"> + <xsl:processing-instruction name="rfc">needLines="4"</xsl:processing-instruction> + </xsl:if>--> + <xsl:apply-templates mode="cleanup"/> +</xsl:template> + +<xsl:template match="x:sup|sup" mode="cleanup"> + <xsl:text>^</xsl:text> + <xsl:apply-templates mode="cleanup" /> +</xsl:template> + +<xsl:template match="sub" mode="cleanup"> + <xsl:text>_</xsl:text> + <xsl:apply-templates mode="cleanup" /> +</xsl:template> + +<xsl:template match="x:span" mode="cleanup"> + <xsl:apply-templates mode="cleanup" /> +</xsl:template> +<xsl:template match="x:span/@anchor" mode="cleanup"/> + +<xsl:template match="author/@asciiFullname" mode="cleanup"/> +<xsl:template match="author/@asciiInitials" mode="cleanup"/> +<xsl:template match="author/@asciiSurname" mode="cleanup"/> + +<xsl:template match="author/@surname" mode="cleanup"> + <xsl:choose> + <xsl:when test="../@asciiSurname!=''"> + <xsl:attribute name="surname"><xsl:value-of select="../@asciiSurname"/></xsl:attribute> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Replacing surname <xsl:value-of select="../@surname"/> by <xsl:value-of select="../@asciiSurname"/>.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:copy/></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="author/@fullname" mode="cleanup"> + <xsl:choose> + <xsl:when test="../@asciiFullname!=''"> + <xsl:attribute name="fullname"><xsl:value-of select="../@asciiFullname"/></xsl:attribute> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Replacing fullname <xsl:value-of select="../@fullname"/> by <xsl:value-of select="../@asciiFullname"/>.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:copy/></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="author/@initials" mode="cleanup"> + <xsl:choose> + <xsl:when test="../@asciiInitials!=''"> + <xsl:attribute name="initials"><xsl:value-of select="../@asciiInitials"/></xsl:attribute> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Replacing initials <xsl:value-of select="../@initials"/> by <xsl:value-of select="../@asciiInitials"/>.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:copy/></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="author/@anchor" mode="cleanup"/> +<xsl:template match="x:include-author" mode="cleanup"> + <t> + <xsl:value-of select="/*/front/author[@anchor=current()/@target]"/> + </t> + <t> + (see Authors Section) + </t> +</xsl:template> + +<xsl:template match="organization/@ascii" mode="cleanup"/> +<xsl:template match="organization" mode="cleanup"> + <organization> + <xsl:apply-templates select="@*" mode="cleanup"/> + <xsl:choose> + <xsl:when test="@ascii!=''"> + <xsl:value-of select="@ascii"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="text()"/> + </xsl:otherwise> + </xsl:choose> + </organization> +</xsl:template> + +<xsl:template match="title/@ascii" mode="cleanup"/> +<xsl:template match="title" mode="cleanup"> + <title> + <xsl:apply-templates select="@*" mode="cleanup"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:apply-templates select="node()" mode="cleanup"/> + </xsl:when> + <xsl:when test="@ascii!=''"> + <xsl:value-of select="@ascii"/> + </xsl:when> + <xsl:otherwise> + <xsl:for-each select="node()"> + <xsl:choose> + <xsl:when test="self::br"> + <xsl:text> </xsl:text> + </xsl:when> + <xsl:when test="self::*"> + <xsl:apply-templates select="node()" mode="cleanup"/> + </xsl:when> + <xsl:when test="self::processing-instruction()"/> + <xsl:otherwise> + <xsl:value-of select="normalize-space(.)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + </title> +</xsl:template> + +<xsl:template match="@x:optional-ascii" mode="cleanup"/> +<xsl:template match="@ascii" mode="cleanup"/> +<xsl:template match="postal/*[@ascii or @x:optional-ascii]" mode="cleanup"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="@*" mode="cleanup"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:copy-of select="@ascii"/> + <xsl:if test="@x:optional-ascii and not(@ascii)"> + <!-- workaround for https://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/443 --> + <xsl:attribute name="ascii"><xsl:value-of select="@x:optional-ascii"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="text()"/> + </xsl:when> + <xsl:when test="@ascii!=''"> + <xsl:value-of select="@ascii"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="text()"/> + </xsl:otherwise> + </xsl:choose> + </xsl:element> +</xsl:template> + +<xsl:template match="postal" mode="cleanup"> + <postal> + <xsl:apply-templates select="@*" mode="cleanup"/> + <xsl:if test="not(street) and not(postalLine)"> + <!-- street is mandatory in V2 --> + <street/> + </xsl:if> + <xsl:apply-templates select="node()" mode="cleanup"/> + </postal> +</xsl:template> + +<!-- not supported --> +<xsl:template match="relref/@format" mode="cleanup"/> + +<xsl:template match="xref[(@x:fmt or @x:sec or @x:rel or @section or @sectionFormat or @relative) and not(*|text())]|relref[not(*|text())]" mode="cleanup"> + <xsl:call-template name="insert-iref-for-xref"/> + <xsl:variable name="is-xref" select="self::xref"/> + <xsl:variable name="node" select="$src//*[@anchor=current()/@target]" /> + + <xsl:variable name="ssec"> + <xsl:call-template name="get-section-xref-section"/> + </xsl:variable> + + <xsl:variable name="tsec"> + <xsl:choose> + <xsl:when test="starts-with(@x:rel,'#') and $ssec='' and $node/x:source/@href"> + <xsl:variable name="extdoc" select="document($node/x:source/@href)"/> + <xsl:for-each select="$extdoc//*[@anchor=substring-after(current()/@x:rel,'#')]"> + <xsl:variable name="t"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="starts-with($t,$unnumbered)"> + <xsl:choose> + <xsl:when test="ancestor::back">A@</xsl:when> + <xsl:otherwise>S@</xsl:otherwise> + </xsl:choose> + <xsl:call-template name="get-title-as-string"> + <xsl:with-param name="node" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$t"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$ssec"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sec"> + <xsl:choose> + <xsl:when test="contains($tsec,'@')">"<xsl:value-of select="substring-after($tsec,'@')"/>"</xsl:when> + <xsl:otherwise><xsl:value-of select="$tsec"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="secterm"> + <xsl:choose> + <!-- starts with letter or unnumbered? --> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')='' or starts-with($tsec,'A@')">Appendix</xsl:when> + <xsl:otherwise>Section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sfmt"> + <xsl:call-template name="get-section-xref-format"> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="ancestor::artwork or ancestor::sourcecode">comma</xsl:when> + <xsl:otherwise>of</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:variable> + + <!--<xsl:comment><xsl:value-of select="concat($sfmt, ' ', $tsec, ' ', @x:sec)"/></xsl:comment>--> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3 and $tsec!='' and not(contains($tsec,'@')) and $sfmt='of'"> + <xref target="{@target}" section="{$tsec}"> + <xsl:if test="@x:rel"> + <xsl:attribute name="relative"><xsl:value-of select="@x:rel"/></xsl:attribute> + </xsl:if> + </xref> + </xsl:when> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3 and $tsec!='' and not(contains($tsec,'@')) and $sfmt='comma'"> + <xref target="{@target}" sectionFormat="comma" section="{$tsec}"> + <xsl:if test="@x:rel"> + <xsl:attribute name="relative"><xsl:value-of select="@x:rel"/></xsl:attribute> + </xsl:if> + </xref> + </xsl:when> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3 and $tsec!='' and not(contains($tsec,'@')) and $sfmt='bare'"> + <xref target="{@target}" sectionFormat="bare" section="{$tsec}"> + <xsl:if test="@x:rel"> + <xsl:attribute name="relative"><xsl:value-of select="@x:rel"/></xsl:attribute> + </xsl:if> + </xref> + </xsl:when> + <xsl:when test="$sfmt='comma'"> + <xref> + <xsl:apply-templates select="@target|@format|@pageno|text()|*" mode="cleanup"/> + </xref> + <xsl:text>, </xsl:text> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + </xsl:when> + <xsl:when test="$sfmt='section'"> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + </xsl:when> + <xsl:when test="$sfmt='bare'"> + <xsl:value-of select="$sec"/> + </xsl:when> + <xsl:when test="$sfmt='parens'"> + <xref> + <xsl:apply-templates select="@target|@format|@pageno|text()|*" mode="cleanup"/> + </xref> + <xsl:text> (</xsl:text> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + <xsl:text>)</xsl:text> + </xsl:when> + <xsl:when test="$sfmt='of'"> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + <xsl:text> of </xsl:text> + <xref> + <xsl:apply-templates select="@target|@format|@pageno|text()|*" mode="cleanup"/> + </xref> + </xsl:when> + <xsl:otherwise> + <xsl:copy> + <xsl:apply-templates select="node()" mode="cleanup"/> + </xsl:copy> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="abstract/@anchor" mode="cleanup"/> +<xsl:template match="note/@anchor" mode="cleanup"/> + +<xsl:template match="xref[(@x:fmt or @x:sec or @x:rel) and (*|text())]|relref[*|text()]" mode="cleanup"> + <xsl:call-template name="insert-iref-for-xref"/> + <xsl:choose> + <xsl:when test="self::relref"> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:when test="@x:fmt='none'"> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:when test="not(@x:fmt)"> + <xref> + <xsl:copy-of select="@target|@format"/> + <xsl:apply-templates mode="cleanup"/> + </xref> + </xsl:when> + <xsl:otherwise> + <xsl:message>Unsupported x:fmt attribute.</xsl:message> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="xref[(text()|*) and (@target=//abstract/@anchor or @target=//note/@anchor or @target=//preamble/@anchor or @target=//spanx/@anchor or @target=//name//@anchor or @target=//references/@anchor or @target=//artwork/@anchor or @target=//sourcecode/@anchor or @target=//artset/@anchor)]" mode="cleanup"> + <!-- remove the link --> + <xsl:apply-templates select="node()" mode="cleanup"/> +</xsl:template> + +<xsl:template match="xref[(text()|*) and @format='none' and (@target=//artwork//*/@anchor or @target=//sourcecode//*/@anchor)]" mode="cleanup"> + <!-- remove links to elements inside <artwork> or <sourcecode> --> + <xsl:apply-templates select="node()" mode="cleanup"/> +</xsl:template> + +<xsl:template match="xref[not((text()|*)) and (@target=//abstract/@anchor or @target=//note/@anchor or @target=//preamble/@anchor or @target=//spanx/@anchor or @target=//references/@anchor or @target=//artwork/@anchor or @target=//sourcecode/@anchor or @target=//artset/@anchor)]" mode="cleanup"> + <xsl:variable name="content"> + <xsl:apply-templates select="."/> + </xsl:variable> + <xsl:value-of select="$content"/> +</xsl:template> + +<xsl:template match="xref[not((text()|*)) and (not(@format) or @format='default') and (@target=//section[@numbered='false']/@anchor)]" mode="cleanup"> + <!-- link to unnumbered section --> + <xsl:copy> + <xsl:copy-of select="@target"/> + <xsl:variable name="content"> + <xsl:apply-templates select="."/> + </xsl:variable> + <xsl:value-of select="$content"/> + </xsl:copy> +</xsl:template> + +<xsl:template match="xref" mode="cleanup" priority="0"> + <xsl:call-template name="insert-iref-for-xref"/> + <xref> + <xsl:apply-templates select="@target|@format" mode="cleanup"/> + <xsl:apply-templates mode="cleanup"/> + </xref> +</xsl:template> + +<xsl:template name="insert-iref-for-xref"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yesxxx' and $xml2rfc-ext-include-index='yes'"> + <xsl:if test="@target=/rfc/back//reference/@anchor"> + <iref item="{@target}"/> + <xsl:if test="@x:sec"> + <xsl:choose> + <xsl:when test="translate(substring(@x:sec,1,1),$ucase,'')=''"> + <iref item="{@target}" subitem="Appendix {@x:sec}"/> + </xsl:when> + <xsl:otherwise> + <iref item="{@target}" subitem="Section {@x:sec}"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:if> + </xsl:if> +</xsl:template> + + +<!-- workaround for https://tools.ietf.org/tools/ietfdb/ticket/2900 --> +<xsl:template match="iref/comment()" mode="cleanup"/> + +<!-- drop index gen extension --> +<xsl:template match="iref" mode="cleanup"> + <xsl:if test="$xml2rfc-ext-include-index='yes'"> + <iref> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </iref> + </xsl:if> +</xsl:template> + + +<!-- issue tracking extensions --> + +<xsl:template match="@xml:lang" mode="cleanup"/> +<xsl:template match="@xml:lang" /> + +<xsl:template match="ed:*" mode="cleanup"/> +<xsl:template match="ed:*" /> + +<xsl:template match="@ed:*" mode="cleanup"/> +<xsl:template match="@ed:*" /> + +<xsl:template match="ed:annotation" mode="cleanup" /> + +<xsl:template match="ed:replace" mode="cleanup"> + <xsl:apply-templates mode="cleanup" /> +</xsl:template> + +<xsl:template match="ed:replace"> + <xsl:apply-templates/> +</xsl:template> + +<xsl:template match="ed:ins" mode="cleanup"> + <xsl:apply-templates mode="cleanup"/> +</xsl:template> + +<xsl:template match="ed:ins"> + <xsl:apply-templates/> +</xsl:template> + +<xsl:template match="ed:issue" mode="issues"> + <section title="{@name}"> + <xsl:variable name="sec"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + + <xsl:if test="$sec!=''"> + <t> + In Section <xsl:value-of select="$sec"/>: + </t> + </xsl:if> + + <t> + Type: <xsl:value-of select="@type" /> + </t> + <xsl:if test="@href"> + <t> + <!-- temp. removed because of xml2rfc's handling of erefs when producing TXT--> + <!--<eref target="{@href}" /> --> + <xsl:text>&lt;</xsl:text> + <xsl:value-of select="@href"/> + <xsl:text>></xsl:text> + <xsl:if test="@alternate-href"> + <xsl:text>, &lt;</xsl:text> + <xsl:value-of select="@alternate-href"/> + <xsl:text>></xsl:text> + </xsl:if> + </t> + </xsl:if> + <xsl:for-each select="ed:item"> + <t> + <xsl:if test="@entered-by or @date"> + <xsl:choose> + <xsl:when test="not(@entered-by)"> + <xsl:value-of select="concat('(',@date,') ')" /> + </xsl:when> + <xsl:when test="not(@date)"> + <xsl:value-of select="concat(@entered-by,': ')" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(@entered-by,' (',@date,'): ')" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:if test="not(xhtml:p)"> + <xsl:apply-templates select="node()" mode="issues"/> + </xsl:if> + </t> + <xsl:if test="xhtml:p|xhtml:pre"> + <xsl:for-each select="node()"> + <xsl:choose> + <xsl:when test="self::xhtml:p"> + <t> + <xsl:apply-templates select="node()" mode="issues"/> + </t> + </xsl:when> + <xsl:when test="self::xhtml:pre"> + <figure> + <artwork><xsl:apply-templates select="node()" mode="issues"/></artwork> + </figure> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:apply-templates select="." mode="issues"/> + </t> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:if> + </xsl:for-each> + <xsl:if test="ed:resolution"> + <t> + <xsl:text>Resolution</xsl:text> + <xsl:if test="ed:resolution/@datetime"> (<xsl:value-of select="ed:resolution/@datetime"/>)</xsl:if> + <xsl:text>: </xsl:text> + <xsl:value-of select="ed:resolution" /> + </t> + </xsl:if> + </section> +</xsl:template> + +<xsl:template match="ed:issueref" mode="cleanup"> + <xsl:apply-templates mode="cleanup"/> +</xsl:template> + +<xsl:template match="*" mode="issues"> + <xsl:apply-templates mode="issues"/> +</xsl:template> + +<xsl:template match="xhtml:q" mode="issues"> + <list><t> + <xsl:text>"</xsl:text> + <xsl:apply-templates mode="issues"/> + <xsl:text>"</xsl:text> + <xsl:if test="@cite"> + <xsl:text> -- </xsl:text> + <eref target="{@cite}"><xsl:value-of select="@cite"/></eref> + </xsl:if> + </t></list> +</xsl:template> + +<xsl:template match="xhtml:br" mode="issues"> + <vspace/> +</xsl:template> + +<xsl:template match="xhtml:del" mode="issues"> + <xsl:text>&lt;del></xsl:text> + <xsl:apply-templates mode="issues"/> + <xsl:text>&lt;/del></xsl:text> +</xsl:template> + +<xsl:template match="xhtml:em" mode="issues"> + <spanx style="emph"> + <xsl:apply-templates mode="issues"/> + </spanx> +</xsl:template> + +<xsl:template match="xhtml:ins" mode="issues"> + <xsl:text>&lt;ins></xsl:text> + <xsl:apply-templates mode="issues"/> + <xsl:text>&lt;/ins></xsl:text> +</xsl:template> + +<xsl:template match="xhtml:tt" mode="issues"> + <xsl:apply-templates mode="issues"/> +</xsl:template> + +<xsl:template match="ed:eref" mode="issues"> + <xsl:text>&lt;</xsl:text> + <xsl:value-of select="."/> + <xsl:text>&gt;</xsl:text> +</xsl:template> + +<xsl:template match="ed:issueref" mode="issues"> + <xsl:apply-templates mode="issues"/> +</xsl:template> + +<xsl:template match="text()" mode="issues"> + <xsl:value-of select="." /> +</xsl:template> + +<!-- workgroup format --> +<xsl:template match="workgroup" mode="cleanup"> + <workgroup> + <xsl:variable name="v" select="normalize-space(.)"/> + <xsl:variable name="h"> + <!-- when a single name, append WG/RG postfix automatically --> + <xsl:choose> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-ietf-') and $submissionType='IETF'"> + <xsl:value-of select="concat($v, ' Working Group')"/> + </xsl:when> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-irtf-') and $submissionType='IRTF'"> + <xsl:value-of select="concat($v, ' Research Group')"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$v"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="$h"/> + </workgroup> +</xsl:template> + +<!-- markup inside artwork element --> + +<xsl:template match="figure" mode="cleanup"> + <!-- move up iref elements --> + <xsl:for-each select=".//artwork//xref"> + <xsl:if test="not(ancestor::ed:del)"> + <xsl:call-template name="insert-iref-for-xref"/> + </xsl:if> + </xsl:for-each> + <figure> + <xsl:apply-templates select="@align|@alt|@anchor|@height|@src|@suppress-title|@width" mode="cleanup" /> + <xsl:if test="not(@anchor) and artset/artwork/@anchor"> + <!-- propagate anchor --> + <xsl:copy-of select="artset/artwork/@anchor[1]"/> + </xsl:if> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates select=".//artwork//iref|.//sourcecode//iref" mode="cleanup"/> + <xsl:apply-templates select="iref|preamble|artwork|artset|sourcecode|postamble|ed:replace|ed:ins|ed:del" mode="cleanup" /> + </figure> +</xsl:template> +<xsl:template match="figure/name" mode="cleanup"/> + +<xsl:template name="insert-begin-code"/> +<xsl:template name="insert-end-code"/> +<xsl:template match="@x:is-code-component" mode="cleanup"/> + +<xsl:template match="artwork[svg:svg]" mode="cleanup"> +<xsl:call-template name="warning"> + <xsl:with-param name="msg">SVG image removed.</xsl:with-param> +</xsl:call-template> +<artwork>(see SVG image in HTML version)</artwork> +</xsl:template> + +<xsl:template match="artwork" mode="cleanup"> + <xsl:call-template name="insert-markup"/> +</xsl:template> + +<xsl:template match="artwork[not(ancestor::figure)]" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:apply-templates select=".//iref" mode="cleanup"/> + <xsl:call-template name="insert-markup"/> + </xsl:when> + <xsl:when test="parent::blockquote"> + <t> + <xsl:call-template name="bare-artwork-to-v2"/> + </t> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="bare-artwork-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="bare-artwork-to-v2"> + <figure> + <!-- propagate anchor --> + <xsl:if test="parent::artset and not(../@anchor)"> + <xsl:copy-of select="@anchor"/> + </xsl:if> + <!-- move irefs up --> + <xsl:apply-templates select="iref" mode="cleanup"/> + <xsl:call-template name="insert-markup"/> + </figure> +</xsl:template> + +<xsl:template match="artwork/@anchor" mode="cleanup"/> + +<xsl:template name="insert-markup"> + <xsl:variable name="content2"><xsl:apply-templates select="node()"/></xsl:variable> + <xsl:variable name="content" select="translate($content2,'&#160;&#x2500;&#x2502;&#x2508;&#x250c;&#x2510;&#x2514;&#x2518;&#x251c;&#x2524;',' -|+++++++')"/> + <artwork> + <xsl:apply-templates select="@*" mode="cleanup" /> + <xsl:if test="@x:is-code-component='yes'"> + <xsl:if test="starts-with(.,'&#10;')"> + <xsl:text>&#10;</xsl:text> + </xsl:if> + <xsl:value-of select="@x:indent-with"/> + <xsl:text>&lt;CODE BEGINS&gt;&#10;</xsl:text> + </xsl:if> + <xsl:if test="starts-with(.,'&#10;')"> + <xsl:text>&#10;</xsl:text> + <xsl:value-of select="@x:indent-with"/> + </xsl:if> + <xsl:choose> + <xsl:when test="@x:indent-with!=''"> + <xsl:call-template name="indent"> + <xsl:with-param name="content" select="$content"/> + <xsl:with-param name="with" select="@x:indent-with"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$content"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@x:is-code-component='yes'">&#10;&lt;CODE ENDS&gt;&#10;</xsl:if> + </artwork> +</xsl:template> + +<xsl:template match="@x:indent-with" mode="cleanup"/> +<xsl:template match="@x:lang" mode="cleanup"/> + +<xsl:template name="indent"> + <xsl:param name="content"/> + <xsl:param name="with"/> + + <xsl:value-of select="substring($content,1,1)"/> + <xsl:if test="substring($content,1,1)='&#10;'"> + <xsl:value-of select="$with"/> + </xsl:if> + + <xsl:choose> + <xsl:when test="$content=''" /> + <xsl:otherwise> + <xsl:call-template name="indent"> + <xsl:with-param name="content" select="substring($content,2)"/> + <xsl:with-param name="with" select="$with"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template match="artset" mode="cleanup"> + <!-- see https://tools.ietf.org/html/draft-levkowetz-xml2rfc-v3-implementation-notes-08#section-3.1.1 --> + <xsl:choose> + <xsl:when test="artwork[not(svg:svg or normalize-space(.)='' or @src!='')]"> + <xsl:apply-templates select="artwork[not(svg:svg or normalize-space(.)='' or @src!='')][1]" mode="cleanup"/> + </xsl:when> + <xsl:when test="artwork"> + <xsl:apply-templates select="artwork[1]" mode="cleanup"/> + </xsl:when> + <xsl:when test="not(artwork) and parent::figure"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">artset needs to contain at least one artwork child element</xsl:with-param> + </xsl:call-template> + <artwork/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<!-- email repetitions --> +<xsl:template match="email" mode="cleanup"> + <!-- combine in a single element --> + <xsl:if test="not(preceding-sibling::email)"> + <email> + <xsl:for-each select="../email"> + <xsl:value-of select="."/> + <xsl:if test="position()!=last()"> + <xsl:text>, </xsl:text> + </xsl:if> + </xsl:for-each> + </email> + </xsl:if> +</xsl:template> + +<!-- defaults for <eref> brackets --> +<xsl:template match="eref[not(*|text()) and not(ancestor::cref)]" mode="cleanup"> + <eref> + <xsl:copy-of select="@target"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3 and not(@brackets)"> + <xsl:attribute name="brackets">angle</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="@brackets"/> + </xsl:otherwise> + </xsl:choose> + </eref> +</xsl:template> + +<!-- cref/@display --> +<xsl:template match="cref/@display" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:copy-of select="."/> + </xsl:when> + <xsl:otherwise> + <!-- otherwise just drop --> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- markup inside cref --> +<xsl:template match="cref//eref" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:copy> + <xsl:apply-templates select="node()|@*" mode="cleanup"/> + </xsl:copy> + </xsl:when> + <xsl:otherwise> + <xsl:text>&lt;</xsl:text> + <xsl:value-of select="@target"/> + <xsl:text>&gt;</xsl:text> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="cref//x:dfn|cref//x:ref" mode="cleanup" priority="9"> + <xsl:variable name="text"> + <xsl:apply-templates select="."/> + </xsl:variable> + <xsl:value-of select="$text"/> +</xsl:template> + +<xsl:template match="cref//xref" mode="cleanup" priority="9"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:copy> + <xsl:apply-templates select="@*|*" mode="cleanup"/> + </xsl:copy> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="text"> + <xsl:apply-templates select="."/> + </xsl:variable> + <xsl:value-of select="$text"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- annotations --> +<xsl:template match="@x:annotation" mode="cleanup"> + <xsl:comment> + <xsl:value-of select="."/> + </xsl:comment> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Dropping annotation on <xsl:value-of select="local-name(..)"/> element.</xsl:with-param> + </xsl:call-template> +</xsl:template> + +<!-- artwork extensions --> +<xsl:template match="artwork/@x:extraction-note" mode="cleanup"/> + +<!-- list formatting --> +<xsl:template match="list/@x:indent" mode="cleanup"/> + +<!-- rewrite to 'hanging' for now --> +<xsl:template match="list[@style='x:dictionary']" mode="cleanup"> + <list style="hanging"> + <xsl:copy-of select="@hangIndent"/> + <xsl:apply-templates select="*" mode="cleanup"/> + </list> +</xsl:template> + +<!-- referencing extensions --> +<xsl:template match="iref/@x:for-anchor" mode="cleanup"/> + +<!-- GRRDL info stripped --> +<xsl:template match="@grddl:transformation" mode="cleanup"/> + +<!-- maturity level stripped --> +<xsl:template match="@x:maturity-level" mode="cleanup"/> + +<!-- normativity stripped --> +<xsl:template match="@x:nrm" mode="cleanup"/> + +<!-- table extensions --> +<xsl:template match="texttable/@x:caption-side" mode="cleanup"/> + +<!-- title extensions --> +<xsl:template match="title/@x:quotes" mode="cleanup"/> + +<!-- organization extensions --> +<xsl:template match="organization/@showOnFrontPage" mode="cleanup"/> + +<!-- RDF info stripped --> +<xsl:template match="rdf:*" mode="cleanup"/> + +<!-- cases where xml2rfc does not allow anchors --> +<xsl:template match="c/@anchor" mode="cleanup"/> +<xsl:template match="preamble/@anchor" mode="cleanup"/> +<xsl:template match="spanx/@anchor" mode="cleanup"/> + +<!-- Workaround for http://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/297 --> +<xsl:template match="spanx[@style='vbare']" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-strip-vbare='true'"> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <spanx style="vbare"> + <xsl:apply-templates mode="cleanup"/> + </spanx> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- v3 features --> +<xsl:template match="rfc/@sortRefs" mode="cleanup"/> +<xsl:template match="rfc/@symRefs" mode="cleanup"/> +<xsl:template match="rfc/@tocInclude" mode="cleanup"/> +<xsl:template match="rfc/@tocDepth" mode="cleanup"/> +<xsl:template match="rfc/@consensus" mode="cleanup"/> + +<!-- handled below --> +<xsl:template match="rfc/@category" mode="cleanup"/> +<xsl:template match="rfc/@ipr" mode="cleanup"/> + +<xsl:template match="rfc" mode="cleanup"> + <xsl:if test="@sortRefs='true'"> + <xsl:processing-instruction name="rfc">sortrefs="yes"</xsl:processing-instruction> + </xsl:if> + <xsl:if test="@symRefs='false'"> + <xsl:processing-instruction name="rfc">symrefs="no"</xsl:processing-instruction> + </xsl:if> + <xsl:if test="$parsedTocDepth!=3 and $xml2rfc-ext-xml2rfc-voc &lt; 3"> + <xsl:processing-instruction name="rfc">tocdepth="<xsl:value-of select="$parsedTocDepth"/>"</xsl:processing-instruction> + </xsl:if> + <xsl:if test="@version and (not(@tocInclude) or @tocInclude='true')"> + <xsl:processing-instruction name="rfc">toc="yes"</xsl:processing-instruction> + </xsl:if> + <rfc> + <xsl:if test="not(@version) and $xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:attribute name="version"><xsl:value-of select="$xml2rfc-ext-xml2rfc-voc"/></xsl:attribute> + </xsl:if> + <xsl:if test="not(@tocDepth) and $xml2rfc-ext-xml2rfc-voc >= 3 and $parsedTocDepth!=3"> + <xsl:attribute name="tocDepth"><xsl:value-of select="$parsedTocDepth"/></xsl:attribute> + </xsl:if> + <xsl:if test="not(@indexInclude) and $xml2rfc-ext-xml2rfc-voc >= 3"> + <!-- index gen broken in xml2rfc v3 mode for now, see https://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/418 --> + <xsl:attribute name="indexInclude">false</xsl:attribute> + </xsl:if> + <xsl:if test="not(@sortRefs) and $xml2rfc-ext-xml2rfc-voc >= 3 and $xml2rfc-sortrefs='yes'"> + <xsl:attribute name="sortRefs">true</xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="@consensus='yes' and $xml2rfc-ext-xml2rfc-voc >= 3"><xsl:attribute name="consensus">true</xsl:attribute></xsl:when> + <xsl:when test="@consensus='no' and $xml2rfc-ext-xml2rfc-voc >= 3"><xsl:attribute name="consensus">false</xsl:attribute></xsl:when> + <xsl:when test="@consensus='true' and $xml2rfc-ext-xml2rfc-voc &lt; 3"><xsl:attribute name="consensus">yes</xsl:attribute></xsl:when> + <xsl:when test="@consensus='false' and $xml2rfc-ext-xml2rfc-voc &lt; 3"><xsl:attribute name="consensus">no</xsl:attribute></xsl:when> + <xsl:otherwise><xsl:copy-of select="@consensus"/></xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="@submissionType='IETF' and not(@category) and $xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">defaulting /rfc/@category to "info" for xml2rfc v3</xsl:with-param> + </xsl:call-template> + <xsl:attribute name="category">info</xsl:attribute> + </xsl:when> + <xsl:otherwise><xsl:copy-of select="@category"/></xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="@submissionType='IETF' and not(@ipr) and $xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">defaulting /rfc/@ipr to "trust200902" for xml2rfc v3</xsl:with-param> + </xsl:call-template> + <xsl:attribute name="ipr">trust200902</xsl:attribute> + </xsl:when> + <xsl:otherwise><xsl:copy-of select="@ipr"/></xsl:otherwise> + </xsl:choose> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </rfc> +</xsl:template> + +<xsl:template match="strong" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <strong> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </strong> + </xsl:when> + <xsl:when test="*"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">strong not translated when including child elements</xsl:with-param> + </xsl:call-template> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <spanx style="strong"> + <xsl:apply-templates mode="cleanup"/> + </spanx> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="em" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <em> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </em> + </xsl:when> + <xsl:when test="*"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">em not translated when including child elements</xsl:with-param> + </xsl:call-template> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <spanx style="emph"> + <xsl:apply-templates mode="cleanup"/> + </spanx> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="tt" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <tt> + <xsl:apply-templates select="node()|@*" mode="cleanup" /> + </tt> + </xsl:when> + <xsl:when test="*"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">tt not translated when they include child elements</xsl:with-param> + </xsl:call-template> + <xsl:apply-templates mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <spanx style="verb"> + <xsl:apply-templates mode="cleanup"/> + </spanx> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="references/@anchor" mode="cleanup"/> + +<!-- New reference attributes --> +<xsl:template match="reference/@quoteTitle" mode="cleanup"> + <xsl:if test="$xml2rfc-ext-xml2rfc-backend >= 201706"> + <xsl:attribute name="quote-title"><xsl:value-of select="."/></xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template match="reference/front/abstract" mode="cleanup"/> + +<xsl:template match="referencegroup" mode="cleanup"> + <reference anchor="{@anchor}"> + <xsl:copy-of select="@target"/> + <xsl:if test="$xml2rfc-ext-xml2rfc-backend >= 201706"> + <xsl:attribute name="quote-title">false</xsl:attribute> + </xsl:if> + <xsl:comment>...expanded &lt;referencegroup>...</xsl:comment> + <front> + <title> + <xsl:text>Consisting of: </xsl:text> + <xsl:variable xmlns:myns="mailto:julian.reschke@greenbytes.de?subject=rfc2629.xslt" name="included" select="exslt:node-set($includeDirectives)/myns:include[@in=generate-id(current())]/*[self::reference or self::referencegroup]"/> + <xsl:for-each select="reference|$included"> + <xsl:value-of select="concat('[',@anchor,']')"/> + <xsl:choose> + <xsl:when test="position() &lt; last() - 1">, </xsl:when> + <xsl:when test="position() = last() - 1">, and </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:for-each> + </title> + <author/> + <date/> + </front> + </reference> + <xsl:apply-templates mode="cleanup"/> +</xsl:template> + +<xsl:template match="reference" mode="cleanup"> + <reference> + <xsl:apply-templates select="@anchor|@target|@quoteTitle" mode="cleanup"/> + <xsl:choose> + <xsl:when test="not(@target) and $xml2rfc-ext-link-rfc-to-info-page='yes' and seriesInfo[@name='BCP'] and starts-with(@anchor,'BCP')"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-info-uri"> + <xsl:with-param name="type" select="'bcp'"/> + <xsl:with-param name="no" select="seriesInfo[@name='BCP']/@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:attribute name="target"><xsl:value-of select="$uri"/></xsl:attribute> + </xsl:when> + <xsl:when test="not(@target) and $xml2rfc-ext-link-rfc-to-info-page='yes' and seriesInfo[@name='RFC']"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-info-uri"> + <xsl:with-param name="type" select="'rfc'"/> + <xsl:with-param name="no" select="seriesInfo[@name='RFC']/@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:attribute name="target"><xsl:value-of select="$uri"/></xsl:attribute> + </xsl:when> + <xsl:when test="not(@target) and $xml2rfc-ext-link-rfc-to-info-page='yes' and not(seriesInfo) and document(x:source/@href)/rfc/@number"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-info-uri"> + <xsl:with-param name="type" select="'rfc'"/> + <xsl:with-param name="no" select="document(x:source/@href)/rfc/@number"/> + </xsl:call-template> + </xsl:variable> + <xsl:attribute name="target"><xsl:value-of select="$uri"/></xsl:attribute> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + <xsl:choose> + <xsl:when test="front"> + <xsl:apply-templates select="front" mode="cleanup"/> + </xsl:when> + <xsl:when test="x:source"> + <xsl:variable name="d" select="document(x:source/@href)"/> + <xsl:comment>included from <xsl:value-of select="x:source/@href"/></xsl:comment> + <front> + <xsl:apply-templates select="$d/rfc/front/title" mode="cleanup"/> + <xsl:apply-templates select="$d/rfc/front/author" mode="cleanup"/> + <xsl:choose> + <xsl:when test="$d/rfc/front/date/@*"> + <!-- any date info present? --> + <xsl:apply-templates select="$d/rfc/front/date" mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <!-- let defaults apply --> + <date year="{$xml2rfc-ext-pub-year}" month="{$xml2rfc-ext-pub-month}"/> + </xsl:otherwise> + </xsl:choose> + </front> + <xsl:if test="not(seriesInfo) and document(x:source/@href)/rfc/@docName"> + <seriesInfo name="Internet-Draft" value="{document(x:source/@href)/rfc/@docName}"/> + </xsl:if> + <xsl:if test="not(seriesInfo) and document(x:source/@href)/rfc/@number"> + <seriesInfo name="RFC" value="{document(x:source/@href)/rfc/@number}"/> + </xsl:if> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + <xsl:apply-templates select="seriesInfo|front/seriesInfo" mode="cleanup"/> + + <!-- Insert DOI for RFCs --> + <xsl:variable name="doi"> + <xsl:choose> + <xsl:when test="seriesInfo|front/seriesInfo"> + <xsl:call-template name="compute-doi"/> + </xsl:when> + <xsl:when test="document(x:source/@href)/rfc/@number"> + <xsl:call-template name="compute-doi"> + <xsl:with-param name="rfc" select="document(x:source/@href)/rfc/@number"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <xsl:if test="$xml2rfc-ext-insert-doi='yes' and $doi!='' and not(seriesInfo[@name='DOI']|front/seriesInfo[@name='DOI'])"> + <seriesInfo name="DOI" value="{$doi}"/> + </xsl:if> + + <xsl:apply-templates select="*[not(self::front) and not(self::seriesInfo)]" mode="cleanup"/> + </reference> +</xsl:template> + +<xsl:template match="seriesInfo" mode="cleanup"> + <xsl:choose> + <xsl:when test="@name='Internet-Draft' and $rfcno > 7375"> + <!-- special case in RFC formatting since 2015 --> + <seriesInfo name="Work in Progress," value="{@value}"/> + </xsl:when> + <xsl:when test="@name='DOI' and starts-with(@value,'10.17487/RFC') and $xml2rfc-ext-insert-doi='no'"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Removing DOI <xsl:value-of select="@value"/> from &lt;reference> element</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <seriesInfo name="{@name}" value="{@value}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + + +<xsl:template match="date[ancestor::reference]" mode="cleanup"> + <xsl:choose> + <xsl:when test="@year!='' or normalize-space(.)=''"> + <date> + <xsl:apply-templates select="@*" mode="cleanup"/> + </date> + </xsl:when> + <xsl:otherwise> + <date year="{normalize-space(.)}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="front" mode="cleanup"> + <!-- silence certain xml2rfcv3 warning messages --> + <xsl:if test="$xml2rfc-ext-xml2rfc-backend >= 201706 and not(ancestor::reference)"> + <xsl:if test="not(/rfc/@consensus)"> + <xsl:text>&#10;</xsl:text> + <xsl:comment>see https://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/420</xsl:comment> + <xsl:text>&#10;</xsl:text> + <xsl:processing-instruction name="v3xml2rfc">silence="Warning: Setting consensus="true" for IETF STD document"</xsl:processing-instruction> + </xsl:if> + <xsl:if test="$xml2rfc-ext-xml2rfc-voc >= 3 and not(/rfc/@submissionType) and not ($is-rfc)"> + <!-- issue to be raised --> + <xsl:text>&#10;</xsl:text> + <xsl:processing-instruction name="v3xml2rfc">silence="Warning: Expected a valid submissionType (stream) setting"</xsl:processing-instruction> + </xsl:if> + <xsl:if test="$xml2rfc-ext-xml2rfc-voc >= 3 and substring(/rfc/@docName, string-length(/rfc/@docName)-string-length('-latest')+1)='-latest'"> + <xsl:text>&#10;</xsl:text> + <xsl:comment>see https://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/439</xsl:comment> + <xsl:text>&#10;</xsl:text> + <xsl:processing-instruction name="v3xml2rfc">silence="The 'docName' attribute of the &lt;rfc/> element"</xsl:processing-instruction> + </xsl:if> + </xsl:if> + <front> + <xsl:apply-templates select="title" mode="cleanup"/> + <xsl:if test="$xml2rfc-ext-xml2rfc-voc >= 3 and seriesInfo"> + <xsl:apply-templates select="seriesInfo" mode="cleanup"/> + </xsl:if> + <xsl:apply-templates select="author" mode="cleanup"/> + <xsl:apply-templates select="date" mode="cleanup"/> + <xsl:if test="not(date)"> + <!-- mandatory in v2 --> + <date/> + </xsl:if> + <xsl:apply-templates select="text()|node()[not(self::seriesInfo or self::title or self::author or self::date)]" mode="cleanup"/> + </front> +</xsl:template> + +<!-- Note titles --> +<xsl:template match="note" mode="cleanup"> + <note> + <xsl:apply-templates select="@anchor" mode="cleanup"/> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + <xsl:if test="@removeInRFC='true' and (not(t) or t[1]!=$note-removeInRFC)"> + <t><xsl:value-of select="$note-removeInRFC"/></t> + </xsl:if> + <xsl:apply-templates mode="cleanup"/> + </note> +</xsl:template> +<xsl:template match="note/name" mode="cleanup"/> + +<!-- References --> +<xsl:template match="references" mode="cleanup"> + <xsl:choose> + <xsl:when test="parent::back and count(../references) > 1 and $xml2rfc-ext-xml2rfc-voc >= 3"> + <!-- insert top-level references section --> + <xsl:if test="not(preceding-sibling::references)"> + <references> + <name>References</name> + <xsl:for-each select="../references"> + <references> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:apply-templates select="@anchor|@toc" mode="cleanup"/> + <xsl:if test="not(name)"> + <name><xsl:value-of select="$title"/></name> + </xsl:if> + <xsl:apply-templates select="*" mode="cleanup"/> + </references> + </xsl:for-each> + </references> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <references> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:apply-templates select="@anchor|@toc" mode="cleanup"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3 and name"> + <xsl:apply-templates select="name" mode="cleanup"/> + </xsl:when> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <name><xsl:value-of select="$title"/></name> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + <xsl:apply-templates mode="cleanup" select="node()[not(self::name)]"/> + </references> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- Section titles --> +<xsl:template match="section" mode="cleanup"> + <section> + <xsl:copy-of select="@anchor|@toc"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-backend >= 201610"> + <xsl:copy-of select="@numbered"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:apply-templates select="@title" mode="cleanup"/> + <xsl:if test="name"> + <name> + <xsl:apply-templates select="name/node()" mode="cleanup"/> + </name> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="title"> + <xsl:call-template name="get-title-as-string"/> + </xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@removeInRFC='true' and (not(t) or t[1]!=$section-removeInRFC)"> + <t><xsl:value-of select="$section-removeInRFC"/></t> + </xsl:if> + <xsl:apply-templates mode="cleanup"/> + </section> + <xsl:if test="(@numbered='no' or @numbered='false') and $xml2rfc-ext-xml2rfc-backend &lt; 201610"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unnumbered sections not supported</xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> +<xsl:template match="section/name" mode="cleanup"/> + +<!-- Definition Lists --> +<xsl:template match="dl" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <dl> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </dl> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="dl-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="dl-to-v2"> + <xsl:choose> + <xsl:when test="parent::dd"> + <xsl:call-template name="process-dl"/> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:call-template name="process-dl"/> + </t> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="process-dl"> + <xsl:copy-of select="@anchor"/> + <xsl:variable name="newl" select="@newline"/> + <xsl:variable name="spac" select="@spacing"/> + <xsl:if test="parent::section"> + <!-- avoid adding PIs into nested lists due to xml2rfc bug --> + <xsl:processing-instruction name="rfc"> + <xsl:choose> + <xsl:when test="not($spac='compact')">subcompact='no'</xsl:when> + <xsl:otherwise>subcompact='yes'</xsl:otherwise> + </xsl:choose> + </xsl:processing-instruction> + </xsl:if> + <list style="hanging"> + <xsl:variable name="indent" select="@indent"/> + <xsl:if test="number($indent)=$indent"> + <xsl:attribute name="hangIndent"><xsl:value-of select="$indent"/></xsl:attribute> + </xsl:if> + <xsl:for-each select="dt"> + <xsl:variable name="txt"> + <xsl:apply-templates select="." mode="cleanup"/> + </xsl:variable> + <!-- TODO: check for more block-level elements --> + <xsl:variable name="desc" select="following-sibling::dd[1]"/> + <xsl:variable name="block-level-children" select="$desc/artwork | $desc/dl | $desc/figure | $desc/ol | $desc/sourcecode | $desc/t | $desc/table | $desc/ul"/> + <t hangText="{normalize-space($txt)}"> + <xsl:choose> + <xsl:when test="@anchor"> + <xsl:copy-of select="@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="$desc/@anchor"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$newl='true'"> + <xsl:choose> + <xsl:when test="$block-level-children"> + <vspace blankLines="1"/> + </xsl:when> + <xsl:otherwise> + <vspace blankLines="0"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:apply-templates select="iref" mode="cleanup"/> + <xsl:choose> + <xsl:when test="$block-level-children"> + <xsl:for-each select="$block-level-children"> + <xsl:choose> + <xsl:when test="self::t"> + <xsl:apply-templates select="node()" mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="." mode="cleanup"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="position()!=last()"> + <xsl:choose> + <xsl:when test="not($spac='compact')"><vspace blankLines="1"/></xsl:when> + <xsl:otherwise><vspace blankLines="0"/></xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="$desc/node()" mode="cleanup"/> + </xsl:otherwise> + </xsl:choose> + </t> + </xsl:for-each> + </list> +</xsl:template> + +<!-- rewrite link target going to <dd> to use preceding <dt>'s anchor when present --> +<xsl:template match="xref/@target[.=//dd/@anchor]" mode="cleanup"> + <xsl:variable name="t" select="//dd[@anchor=current()]"/> + <xsl:variable name="p" select="$t/preceding-sibling::dt[1]"/> + <xsl:choose> + <xsl:when test="$p/@anchor"> + <xsl:attribute name="target"><xsl:value-of select="$p/@anchor"/></xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="target"><xsl:value-of select="@target"/></xsl:attribute> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- List items --> +<xsl:template match="li" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <li> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </li> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="li-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="li-to-v2"> + <t> + <xsl:copy-of select="@anchor"/> + <xsl:apply-templates mode="cleanup"/> + </t> +</xsl:template> + +<xsl:template match="li/t" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <t> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </t> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="li-t-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="li-t-to-v2"> + <xsl:apply-templates mode="cleanup"/> + <xsl:if test="position()!=last()"> + <vspace blankLines="1"/> + </xsl:if> +</xsl:template> + +<xsl:template match="li/ul" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <ul> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </ul> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="li-ul-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="li-ul-to-v2"> + <list style="symbols"> + <xsl:apply-templates mode="cleanup"/> + </list> + <xsl:if test="position()!=last()"> + <vspace blankLines="1"/> + </xsl:if> +</xsl:template> + +<!-- Ordered Lists --> +<xsl:template match="ol" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <ol> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </ol> + </xsl:when> + <xsl:when test="parent::li"> + <xsl:call-template name="ol-to-v2"/> + <xsl:if test="position()!=last()"> + <vspace blankLines="1"/> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:call-template name="ol-to-v2"/> + </t> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="ol-to-v2"> + <xsl:copy-of select="@anchor"/> + <xsl:if test="@start and @start!='1'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">list start != 1 not supported</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="@group"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">ol/@group not supported</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="style"> + <xsl:choose> + <xsl:when test="not(@type) or @type='1'">numbers</xsl:when> + <xsl:when test="@type='a'">letters</xsl:when> + <xsl:when test="@type='A'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">ol/@type=<xsl:value-of select="@type"/> not supported (defaulting to 'a')</xsl:with-param> + </xsl:call-template> + <xsl:text>letters</xsl:text> + </xsl:when> + <xsl:when test="string-length(@type)>1">format <xsl:value-of select="@type"/></xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">ol/@type=<xsl:value-of select="@type"/> not supported (defaulting to '1')</xsl:with-param> + </xsl:call-template> + <xsl:text>numbers</xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <list style="{$style}"> + <xsl:if test="@group"> + <xsl:attribute name="counter"><xsl:value-of select="@group"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates mode="cleanup"/> + </list> +</xsl:template> + +<!-- Unordered Lists --> +<xsl:template match="ul" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <ul> + <xsl:apply-templates select="@*" mode="cleanup"/> + <xsl:if test="not(li) and @x:when-empty"> + <li> + <xsl:value-of select="@x:when-empty"/> + </li> + </xsl:if> + <xsl:apply-templates select="node()" mode="cleanup"/> + </ul> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="ul-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> +<xsl:template match="ul/@x:when-empty" mode="cleanup"/> + +<xsl:template name="ul-to-v2"> + <xsl:choose> + <xsl:when test="not(li) and @x:when-empty"> + <t> + <xsl:value-of select="@x:when-empty"/> + </t> + </xsl:when> + <xsl:otherwise> + <t> + <xsl:choose> + <xsl:when test="@empty='true'"> + <list style="empty"> + <xsl:apply-templates mode="cleanup"/> + </list> + </xsl:when> + <xsl:otherwise> + <list style="symbols"> + <xsl:apply-templates mode="cleanup"/> + </list> + </xsl:otherwise> + </xsl:choose> + </t> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-content-of-artwork"> + <xsl:variable name="content2"><xsl:apply-templates select="node()"/></xsl:variable> + <xsl:variable name="content" select="translate($content2,'&#160;&#x2500;&#x2502;&#x2508;&#x250c;&#x2510;&#x2514;&#x2518;&#x251c;&#x2524;',' -|+++++++')"/> + <xsl:value-of select="$content"/> +</xsl:template> + +<xsl:template name="insert-sourcecode-as-artwork"> + <artwork> + <xsl:copy-of select="@type"/> + <xsl:if test="@markers='true'"> + <xsl:text>&lt;CODE BEGINS></xsl:text> + <xsl:if test="self::sourcecode and @name"> + <xsl:variable name="offending" select="translate(@name,concat($alnum,'-+.,;_~#'),'')"/> + <xsl:choose> + <xsl:when test="$offending!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">illegal characters in @name attribute '<xsl:value-of select="@name"/>': '<xsl:value-of select="$offending"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text> file "</xsl:text> + <xsl:value-of select="@name"/> + <xsl:text>"</xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:text>&#10;</xsl:text> + </xsl:if> + + <xsl:if test="starts-with(.,'&#10;')"> + <xsl:text>&#10;</xsl:text> + <xsl:value-of select="@x:indent-with"/> + </xsl:if> + <xsl:call-template name="get-content-of-artwork"/> + <xsl:if test="@markers='true'">&#10;&lt;CODE ENDS></xsl:if> + </artwork> +</xsl:template> + +<!-- Source Code --> +<xsl:template match="sourcecode" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <xsl:apply-templates select=".//iref" mode="cleanup"/> + <sourcecode> + <xsl:copy-of select="@*"/> + <xsl:call-template name="get-content-of-artwork"/> + </sourcecode> + </xsl:when> + <xsl:when test="parent::figure"> + <xsl:call-template name="insert-sourcecode-as-artwork"/> + </xsl:when> + <xsl:when test="parent::blockquote"> + <t> + <figure> + <xsl:apply-templates select=".//iref" mode="cleanup"/> + <xsl:call-template name="insert-sourcecode-as-artwork"/> + </figure> + </t> + </xsl:when> + <xsl:otherwise> + <figure> + <xsl:apply-templates select=".//iref" mode="cleanup"/> + <xsl:call-template name="insert-sourcecode-as-artwork"/> + </figure> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- Tables --> +<xsl:template match="table" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <table> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </table> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="table-to-v2"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="table-to-v2"> + <texttable> + <xsl:apply-templates select="@anchor|@align" mode="cleanup"/> + <xsl:if test="not(@align)"> + <xsl:attribute name="align">left</xsl:attribute> + </xsl:if> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:if test="count(thead/tr) > 1"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">Multiple table header lines not supported</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="thead/tr/*"> + <xsl:variable name="p" select="position()"/> + <!-- in texttable the whole column has the same alignment; we try + either the first non-header row or the header itself--> + <xsl:variable name="align"> + <xsl:choose> + <xsl:when test="tbody/tr[1]/*[1] and tbody/tr[1]/*[1]/@align"><xsl:value-of select="tbody/tr[1]/*[1]/@align"/></xsl:when> + <xsl:when test="@align"><xsl:value-of select="@align"/></xsl:when> + <xsl:otherwise>left</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <ttcol align="{$align}"> + <xsl:apply-templates mode="cleanup"/> + </ttcol> + </xsl:for-each> + <xsl:for-each select="tbody/tr/*"> + <c> + <xsl:if test="position()=1"> + <xsl:apply-templates select="../../../iref" mode="cleanup"/> + </xsl:if> + <xsl:choose> + <xsl:when test="t|sourcecode|ol|dl|uo"> + <xsl:apply-templates select="t/node()|sourcecode/node()|ol/li/node()|ul/li/node()|dl/*/node()" mode="cleanup"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates mode="cleanup"/> + </xsl:otherwise> + </xsl:choose> + </c> + <xsl:if test="@rowspan and @rowspan!='1'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">rowspan attribute not supported (dropped, table will be ugly)</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="@colspan and @colspan!='1'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">colspan attribute not supported (dropped, table will be ugly)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + <xsl:if test="tfoot"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">tfoot element not supported (dropped)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </texttable> +</xsl:template> + +<!-- date formats --> +<xsl:template match="/rfc/front/date/@month" mode="cleanup"> + <xsl:attribute name="month"> + <xsl:choose> + <xsl:when test="string(number(.))!='NaN' and number(.)&gt;0 and number(.)&lt;13"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:attribute> +</xsl:template> + +<!-- x:contributor/contact --> +<xsl:template match="x:contributor|contact" mode="cleanup"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-xml2rfc-voc >= 3"> + <contact> + <xsl:apply-templates select="@*|node()" mode="cleanup"/> + </contact> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="content"> + <xsl:apply-templates select="."/> + </xsl:variable> + <t> + <xsl:apply-templates select="exslt:node-set($content)/*" mode="text"/> + </t> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="*" mode="text"> + <xsl:apply-templates mode="text"/> +</xsl:template> +<xsl:template match="text()" mode="text"> + <xsl:value-of select="."/> +</xsl:template> +<xsl:template match="br" mode="text"> + <vspace blankLines="0"/> +</xsl:template> + +<!-- x:include --> +<xsl:template match="/rfc/back/references/xi:include|/rfc/back/references/referencegroup/xi:include" mode="cleanup"> + <xsl:apply-templates select="document(@href)" mode="cleanup"/> +</xsl:template> + +<!-- Display names for references --> +<xsl:template match="displayreference" mode="cleanup"/> +<xsl:template match="reference/@anchor[.=/rfc/back/displayreference/@target]" mode="cleanup"> + <xsl:attribute name="anchor"> + <xsl:call-template name="generate-ref-name"/> + </xsl:attribute> +</xsl:template> +<xsl:template match="xref/@target[.=/rfc/back/displayreference/@target]" mode="cleanup"> + <xsl:attribute name="target"> + <xsl:call-template name="generate-ref-name"/> + </xsl:attribute> +</xsl:template> + +<xsl:template name="generate-ref-name"> + <xsl:variable name="tnewname"> + <xsl:value-of select="/rfc/back/displayreference[@target=current()]/@to"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="count(/rfc/back/displayreference[@to=current()])>1 or //reference[@anchor=$tnewname]"> + <xsl:value-of select="current()"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Not rewriting reference name <xsl:value-of select="current()"/> as it would conflict</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="translate(substring($tnewname,1,1),$digits,'')=''"> + <xsl:value-of select="concat('_',$tnewname)"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">rewriting reference name '<xsl:value-of select="$tnewname"/>' to '<xsl:value-of select="concat('_',$tnewname)"/>' due to illegal start character</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$tnewname"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +</xsl:transform> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/lib/cssmap.xml b/test/fixtures/cache-tests/spec/lib/cssmap.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/cssmap.xml @@ -0,0 +1,15 @@ +<css> + <map from="error" css="bg-danger"/> + <map from="fbbutton" css="btn btn-primary fbbutton"/> + <map from="feedback" css="btn btn-primary feedback"/> + <map from="header" css="table table-condensed header"/> + <map from="docstatus" css="alert alert-info"/> + <map from="noprint" css="hidden-print"/> + <map from="note" css="alert alert-warning"/> + <map from="tcenter" css="text-center"/> + <map from="left" css="text-left"/> + <map from="right" css="text-right"/> + <map from="reference" css="dl-horizontal"/> + <map from="tt" css="table table-condensed table-striped"/> + <map from="publishedasrfc" css="alert alert-danger"/> +</css> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2119.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2119.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2119.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC2119' target='http://www.rfc-editor.org/info/rfc2119'> +<front> +<title>Key words for use in RFCs to Indicate Requirement Levels</title> +<author initials='S.' surname='Bradner' fullname='S. Bradner'><organization /></author> +<date year='1997' month='March' /> +<abstract><t>In many standards track documents several words are used to signify the requirements in the specification. These words are often capitalized. This document defines these words as they should be interpreted in IETF documents. This document specifies an Internet Best Current Practices for the Internet Community, and requests discussion and suggestions for improvements.</t></abstract> +</front> +<seriesInfo name='BCP' value='14'/> +<seriesInfo name='RFC' value='2119'/> +<seriesInfo name='DOI' value='10.17487/RFC2119'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2818.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2818.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.2818.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC2818' target='http://www.rfc-editor.org/info/rfc2818'> +<front> +<title>HTTP Over TLS</title> +<author initials='E.' surname='Rescorla' fullname='E. Rescorla'><organization /></author> +<date year='2000' month='May' /> +<abstract><t>This memo describes how to use Transport Layer Security (TLS) to secure Hypertext Transfer Protocol (HTTP) connections over the Internet. This memo provides information for the Internet community.</t></abstract> +</front> +<seriesInfo name='RFC' value='2818'/> +<seriesInfo name='DOI' value='10.17487/RFC2818'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.3864.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.3864.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.3864.xml @@ -0,0 +1,15 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC3864' target='https://www.rfc-editor.org/info/rfc3864'> +<front> +<title>Registration Procedures for Message Header Fields</title> +<author initials='G.' surname='Klyne' fullname='G. Klyne'><organization /></author> +<author initials='M.' surname='Nottingham' fullname='M. Nottingham'><organization /></author> +<author initials='J.' surname='Mogul' fullname='J. Mogul'><organization /></author> +<date year='2004' month='September' /> +<abstract><t>This specification defines registration procedures for the message header fields used by Internet mail, HTTP, Netnews and other applications. This document specifies an Internet Best Current Practices for the Internet Community, and requests discussion and suggestions for improvements.</t></abstract> +</front> +<seriesInfo name='BCP' value='90'/> +<seriesInfo name='RFC' value='3864'/> +<seriesInfo name='DOI' value='10.17487/RFC3864'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5023.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5023.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5023.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5023' target='https://www.rfc-editor.org/info/rfc5023'> +<front> +<title>The Atom Publishing Protocol</title> +<author initials='J.' surname='Gregorio' fullname='J. Gregorio' role='editor'><organization /></author> +<author initials='B.' surname='de hOra' fullname='B. de hOra' role='editor'><organization /></author> +<date year='2007' month='October' /> +<abstract><t>The Atom Publishing Protocol (AtomPub) is an application-level protocol for publishing and editing Web resources. The protocol is based on HTTP transfer of Atom-formatted representations. The Atom format is documented in the Atom Syndication Format. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='5023'/> +<seriesInfo name='DOI' value='10.17487/RFC5023'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5226.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5226.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5226.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5226' target='https://www.rfc-editor.org/info/rfc5226'> +<front> +<title>Guidelines for Writing an IANA Considerations Section in RFCs</title> +<author initials='T.' surname='Narten' fullname='T. Narten'><organization /></author> +<author initials='H.' surname='Alvestrand' fullname='H. Alvestrand'><organization /></author> +<date year='2008' month='May' /> +<abstract><t>Many protocols make use of identifiers consisting of constants and other well-known values. Even after a protocol has been defined and deployment has begun, new values may need to be assigned (e.g., for a new option type in DHCP, or a new encryption or authentication transform for IPsec). To ensure that such quantities have consistent values and interpretations across all implementations, their assignment must be administered by a central authority. For IETF protocols, that role is provided by the Internet Assigned Numbers Authority (IANA).</t><t>In order for IANA to manage a given namespace prudently, it needs guidelines describing the conditions under which new values can be assigned or when modifications to existing values can be made. If IANA is expected to play a role in the management of a namespace, IANA must be given clear and concise instructions describing that role. This document discusses issues that should be considered in formulating a policy for assigning values to a namespace and provides guidelines for authors on the specific text that must be included in documents that place demands on IANA.</t><t>This document obsoletes RFC 2434. This document specifies an Internet Best Current Practices for the Internet Community, and requests discussion and suggestions for improvements.</t></abstract> +</front> +<seriesInfo name='RFC' value='5226'/> +<seriesInfo name='DOI' value='10.17487/RFC5226'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5234.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5234.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5234.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5234' target='https://www.rfc-editor.org/info/rfc5234'> +<front> +<title>Augmented BNF for Syntax Specifications: ABNF</title> +<author initials='D.' surname='Crocker' fullname='D. Crocker' role='editor'><organization /></author> +<author initials='P.' surname='Overell' fullname='P. Overell'><organization /></author> +<date year='2008' month='January' /> +<abstract><t>Internet technical specifications often need to define a formal syntax. Over the years, a modified version of Backus-Naur Form (BNF), called Augmented BNF (ABNF), has been popular among many Internet specifications. The current specification documents ABNF. It balances compactness and simplicity with reasonable representational power. The differences between standard BNF and ABNF involve naming rules, repetition, alternatives, order-independence, and value ranges. This specification also supplies additional rule definitions and encoding for a core lexical analyzer of the type common to several Internet specifications. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='STD' value='68'/> +<seriesInfo name='RFC' value='5234'/> +<seriesInfo name='DOI' value='10.17487/RFC5234'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5246.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5246.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5246.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5246' target='http://www.rfc-editor.org/info/rfc5246'> +<front> +<title>The Transport Layer Security (TLS) Protocol Version 1.2</title> +<author initials='T.' surname='Dierks' fullname='T. Dierks'><organization /></author> +<author initials='E.' surname='Rescorla' fullname='E. Rescorla'><organization /></author> +<date year='2008' month='August' /> +<abstract><t>This document specifies Version 1.2 of the Transport Layer Security (TLS) protocol. The TLS protocol provides communications security over the Internet. The protocol allows client/server applications to communicate in a way that is designed to prevent eavesdropping, tampering, or message forgery. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='5246'/> +<seriesInfo name='DOI' value='10.17487/RFC5246'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5280.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5280.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5280.xml @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5280' target='https://www.rfc-editor.org/info/rfc5280'> +<front> +<title>Internet X.509 Public Key Infrastructure Certificate and Certificate Revocation List (CRL) Profile</title> +<author initials='D.' surname='Cooper' fullname='D. Cooper'><organization /></author> +<author initials='S.' surname='Santesson' fullname='S. Santesson'><organization /></author> +<author initials='S.' surname='Farrell' fullname='S. Farrell'><organization /></author> +<author initials='S.' surname='Boeyen' fullname='S. Boeyen'><organization /></author> +<author initials='R.' surname='Housley' fullname='R. Housley'><organization /></author> +<author initials='W.' surname='Polk' fullname='W. Polk'><organization /></author> +<date year='2008' month='May' /> +<abstract><t>This memo profiles the X.509 v3 certificate and X.509 v2 certificate revocation list (CRL) for use in the Internet. An overview of this approach and model is provided as an introduction. The X.509 v3 certificate format is described in detail, with additional information regarding the format and semantics of Internet name forms. Standard certificate extensions are described and two Internet-specific extensions are defined. A set of required certificate extensions is specified. The X.509 v2 CRL format is described in detail along with standard and Internet-specific extensions. An algorithm for X.509 certification path validation is described. An ASN.1 module and examples are provided in the appendices. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='5280'/> +<seriesInfo name='DOI' value='10.17487/RFC5280'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5785.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5785.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.5785.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC5785' target='http://www.rfc-editor.org/info/rfc5785'> +<front> +<title>Defining Well-Known Uniform Resource Identifiers (URIs)</title> +<author initials='M.' surname='Nottingham' fullname='M. Nottingham'><organization /></author> +<author initials='E.' surname='Hammer-Lahav' fullname='E. Hammer-Lahav'><organization /></author> +<date year='2010' month='April' /> +<abstract><t>This memo defines a path prefix for &quot;well-known locations&quot;, &quot;/.well-known/&quot;, in selected Uniform Resource Identifier (URI) schemes. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='5785'/> +<seriesInfo name='DOI' value='10.17487/RFC5785'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6066.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6066.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6066.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC6066' target='https://www.rfc-editor.org/info/rfc6066'> +<front> +<title>Transport Layer Security (TLS) Extensions: Extension Definitions</title> +<author initials='D.' surname='Eastlake 3rd' fullname='D. Eastlake 3rd'><organization /></author> +<date year='2011' month='January' /> +<abstract><t>This document provides specifications for existing TLS extensions. It is a companion document for RFC 5246, &quot;The Transport Layer Security (TLS) Protocol Version 1.2&quot;. The extensions specified are server_name, max_fragment_length, client_certificate_url, trusted_ca_keys, truncated_hmac, and status_request. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='6066'/> +<seriesInfo name='DOI' value='10.17487/RFC6066'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6454.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6454.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6454.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC6454' target='http://www.rfc-editor.org/info/rfc6454'> +<front> +<title>The Web Origin Concept</title> +<author initials='A.' surname='Barth' fullname='A. Barth'><organization /></author> +<date year='2011' month='December' /> +<abstract><t>This document defines the concept of an &quot;origin&quot;, which is often used as the scope of authority or privilege by user agents. Typically, user agents isolate content retrieved from different origins to prevent malicious web site operators from interfering with the operation of benign web sites. In addition to outlining the principles that underlie the concept of origin, this document details how to determine the origin of a URI and how to serialize an origin into a string. It also defines an HTTP header field, named &quot;Origin&quot;, that indicates which origins are associated with an HTTP request. [STANDARDS-TRACK]</t></abstract> +</front> +<seriesInfo name='RFC' value='6454'/> +<seriesInfo name='DOI' value='10.17487/RFC6454'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6960.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6960.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6960.xml @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC6960' target='https://www.rfc-editor.org/info/rfc6960'> +<front> +<title>X.509 Internet Public Key Infrastructure Online Certificate Status Protocol - OCSP</title> +<author initials='S.' surname='Santesson' fullname='S. Santesson'><organization /></author> +<author initials='M.' surname='Myers' fullname='M. Myers'><organization /></author> +<author initials='R.' surname='Ankney' fullname='R. Ankney'><organization /></author> +<author initials='A.' surname='Malpani' fullname='A. Malpani'><organization /></author> +<author initials='S.' surname='Galperin' fullname='S. Galperin'><organization /></author> +<author initials='C.' surname='Adams' fullname='C. Adams'><organization /></author> +<date year='2013' month='June' /> +<abstract><t>This document specifies a protocol useful in determining the current status of a digital certificate without requiring Certificate Revocation Lists (CRLs). Additional mechanisms addressing PKIX operational requirements are specified in separate documents. This document obsoletes RFCs 2560 and 6277. It also updates RFC 5912.</t></abstract> +</front> +<seriesInfo name='RFC' value='6960'/> +<seriesInfo name='DOI' value='10.17487/RFC6960'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6962.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6962.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.6962.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC6962' target='https://www.rfc-editor.org/info/rfc6962'> +<front> +<title>Certificate Transparency</title> +<author initials='B.' surname='Laurie' fullname='B. Laurie'><organization /></author> +<author initials='A.' surname='Langley' fullname='A. Langley'><organization /></author> +<author initials='E.' surname='Kasper' fullname='E. Kasper'><organization /></author> +<date year='2013' month='June' /> +<abstract><t>This document describes an experimental protocol for publicly logging the existence of Transport Layer Security (TLS) certificates as they are issued or observed, in a manner that allows anyone to audit certificate authority (CA) activity and notice the issuance of suspect certificates as well as to audit the certificate logs themselves. The intent is that eventually clients would refuse to honor certificates that do not appear in a log, effectively forcing CAs to add all issued certificates to the logs.</t><t>Logs are network services that implement the protocol operations for submissions and queries that are defined in this document.</t></abstract> +</front> +<seriesInfo name='RFC' value='6962'/> +<seriesInfo name='DOI' value='10.17487/RFC6962'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7159.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7159.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7159.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7159' target='http://www.rfc-editor.org/info/rfc7159'> +<front> +<title>The JavaScript Object Notation (JSON) Data Interchange Format</title> +<author initials='T.' surname='Bray' fullname='T. Bray' role='editor'><organization /></author> +<date year='2014' month='March' /> +<abstract><t>JavaScript Object Notation (JSON) is a lightweight, text-based, language-independent data interchange format. It was derived from the ECMAScript Programming Language Standard. JSON defines a small set of formatting rules for the portable representation of structured data.</t><t>This document removes inconsistencies with other specifications of JSON, repairs specification errors, and offers experience-based interoperability guidance.</t></abstract> +</front> +<seriesInfo name='RFC' value='7159'/> +<seriesInfo name='DOI' value='10.17487/RFC7159'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7230.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7230.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7230.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7230' target='http://www.rfc-editor.org/info/rfc7230'> +<front> +<title>Hypertext Transfer Protocol (HTTP/1.1): Message Syntax and Routing</title> +<author initials='R.' surname='Fielding' fullname='R. Fielding' role='editor'><organization /></author> +<author initials='J.' surname='Reschke' fullname='J. Reschke' role='editor'><organization /></author> +<date year='2014' month='June' /> +<abstract><t>The Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol for distributed, collaborative, hypertext information systems. This document provides an overview of HTTP architecture and its associated terminology, defines the &quot;http&quot; and &quot;https&quot; Uniform Resource Identifier (URI) schemes, defines the HTTP/1.1 message syntax and parsing requirements, and describes related security concerns for implementations.</t></abstract> +</front> +<seriesInfo name='RFC' value='7230'/> +<seriesInfo name='DOI' value='10.17487/RFC7230'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7232.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7232.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7232.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7232' target='http://www.rfc-editor.org/info/rfc7232'> +<front> +<title>Hypertext Transfer Protocol (HTTP/1.1): Conditional Requests</title> +<author initials='R.' surname='Fielding' fullname='R. Fielding' role='editor'><organization /></author> +<author initials='J.' surname='Reschke' fullname='J. Reschke' role='editor'><organization /></author> +<date year='2014' month='June' /> +<abstract><t>The Hypertext Transfer Protocol (HTTP) is a stateless application- level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP/1.1 conditional requests, including metadata header fields for indicating state changes, request header fields for making preconditions on such state, and rules for constructing the responses to a conditional request when one or more preconditions evaluate to false.</t></abstract> +</front> +<seriesInfo name='RFC' value='7232'/> +<seriesInfo name='DOI' value='10.17487/RFC7232'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7234.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7234.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7234.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7234' target='http://www.rfc-editor.org/info/rfc7234'> +<front> +<title>Hypertext Transfer Protocol (HTTP/1.1): Caching</title> +<author initials='R.' surname='Fielding' fullname='R. Fielding' role='editor'><organization /></author> +<author initials='M.' surname='Nottingham' fullname='M. Nottingham' role='editor'><organization /></author> +<author initials='J.' surname='Reschke' fullname='J. Reschke' role='editor'><organization /></author> +<date year='2014' month='June' /> +<abstract><t>The Hypertext Transfer Protocol (HTTP) is a stateless \%application- level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP caches and the associated header fields that control cache behavior or indicate cacheable response messages.</t></abstract> +</front> +<seriesInfo name='RFC' value='7234'/> +<seriesInfo name='DOI' value='10.17487/RFC7234'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7258.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7258.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7258.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7258' target='http://www.rfc-editor.org/info/rfc7258'> +<front> +<title>Pervasive Monitoring Is an Attack</title> +<author initials='S.' surname='Farrell' fullname='S. Farrell'><organization /></author> +<author initials='H.' surname='Tschofenig' fullname='H. Tschofenig'><organization /></author> +<date year='2014' month='May' /> +<abstract><t>Pervasive monitoring is a technical attack that should be mitigated in the design of IETF protocols, where possible.</t></abstract> +</front> +<seriesInfo name='BCP' value='188'/> +<seriesInfo name='RFC' value='7258'/> +<seriesInfo name='DOI' value='10.17487/RFC7258'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7435.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7435.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7435.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7435' target='http://www.rfc-editor.org/info/rfc7435'> +<front> +<title>Opportunistic Security: Some Protection Most of the Time</title> +<author initials='V.' surname='Dukhovni' fullname='V. Dukhovni'><organization /></author> +<date year='2014' month='December' /> +<abstract><t>This document defines the concept &quot;Opportunistic Security&quot; in the context of communications protocols. Protocol designs based on Opportunistic Security use encryption even when authentication is not available, and use authentication when possible, thereby removing barriers to the widespread use of encryption on the Internet.</t></abstract> +</front> +<seriesInfo name='RFC' value='7435'/> +<seriesInfo name='DOI' value='10.17487/RFC7435'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7540.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7540.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7540.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7540' target='http://www.rfc-editor.org/info/rfc7540'> +<front> +<title>Hypertext Transfer Protocol Version 2 (HTTP/2)</title> +<author initials='M.' surname='Belshe' fullname='M. Belshe'><organization /></author> +<author initials='R.' surname='Peon' fullname='R. Peon'><organization /></author> +<author initials='M.' surname='Thomson' fullname='M. Thomson' role='editor'><organization /></author> +<date year='2015' month='May' /> +<abstract><t>This specification describes an optimized expression of the semantics of the Hypertext Transfer Protocol (HTTP), referred to as HTTP version 2 (HTTP/2). HTTP/2 enables a more efficient use of network resources and a reduced perception of latency by introducing header field compression and allowing multiple concurrent exchanges on the same connection. It also introduces unsolicited push of representations from servers to clients.</t><t>This specification is an alternative to, but does not obsolete, the HTTP/1.1 message syntax. HTTP's existing semantics remain unchanged.</t></abstract> +</front> +<seriesInfo name='RFC' value='7540'/> +<seriesInfo name='DOI' value='10.17487/RFC7540'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7838.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7838.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.7838.xml @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC7838' target='http://www.rfc-editor.org/info/rfc7838'> +<front> +<title>HTTP Alternative Services</title> +<author initials='M.' surname='Nottingham' fullname='M. Nottingham'><organization /></author> +<author initials='P.' surname='McManus' fullname='P. McManus'><organization /></author> +<author initials='J.' surname='Reschke' fullname='J. Reschke'><organization /></author> +<date year='2016' month='April' /> +<abstract><t>This document specifies &quot;Alternative Services&quot; for HTTP, which allow an origin's resources to be authoritatively available at a separate network location, possibly accessed with a different protocol configuration.</t></abstract> +</front> +<seriesInfo name='RFC' value='7838'/> +<seriesInfo name='DOI' value='10.17487/RFC7838'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8174.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8174.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8174.xml @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC8174' target='https://www.rfc-editor.org/info/rfc8174'> +<front> +<title>Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words</title> +<author initials='B.' surname='Leiba' fullname='B. Leiba'><organization /></author> +<date year='2017' month='May' /> +<abstract><t>RFC 2119 specifies common key words that may be used in protocol specifications. This document aims to reduce the ambiguity by clarifying that only UPPERCASE usage of the key words have the defined special meanings.</t></abstract> +</front> +<seriesInfo name='BCP' value='14'/> +<seriesInfo name='RFC' value='8174'/> +<seriesInfo name='DOI' value='10.17487/RFC8174'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8288.xml b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8288.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/ref/reference.RFC.8288.xml @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='UTF-8'?> + +<reference anchor='RFC8288' target='https://www.rfc-editor.org/info/rfc8288'> +<front> +<title>Web Linking</title> +<author initials='M.' surname='Nottingham' fullname='M. Nottingham'><organization /></author> +<date year='2017' month='October' /> +<abstract><t>This specification defines a model for the relationships between resources on the Web (&quot;links&quot;) and the type of those relationships (&quot;link relation types&quot;).</t><t>It also defines the serialisation of such links in HTTP headers with the Link header field.</t></abstract> +</front> +<seriesInfo name='RFC' value='8288'/> +<seriesInfo name='DOI' value='10.17487/RFC8288'/> +</reference> diff --git a/test/fixtures/cache-tests/spec/lib/rfc2629-no-doctype.xslt b/test/fixtures/cache-tests/spec/lib/rfc2629-no-doctype.xslt new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfc2629-no-doctype.xslt @@ -0,0 +1,13505 @@ +<!-- + XSLT transformation from RFC2629 XML format to HTML + + Copyright (c) 2006-2020, Julian Reschke (julian.reschke@greenbytes.de) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Julian Reschke nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +--> + +<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" + version="2.0" + + xmlns:date="http://exslt.org/dates-and-times" + xmlns:ed="http://greenbytes.de/2002/rfcedit" + xmlns:exslt="http://exslt.org/common" + xmlns:fo="http://www.w3.org/1999/XSL/Format" + xmlns:msxsl="urn:schemas-microsoft-com:xslt" + xmlns:myns="mailto:julian.reschke@greenbytes.de?subject=rfc2629.xslt" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:saxon="http://saxon.sf.net/" + xmlns:saxon-old="http://icl.com/saxon" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:x="http://purl.org/net/xml2rfc/ext" + xmlns:xi="http://www.w3.org/2001/XInclude" + xmlns:xhtml="http://www.w3.org/1999/xhtml" + + exclude-result-prefixes="date ed exslt fo msxsl myns rdf saxon saxon-old svg x xi xhtml" + > + +<xsl:strip-space elements="abstract address artset aside author back boilerplate dl figure front list middle note ol postal reference references rfc section table tbody thead tr texttable ul svg:svg"/> + + +<!-- PIs outside the root element, or inside the root element but before <front> --> +<xsl:variable name="global-std-pis" select="/processing-instruction('rfc') | /*/processing-instruction('rfc')[following-sibling::front]"/> + +<!-- rfc authorship PI --> + +<xsl:param name="xml2rfc-authorship"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'authorship'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc comments PI --> + +<xsl:param name="xml2rfc-comments"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'comments'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc compact PI --> + +<xsl:param name="xml2rfc-compact"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'compact'"/> + <xsl:with-param name="default" select="$xml2rfc-rfcedstyle"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc footer PI --> + +<xsl:param name="xml2rfc-footer"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'footer'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc header PI --> + +<xsl:param name="xml2rfc-header"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'header'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc inline PI --> + +<xsl:param name="xml2rfc-inline"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'inline'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- include a table of contents if a processing instruction <?rfc?> + exists with contents toc="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-toc"> + <xsl:variable name="default"> + <xsl:choose> + <xsl:when test="/rfc/@version >= 3">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="/rfc/@tocInclude='false'">no</xsl:when> + <xsl:when test="/rfc/@tocInclude='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'toc'"/> + <xsl:with-param name="default" select="$default"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- optional tocdepth--> + +<xsl:param name="xml2rfc-tocdepth"> + <xsl:choose> + <xsl:when test="/rfc/@tocDepth"> + <xsl:value-of select="/rfc/@tocDepth"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'tocdepth'"/> + <xsl:with-param name="default" select="'3'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:variable name="parsedTocDepth"> + <xsl:choose> + <xsl:when test="$xml2rfc-tocdepth='1'">1</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='2'">2</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='3'">3</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='4'">4</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='5'">5</xsl:when> + <xsl:otherwise>99</xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- suppress top block if a processing instruction <?rfc?> + exists with contents tocblock="no". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-topblock"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'topblock'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- Format to the RFC Editor's taste --> + +<xsl:param name="xml2rfc-rfcedstyle"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'rfcedstyle'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- the name of an automatically inserted references section --> + +<xsl:param name="xml2rfc-refparent"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'refparent'"/> + <xsl:with-param name="default" select="'References'"/> + </xsl:call-template> +</xsl:param> + +<!-- use symbolic reference names instead of numeric ones unless a processing instruction <?rfc?> + exists with contents symrefs="no". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-symrefs"> + <xsl:choose> + <xsl:when test="/rfc/@symRefs='false'">no</xsl:when> + <xsl:when test="/rfc/@symRefs='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'symrefs'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- sort references if a processing instruction <?rfc?> + exists with contents sortrefs="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-sortrefs"> + <xsl:choose> + <xsl:when test="/rfc/@sortRefs='true'">yes</xsl:when> + <xsl:when test="/rfc/@sortRefs='false'">no</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'sortrefs'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- insert editing marks if a processing instruction <?rfc?> + exists with contents editing="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-editing"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'editing'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- make it a private paper --> + +<xsl:param name="xml2rfc-private"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'private'"/> + </xsl:call-template> +</xsl:param> + +<!-- background image? --> + +<xsl:param name="xml2rfc-background"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'background'"/> + </xsl:call-template> +</xsl:param> + +<!-- override CSS? --> + +<xsl:param name="xml2rfc-ext-css-resource"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'css-resource'"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-css-contents"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'css-contents'"/> + </xsl:call-template> +</xsl:param> + +<!-- CSS max page width --> + +<xsl:param name="xml2rfc-ext-maxwidth"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'maxwidth'"/> + <xsl:with-param name="default" select="'1000'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="parsedMaxwidth"> + <xsl:choose> + <xsl:when test="string(number($xml2rfc-ext-maxwidth)) != 'NaN'"> + <xsl:value-of select="$xml2rfc-ext-maxwidth"/> + </xsl:when> + <xsl:when test="$xml2rfc-ext-maxwidth='none'"></xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Unsupported value of rfc-ext maxwidth PI: ', $xml2rfc-ext-maxwidth)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- CSS styles --> + +<xsl:param name="xml2rfc-ext-styles">fft-sans-serif ffb-serif ff-cleartype</xsl:param> + +<xsl:variable name="styles" select="concat(' ',normalize-space($xml2rfc-ext-styles),' ')"/> + +<xsl:param name="xml2rfc-ext-ff-body"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' ffb-serif ')"> + 'Noto Serif', + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + 'Noto Sans', + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + <xsl:if test="contains($styles,' ffb-serif ')"> + cambria, georgia, + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + candara, calibri, + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + segoe, optima, arial, sans-serif, + </xsl:if> + serif + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-ff-title"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' fft-serif ')"> + 'Noto Serif', + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + 'Noto Sans', + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + <xsl:if test="contains($styles,' fft-serif ')"> + cambria, georgia, + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + candara, calibri, + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' fft-serif ')"> + serif, + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + segoe, optima, arial, + </xsl:if> + sans-serif + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-ff-pre"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + 'Roboto Mono', + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + consolas, monaco, + </xsl:if> + monospace + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-webfonts"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' ffb-sans-serif ') or contains($styles,' fft-sans-serif ')"> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Noto+Sans:r,b,i,bi');&#10;</xsl:text> + </xsl:if> + <xsl:if test="contains($styles,' ffb-serif ') or contains($styles,' fft-serif ')"> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Noto+Serif:r,b,i,bi');&#10;</xsl:text> + </xsl:if> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Roboto+Mono:r,b,i,bi');&#10;</xsl:text> + </xsl:if> +</xsl:param> + +<xsl:param name="xml2rfc-ext-dark-mode"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'dark-mode'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<xsl:template name="ff-list"> + <xsl:param name="s"/> + <xsl:choose> + <xsl:when test="not(contains($s,','))"> + <xsl:value-of select="normalize-space($s)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(substring-before($s,','))"/> + <xsl:text>, </xsl:text> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="substring-after($s,',')"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- include PI --> + +<xsl:template name="getIncludes"> + <xsl:param name="nodes"/> + <xsl:for-each select="$nodes"> + <xsl:variable name="uri1"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="."/> + <xsl:with-param name="attr" select="'include'"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$uri1!=''"> + <xsl:variable name="tbase" select="substring-before($uri1, '?')"/> + <xsl:variable name="base"><xsl:choose><xsl:when test="$tbase!=''"><xsl:value-of select="$tbase"/></xsl:when><xsl:otherwise><xsl:value-of select="$uri1"/></xsl:otherwise></xsl:choose></xsl:variable> + <xsl:variable name="tquery" select="substring-after($uri1, '?')"/> + <xsl:variable name="query"><xsl:if test="$tquery!=''">?</xsl:if><xsl:value-of select="$tquery"/></xsl:variable> + <xsl:variable name="ends-with-xml" select="substring($base, string-length($base)-3)='.xml'"/> + <xsl:variable name="for-draft" select="contains($base,'reference.I-D')"/> + <xsl:variable name="uri2" select="concat($base,'.xml',$query)"/> + <xsl:variable name="uri3r" select="concat($toolsBaseUriForRFCReferences,$base,$query)"/> + <xsl:variable name="uri4r" select="concat($toolsBaseUriForRFCReferences,$base,'.xml',$query)"/> + <xsl:variable name="uri3i" select="concat($toolsBaseUriForIDReferences,$base,$query)"/> + <xsl:variable name="uri4i" select="concat($toolsBaseUriForIDReferences,$base,'.xml',$query)"/> + <xsl:choose> + <xsl:when test="not($ends-with-xml) and document($uri2)/reference"> + <xsl:call-template name="include-uri-warning"> + <xsl:with-param name="specified" select="$uri1"/> + <xsl:with-param name="success" select="$uri2"/> + </xsl:call-template> + <myns:include from="{$uri2}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri2)"/> + </myns:include> + </xsl:when> + <xsl:when test="document($uri1)/reference"> + <myns:include from="{$uri1}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri1)"/> + </myns:include> + </xsl:when> + <xsl:when test="not($ends-with-xml) and $for-draft and not(contains($uri1,':')) and document($uri4i)/reference"> + <xsl:call-template name="include-uri-warning"> + <xsl:with-param name="specified" select="$uri1"/> + <xsl:with-param name="success" select="$uri4i"/> + </xsl:call-template> + <myns:include from="{$uri4i}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri4i)"/> + </myns:include> + </xsl:when> + <xsl:when test="not(contains($uri1,':')) and $for-draft and document($uri3i)/reference"> + <xsl:call-template name="include-uri-warning"> + <xsl:with-param name="specified" select="$uri1"/> + <xsl:with-param name="success" select="$uri3i"/> + </xsl:call-template> + <myns:include from="{$uri3i}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri3i)"/> + </myns:include> + </xsl:when> + <xsl:when test="not($ends-with-xml) and not(contains($uri1,':')) and document($uri4r)/reference"> + <xsl:call-template name="include-uri-warning"> + <xsl:with-param name="specified" select="$uri1"/> + <xsl:with-param name="success" select="$uri4r"/> + </xsl:call-template> + <myns:include from="{$uri4r}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri4r)"/> + </myns:include> + </xsl:when> + <xsl:when test="not(contains($uri1,':')) and document($uri3r)/reference"> + <xsl:call-template name="include-uri-warning"> + <xsl:with-param name="specified" select="$uri1"/> + <xsl:with-param name="success" select="$uri3r"/> + </xsl:call-template> + <myns:include from="{$uri3r}" in="{generate-id(..)}"> + <xsl:copy-of select="document($uri3r)"/> + </myns:include> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> + </xsl:for-each> +</xsl:template> + +<xsl:template name="include-uri-warning"> + <xsl:param name="specified"/> + <xsl:param name="success"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">include succeeded for best-guess URI <xsl:value-of select="$success"/> while <xsl:value-of select="$specified"/> was specified - you may want to adjust the include directive in order to avoid future warnings</xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="getXIncludes"> + <xsl:param name="nodes"/> + <xsl:for-each select="$nodes"> + <xsl:choose> + <xsl:when test="(@parse and @parse!='xml') or @xpointer"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Unsupported attributes on x:include element'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="uri"> + <!--<xsl:choose> + <xsl:when test="starts-with(@href,'https://xml2rfc.ietf.org/public/rfc/')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">rewriting URI to /xml2rfc.tools.ietf.org for <xsl:value-of select="@href"/> - see in order to avoid broken server's 403 response (see https://mailarchive.ietf.org/arch/msg/xml2rfc/56sDqFVKF0baqdgEjHQtxOUMf4o).</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="concat('https://xml2rfc.tools.ietf.org/public/rfc/',substring-after(@href,'https://xml2rfc.ietf.org/public/rfc/'))"/> + </xsl:when> + <xsl:otherwise>--> + <xsl:value-of select="@href"/> + <!--</xsl:otherwise> + </xsl:choose>--> + </xsl:variable> + <xsl:variable name="doc"> + <xsl:copy-of select="document($uri)"/> + </xsl:variable> + <xsl:if test="count(exslt:node-set($doc)) = 1"> + <myns:include from="{@href}" in="{generate-id(..)}"> + <xsl:copy-of select="$doc"/> + </myns:include> + </xsl:if> + <xsl:for-each select="exslt:node-set($doc)//xi:include"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Nested x:include elements are not supported'"/> + </xsl:call-template> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:variable name="includeDirectives"> + <xsl:call-template name="getIncludes"> + <xsl:with-param name="nodes" select="/rfc/back//references/processing-instruction('rfc')|/rfc/back//references/referencegroup/processing-instruction('rfc')"/> + </xsl:call-template> + <xsl:call-template name="getXIncludes"> + <xsl:with-param name="nodes" select="/rfc/back//references/xi:include|/rfc/back//references/referencegroup/xi:include"/> + </xsl:call-template> +</xsl:variable> + +<xsl:variable name="sourcedReferences"> + <xsl:for-each select="//reference[x:source/@href and not(seriesInfo)]"> + <xsl:copy> + <xsl:variable name="f" select="document(x:source/@href)"/> + <xsl:if test="$f/rfc/@number" myns:namespaceless-elements="xml2rfc"> + <seriesInfo name="RFC" value="{$f/rfc/@number}"/> + </xsl:if> + <xsl:if test="$f/rfc/@docName" myns:namespaceless-elements="xml2rfc"> + <seriesInfo name="Internet-Draft" value="{$f/@docName}"/> + </xsl:if> + </xsl:copy> + </xsl:for-each> +</xsl:variable> + +<!-- logging --> + +<xsl:param name="xml2rfc-ext-log-level"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'log-level'"/> + <xsl:with-param name="default" select="'WARNING'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="log-level"> + <xsl:call-template name="parse-log-level"> + <xsl:with-param name="level" select="$xml2rfc-ext-log-level"/> + </xsl:call-template> +</xsl:variable> + +<xsl:param name="xml2rfc-ext-abort-on"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'abort-on'"/> + <xsl:with-param name="default" select="'OFF'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="abort-log-level"> + <xsl:call-template name="parse-log-level"> + <xsl:with-param name="level" select="$xml2rfc-ext-abort-on"/> + </xsl:call-template> +</xsl:variable> + +<xsl:template name="parse-log-level"> + <xsl:param name="level"/> + <xsl:choose> + <xsl:when test="$level='OFF'">6</xsl:when> + <xsl:when test="$level='FATAL'">5</xsl:when> + <xsl:when test="$level='ERROR'">4</xsl:when> + <xsl:when test="$level='WARNING'">3</xsl:when> + <xsl:when test="$level='INFO'">2</xsl:when> + <xsl:when test="$level='DEBUG'">1</xsl:when> + <xsl:when test="$level='TRACE'">0</xsl:when> + <xsl:otherwise> + <xsl:message>Unsupported LOG level '<xsl:value-of select="$level"/>', defaulting to 'WARNING'</xsl:message> + <xsl:value-of select="'3'"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- prettyprinting --> + +<xsl:param name="xml2rfc-ext-html-pretty-print"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'html-pretty-print'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="prettyprint-class"> + <xsl:if test="$xml2rfc-ext-html-pretty-print"> + <xsl:value-of select="substring-before(normalize-space($xml2rfc-ext-html-pretty-print),' ')"/> + </xsl:if> +</xsl:variable> + +<xsl:variable name="prettyprint-script"> + <xsl:if test="$xml2rfc-ext-html-pretty-print"> + <xsl:value-of select="substring-after(normalize-space($xml2rfc-ext-html-pretty-print),' ')"/> + </xsl:if> +</xsl:variable> + +<!-- Unicode database --> +<xsl:param name="xml2rfc-ext-ucd-file"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'ucd-file'"/> + </xsl:call-template> +</xsl:param> + +<!-- external resource containing errata, as generated by parse-errata.xslt --> +<xsl:param name="xml2rfc-ext-errata"/> +<xsl:variable name="errata-parsed" select="document($xml2rfc-ext-errata)//erratum[@status!='Rejected']"/> + +<!-- "remove in RFC phrases" --> +<xsl:variable name="note-removeInRFC">This note is to be removed before publishing as an RFC.</xsl:variable> +<xsl:variable name="section-removeInRFC">This section is to be removed before publishing as an RFC.</xsl:variable> + +<!-- constant string for unnumbered parts --> +<xsl:variable name="unnumbered">unnumbered-</xsl:variable> + +<!-- CSS class name remapping --> + +<xsl:param name="xml2rfc-ext-css-map"/> + +<xsl:template name="generate-css-class"> + <xsl:param name="name"/> + <xsl:variable name="cssmap" select="document($xml2rfc-ext-css-map)"/> + <xsl:variable name="entry" select="$cssmap/*/map[@from=$name]"/> + <xsl:choose> + <xsl:when test="$entry"> + <xsl:value-of select="$entry/@css"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$name"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- WORK IN PROGRESS; ONLY A FEW CLASSES SUPPORTED FOR NOW --> +<xsl:variable name="css-artwork"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'artwork'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-art-svg"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'art-svg'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-docstatus"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'docstatus'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-center"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'center'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-erratum"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'erratum'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-error"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'error'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-fbbutton"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'fbbutton'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-feedback"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'feedback'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-header"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'header'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-left"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'left'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-noprint"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'noprint'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-publishedasrfc"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'publishedasrfc'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-reference"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'reference'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-right"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'right'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tcenter"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tcenter'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tleft"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tleft'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tright"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tright'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tt"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tt'"/></xsl:call-template></xsl:variable> + + +<!-- RFC-Editor site linking --> + +<xsl:param name="xml2rfc-ext-link-rfc-to-info-page"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'link-rfc-to-info-page'"/> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201503">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:param> + +<!-- DOI insertion --> + +<xsl:param name="xml2rfc-ext-insert-doi"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'insert-doi'"/> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201505">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:param> + +<!-- initials handling? --> + +<xsl:param name="xml2rfc-multiple-initials"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'multiple-initials'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- paragraph links? --> + +<xsl:param name="xml2rfc-ext-paragraph-links"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'paragraph-links'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for XML parsing in artwork --> + +<xsl:param name="xml2rfc-ext-parse-xml-in-artwork"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'parse-xml-in-artwork'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-trace-parse-xml"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'trace-parse-xml'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for excluding the index --> + +<xsl:param name="xml2rfc-ext-include-index"> + <xsl:choose> + <xsl:when test="/rfc/@indexInclude='false'">no</xsl:when> + <xsl:when test="/rfc/@indexInclude='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'include-index'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- extension for inserting RFC metadata --> + +<xsl:param name="xml2rfc-ext-insert-metadata"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'insert-metadata'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for excluding DCMI properties in meta tag (RFC2731) --> + +<xsl:param name="xml2rfc-ext-support-rfc2731"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'support-rfc2731'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for excluding generator information --> + +<xsl:param name="xml2rfc-ext-include-generator"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'include-generator'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for specifying the value for <vspace> after which it's taken as a page break --> + +<xsl:param name="xml2rfc-ext-vspace-pagebreak"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'vspace-pagebreak'"/> + <xsl:with-param name="default" select="'100'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for allowing markup inside artwork --> + +<xsl:param name="xml2rfc-ext-allow-markup-in-artwork"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'allow-markup-in-artwork'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for including references into index --> + +<xsl:param name="xml2rfc-ext-include-references-in-index"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'include-references-in-index'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for switching the behaviour for xrefs with text content --> +<!-- 'text': as in text output, 'nothing': just the link --> + +<xsl:param name="xml2rfc-ext-xref-with-text-generate"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'xref-with-text-generate-text'"/> + <xsl:with-param name="default" select="'text'"/> + </xsl:call-template> +</xsl:param> + +<!-- position of author's section --> + +<xsl:param name="xml2rfc-ext-authors-section"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'authors-section'"/> + <xsl:with-param name="default" select="'end'"/> + </xsl:call-template> +</xsl:param> + +<!-- justification? --> + +<xsl:param name="xml2rfc-ext-justification"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'justification'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- switch for doublesided layout --> + +<xsl:param name="xml2rfc-ext-duplex"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'duplex'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- trailing dots in section numbers --> + +<xsl:param name="xml2rfc-ext-sec-no-trailing-dots"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'sec-no-trailing-dots'"/> + </xsl:call-template> +</xsl:param> + +<!-- check artwork width? --> + +<xsl:param name="xml2rfc-ext-check-artwork-width"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'check-artwork-width'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- choose whether or not to do mailto links --> + +<xsl:param name="xml2rfc-linkmailto"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'linkmailto'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- iprnotified switch --> + +<xsl:param name="xml2rfc-iprnotified"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'iprnotified'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- URL templates for RFCs and Internet Drafts. --> + +<!-- Reference the marked up versions over on https://tools.ietf.org/html. --> +<xsl:param name="rfcUrlFragSection" select="'section-'" /> +<xsl:param name="rfcUrlFragAppendix" select="'appendix-'" /> +<xsl:param name="internetDraftUrlFragSection" select="'section-'" /> +<xsl:param name="internetDraftUrlFragAppendix" select="'appendix-'" /> + +<!-- base URI for include directive when relative reference does not resolve for RFCs --> +<xsl:param name="toolsBaseUriForRFCReferences">https://xml2rfc.tools.ietf.org/public/rfc/bibxml/</xsl:param> + +<!-- base URI for include directive when relative reference does not resolve for Intetnet Drafts --> +<xsl:param name="toolsBaseUriForIDReferences">https://xml2rfc.tools.ietf.org/public/rfc/bibxml-ids/</xsl:param> + +<!--templates for URI calculation --> + +<xsl:param name="xml2rfc-ext-isbn-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'isbn-uri'"/> + <xsl:with-param name="default">https://www.worldcat.org/search?q=isbn:{isbn}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-isbn-uri"> + <xsl:param name="isbn"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-isbn-uri"/> + <xsl:with-param name="replace" select="'{isbn}'"/> + <xsl:with-param name="by" select="translate($isbn,'-','')"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-uri'"/> + <!-- previously 'https://tools.ietf.org/html/rfc{rfc}' --> + <xsl:with-param name="default">https://www.rfc-editor.org/rfc/rfc{rfc}.html</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-rfc-uri"> + <xsl:param name="rfc"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-uri"/> + <xsl:with-param name="replace" select="'{rfc}'"/> + <xsl:with-param name="by" select="$rfc"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-internet-draft-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'internet-draft-uri'"/> + <xsl:with-param name="default">https://tools.ietf.org/html/{internet-draft}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-internet-draft-uri"> + <xsl:param name="internet-draft"/> + <xsl:param name="ref" select="."/> + <xsl:variable name="local-link-template"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$ref/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'internet-draft-uri'"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$local-link-template!=''"> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$local-link-template"/> + <xsl:with-param name="replace" select="'{internet-draft}'"/> + <xsl:with-param name="by" select="$internet-draft"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-internet-draft-uri"/> + <xsl:with-param name="replace" select="'{internet-draft}'"/> + <xsl:with-param name="by" select="$internet-draft"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:param name="xml2rfc-ext-diff-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'diff-uri'"/> + <xsl:with-param name="default">https://tools.ietf.org/rfcdiff?url2={internet-draft}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-diff-uri"> + <xsl:param name="name"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-diff-uri"/> + <xsl:with-param name="replace" select="'{internet-draft}'"/> + <xsl:with-param name="by" select="$name"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-latest-diff-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'latest-diff-uri'"/> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-latest-diff-uri"> + <xsl:param name="name"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-latest-diff-uri"/> + <xsl:with-param name="replace" select="'{internet-draft}'"/> + <xsl:with-param name="by" select="$name"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-doi-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'doi-uri'"/> + <xsl:with-param name="default">https://dx.doi.org/{doi}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-doi-uri"> + <xsl:param name="doi"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-doi-uri"/> + <xsl:with-param name="replace" select="'{doi}'"/> + <xsl:with-param name="by" select="$doi"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-info-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-info-uri'"/> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth &lt; 201708">http://www.rfc-editor.org/info/{type}{no}</xsl:when> + <xsl:otherwise>https://www.rfc-editor.org/info/{type}{no}</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-rfc-info-uri"> + <xsl:param name="type"/> + <xsl:param name="no"/> + <xsl:variable name="t"> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-info-uri"/> + <xsl:with-param name="replace" select="'{type}'"/> + <xsl:with-param name="by" select="$type"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$t"/> + <xsl:with-param name="replace" select="'{no}'"/> + <xsl:with-param name="by" select="$no"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-erratum-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-erratum-uri'"/> + <xsl:with-param name="default">https://www.rfc-editor.org/errata/eid{eid}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-rfc-erratum-uri"> + <xsl:param name="eid"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-erratum-uri"/> + <xsl:with-param name="replace" select="'{eid}'"/> + <xsl:with-param name="by" select="$eid"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-errata-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-errata-uri'"/> + <xsl:with-param name="default">https://www.rfc-editor.org/errata/rfc{rfc}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-draft-status-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'draft-status-uri'"/> + <xsl:with-param name="default">https://datatracker.ietf.org/doc/{draftname}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-draft-status-uri"> + <xsl:param name="draftname"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-draft-status-uri"/> + <xsl:with-param name="replace" select="'{draftname}'"/> + <xsl:with-param name="by" select="$draftname"/> + </xsl:call-template> +</xsl:template> + +<!-- the format we're producing --> +<xsl:param name="outputExtension" select="'html'"/> + +<!-- source for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-from"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-from'"/> + <xsl:with-param name="default" select="''"/> + </xsl:call-template> +</xsl:param> + +<!-- XSLT for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-xslt"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-xslt'"/> + <xsl:with-param name="default" select="'rfc2629.xslt'"/> + </xsl:call-template> +</xsl:param> + +<!-- interval for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-interval"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-interval'"/> + <xsl:with-param name="default" select="10"/> + </xsl:call-template> +</xsl:param> + +<!-- for testing: switch to disable code that gets the system time --> +<xsl:param name="xml2rfc-ext-use-system-time"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'use-system-time'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- warning re: absent node-set ext. function --> +<xsl:variable name="node-set-warning"> + This stylesheet requires either an XSLT-1.0 processor with node-set() + extension function, or an XSLT-2.0 processor. Therefore, parts of the + document couldn't be displayed. +</xsl:variable> + +<!-- character translation tables --> +<xsl:variable name="lcase" select="'abcdefghijklmnopqrstuvwxyz'" /> +<xsl:variable name="ucase" select="'ABCDEFGHIJKLMNOPQRSTUVWXYZ'" /> +<xsl:variable name="digits" select="'0123456789'" /> +<xsl:variable name="alpha" select="concat($lcase,$ucase)"/> +<xsl:variable name="alnum" select="concat($alpha,$digits)"/> + +<!-- build help keys for indices --> +<xsl:key name="index-first-letter" + match="iref|reference" + use="translate(substring(concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor,@item),1,1),'abcdefghijklmnopqrstuvwxyz','ABCDEFGHIJKLMNOPQRSTUVWXYZ')" /> + +<xsl:key name="index-item" + match="iref" + use="@item" /> + +<xsl:key name="index-item-subitem" + match="iref" + use="concat(@item,'..',@subitem)" /> + +<xsl:key name="index-xref-by-sec" + match="xref[@x:sec]|relref[@section]" + use="concat(@target,'..',@x:sec,@section)" /> + +<xsl:key name="index-xref-by-anchor" + match="xref[@x:rel]|relref[@relative]" + use="concat(@target,'..',@x:rel,@relative)" /> + +<xsl:key name="anchor-item" + match="//*[@anchor]" + use="@anchor"/> + +<xsl:key name="xref-item" + match="//xref|//relref" + use="@target"/> + +<xsl:key name="extref-item" + match="//x:ref" + use="."/> + +<!-- prefix for automatically generated anchors --> +<xsl:variable name="anchor-pref" select="'rfc.'" /> + +<!-- IPR version switch --> +<xsl:variable name="ipr-rfc3667" select="( + number($rfcno) &gt; 3708) or + not( + (/rfc/@ipr = 'full2026') or + (/rfc/@ipr = 'noDerivativeWorks2026') or + (/rfc/@ipr = 'noDerivativeWorksNow') or + (/rfc/@ipr = 'none') or + (/rfc/@ipr = '') or + not(/rfc/@ipr) + )" /> + +<xsl:variable name="draft-fullname" select="/rfc/@docName"/> + +<xsl:variable name="draft-seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="$draft-fullname"/> + </xsl:call-template> +</xsl:variable> + +<xsl:variable name="draft-basename"> + <xsl:call-template name="draft-base-name"> + <xsl:with-param name="name" select="$draft-fullname"/> + </xsl:call-template> +</xsl:variable> + +<xsl:variable name="is-submitted-draft" select="number($draft-seq)=$draft-seq"/> + +<xsl:variable name="is-rfc" select="$src/rfc/@number"/> + +<xsl:variable name="rfcno"> + <xsl:value-of select="$src/rfc/@number"/> + <xsl:if test="$is-rfc"> + <xsl:for-each select="$src/rfc/front/seriesInfo[@name='RFC']"> + <xsl:if test="number(@value) != number($src/rfc/@number)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">RFC number given in /rfc/front/seriesInfo (<xsl:value-of select="@value"/>) inconsistent with rfc element (<xsl:value-of select="$src/rfc/@number"/>)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + </xsl:if> +</xsl:variable> + +<xsl:variable name="submissionType"> + <xsl:choose> + <xsl:when test="/rfc/@submissionType='IETF' or not(/rfc/@submissionType) or /rfc/submissionType=''">IETF</xsl:when> + <xsl:when test="/rfc/@submissionType='IAB' or /rfc/@submissionType='IRTF' or /rfc/@submissionType='independent'"> + <xsl:value-of select="/rfc/@submissionType"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('(UNSUPPORTED SUBMISSION TYPE: ',/rfc/@submissionType,')')"/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@submissionType: ', /rfc/@submissionType)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <!-- sanity check on @consensus --> + <xsl:if test="/rfc/@consensus and /rfc/@submissionType='independent'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/@consensus meaningless with a /rfc/@submissionType value of ', /rfc/@submissionType)"/> + </xsl:call-template> + </xsl:if> +</xsl:variable> + +<xsl:variable name="consensus"> + <xsl:choose> + <xsl:when test="/rfc/@consensus='yes' or /rfc/@consensus='true' or not(/rfc/@consensus)">yes</xsl:when> + <xsl:when test="/rfc/@consensus='no' or /rfc/@consensus='false'">no</xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('(UNSUPPORTED VALUE FOR CONSENSUS: ',/rfc/@consensus,')')"/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@consensus: ', /rfc/@consensus)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- Header format as defined in RFC 5741, and deployed end of Dec 2009 --> +<xsl:variable name="header-format"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201001 or + ($rfcno=5741 or $rfcno=5742 or $rfcno=5743)" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="rfc-boilerplate"> + <xsl:choose> + <!-- RFC boilerplate as defined in RFC 5741, and deployed end of Dec 2009 --> + <xsl:when test="$pub-yearmonth >= 201001 or + ($rfcno=5741 or $rfcno=5742 or $rfcno=5743)" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<!-- use https in boilerplate links? --> +<xsl:variable name="rfc-boilerplate-use-https" select="($pub-yearmonth >= 201709 and number($rfcno)!=8230 and number($rfcno)!=8325 and number($rfcno)!=8236) or number($rfcno)=8214 or number($rfcno)=8223"/> + +<xsl:variable name="rfc-info-link"> + <xsl:variable name="scheme"> + <xsl:choose> + <xsl:when test="$rfc-boilerplate-use-https">https</xsl:when> + <xsl:otherwise>http</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="concat($scheme,'://www.rfc-editor.org/info/rfc',$rfcno)"/> +</xsl:variable> + +<xsl:variable name="trust-license-info-link"> + <xsl:variable name="scheme"> + <xsl:choose> + <xsl:when test="$rfc-boilerplate-use-https">https</xsl:when> + <xsl:otherwise>http</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="concat($scheme,'://trustee.ietf.org/license-info')"/> +</xsl:variable> + +<!-- the reference to the latest and greatest headers-and-boilerplates document --> +<xsl:variable name="hab-reference" myns:namespaceless-elements="xml2rfc"> + <eref> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201606 or ($rfcno=7846 or $rfcno=7865 or $rfcno=7866 or $rfcno=7873 or $rfcno=7879 or $rfcno=7892)"><xsl:attribute name="target">https://tools.ietf.org/html/rfc7841#section-2</xsl:attribute>Section 2 of RFC 7841</xsl:when> + <xsl:otherwise><xsl:attribute name="target">https://tools.ietf.org/html/rfc5741#section-2</xsl:attribute>Section 2 of RFC 5741</xsl:otherwise> + </xsl:choose> + </eref> +</xsl:variable> + +<xsl:variable name="id-boilerplate"> + <xsl:choose> + <!-- ID boilerplate approved by IESG on Jan 14 2010--> + <xsl:when test="$pub-yearmonth >= 201004" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="ipr-rfc4748" select="( + $ipr-rfc3667 and + ( number($rfcno) &gt;= 4715 and ( number($rfcno)!=4718 and number($rfcno)!=4735 and number($rfcno)!= 4749 )) + or + ( number($rfcno)=4578 or number($rfcno)=4582 or number($rfcno)=4583 or number($rfcno)=4628 or number($rfcno)=4629 or number($rfcno)=4639 or number($rfcno)=4651 or number($rfcno)=4682 or number($rfcno)=4684 or number($rfcno)=4695 or number($rfcno)=4696 ) + or + ( not($is-rfc) and $pub-yearmonth >= 200611) + )" /> + +<xsl:variable name="ipr-2007-08" select="( + $ipr-rfc4748 and + ( + (number($rfcno) &gt; 5000 + and number($rfcno) != 5020 + and number($rfcno) != 5021 + and number($rfcno) != 5034 + and number($rfcno) != 5052 + and number($rfcno) != 5065 + and number($rfcno) != 5094) or + ($xml2rfc-ext-pub-year >= 2008) or + (not($is-rfc) and $pub-yearmonth >= 200709) + ) + )" /> + +<xsl:variable name="ipr-2008-11" select="( + $is-rfc and $pub-yearmonth >= 200811 + ) + or + ( + /rfc/@ipr = 'trust200811' or + /rfc/@ipr = 'noModificationTrust200811' or + /rfc/@ipr = 'noDerivativesTrust200902' or + /rfc/@ipr = 'trust200902' or + /rfc/@ipr = 'noModificationTrust200902' or + /rfc/@ipr = 'noDerivativesTrust200902' or + /rfc/@ipr = 'pre5378Trust200902' + )" /> + +<xsl:variable name="ipr-2009-02" select="( + $ipr-2008-11 and $pub-yearmonth >= 200902 + )" /> + +<!-- this makes the Sep 2009 TLP text depend on the publication date to be >= 2009-11 + for IDs, and around 2009-09 for RFCs--> +<xsl:variable name="ipr-2009-09" select="( + ( not($is-rfc) and $pub-yearmonth >= 200911 ) + or + ( + $is-rfc and $pub-yearmonth >= 200909 and + $rfcno!=5582 and $rfcno!=5621 and $rfcno!=5632 and $rfcno!=5645 and $rfcno!=5646 and $rfcno!=5681 + ) + )" /> + +<!-- this makes the Jan 2010 TLP text depend on the publication date to be >= 2010-04 + for IDs, and around 2010-01 for RFCs--> +<xsl:variable name="ipr-2010-01" select="( + ( not($is-rfc) and $pub-yearmonth >= 201004 ) + or + ( + $is-rfc and ($pub-yearmonth >= 201001 or + $rfcno=5741 or $rfcno=5742 or $rfcno=5743) + ) + )" /> + +<!-- see http://mailman.rfc-editor.org/pipermail/rfc-interest/2009-June/001373.html --> +<!-- for IDs, implement the change as 2009-11 --> +<xsl:variable name="abstract-first" select="( + ($is-rfc and $pub-yearmonth >= 200907) + or + (not($is-rfc) and $pub-yearmonth >= 200911) + )" /> + +<!-- RFC 7322 changed the placement of notes --> +<xsl:variable name="notes-follow-abstract" select="( + ($is-rfc and $rfcno >= 7200) + or + ($pub-yearmonth >= 201409) + )" /> + +<!-- funding switch --> +<xsl:variable name="funding0" select="( + $rfcno &gt; 2499) or + (not($is-rfc) and /rfc/@docName and $xml2rfc-ext-pub-year &gt;= 1999 + )" /> + +<xsl:variable name="funding1" select="( + $rfcno &gt; 4320) or + (not($is-rfc) and /rfc/@docName and $xml2rfc-ext-pub-year &gt;= 2006 + )" /> + +<xsl:variable name="no-funding" select="$ipr-2007-08"/> + +<xsl:variable name="no-copylong" select="$ipr-2008-11 or /rfc/@ipr='none'"/> + +<!-- will document have an index --> +<xsl:variable name="has-index" select="(//iref or (//xref and $xml2rfc-ext-include-references-in-index='yes')) and $xml2rfc-ext-include-index!='no'" /> + +<!-- does the document contain edits? --> +<xsl:variable name="has-edits" select="//ed:ins | //ed:del | //ed:replace" /> + +<!-- does the document have a published-as-rfc link? --> +<xsl:variable name="published-as-rfc" select="/*/x:link[@rel='Alternate' and starts-with(@title,'RFC')]"/> + + +<xsl:template match="text()[not(ancestor::artwork or ancestor::sourcecode)]"> + <xsl:variable name="ws" select="'&#9;&#10;&#13;&#32;'"/> + <xsl:variable name="starts-with-ws" select="'' = translate(substring(.,1,1),$ws,'')"/> + <xsl:variable name="ends-with-ws" select="'' = translate(substring(.,string-length(.),1),$ws,'')"/> + <xsl:variable name="normalized" select="normalize-space(.)"/> + <!--<xsl:message> Orig: "<xsl:value-of select="."/>"</xsl:message> + <xsl:message>Start: "<xsl:value-of select="$starts-with-ws"/>"</xsl:message> + <xsl:message> End: "<xsl:value-of select="$ends-with-ws"/>"</xsl:message> --> + <xsl:if test="$starts-with-ws"> + <xsl:variable name="t"> + <xsl:for-each select="preceding-sibling::node()"> + <xsl:choose> + <xsl:when test="self::text()"> + <xsl:value-of select="."/> + </xsl:when> + <xsl:when test="self::*"> + <xsl:apply-templates select="."/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="text-before" select="normalize-space($t)"/> + <xsl:if test="$text-before!=''"> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:if> + <xsl:value-of select="$normalized"/> + <xsl:if test="$ends-with-ws and $normalized!=''"> + <xsl:variable name="t"> + <xsl:for-each select="following-sibling::node()"> + <xsl:choose> + <xsl:when test="self::text()"> + <xsl:value-of select="."/> + </xsl:when> + <xsl:when test="self::*"> + <xsl:apply-templates select="."/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="text-after" select="normalize-space($t)"/> + <xsl:if test="$text-after!='' and substring($t,1,1)!=' '"> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:if> +</xsl:template> + + +<xsl:template match="abstract"> + <xsl:call-template name="check-no-text-content"/> + <section> + <xsl:call-template name="copy-anchor"/> + <h2 id="{$anchor-pref}abstract"><a href="#{$anchor-pref}abstract">Abstract</a></h2> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'abstract'"/> + </xsl:call-template> + <xsl:apply-templates /> + </section> +</xsl:template> + +<msxsl:script language="JScript" implements-prefix="myns"> + function parseXml(str) { + try { + var doc = new ActiveXObject("MSXML2.DOMDocument"); + doc.async = false; + if (doc.loadXML(str)) { + return ""; + } + else { + return doc.parseError.reason + "\n" + doc.parseError.srcText + " (" + doc.parseError.line + "/" + doc.parseError.linepos + ")"; + } + } + catch(e) { + return ""; + } + } +</msxsl:script> + +<xsl:template name="add-artwork-class"> + <xsl:variable name="v"> + <xsl:choose> + <xsl:when test="@type='abnf' or @type='abnf2045' or @type='abnf2616' or @type='abnf7230' or @type='application/xml-dtd' or @type='inline' or @type='application/relax-ng-compact-syntax'">inline</xsl:when> + <xsl:when test="starts-with(@type,'message/http') and contains(@type,'msgtype=&quot;request&quot;')">text2</xsl:when> + <xsl:when test="starts-with(@type,'message/http')">text</xsl:when> + <xsl:when test="@type='drawing' or @type='pdu'">drawing</xsl:when> + <xsl:when test="self::sourcecode or @type='text/plain' or @type='example' or @type='code' or @type='xml' or @type='application/xml-dtd' or @type='application/json'">text</xsl:when> + <xsl:otherwise/> + </xsl:choose> + <xsl:if test="@x:lang and $prettyprint-class!=''"> + <xsl:value-of select="concat(' ',$prettyprint-class)"/> + <xsl:if test="@x:lang!=''"> + <xsl:value-of select="concat(' lang-',@x:lang)"/> + </xsl:if> + </xsl:if> + <xsl:if test="contains(@type,'abnf') and $prettyprint-class!=''"> + <xsl:value-of select="concat(' ',$prettyprint-class,' lang-ietf_abnf')"/> + </xsl:if> + </xsl:variable> + <xsl:if test="normalize-space($v)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($v)"/></xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-begin-code"> + <xsl:if test="(self::artwork and @x:is-code-component='yes') or (self::sourcecode and @markers='true')"> + <pre class="ccmarker cct"> + <xsl:text>&lt;CODE BEGINS></xsl:text> + <xsl:if test="self::sourcecode and @name"> + <xsl:variable name="offending" select="translate(@name,concat($alnum,'-+.,;_~#'),'')"/> + <xsl:choose> + <xsl:when test="$offending!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">illegal characters in @name attribute '<xsl:value-of select="@name"/>': '<xsl:value-of select="$offending"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text> file "</xsl:text> + <xsl:value-of select="@name"/> + <xsl:text>"</xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </pre> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-end-code"> + <xsl:if test="(self::artwork and @x:is-code-component='yes') or (self::sourcecode and @markers='true')"> + <pre class="ccmarker ccb">&lt;CODE ENDS></pre> + </xsl:if> +</xsl:template> + +<xsl:template match="artset"> + <xsl:call-template name="check-no-text-content"/> + <!-- see https://tools.ietf.org/html/draft-levkowetz-xml2rfc-v3-implementation-notes-08#section-3.1.1 --> + <xsl:choose> + <xsl:when test="artwork[svg:svg or normalize-space(.)='' or @src!='']"> + <xsl:apply-templates select="artwork[svg:svg or normalize-space(.)='' or @src!=''][1]"/> + </xsl:when> + <xsl:when test="artwork"> + <xsl:apply-templates select="artwork[1]"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg">artset needs to contain at least one artwork child element</xsl:with-param> + </xsl:call-template> + <p> + <xsl:call-template name="attach-paragraph-number-as-id"/> + <xsl:if test="@anchor"> + <span id="{@anchor}"/> + </xsl:if> + </p> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="artwork|sourcecode"> + <xsl:if test="not(ancestor::ed:del) and $xml2rfc-ext-parse-xml-in-artwork='yes' and function-available('myns:parseXml')" use-when="function-available('myns:parseXml')"> + <xsl:if test="contains(.,'&lt;?xml')"> + <xsl:variable name="body" select="substring-after(substring-after(.,'&lt;?xml'),'?>')" /> + <xsl:if test="$body!='' and myns:parseXml($body)!=''"> + <table style="background-color: red; border-width: thin; border-style: solid; border-color: black;"> + <tr><td> + XML PARSE ERROR; parsed the body below: + <pre> + <xsl:value-of select="$body"/> + </pre> + resulting in: + <pre> + <xsl:value-of select="myns:parseXml($body)" /> + </pre> + </td></tr></table> + </xsl:if> + </xsl:if> + <xsl:if test="@ed:parse-xml-after"> + <xsl:if test="myns:parseXml(string(.))!=''"> + <table style="background-color: red; border-width: thin; border-style: solid; border-color: black;"> + <tr><td> + XML PARSE ERROR: + <pre><xsl:value-of select="myns:parseXml(string(.))" /></pre> + </td></tr></table> + </xsl:if> + </xsl:if> + </xsl:if> + <xsl:if test="contains(.,'&#9;')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'artwork or sourcecode contains HTAB character'"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:if> + <xsl:variable name="display"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-allow-markup-in-artwork='yes'"> + <xsl:apply-templates/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="text-in-artwork"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="divstyle"> + <xsl:choose> + <xsl:when test="self::artwork and @align='right'">display:table; margin-left: auto; margin-right: 0em;</xsl:when> + <xsl:when test="self::artwork and @align='center'">display:table; margin-left: auto; margin-right: auto;</xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <xsl:variable name="prestyle"> + <xsl:choose> + <xsl:when test="self::artwork and (@align='right' or @align='center')">margin-left: 0em;</xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <div> + <xsl:choose> + <xsl:when test="parent::artset"> + <xsl:for-each select=".."> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$divstyle!=''"> + <xsl:attribute name="style"><xsl:value-of select="$divstyle"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="insert-begin-code"/> + <pre> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="$prestyle!=''"> + <xsl:attribute name="style"><xsl:value-of select="$prestyle"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="add-artwork-class"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="$display"/> + </pre> + <xsl:call-template name="insert-end-code"/> + </div> + <xsl:call-template name="check-artwork-width"> + <xsl:with-param name="content"><xsl:apply-templates/></xsl:with-param> + <xsl:with-param name="indent"><xsl:value-of select="string-length(@x:indent-with)"/></xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="text-in-artwork"> + <xsl:param name="content" select="."/> + <xsl:choose> + <xsl:when test="contains($content,'&#9;')"> + <xsl:call-template name="text-in-artwork"> + <xsl:with-param name="content" select="substring-before($content,'&#9;')"/> + </xsl:call-template> + <span class="error" title="HTAB character">&#x2409;</span> + <xsl:call-template name="text-in-artwork"> + <xsl:with-param name="content" select="substring-after($content,'&#9;')"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$content"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- special case for first text node in artwork or sourcecode --> +<xsl:template match="artwork/text()[1]|sourcecode/text()[1]" priority="9"> + <xsl:choose> + <xsl:when test="starts-with(.,'&#10;')"> + <!-- reduce leading whitespace --> + <xsl:call-template name="text-in-artwork"> + <xsl:with-param name="content" select="substring(.,2)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="text-in-artwork"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- other text nodes in artwork or sourcecode --> +<xsl:template match="artwork//text()|sourcecode//text()"> + <xsl:call-template name="text-in-artwork"/> +</xsl:template> + +<xsl:template name="check-artwork-width"> + <xsl:param name="content"/> + <xsl:param name="indent"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-check-artwork-width='no'"> + <!-- skip check --> + </xsl:when> + <xsl:when test="not(contains($content,'&#10;'))"> + <xsl:if test="string-length($content) > 69 + number($indent)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">artwork line too long: '<xsl:value-of select="$content"/>' (<xsl:value-of select="string-length($content)"/> characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="start" select="substring-before($content,'&#10;')"/> + <xsl:variable name="end" select="substring-after($content,'&#10;')"/> + <xsl:variable name="max"> + <xsl:choose> + <xsl:when test="$indent!=''"><xsl:value-of select="69 + $indent"/></xsl:when> + <xsl:otherwise>69</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="string-length($start) > $max"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">artwork line too long: '<xsl:value-of select="$start"/>' (<xsl:value-of select="string-length($start)"/> characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="check-artwork-width"> + <xsl:with-param name="content" select="$end"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="artwork[xi:include]" priority="9"> + <xsl:variable name="resolved" xmlns=""> + <xsl:element name="artwork" namespace=""> + <xsl:copy-of select="@*"/> + <xsl:for-each select="node()"> + <xsl:choose> + <xsl:when test="self::xi:include"> + <xsl:if test="(@parse and @parse!='xml') or @xpointer"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Unsupported attributes on x:include element'"/> + </xsl:call-template> + </xsl:if> + <xsl:copy-of select="document(@href)"/> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:element> + </xsl:variable> + <xsl:apply-templates select="exslt:node-set($resolved)/*"/> +</xsl:template> + +<xsl:template match="artwork[@src and starts-with(@type,'image/') or @type='svg']|artwork[svg:svg]"> + <xsl:variable name="class"> + <xsl:value-of select="$css-artwork"/> + <xsl:text> </xsl:text> + <xsl:if test="svg:svg"> + <xsl:value-of select="$css-art-svg"/> + </xsl:if> + <xsl:choose> + <xsl:when test="@align='center'"><xsl:text> </xsl:text><xsl:value-of select="$css-center"/></xsl:when> + <xsl:when test="@align='right'"><xsl:text> </xsl:text><xsl:value-of select="$css-right"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <div class="{normalize-space($class)}"> + <xsl:choose> + <xsl:when test="parent::artset"> + <xsl:for-each select=".."> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="svg:svg"> + <xsl:choose> + <xsl:when test="parent::artset and ../@anchor"> + <div id="{../@anchor}"> + <xsl:apply-templates select="svg:svg" mode="embed-svg"/> + </div> + </xsl:when> + <xsl:when test="parent::artset and ../artwork/@anchor"> + <div id="{../artwork[@anchor][1]/@anchor}"> + <xsl:apply-templates select="svg:svg" mode="embed-svg"/> + </div> + </xsl:when> + <xsl:when test="@anchor"> + <div id="{@anchor}"> + <xsl:apply-templates select="svg:svg" mode="embed-svg"/> + </div> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="svg:svg" mode="embed-svg"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="alt"> + <xsl:choose> + <xsl:when test="@alt!=''"> + <xsl:value-of select="@alt"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <img src="{@src}"> + <xsl:if test="normalize-space($alt)!=''"> + <xsl:attribute name="alt"><xsl:value-of select="$alt"/></xsl:attribute> + </xsl:if> + <xsl:if test="@width and @width!=''"> + <xsl:copy-of select="@width"/> + </xsl:if> + <xsl:if test="@height and @height!=''"> + <xsl:copy-of select="@height"/> + </xsl:if> + </img> + </xsl:otherwise> + </xsl:choose> + </div> +</xsl:template> + +<!-- copy SVG content without inserted line no information --> +<xsl:template match="node()|@*" mode="embed-svg"> + <xsl:copy><xsl:apply-templates select="node()|@*" mode="embed-svg"/></xsl:copy> +</xsl:template> +<xsl:template match="processing-instruction('rfc-ext')[contains(.,'line-no=')]" mode="embed-svg"/> + +<xsl:template match="/" mode="embed-svg"> + <xsl:copy><xsl:apply-templates select="node()" mode="embed-svg"/></xsl:copy> +</xsl:template> + +<xsl:template match="contact[ancestor::t]"> + <xsl:if test="*"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Ignoring child elements of &lt;contact> when used inside &lt;t>.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="@fullname"/> + <xsl:if test="@asciiFullname"> + <xsl:text> (</xsl:text> + <xsl:value-of select="@asciiFullname"/> + <xsl:text>)</xsl:text> + </xsl:if> +</xsl:template> + +<xsl:template match="author|contact|x:contributor"> + <xsl:call-template name="check-no-text-content"/> + + <address> + <xsl:call-template name="emit-author"/> + + <xsl:if test="@asciiFullname!='' or organization/@ascii!='' or address/postal/*/@ascii"> + <br/><br/> + <em>Additional contact information:</em> + <br/> + <xsl:call-template name="emit-author"> + <xsl:with-param name="ascii" select="false()"/> + </xsl:call-template> + </xsl:if> + </address> +</xsl:template> + +<xsl:template name="emit-postal-line"> + <xsl:param name="prefix"/> + <xsl:param name="value"/> + <xsl:param name="values"/> + <xsl:param name="link"/> + <xsl:param name="annotation"/> + + <xsl:if test="normalize-space($value)!='' or $values"> + <br/> + <xsl:if test="$prefix!=''"><xsl:value-of select="$prefix"/>: </xsl:if> + <xsl:choose> + <xsl:when test="$values"> + <xsl:for-each select="exslt:node-set($values)/*"> + <xsl:choose> + <xsl:when test="@href"> + <a href="{@href}"><xsl:value-of select="normalize-space(.)"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(.)"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:when test="$link!=''"> + <a href="{$link}"><xsl:value-of select="normalize-space($value)"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space($value)"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$annotation!=''"> + <xsl:text> </xsl:text> + <i><xsl:value-of select="$annotation"/></i> + </xsl:if> + </xsl:if> +</xsl:template> + +<xsl:template name="emit-author-details"> + <xsl:param name="ascii"/> + <xsl:for-each select="address"> + <xsl:choose> + <xsl:when test="position() != 1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Multiple &lt;address> elements inside &lt;author>, all but the first ignored.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="emit-author-details2"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:template name="emit-postal-city"> + <xsl:param name="ascii"/> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-city-code"> + <xsl:param name="ascii"/> + <xsl:param name="prefix"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:value-of select="$city"/> + <xsl:text> </xsl:text> + <xsl:if test="$code!=''"> + <xsl:choose> + <xsl:when test="$prefix!='' and starts-with($code,$prefix)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Prefix '<xsl:value-of select="$prefix"/>' on &lt;code> '<xsl:value-of select="$code"/>' will be inserted automatically.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$prefix"/> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="$code"/> + </xsl:if> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-city-region-code"> + <xsl:param name="ascii"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value" select="concat($city,' ',$region,' ',$code)"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-code-city-region"> + <xsl:param name="ascii"/> + <xsl:param name="cr-delim" select="' '"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value" select="concat($code,' ',$city,$cr-delim,$region)"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-city-comma-region-code"> + <xsl:param name="ascii"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:value-of select="$city"/> + <xsl:variable name="region-and-code" select="concat($region,' ',$code)"/> + <xsl:if test="normalize-space($region-and-code)!=''"> + <xsl:if test="$city!=''"> + <xsl:text>, </xsl:text> + </xsl:if> + <xsl:value-of select="normalize-space($region-and-code)"/> + </xsl:if> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-city-minus-region"> + <xsl:param name="ascii"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:value-of select="$city"/> + <xsl:if test="$region!=''"> + <xsl:if test="$city!=''"> + <xsl:text>-</xsl:text> + </xsl:if> + <xsl:value-of select="$region"/> + </xsl:if> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-cityarea"> + <xsl:param name="ascii"/> + <xsl:if test="cityarea"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="cityarea"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="emit-postal-cityarea-city"> + <xsl:param name="ascii"/> + <xsl:variable name="cityarea"> + <xsl:if test="cityarea"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="cityarea"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value" select="concat($cityarea,' ',$city)"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-code"> + <xsl:param name="ascii"/> + <xsl:param name="prefix"/> + <xsl:if test="code"> + <xsl:variable name="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:if test="$code!=''"> + <xsl:choose> + <xsl:when test="$prefix!='' and starts-with($code,$prefix)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Prefix '<xsl:value-of select="$prefix"/>' on &lt;code> '<xsl:value-of select="$code"/>' will be inserted automatically.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$prefix"/> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="$code"/> + </xsl:if> + </xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="emit-postal-country"> + <xsl:param name="ascii"/> + <xsl:if test="country"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="country"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="emit-postal-code-city"> + <xsl:param name="ascii"/> + <xsl:param name="prefix"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:if test="$code!=''"> + <xsl:choose> + <xsl:when test="$prefix!='' and starts-with($code,$prefix)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Prefix '<xsl:value-of select="$prefix"/>' on &lt;code> '<xsl:value-of select="$code"/>' will be inserted automatically.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$prefix"/> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="$code"/> + </xsl:if> + <xsl:text> </xsl:text> + <xsl:value-of select="$city"/> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-region"> + <xsl:param name="ascii"/> + <xsl:if test="region"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="emit-postal-region-city-cityarea"> + <xsl:param name="ascii"/> + <xsl:variable name="city"> + <xsl:if test="city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="city"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="cityarea"> + <xsl:if test="cityarea"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="cityarea"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value" select="concat($region,' ',$city,' ',$cityarea)"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-region-code"> + <xsl:param name="ascii"/> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value" select="concat($region,' ',$code)"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-region-comma-code"> + <xsl:param name="ascii"/> + <xsl:variable name="region"> + <xsl:if test="region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="region"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="code"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:value-of select="$region"/> + <xsl:if test="$region!='' and $code!=''">, </xsl:if> + <xsl:value-of select="$code"/> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-postal-street"> + <xsl:param name="ascii"/> + <xsl:for-each select="extaddr"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> + <xsl:for-each select="street"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> + <xsl:for-each select="pobox"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> +</xsl:template> + +<xsl:template name="author-name-for-diags"> + <xsl:variable name="author" select="ancestor-or-self::author"/> + <xsl:choose> + <xsl:when test="$author/@fullname"> + <xsl:value-of select="$author/@fullname"/> + </xsl:when> + <xsl:when test="$author/@surname"> + <xsl:value-of select="$author/@surname"/> + </xsl:when> + <xsl:when test="$author/organization"> + <xsl:text>(org) </xsl:text> + <xsl:value-of select="$author/organization"/> + </xsl:when> + <xsl:otherwise>???</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="emit-postal-warnings"> + <xsl:param name="nodes"/> + <xsl:for-each select="$nodes"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Element '<xsl:value-of select="local-name(.)"/>' with value '<xsl:value-of select="normalize-space(.)"/>' not displayed in postal address for '<xsl:call-template name="author-name-for-diags"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:for-each> +</xsl:template> + +<xsl:template name="emit-author-details2"> + <xsl:param name="ascii"/> + <xsl:for-each select="postal"> + <xsl:choose> + <xsl:when test="position()!=1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Multiple &lt;postal> elements inside &lt;address> for '<xsl:call-template name="author-name-for-diags"/>', all but the first ignored.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(postalLine)"> + <xsl:variable name="ascii-country"> + <xsl:if test="country"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="country"/> + <xsl:with-param name="ascii" select="true()"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:if test="$ascii and $ascii-country=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Postal address for '<xsl:call-template name="author-name-for-diags"/>' is incomplete because country information is missing.</xsl:with-param> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:if> + <xsl:variable name="format"> + <xsl:if test="/rfc/@version >= 3"> + <xsl:call-template name="get-country-format"> + <xsl:with-param name="country" select="$ascii-country"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:if test="$ascii and contains($format,'%C') and street and not(city)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Postal address for '<xsl:call-template name="author-name-for-diags"/>' likely incomplete: street specified, but city is not.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="postprefix"> + <xsl:call-template name="get-country-postprefix"> + <xsl:with-param name="country" select="$ascii-country"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <!-- A STREETADDRESS, C CITY, D CITYAREA (DISTRICT?), Z (ZIP)CODE --> + <xsl:when test="$format='%A%n%C %S %Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-region-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z %C %S'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code-city-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z %C/%S'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code-city-region"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="cr-delim" select="'/'"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z %C%n%S'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%C %Z%n%S'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z %C'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code-city"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="prefix" select="$postprefix"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|region|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%C %Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-code"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="prefix" select="$postprefix"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|region|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%D%n%C%n%S %Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%D%n%C-%S%n%Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-minus-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%Z%n%S%C%D%n%A'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region-comma-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%C%n%A%n%Z'"> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%Z%n%S%n%A'"> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="prefix" select="$postprefix"/></xsl:call-template> + <xsl:call-template name="emit-postal-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z%n%C'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="prefix" select="$postprefix"/></xsl:call-template> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%C%n%Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%D %C%n%S %Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="prefix" select="$postprefix"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="city|cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%D%n%C %Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%D%n%Z %C, %S'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-cityarea"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code-city-region"><xsl:with-param name="ascii" select="$ascii"/><xsl:with-param name="cr-delim" select="', '"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:when test="$format='%A%n%C%n%S%n%Z'"> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-region"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- %A%n%C, %S %Z --> + <xsl:call-template name="emit-postal-street"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-city-comma-region-code"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-country"><xsl:with-param name="ascii" select="$ascii"/></xsl:call-template> + <xsl:call-template name="emit-postal-warnings"><xsl:with-param name="nodes" select="cityarea|sortingcode"/></xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:if test="*[not(self::postalLine)]"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">It is not allowed to mix postalLine with other elements; these will be ignored.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="postalLine"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="value"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + <xsl:if test="phone"> + <xsl:variable name="phone"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="phone"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="prefix">Phone</xsl:with-param> + <xsl:with-param name="value" select="$phone"/> + <xsl:with-param name="link" select="concat('tel:',translate($phone,' ',''))"/> + </xsl:call-template> + </xsl:if> + <xsl:if test="facsimile"> + <xsl:variable name="facsimile"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="facsimile"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="prefix">Fax</xsl:with-param> + <xsl:with-param name="value" select="$facsimile"/> + <xsl:with-param name="link" select="concat('fax:',translate($facsimile,' ',''))"/> + </xsl:call-template> + </xsl:if> + <xsl:if test="email"> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="prefix"> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Email</xsl:when> + <xsl:otherwise>EMail</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + <xsl:with-param name="values"> + <xsl:for-each select="email"> + <xsl:variable name="e"> + <xsl:call-template name="extract-email"/> + </xsl:variable> + <v> + <xsl:if test="$xml2rfc-linkmailto!='no'"> + <xsl:attribute name="href"> + <xsl:value-of select="concat('mailto:',normalize-space($e))"/> + </xsl:attribute> + </xsl:if> + <xsl:value-of select="normalize-space($e)"/> + </v> + </xsl:for-each> + </xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="uri"> + <xsl:variable name="uri"> + <xsl:call-template name="extract-uri"/> + </xsl:variable> + <xsl:call-template name="emit-postal-line"> + <xsl:with-param name="prefix">URI</xsl:with-param> + <xsl:with-param name="value" select="$uri"/> + <xsl:with-param name="link" select="$uri"/> + <xsl:with-param name="annotation" select="@x:annotation"/> + </xsl:call-template> + </xsl:for-each> +</xsl:template> + +<xsl:template name="emit-author"> + <xsl:param name="ascii" select="true()"/> + <b> + <xsl:choose> + <xsl:when test="(not(@fullname) or @fullname='') and @surname!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">fullname attribute should be specified for author (using surname instead)</xsl:with-param> + </xsl:call-template> + <xsl:call-template name="format-initials"/> + <xsl:text> </xsl:text> + <xsl:value-of select="@surname"/> + </xsl:when> + <xsl:when test="@asciiFullname!='' and $ascii"> + <xsl:value-of select="@asciiFullname" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@fullname" /> + </xsl:otherwise> + </xsl:choose> + </b> + <xsl:if test="not(self::contact) and @role"> + <xsl:text> (</xsl:text> + <xsl:value-of select="@role" /> + <xsl:text>)</xsl:text> + </xsl:if> + <!-- annotation support for Martin "uuml" Duerst --> + <xsl:if test="@x:annotation"> + <xsl:text> </xsl:text> + <i><xsl:value-of select="@x:annotation"/></i> + </xsl:if> + + <xsl:if test="normalize-space(concat(organization,organization/@ascii)) != ''"> + <br/> + <xsl:choose> + <xsl:when test="organization/@ascii!='' and $ascii"> + <xsl:value-of select="organization/@ascii" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="organization" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:call-template name="emit-author-details"> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> +</xsl:template> + +<!-- this is a named template because <back> may be absent --> +<xsl:template name="back"> + <xsl:for-each select="back"> + <xsl:call-template name="check-no-text-content"/> + </xsl:for-each> + + <!-- add editorial comments --> + <xsl:if test="//cref[not(@display) or display!='false'] and $xml2rfc-comments='yes' and $xml2rfc-inline!='yes'"> + <xsl:call-template name="insertComments" /> + </xsl:if> + + <!-- next, add information about the document's authors --> + <xsl:if test="$xml2rfc-ext-authors-section='before-appendices'"> + <xsl:call-template name="insertAuthors" /> + </xsl:if> + + <!-- add all other top-level sections under <back> --> + <xsl:apply-templates select="back/*[not(self::references) and not(self::ed:replace and .//references)]" /> + + <!-- insert the index if index entries exist --> + <!-- note it always comes before the authors section --> + <xsl:if test="$has-index"> + <xsl:call-template name="insertIndex" /> + </xsl:if> + + <!-- Authors section is the absolute last thing, except for copyright stuff --> + <xsl:if test="$xml2rfc-ext-authors-section='end'"> + <xsl:call-template name="insertAuthors" /> + </xsl:if> + + <xsl:if test="$xml2rfc-private=''"> + <!-- copyright statements --> + <xsl:variable name="copyright"> + <xsl:call-template name="insertCopyright" /> + </xsl:variable> + + <!-- emit it --> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <xsl:apply-templates select="exslt:node-set($copyright)" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + +</xsl:template> + +<xsl:template name="check-absolute-uri"> + <xsl:variable name="potential-scheme" select="substring-before(@target,':')"/> + <xsl:variable name="invalid-scheme-chars" select="translate($potential-scheme,concat($alnum,'+-.'),'')"/> + <xsl:if test="$potential-scheme=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">target attribute not an absolute URI: <xsl:value-of select="@target"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="$potential-scheme!='' and $invalid-scheme-chars!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">target attribute '<xsl:value-of select="@target"/>' contains invalid scheme name '<xsl:value-of select="$potential-scheme"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template match="eref[*|text()]"> + <xsl:call-template name="check-absolute-uri"/> + <a href="{@target}"> + <xsl:apply-templates/> + </a> +</xsl:template> + +<xsl:template match="eref[not(*|text())]"> + <xsl:call-template name="check-absolute-uri"/> + <xsl:variable name="in-angles" select="(not(/rfc/@version >= 3) and not(@brackets='none')) or @brackets='angle'"/> + <xsl:if test="$in-angles"><xsl:text>&lt;</xsl:text></xsl:if> + <a href="{@target}"><xsl:value-of select="@target"/></a> + <xsl:if test="$in-angles"><xsl:text>&gt;</xsl:text></xsl:if> +</xsl:template> + +<xsl:template match="figure"> + <xsl:call-template name="check-no-text-content"/> + <!-- warn about the attributes that we do not support --> + <xsl:for-each select="@*[local-name()!='title' and local-name()!='suppress-title' and local-name()!='anchor' and local-name()!='pn' and normalize-space(.)!='']"> + <xsl:if test="local-name(.)!='align' or normalize-space(.)!='left'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('unsupported attribute ',local-name(.),' on figure element')"/> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + <xsl:variable name="anch-container"> + <xsl:choose> + <xsl:when test="ancestor::t">span</xsl:when> + <xsl:otherwise>div</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="@anchor!=''"> + <xsl:call-template name="check-anchor"/> + <xsl:element name="{$anch-container}"> + <xsl:attribute name="id"><xsl:value-of select="@anchor"/></xsl:attribute> + </xsl:element> + </xsl:if> + <xsl:variable name="anch"> + <xsl:call-template name="get-figure-anchor"/> + </xsl:variable> + <xsl:element name="{$anch-container}"> + <xsl:attribute name="id"><xsl:value-of select="$anch"/></xsl:attribute> + <xsl:apply-templates select="*[not(self::name)]"/> + </xsl:element> + <xsl:if test="(@title!='' or name) or (@anchor!='' and not(@suppress-title='true'))"> + <xsl:variable name="n"><xsl:call-template name="get-figure-number"/></xsl:variable> + <p class="figure"> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Figure </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!='' or name">: </xsl:if> + </xsl:if> + <xsl:call-template name="insertTitle"/> + </p> + </xsl:if> +</xsl:template> + +<xsl:variable name="all-notes" select="/rfc/front/note"/> +<xsl:variable name="all-edited-notes" select="/rfc/front/ed:replace[.//note]"/> + +<!-- TODO:extend for other streams --> +<xsl:variable name="stream-note-titles">[IESG Note][IESG Note:]</xsl:variable> + +<xsl:variable name="notes-not-in-boilerplate" select="$all-notes[not(contains($stream-note-titles,concat('[',normalize-space(@title),']'))) or $xml2rfc-private!='' or $notes-follow-abstract]"/> +<xsl:variable name="edited-notes-not-in-boilerplate" select="$all-edited-notes[not(contains($stream-note-titles,concat('[',normalize-space(.//note/@title),']'))) or $xml2rfc-private!='' or $notes-follow-abstract]"/> +<xsl:variable name="notes-in-boilerplate" select="$all-notes[not(not(contains($stream-note-titles,concat('[',normalize-space(@title),']'))) or $xml2rfc-private!='' or $notes-follow-abstract)]"/> +<xsl:variable name="edited-notes-in-boilerplate" select="$all-edited-notes[not(not(contains($stream-note-titles,concat('[',normalize-space(.//note/@title),']'))) or $xml2rfc-private!='' or $notes-follow-abstract)]"/> + +<xsl:template name="draft-sequence-number"> + <xsl:param name="name"/> + <xsl:choose> + <xsl:when test="contains($name,'-')"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="substring-after($name,'-')"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$name"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="draft-base-name"> + <xsl:param name="name"/> + <xsl:variable name="seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:variable> + <xsl:value-of select="substring($name,1,string-length($name)-string-length($seq)-1)"/> +</xsl:template> + +<xsl:template name="draft-name-legal"> + <xsl:param name="name"/> + + <xsl:if test="contains($name,'.')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>' should contain the base name, not the filename (thus no file extension).</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="offending" select="translate($name,concat($lcase,$digits,'-.'),'')"/> + <xsl:if test="$offending != ''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>' should not contain the character '<xsl:value-of select="substring($offending,1,1)"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="contains($name,'--')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>' should not contain the character sequence '--'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="not(starts-with($name,'draft-'))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>' should start with 'draft-'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:variable> + + <xsl:if test="$seq='' or ($seq!='latest' and translate($seq,$digits,'')!='')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>' should end with a two-digit sequence number or 'latest'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="string-length($name)-string-length($seq) > 50"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The Internet-Draft name '<xsl:value-of select="$name"/>', excluding sequence number, should have less than 50 characters.</xsl:with-param> + </xsl:call-template> + </xsl:if> + +</xsl:template> + + +<xsl:template match="front"> + <xsl:call-template name="check-no-text-content"/> + <header> + <xsl:if test="$xml2rfc-topblock!='no'"> + <!-- collect information for left column --> + <xsl:variable name="leftColumn"> + <xsl:call-template name="collectLeftHeaderColumn" /> + </xsl:variable> + <!-- collect information for right column --> + <xsl:variable name="rightColumn"> + <xsl:call-template name="collectRightHeaderColumn" /> + </xsl:variable> + <!-- insert the collected information --> + <table class="{$css-header}" id="{$anchor-pref}headerblock"> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <xsl:call-template name="emitheader"> + <xsl:with-param name="lc" select="exslt:node-set($leftColumn)" /> + <xsl:with-param name="rc" select="exslt:node-set($rightColumn)" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </table> + </xsl:if> + + <div id="{$anchor-pref}title"> + <!-- main title --> + <h1><xsl:apply-templates select="title"/></h1> + <xsl:if test="/rfc/@docName"> + <xsl:variable name="docname" select="/rfc/@docName"/> + <xsl:choose> + <xsl:when test="$rfcno!=''"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' is ignored because an RFC number (<xsl:value-of select="$rfcno"/>) is specified as well.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <div class="filename"> + <xsl:variable name="seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="$docname"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="base"> + <xsl:call-template name="draft-base-name"> + <xsl:with-param name="name" select="$docname"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="status-uri"> + <xsl:call-template name="compute-draft-status-uri"> + <xsl:with-param name="draftname" select="$base"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="number($seq)>=0"> + <xsl:variable name="draft-uri"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$docname"/> + </xsl:call-template> + </xsl:variable> + <a href="{$status-uri}" class="smpl"><xsl:value-of select="$base"/></a> + <xsl:text>-</xsl:text> + <a href="{$draft-uri}" class="smpl"><xsl:value-of select="$seq"/></a> + </xsl:when> + <xsl:when test="$base!=''"> + <a href="{$status-uri}" class="smpl"><xsl:value-of select="$docname"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$docname"/> + </xsl:otherwise> + </xsl:choose> + </div> + </xsl:otherwise> + </xsl:choose> + + <xsl:variable name="si" select="/rfc/front/seriesInfo[@name='Internet-Draft']"/> + <xsl:if test="$si and $si/@value!=$docname"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Inconsistent draft names in /rfc/@docName ('<xsl:value-of select="$docname"/>') and /rfc/seriesInfo ('<xsl:value-of select="$si/@value"/>').</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:call-template name="draft-name-legal"> + <xsl:with-param name="name" select="$docname"/> + </xsl:call-template> + </xsl:if> + </div> + </header> + + <!-- insert notice about update --> + <xsl:if test="$published-as-rfc"> + <p class="{$css-publishedasrfc}"> + <b>Note:</b> a later version of this document has been published as <a href="{$published-as-rfc/@href}"><xsl:value-of select="$published-as-rfc/@title"/></a>. + </p> + </xsl:if> + + <!-- check for conforming ipr attribute --> + <xsl:choose> + <xsl:when test="not(/rfc/@ipr)"> + <xsl:if test="not($is-rfc) and $xml2rfc-private=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Either /rfc/@ipr or /rfc/@number is required</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="/rfc/@ipr = 'full2026'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativeWorks'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativeWorksNow'" /> + <xsl:when test="/rfc/@ipr = 'none'" /> + <xsl:when test="/rfc/@ipr = 'full3667'" /> + <xsl:when test="/rfc/@ipr = 'noModification3667'" /> + <xsl:when test="/rfc/@ipr = 'noDerivatives3667'" /> + <xsl:when test="/rfc/@ipr = 'full3978'" /> + <xsl:when test="/rfc/@ipr = 'noModification3978'" /> + <xsl:when test="/rfc/@ipr = 'noDerivatives3978'" /> + <xsl:when test="/rfc/@ipr = 'trust200811'" /> + <xsl:when test="/rfc/@ipr = 'noModificationTrust200811'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativesTrust200811'" /> + <xsl:when test="/rfc/@ipr = 'trust200902'" /> + <xsl:when test="/rfc/@ipr = 'noModificationTrust200902'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativesTrust200902'" /> + <xsl:when test="/rfc/@ipr = 'pre5378Trust200902'" /> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unknown value for /rfc/@ipr: ', /rfc/@ipr)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'boilerplate'"/> + </xsl:call-template> + + <xsl:if test="not($abstract-first)"> + <xsl:if test="$xml2rfc-private=''"> + <xsl:call-template name="emit-ietf-preamble"> + <xsl:with-param name="notes" select="$notes-in-boilerplate|$edited-notes-in-boilerplate"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:apply-templates select="abstract"/> + + <xsl:if test="$notes-follow-abstract"> + <xsl:apply-templates select="$notes-not-in-boilerplate|$edited-notes-not-in-boilerplate" /> + </xsl:if> + + <xsl:if test="$abstract-first"> + <xsl:if test="$xml2rfc-private=''"> + <xsl:call-template name="emit-ietf-preamble"> + <xsl:with-param name="notes" select="$notes-in-boilerplate|$edited-notes-in-boilerplate"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:if test="not($notes-follow-abstract)"> + <xsl:apply-templates select="$notes-not-in-boilerplate|$edited-notes-not-in-boilerplate" /> + </xsl:if> + + <xsl:if test="$xml2rfc-toc='yes'"> + <xsl:apply-templates select="/" mode="toc" /> + </xsl:if> + +</xsl:template> + +<xsl:template name="string-diff"> + <xsl:param name="s1"/> + <xsl:param name="s2"/> + <xsl:param name="p"/> + + <xsl:choose> + <xsl:when test="$s1='' and $s2=''"><!-- done --></xsl:when> + <xsl:when test="$s1=''"> + <xsl:value-of select="concat('Extra characters at the end of 1st string: ',$s1)"/> + </xsl:when> + <xsl:when test="$s2=''"> + <xsl:value-of select="concat('Extra characters at the end of 2sn string: ',$s2)"/> + </xsl:when> + <xsl:when test="substring($s1,1,1)!=substring($s2,1,1)"> + <xsl:value-of select="concat('Strings differ at position ',string-length($p),', 1st string ends in: [[[',$s1,']]], 2nd string ends in: [[[',$s2,']]]')"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="string-diff"> + <xsl:with-param name="s1" select="substring($s1,2)"/> + <xsl:with-param name="s2" select="substring($s2,2)"/> + <xsl:with-param name="p" select="concat($p,substring($s1,1,1))"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + + +<xsl:template name="emit-ietf-preamble"> + <xsl:param name="notes"/> + + <!-- Get status info formatted as per RFC2629--> + <xsl:variable name="preamble"> + <xsl:for-each select="/rfc"> + <xsl:call-template name="insertPreamble"> + <xsl:with-param name="notes" select="$notes"/> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + + <!-- emit it --> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <!-- get document-supplied boilerplate --> + <xsl:variable name="userboiler"> + <xsl:apply-templates select="$src//rfc/front/boilerplate"/> + </xsl:variable> + <xsl:variable name="generated"> + <xsl:apply-templates select="exslt:node-set($preamble)"/> + </xsl:variable> + <xsl:copy-of select="$generated"/> + <!--<xsl:message>1: [[[<xsl:value-of select="normalize-space(string($userboiler))"/>]]]</xsl:message> + <xsl:message>2: [[[<xsl:value-of select="normalize-space(string($generated))"/>]]]</xsl:message>--> + <xsl:variable name="differ" select="$src//rfc/front/boilerplate and normalize-space(string($userboiler))!=normalize-space(string($generated))"/> + <xsl:if test="$differ"> + <xsl:variable name="diff"> + <xsl:call-template name="string-diff"> + <xsl:with-param name="s1" select="normalize-space(string($userboiler))"/> + <xsl:with-param name="s2" select="normalize-space(string($generated))"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('User-supplied boilerplate differs from auto-generated boilerplate (inserting auto-generated); ', $diff)"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="iref"> + <xsl:variable name="anchor"><xsl:call-template name="compute-iref-anchor"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::figure"> + <div id="{$anchor}"/> + </xsl:when> + <xsl:when test="ancestor::t or ancestor::artwork or ancestor::sourcecode or ancestor::preamble or ancestor::postamble"> + <span id="{$anchor}"/> + </xsl:when> + <xsl:otherwise> + <div id="{$anchor}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="compute-iref-anchor"> + <xsl:variable name="first" select="translate(substring(@item,1,1),$ucase,$lcase)"/> + <xsl:variable name="nkey" select="translate($first,$alnum,'')"/> + <xsl:choose> + <xsl:when test="count(.|$section-level-irefs)=count($section-level-irefs)"> + <xsl:for-each select=".."> + <xsl:value-of select="$anchor-pref"/>section.<xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="$nkey=''"> + <xsl:value-of select="$anchor-pref"/>iref.<xsl:value-of select="$first"/>.<xsl:number level="any" count="iref[starts-with(translate(@item,$ucase,$lcase),$first) and count(.|$section-level-irefs)!=count($section-level-irefs)]"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/>iref.<xsl:number level="any" count="iref[translate(substring(@item,1,1),$alnum,'')!='' and count(.|$section-level-irefs)!=count($section-level-irefs)]"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="compute-extref-anchor"> + <xsl:variable name="first" select="translate(substring(.,1,1),$ucase,$lcase)"/> + <xsl:variable name="nkey" select="translate($first,$lcase,'')"/> + <xsl:choose> + <xsl:when test="$nkey=''"> + <xsl:value-of select="$anchor-pref"/>extref.<xsl:value-of select="$first"/>.<xsl:number level="any" count="x:ref[starts-with(translate(.,$ucase,$lcase),$first)]"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/>extref.<xsl:number level="any" count="x:ref[translate(substring(.,1,1),concat($lcase,$ucase),'')='']"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- list templates depend on the list style --> + +<xsl:template name="list-empty"> + <ul class="empty"> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ul> +</xsl:template> + +<xsl:template name="list-format"> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </dl> +</xsl:template> + +<xsl:template name="list-hanging"> + <xsl:variable name="compact"> + <xsl:call-template name="get-compact-setting"/> + </xsl:variable> + <!-- insert a hard space for nested lists so that indentation works ok --> + <xsl:if test="ancestor::list and normalize-space(preceding-sibling::text())=''"> + <xsl:text>&#160;</xsl:text> + </xsl:if> + <dl> + <xsl:if test="$compact='yes'"> + <xsl:attribute name="class">compact</xsl:attribute> + </xsl:if> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </dl> +</xsl:template> + +<xsl:template name="list-numbers"> + <ol> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ol> +</xsl:template> + +<xsl:template name="check-no-hangindent"> + <xsl:if test="@hangIndent"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="'hangIndent attribute not supported for this list style'"/> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="list-letters"> + <xsl:variable name="type"> + <xsl:choose> + <!-- lowercase for even-numbered nesting levels --> + <xsl:when test="0=(count(ancestor::list[@style='letters']) mod 2)">a</xsl:when> + <!-- uppercase otherwise --> + <xsl:otherwise>A</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <ol type="{$type}"> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ol> +</xsl:template> + +<xsl:template name="list-symbols"> + <ul> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ul> +</xsl:template> + +<xsl:template match="list"> + <xsl:variable name="style" select="ancestor-or-self::list[@style][1]/@style"/> + <xsl:call-template name="check-no-text-content"/> + <xsl:choose> + <xsl:when test="not($style) or $style='empty'"> + <xsl:call-template name="check-no-hangindent"/> + <xsl:call-template name="list-empty"/> + </xsl:when> + <xsl:when test="starts-with($style, 'format ')"> + <xsl:call-template name="list-format"/> + </xsl:when> + <xsl:when test="$style='hanging'"> + <xsl:call-template name="list-hanging"/> + </xsl:when> + <xsl:when test="$style='letters'"> + <xsl:call-template name="check-no-hangindent"/> + <xsl:call-template name="list-letters"/> + </xsl:when> + <xsl:when test="$style='numbers'"> + <xsl:call-template name="check-no-hangindent"/> + <xsl:call-template name="list-numbers"/> + </xsl:when> + <xsl:when test="$style='symbols'"> + <xsl:call-template name="check-no-hangindent"/> + <xsl:call-template name="list-symbols"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported style attribute: ', $style)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + + + +<!-- v3 lists --> + +<xsl:template match="ol[string-length(@type)>1]"> + <xsl:variable name="start"> + <xsl:choose> + <xsl:when test="@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@start"> + <xsl:value-of select="@start"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <div> + <xsl:call-template name="attach-paragraph-number-as-id"/> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:for-each select="li"> + <xsl:variable name="label"> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="../@type"/> + <xsl:with-param name="pos" select="$start - 1 + position()"/> + </xsl:call-template> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:value-of select="$label"/> + </dt> + <dd> + <xsl:apply-templates/> + </dd> + </xsl:for-each> + </dl> + </div> +</xsl:template> + +<xsl:template match="dl"> + <xsl:variable name="newl" select="@newline"/> + <xsl:variable name="spac" select="@spacing"/> + <xsl:variable name="class"> + <xsl:if test="$spac='compact'">compact </xsl:if> + <xsl:if test="$newl='true'">nohang </xsl:if> + </xsl:variable> + <div> + <xsl:if test="not(ancestor::list)"> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:if> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="normalize-space($class)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($class)"/></xsl:attribute> + </xsl:if> + <xsl:for-each select="dt"> + <xsl:apply-templates select="."/> + <xsl:apply-templates select="following-sibling::dd[1]"/> + </xsl:for-each> + </dl> + </div> +</xsl:template> + +<xsl:template match="dt"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </dt> +</xsl:template> + +<xsl:template match="dd"> + <dd> + <xsl:call-template name="copy-anchor"/> + <xsl:variable name="indent" select="../@indent"/> + <xsl:if test="number($indent)=$indent"> + <xsl:attribute name="style">margin-left: <xsl:value-of select="$indent div 2"/>em</xsl:attribute> + </xsl:if> + <xsl:variable name="block-level-children" select="artwork|aside|dl|figure|ol|sourcecode|t|table|ul"/> + <xsl:choose> + <xsl:when test="$block-level-children"> + <!-- TODO: improve error handling--> + <xsl:for-each select="$block-level-children"> + <xsl:choose> + <xsl:when test="self::t"> + <p> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </p> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + <!-- add one nbsp for empty dd elements --> + <xsl:if test="normalize-space(.)=''">&#160;</xsl:if> + </dd> +</xsl:template> + +<!-- get value of "compact" mode, checking subcompact first, then compact --> +<xsl:template name="get-compact-setting"> + <xsl:variable name="t1"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="preceding::processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'subcompact'"/> + <xsl:with-param name="default" select="'?'"/> + <xsl:with-param name="duplicate-warning" select="'no'"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$t1='?'"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="preceding::processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'compact'"/> + <xsl:with-param name="default" select="'?'"/> + <xsl:with-param name="duplicate-warning" select="'no'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$t1"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="ol-start"> + <xsl:param name="node"/> + <xsl:variable name="group" select="$node/@group"/> + <xsl:variable name="prec" select="$node/preceding::ol[@group=$group]"/> + <xsl:choose> + <xsl:when test="$node/@start"> + <xsl:value-of select="$node/@start"/> + </xsl:when> + <xsl:when test="$prec"> + <xsl:variable name="s"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="$prec[last()]"/> + </xsl:call-template> + </xsl:variable> + <xsl:value-of select="$s + count($prec[last()]/li)"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="ol[not(@type) or string-length(@type)=1]"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="start"> + <xsl:choose> + <xsl:when test="@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@start"> + <xsl:value-of select="@start"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <div> + <xsl:if test="not(ancestor::list)"> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:if> + <ol> + <xsl:if test="$start!=''"> + <xsl:attribute name="start"><xsl:value-of select="$start"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="@type"/> + <xsl:apply-templates /> + </ol> + </div> +</xsl:template> + +<xsl:template match="ul"> + <div> + <xsl:call-template name="insertInsDelClass"/> + <xsl:if test="not(ancestor::list)"> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:if> + <xsl:choose> + <xsl:when test="not(li) and @x:when-empty"> + <p> + <xsl:call-template name="copy-anchor"/> + <xsl:value-of select="@x:when-empty"/> + </p> + </xsl:when> + <xsl:otherwise> + <ul> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="@empty='true'"> + <xsl:attribute name="class"> + <xsl:text>empty</xsl:text> + <xsl:if test="@bare='true'"> + <xsl:text> bare</xsl:text> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">support for "bare" is experimental, see https://trac.tools.ietf.org/tools/xml2rfc/trac/ticket/547 for more information</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="@bare and @bare!='true'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">the only valid value for "bare" is "true"</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:attribute> + </xsl:if> + <xsl:if test="@bare and not(@empty='true')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">"bare" attribute is ignored when "empty" is not "true"</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:apply-templates /> + </ul> + </xsl:otherwise> + </xsl:choose> + </div> +</xsl:template> + +<xsl:template match="li"> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="(parent::ol or parent::ul) and ../@indent and number(../@indent)&gt;4"> + <xsl:attribute name="style">padding-left: <xsl:value-of select="(../@indent div 2) - 2"/>em</xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="artset|artwork|blockquote|dl|figure|ol|sourcecode|t|ul"> + <xsl:choose> + <xsl:when test="bcp14|cref|em|eref|iref|relref|strong|sub|sup|tt|xref|text()[normalize-space(.)!='']"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">unexpected content in &lt;li&gt;: can not mix block-level and phrase-level elements</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="bcp14|cref|em|eref|iref|relref|strong|sub|sup|tt|xref|text()[normalize-space(.)!='']"> + <xsl:choose> + <xsl:when test="artset|artwork|blockquote|dl|figure|ol|sourcecode|t|ul"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">unexpected content in &lt;li&gt;: can not mix phrase-level and block-level elements</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg">unexpected content in &lt;li&gt;</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="not(following-sibling::li)"> + <xsl:variable name="l"> + <xsl:for-each select=".."> + <xsl:call-template name="get-paragraph-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="xml2rfc-ext-paragraph-links='yes' and $l!=''"> + <a class='self' href='#{$anchor-pref}section.{$l}'>&#xb6;</a> + </xsl:if> + </xsl:if> + </li> +</xsl:template> + +<!-- same for t(ext) elements --> + +<xsl:template name="list-item-generic"> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"/> + </xsl:for-each> + <xsl:apply-templates /> + </li> +</xsl:template> + +<xsl:template name="list-item-hanging"> + <xsl:if test="@hangText!=''"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:if test="count(preceding-sibling::t)=0"> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + <xsl:value-of select="@hangText" /> + </dt> + </xsl:if> + + <xsl:variable name="dd-content"> + <xsl:apply-templates/> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$dd-content!=''"> + <dd> + <xsl:call-template name="insertInsDelClass"/> + <!-- if hangIndent present, use 0.7 of the specified value (1em is the width of the "m" character --> + <xsl:if test="../@hangIndent"> + <xsl:attribute name="style">margin-left: <xsl:value-of select="format-number(../@hangIndent * 0.7,'#.#')"/>em</xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </dd> + </xsl:when> + <xsl:otherwise> + <dd>&#160;</dd> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="list-item-format"> + <xsl:variable name="list" select=".." /> + <xsl:variable name="format" select="substring-after(../@style,'format ')"/> + <xsl:variable name="pos"> + <xsl:choose> + <xsl:when test="$list/@counter"> + <xsl:number level="any" count="list[@counter=$list/@counter]/t"/> + </xsl:when> + <xsl:otherwise><xsl:value-of select="1 + count(preceding-sibling::t)"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="$format"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </dt> + <dd> + <xsl:apply-templates/> + </dd> +</xsl:template> + +<xsl:template match="list/t | list/ed:replace/ed:*/t"> + <xsl:variable name="style" select="ancestor::list[@style][1]/@style"/> + <xsl:choose> + <xsl:when test="not($style) or $style='empty' or $style='letters' or $style='numbers' or $style='symbols'"> + <xsl:if test="@hangText"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="'t/@hangText used on unstyled list'"/> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="list-item-generic"/> + </xsl:when> + <xsl:when test="starts-with($style, 'format ')"> + <xsl:call-template name="list-item-format"/> + </xsl:when> + <xsl:when test="$style='hanging'"> + <xsl:call-template name="list-item-hanging"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported style attribute: ', $style)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="list-lt-generic"> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="t" /> + </li> +</xsl:template> + +<xsl:template name="list-lt-format"> + <xsl:variable name="list" select=".." /> + <xsl:variable name="format" select="substring-after(../@style,'format ')" /> + <xsl:variable name="pos"> + <xsl:choose> + <xsl:when test="$list/@counter"> + <xsl:number level="any" count="list[@counter=$list/@counter]/*" /> + </xsl:when> + <xsl:otherwise><xsl:value-of select="position()"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="$format"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </dt> + <dd> + <xsl:apply-templates select="t" /> + </dd> +</xsl:template> + +<xsl:template name="list-lt-hanging"> + <xsl:if test="@hangText!=''"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + <xsl:value-of select="@hangText" /> + </dt> + </xsl:if> + <dd> + <xsl:call-template name="insertInsDelClass"/> + <!-- if hangIndent present, use 0.7 of the specified value (1em is the width of the "m" character --> + <xsl:if test="../@hangIndent"> + <xsl:attribute name="style">margin-left: <xsl:value-of select="format-number(../@hangIndent * 0.7,'#.#')"/>em</xsl:attribute> + </xsl:if> + <xsl:apply-templates select="t" /> + </dd> +</xsl:template> + +<xsl:template match="list/x:lt"> + <xsl:variable name="style" select="ancestor::list[@style][1]/@style"/> + <xsl:choose> + <xsl:when test="$style='letters' or $style='numbers' or $style='symbols'"> + <xsl:call-template name="list-lt-generic"/> + </xsl:when> + <xsl:when test="starts-with($style, 'format ')"> + <xsl:call-template name="list-lt-format"/> + </xsl:when> + <xsl:when test="$style='hanging'"> + <xsl:call-template name="list-lt-hanging"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported style attribute: ', $style)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="expand-format-percent"> + <xsl:param name="format"/> + <xsl:param name="pos"/> + + <xsl:choose> + <xsl:when test="$format=''"><!-- done--></xsl:when> + <xsl:when test="substring($format,1,1)!='%' or string-length($format)=1"> + <xsl:value-of select="substring($format,1,1)"/> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring($format,2)"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="f" select="substring($format,2,1)"/> + <xsl:choose> + <xsl:when test="$f='%'">%</xsl:when> + <xsl:when test="$f='c'"><xsl:number value="$pos" format="a"/></xsl:when> + <xsl:when test="$f='C'"><xsl:number value="$pos" format="A"/></xsl:when> + <xsl:when test="$f='d'"><xsl:number value="$pos"/></xsl:when> + <xsl:when test="$f='i'"><xsl:number value="$pos" format="i"/></xsl:when> + <xsl:when test="$f='I'"><xsl:number value="$pos" format="I"/></xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported % format: ', $f)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring($format,3)"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template match="middle"> + <xsl:call-template name="check-no-text-content"/> + <xsl:apply-templates /> + <xsl:apply-templates select="../back//references"/> +</xsl:template> + +<xsl:template match="note"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="classes"> + <xsl:text>note</xsl:text> + <xsl:text> </xsl:text> + <xsl:if test="@removeInRFC='true'">rfcEditorRemove</xsl:if> + </xsl:variable> + <xsl:variable name="num"><xsl:number/></xsl:variable> + <section class="{normalize-space($classes)}"> + <xsl:call-template name="copy-anchor"/> + <h2 id="{$anchor-pref}note.{$num}" > + <xsl:call-template name="insertInsDelClass"/> + <a href="#{$anchor-pref}note.{$num}"> + <xsl:call-template name="insertTitle" /> + </a> + </h2> + <xsl:if test="@removeInRFC='true' and (not(t) or t[1]!=$note-removeInRFC)"> + <xsl:variable name="t"> + <t><xsl:value-of select="$note-removeInRFC"/></t> + </xsl:variable> + <xsl:variable name="link" select="concat($anchor-pref,'note.',$num,'.p.1')"/> + <div id="{$link}"> + <xsl:apply-templates mode="t-content" select="exslt:node-set($t)//text()"> + <xsl:with-param name="inherited-self-link" select="$link"/> + </xsl:apply-templates> + </div> + </xsl:if> + <xsl:apply-templates /> + </section> +</xsl:template> + +<xsl:template match="postamble"> + <xsl:if test="normalize-space(.) != ''"> + <p> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates /> + </p> + </xsl:if> +</xsl:template> + +<xsl:template match="preamble"> + <xsl:if test="normalize-space(.) != ''"> + <p> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates /> + </p> + </xsl:if> +</xsl:template> + +<xsl:template name="computed-auto-target"> + <xsl:param name="bib" select="."/> + <xsl:param name="ref"/> + + <xsl:variable name="sec"> + <xsl:choose> + <xsl:when test="$ref and starts-with($ref/@x:rel,'#') and not($ref/@x:sec) and not($ref/@section)"> + <xsl:variable name="extdoc" select="document($bib/x:source/@href)"/> + <xsl:variable name="anch" select="substring-after($ref/@x:rel,'#')"/> + <xsl:for-each select="$extdoc//*[@anchor=$anch or x:anchor-alias/@value=$anch]"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="$ref and $ref/@section"> + <xsl:value-of select="$ref/@section"/> + </xsl:when> + <xsl:when test="$ref"> + <xsl:value-of select="$ref/@x:sec"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$ref and $bib/x:source/@href and $bib/x:source/@basename and $ref/@x:rel"> + <xsl:variable name="extdoc" select="document($bib/x:source/@href)"/> + <xsl:variable name="targetanchor"> + <xsl:variable name="anch" select="substring-after($ref/@x:rel,'#')"/> + <xsl:value-of select="($extdoc//*[@anchor=$anch or x:anchor-alias/@value=$anch])[1]/@anchor"/> + </xsl:variable> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension,'#',$targetanchor)"/> + </xsl:when> + <xsl:when test="$ref and $bib/x:source/@href and $bib/x:source/@basename and $sec!=''"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension,'#')" /> + <xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$sec"/> + <!-- sanity check on target document --> + <xsl:variable name="d" select="document($bib/x:source/@href)"/> + <xsl:variable name="sections"> + <xsl:text> </xsl:text> + <xsl:apply-templates select="$d//rfc" mode="get-section-numbers"/> + <xsl:text> </xsl:text> + </xsl:variable> + <xsl:if test="not(contains($sections,concat(' ',$sec,' ')))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('apparently dangling reference to ',$sec,' of ',$bib/@anchor)"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="$ref and $bib/x:source/@href and $bib/x:source/@basename and $ref/@anchor"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension,'#',$ref/@anchor)" /> + </xsl:when> + <!-- tools.ietf.org won't have the "-latest" draft --> + <xsl:when test="$bib/x:source/@href and $bib/x:source/@basename and substring($bib/x:source/@basename, (string-length($bib/x:source/@basename) - string-length('-latest')) + 1)='-latest'"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension)" /> + </xsl:when> + <!-- TODO: this should handle the case where there's one BCP entry but + multiple RFC entries in a more useful way--> + <xsl:when test="$bib//seriesInfo/@name='RFC'"> + <xsl:variable name="rfcEntries" select="$bib//seriesInfo[@name='RFC']"/> + <xsl:if test="count($rfcEntries)!=1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('seriesInfo/@name=RFC encountered multiple times for reference ',$bib/@anchor,', will generate link to first entry only')"/> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="$rfcEntries[1]/@value"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $rfcUrlFragSection and $rfcUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$rfcUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$rfcUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:when test="$bib//seriesInfo/@name='Internet-Draft'"> + <xsl:variable name="draftName" select="$bib//seriesInfo[@name='Internet-Draft']/@value"/> + <xsl:variable name="endsWithLatest" select="substring($draftName, string-length($draftName) - string-length('-latest') + 1) = '-latest'"/> + <xsl:if test="not($endsWithLatest)"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$draftName"/> + <xsl:with-param name="ref" select="$bib"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $internetDraftUrlFragSection and $internetDraftUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$internetDraftUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$internetDraftUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:if> + </xsl:when> + <xsl:when test="$bib//x:source/@href and document($bib//x:source/@href)/rfc/@number"> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="document($bib//x:source/@href)/rfc/@number"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $rfcUrlFragSection and $rfcUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$rfcUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$rfcUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:when test="$bib//x:source/@href and document($bib//x:source/@href)/rfc/@docName"> + <xsl:variable name="draftName" select="document($bib//x:source/@href)/rfc/@docName"/> + <xsl:variable name="endsWithLatest" select="substring($draftName, string-length($draftName) - string-length('-latest') + 1) = '-latest'"/> + <xsl:if test="not($endsWithLatest)"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$draftName"/> + <xsl:with-param name="ref" select="$bib"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $internetDraftUrlFragSection and $internetDraftUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$internetDraftUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$internetDraftUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:if> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<!-- generates a string with white-space separated section numbers --> +<xsl:template match="node()|@*" mode="get-section-numbers"> + <xsl:apply-templates select="*" mode="get-section-numbers"/> +</xsl:template> +<xsl:template match="section|references|appendix" mode="get-section-numbers"> + <xsl:call-template name="get-section-number"/> + <xsl:text> </xsl:text> + <xsl:apply-templates select="*" mode="get-section-numbers"/> +</xsl:template> + +<!-- titles as plain text --> +<xsl:template match="text()" mode="as-string"> + <xsl:value-of select="."/> +</xsl:template> +<xsl:template match="*" mode="as-string"> + <xsl:apply-templates select="node()" mode="as-string"/> +</xsl:template> +<xsl:template match="br" mode="as-string"> + <xsl:text> </xsl:text> +</xsl:template> + +<xsl:template name="get-title-as-string"> + <xsl:param name="node" select="."/> + <xsl:variable name="t"> + <xsl:for-each select="$node"> + <xsl:choose> + <xsl:when test="name"> + <xsl:apply-templates select="name/node()" mode="as-string"/> + </xsl:when> + <xsl:when test="@title"> + <xsl:value-of select="@title"/> + </xsl:when> + <xsl:when test="self::abstract">Abstract</xsl:when> + <xsl:when test="self::references">References</xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:for-each> + </xsl:variable> + <xsl:value-of select="normalize-space($t)"/> +</xsl:template> + +<xsl:template name="compute-section-number"> + <xsl:param name="bib"/> + <xsl:param name="ref"/> + + <xsl:variable name="anch" select="substring-after($ref/@x:rel,'#')"/> + + <xsl:choose> + <xsl:when test="$anch=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Not a fragment identifier: <xsl:value-of select="$ref/@x:rel"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="extdoc" select="document($bib/x:source/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor=$anch or x:anchor-alias/@value=$anch]"/> + <xsl:if test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$anch"/>' in <xsl:value-of select="$bib/@anchor"/> not found in source file '<xsl:value-of select="$bib/x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="number"> + <xsl:for-each select="$nodes"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="starts-with($number,$unnumbered)"> + <xsl:choose> + <xsl:when test="$nodes[1]/ancestor::back">A@</xsl:when> + <xsl:otherwise>S@</xsl:otherwise> + </xsl:choose> + <xsl:call-template name="get-title-as-string"> + <xsl:with-param name="node" select="$nodes[1]"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$number"/> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="computed-target"> + <xsl:param name="bib"/> + <xsl:param name="ref"/> + + <xsl:variable name="bibtarget"> + <xsl:choose> + <xsl:when test="starts-with($bib/@target,'http://www.rfc-editor.org/info/rfc') or starts-with($bib/@target,'https://www.rfc-editor.org/info/rfc') and $ref and ($ref/@x:sec or $ref/@x:rel or $ref/@section or $ref/@relative)"> + <!--ignored, use tools.ietf.org link instead --> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$bib/@target"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$bibtarget!=''"> + <xsl:if test="$ref and $ref/@x:sec"> + <xsl:choose> + <xsl:when test="$ref/@x:rel"> + <xsl:value-of select="concat($bib/@target,$ref/@x:rel)"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Can't generate section link for to <xsl:value-of select="$bib/@anchor"/>; no @x:rel specified</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:if test="$ref and $ref/@section"> + <xsl:choose> + <xsl:when test="$ref/@relative"> + <xsl:value-of select="concat($bib/@target,$ref/@relative)"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Can't generate section link for to <xsl:value-of select="$bib/@anchor"/>; no @relative specified</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="$bib"/> + <xsl:with-param name="ref" select="$ref"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="compute-doi"> + <xsl:param name="rfc"/> + <xsl:choose> + <xsl:when test="$rfc!=''"> + <xsl:value-of select="concat('10.17487/RFC', format-number($rfc,'#0000'))"/> + </xsl:when> + <!-- xref seems to be for BCP, not RFC --> + <xsl:when test=".//seriesInfo[@name='BCP'] and starts-with(@anchor, 'BCP')" /> + <xsl:when test=".//seriesInfo[@name='RFC'] and not(normalize-space((.//organization)[1])='RFC Errata') and not(starts-with(@target,'http://www.rfc-editor.org') or starts-with(@target,'https://www.rfc-editor.org'))"> + <xsl:variable name="t" select=".//seriesInfo[@name='RFC'][1]/@value"/> + <xsl:value-of select="concat('10.17487/RFC', format-number($t,'#0000'))"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<!-- processed elsewhere --> +<xsl:template match="displayreference"> + <xsl:variable name="t" select="@to"/> + <xsl:if test="//reference/@anchor=$t or count(//displayreference[@to=$t])!=1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">displayreference <xsl:value-of select="$t"/> will create non-unique reference name.</xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="displayname-for-author"> + <xsl:param name="not-reversed"/> + + <xsl:variable name="surname"> + <xsl:call-template name="get-author-surname"/> + </xsl:variable> + + <xsl:variable name="initials"> + <xsl:call-template name="format-initials"/> + </xsl:variable> + <xsl:variable name="truncated-initials"> + <xsl:call-template name="truncate-initials"> + <xsl:with-param name="initials" select="$initials"/> + </xsl:call-template> + </xsl:variable> + + <!-- surname/initials is reversed for last author except when it's the only one --> + <xsl:choose> + <xsl:when test="$truncated-initials='' and $surname"> + <xsl:value-of select="$surname"/> + </xsl:when> + <xsl:when test="$not-reversed"> + <xsl:value-of select="concat($truncated-initials,' ',@surname)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat($surname,', ',$truncated-initials)" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@asciiSurname!='' or @asciiInitials!=''"> + <xsl:text> (</xsl:text> + <xsl:variable name="i"> + <xsl:choose> + <xsl:when test="@asciiInitials!=''"> + <xsl:value-of select="@asciiInitials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$truncated-initials"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="s"> + <xsl:choose> + <xsl:when test="@asciiSurname!=''"> + <xsl:value-of select="@asciiSurname"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$surname"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$i=''"> + <xsl:value-of select="$s"/> + </xsl:when> + <xsl:when test="$not-reversed"> + <xsl:value-of select="concat($i,' ',$s)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat($s,', ',$i)" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>)</xsl:text> + </xsl:if> + <xsl:if test="@role='editor'"> + <xsl:text>, Ed.</xsl:text> + </xsl:if> +</xsl:template> + +<xsl:template name="link-ref-title-to"> + <xsl:choose> + <xsl:when test="starts-with(@target,'http://www.rfc-editor.org/info/rfc') or starts-with(@target,'https://www.rfc-editor.org/info/rfc')"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Ignoring @target <xsl:value-of select="@target"/> in link calculation</xsl:with-param> + </xsl:call-template> + <xsl:call-template name="computed-auto-target"/> + </xsl:when> + <xsl:when test=".//seriesInfo/@name='RFC' and (@target='http://www.rfc-editor.org' or @target='https://www.rfc-editor.org') and starts-with(front/title,'Errata ID ') and front/author/organization='RFC Errata'"> + <!-- check for erratum link --> + <xsl:variable name="eid" select="normalize-space(substring(front/title,string-length('Errata ID ')))"/> + <xsl:call-template name="compute-rfc-erratum-uri"> + <xsl:with-param name="eid" select="$eid"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@target"> + <xsl:if test="normalize-space(@target)=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">invalid (empty) target attribute in reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="normalize-space(@target)" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="computed-auto-target"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="emit-series-info"> + <xsl:param name="multiple-rfcs" select="false()"/> + <xsl:param name="doi"/> + + <xsl:choose> + <xsl:when test="not(@name) and not(@value) and ./text()"> + <xsl:text>, </xsl:text> + <xsl:value-of select="."/> + </xsl:when> + <xsl:when test="@name='RFC' and $multiple-rfcs"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:text>, </xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''"><xsl:text> </xsl:text><xsl:value-of select="@value" /></xsl:if> + </xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="@name='DOI'"> + <xsl:choose> + <xsl:when test="starts-with(@value,'10.17487/RFC') and $xml2rfc-ext-insert-doi='no'"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Removing DOI <xsl:value-of select="@value"/> from &lt;reference> element</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="uri"> + <xsl:call-template name="compute-doi-uri"> + <xsl:with-param name="doi" select="@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:text>, </xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''"><xsl:text> </xsl:text><xsl:value-of select="@value" /></xsl:if> + </xsl:with-param> + </xsl:call-template> + <xsl:if test="$doi!='' and $doi!=@value"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unexpected DOI for RFC, found <xsl:value-of select="@value"/>, expected <xsl:value-of select="$doi"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="@name='ISBN'"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-isbn-uri"> + <xsl:with-param name="isbn" select="@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:text>, </xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''"><xsl:text> </xsl:text><xsl:value-of select="@value" /></xsl:if> + </xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="@name='Internet-Draft'"> + <xsl:variable name="basename"> + <xsl:call-template name="draft-base-name"> + <xsl:with-param name="name" select="@value"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="uri"> + <xsl:call-template name="compute-draft-status-uri"> + <xsl:with-param name="draftname" select="$basename"/> + </xsl:call-template> + </xsl:variable> + <xsl:text>, </xsl:text> + <xsl:choose> + <xsl:when test="number($rfcno) > 7375"> + <!-- special case in RFC formatting since 2015 --> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text">Work in Progress</xsl:with-param> + </xsl:call-template> + <xsl:text>, </xsl:text> + <xsl:value-of select="@value" /> + </xsl:when> + <xsl:when test="/rfc/@version >= 3 and $pub-yearmonth >= 201910"> + <!-- https://tools.ietf.org/html/draft-flanagan-7322bis-04#section-4.8.6.3 --> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text">Work in Progress</xsl:with-param> + </xsl:call-template> + <xsl:text>, Internet-Draft, </xsl:text> + <xsl:value-of select="@value" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''"><xsl:text> </xsl:text><xsl:value-of select="@value" /></xsl:if> + <xsl:if test="@name='Internet-Draft'"> + <xsl:text> (</xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$uri"/> + <xsl:with-param name="text">work in progress</xsl:with-param> + </xsl:call-template> + <xsl:text>)</xsl:text> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text> + <xsl:value-of select="@name"/> + <xsl:if test="@value!=''"><xsl:text> </xsl:text><xsl:value-of select="@value"/></xsl:if> + </xsl:otherwise> + </xsl:choose> + + <!-- check that BCP FYI STD RFC are in the right order --> + <xsl:if test="(@name='BCP' or @name='FYI' or @name='STD') and preceding-sibling::seriesInfo[@name='RFC']"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">RFC number preceding <xsl:value-of select="@name"/> number in reference '<xsl:value-of select="../@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="find-ref-in-artwork"> + <xsl:variable name="lookup" select="concat('[',@anchor,']')"/> + <xsl:variable name="aw" select="//artwork[contains(.,$lookup)]|//sourcecode[contains(.,$lookup)]"/> + <xsl:for-each select="$aw[1]"> + <xsl:text> (but found in </xsl:text> + <xsl:value-of select="local-name()"/> + <xsl:text> element</xsl:text> + <xsl:call-template name="lineno"/> + <xsl:text>, consider marking up the text content which is supported by this processor, see https://greenbytes.de/tech/webdav/rfc2629xslt/rfc2629xslt.html#extension.pis)</xsl:text> + </xsl:for-each> +</xsl:template> + +<xsl:template match="reference"> + <xsl:call-template name="check-no-text-content"/> + + <!-- check for reference to reference --> + <xsl:variable name="anchor" select="@anchor"/> + <xsl:choose> + <xsl:when test="not(@anchor)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">missing anchor attribute on reference, containing the text: <xsl:value-of select="normalize-space(.)"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (ancestor::rfc and not(key('xref-item',$anchor)))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused reference '<xsl:value-of select="@anchor"/>'<xsl:call-template name="find-ref-in-artwork"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (not(ancestor::rfc) and not($src//xref[@target=$anchor]))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused (included) reference '<xsl:value-of select="@anchor"/>'<xsl:call-template name="find-ref-in-artwork"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <!-- check normative/informative --> + <xsl:variable name="t-r-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="$r-is-normative and not(ancestor::ed:del)"> + <xsl:variable name="tst"> + <xsl:for-each select="key('xref-item',$anchor)"> + <xsl:variable name="t-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="$is-normative">OK</xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$tst=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">all references to the normative reference '<xsl:value-of select="@anchor"/>' appear to be informative</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:call-template name="check-anchor"/> + + <dt id="{@anchor}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + <xsl:call-template name="reference-name"/> + </dt> + + <xsl:call-template name="insert-reference-body"/> + +</xsl:template> + +<xsl:template name="insert-reference-body"> + <xsl:param name="in-reference-group" select="false()"/> + + <xsl:variable name="front" select="front[1]|document(x:source/@href)/rfc/front[1]"/> + <xsl:if test="count($front)=0"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">&lt;front> element missing for '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="count($front)>1"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">&lt;front> can be omitted when &lt;x:source> is specified (for '<xsl:value-of select="@anchor"/>')</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="target"> + <xsl:call-template name="link-ref-title-to"/> + </xsl:variable> + + <dd> + <xsl:call-template name="insertInsDelClass"/> + <xsl:if test="$in-reference-group"> + <xsl:call-template name="copy-anchor"/> + </xsl:if> + + <xsl:for-each select="$front[1]/author"> + <xsl:choose> + <xsl:when test="@surname!='' or (@fullname!='' and normalize-space(@fullname)!=normalize-space(organization))"> + <xsl:variable name="displayname"> + <xsl:call-template name="displayname-for-author"> + <xsl:with-param name="not-reversed" select="position()=last() and position()!=1"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="address/email and $xml2rfc-linkmailto!='no'"> + <a href="mailto:{address/email}"><xsl:value-of select="$displayname" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$displayname" /> + </xsl:otherwise> + </xsl:choose> + + <xsl:choose> + <xsl:when test="position()=last() - 1"> + <xsl:if test="last() &gt; 2">,</xsl:if> + <xsl:text> and </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="organization/text()"> + <xsl:choose> + <xsl:when test="address/uri"> + <a href="{address/uri}"><xsl:value-of select="organization" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="organization" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="organization/@ascii"> + <xsl:value-of select="concat(' (',normalize-space(organization/@ascii),')')"/> + </xsl:if> + <xsl:choose> + <xsl:when test="position()=last() - 1"> + <xsl:if test="last() &gt; 2">,</xsl:if> + <xsl:text> and </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:for-each> + + <xsl:variable name="quoted" select="not($front[1]/title/@x:quotes='false') and not(@quoteTitle='false')"/> + <xsl:variable name="title"> + <xsl:apply-templates select="$front[1]/title/node()" mode="get-text-content"/> + </xsl:variable> + + <xsl:if test="$quoted">&#8220;</xsl:if> + <xsl:choose> + <xsl:when test="string-length($target) &gt; 0"> + <a href="{$target}"><xsl:value-of select="$title"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$title"/> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$quoted">&#8221;</xsl:if> + + <xsl:if test="$front[1]/title/@ascii!=''"> + <xsl:text> (</xsl:text> + <xsl:if test="$quoted">&#8220;</xsl:if> + <xsl:value-of select="normalize-space($front[1]/title/@ascii)" /> + <xsl:if test="$quoted">&#8221;</xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + + <xsl:variable name="si" select="seriesInfo|$front[1]/seriesInfo"/> + <xsl:if test="seriesInfo and $front[1]/seriesInfo"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">seriesInfo present both on reference and reference/front</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="doi"> + <xsl:choose> + <xsl:when test="$si"> + <xsl:call-template name="compute-doi"/> + </xsl:when> + <xsl:when test="document(x:source/@href)/rfc/@number"> + <xsl:call-template name="compute-doi"> + <xsl:with-param name="rfc" select="document(x:source/@href)/rfc/@number"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:for-each select="$si"> + <xsl:call-template name="emit-series-info"> + <xsl:with-param name="multiple-rfcs" select="count($si[@name='RFC']) > 1"/> + <xsl:with-param name="doi" select="$doi"/> + </xsl:call-template> + </xsl:for-each> + + <!-- fall back to x:source when needed --> + <xsl:if test="not($si) and x:source/@href"> + <xsl:variable name="derivedsi" myns:namespaceless-elements="xml2rfc"> + <xsl:if test="document(x:source/@href)/rfc/@docName"> + <seriesInfo name="Internet-Draft" value="{document(x:source/@href)/rfc/@docName}"/> + </xsl:if> + <xsl:if test="document(x:source/@href)/rfc/@number"> + <seriesInfo name="RFC" value="{document(x:source/@href)/rfc/@number}"/> + </xsl:if> + </xsl:variable> + <xsl:variable name="tsi" select="exslt:node-set($derivedsi)/seriesInfo"/> + <xsl:for-each select="$tsi"> + <xsl:call-template name="emit-series-info"/> + </xsl:for-each> + </xsl:if> + + <!-- Insert DOI for RFCs --> + <xsl:if test="$xml2rfc-ext-insert-doi='yes' and $doi!='' and not($si[@name='DOI'])"> + <xsl:text>, </xsl:text> + <xsl:variable name="uri"> + <xsl:call-template name="compute-doi-uri"> + <xsl:with-param name="doi" select="$doi"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}">DOI <xsl:value-of select="$doi"/></a> + </xsl:if> + + <!-- avoid hacks using seriesInfo when it's not really series information --> + <xsl:for-each select="x:prose|refcontent"> + <xsl:text>, </xsl:text> + <xsl:apply-templates/> + </xsl:for-each> + + <xsl:call-template name="insert-pub-date"> + <xsl:with-param name="front" select="$front[1]"/> + </xsl:call-template> + + <xsl:choose> + <xsl:when test="string-length(normalize-space(@target)) &gt; 0"> + <!-- hack: suppress specified target in reference group when it appears to be an info link to the RFC editor page --> + <xsl:if test="not($in-reference-group) or not(contains(@target,'www.rfc-editor.org/info/rfc'))"> + <xsl:text>, &lt;</xsl:text> + <a href="{normalize-space(@target)}"><xsl:value-of select="normalize-space(@target)"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:if> + </xsl:when> + <xsl:when test="not($in-reference-group) and $xml2rfc-ext-link-rfc-to-info-page='yes' and $si[@name='BCP'] and starts-with(@anchor, 'BCP')"> + <xsl:text>, &lt;</xsl:text> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-info-uri"> + <xsl:with-param name="type" select="'bcp'"/> + <xsl:with-param name="no" select="$si[@name='BCP']/@value"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$uri"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:when> + <xsl:when test="not($in-reference-group) and $xml2rfc-ext-link-rfc-to-info-page='yes' and $si[@name='RFC']"> + <xsl:text>, &lt;</xsl:text> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-info-uri"> + <xsl:with-param name="type" select="'rfc'"/> + <xsl:with-param name="no" select="$si[@name='RFC']/@value"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$uri"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <xsl:text>.</xsl:text> + + <xsl:for-each select="annotation"> + <br /> + <xsl:apply-templates /> + </xsl:for-each> + </dd> + + <!-- sanity check on x:source/x:has --> + <xsl:for-each select="x:source/x:has"> + <xsl:variable name="doc" select="document(../@href)"/> + <xsl:variable name="anch" select="@anchor"/> + <xsl:variable name="targ" select="@target"/> + <xsl:if test="not(//*[@target=$anch])"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">x:has with anchor '<xsl:value-of select="$anch"/>' in <xsl:value-of select="../@href"/> is unused</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="@target"> + <xsl:if test="not($doc//*[@anchor=$targ]) and not($doc//x:anchor-alias/@value=$targ)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">x:has with target '<xsl:value-of select="$targ"/>' not defined in <xsl:value-of select="../@href"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:if test="not($doc//*[@anchor=$anch]) and not($doc//x:anchor-alias/@value=$anch)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">x:has with anchor '<xsl:value-of select="$anch"/>' not defined in <xsl:value-of select="../@href"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:template name="insert-pub-date"> + <xsl:param name="front"/> + + <xsl:if test="not($front/date) and not (/rfc/@version >= 3)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">&lt;date&gt; missing in reference '<xsl:value-of select="@anchor"/>' (note that it can be empty)</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:choose> + <xsl:when test="$front/date/@year != ''"> + <xsl:if test="normalize-space($front/date)!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">date element has both year attribute and text content: '<xsl:value-of select="$front/date"/>' in reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="string(number($front/date/@year)) = 'NaN'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">date/@year should be a number: '<xsl:value-of select="$front/date/@year"/>' in reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:text>, </xsl:text> + <xsl:if test="$front/date/@month!=''"> + <xsl:if test="front/date/@day!='' and front/date/@x:include-day='true'"> + <xsl:value-of select="front/date/@day"/> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:choose> + <xsl:when test="not(local-name($front/..)='reference') and string(number($front/date/@month)) != 'NaN'"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="number($front/date/@month)"/> + </xsl:call-template> + <xsl:text> </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$front/date/@month"/><xsl:text> </xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:value-of select="$front/date/@year" /> + </xsl:when> + <xsl:when test="document(x:source/@href)/rfc/front"> + <!-- is the date element maybe included and should be defaulted? --> + <xsl:value-of select="concat(', ',$xml2rfc-ext-pub-month,' ',$xml2rfc-ext-pub-year)"/> + </xsl:when> + <xsl:when test="normalize-space($front/date)!=''"> + <xsl:text>, </xsl:text> + <xsl:value-of select="normalize-space($front/date)"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template match="referencegroup"> + <xsl:call-template name="check-no-text-content"/> + + <!-- check for reference to reference --> + <xsl:variable name="anchor" select="@anchor"/> + <xsl:choose> + <xsl:when test="not(@anchor)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">missing anchor on reference: <xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (ancestor::rfc and not(key('xref-item',$anchor)))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (not(ancestor::rfc) and not($src//xref[@target=$anchor]))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused (included) reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <xsl:call-template name="check-anchor"/> + + <dt id="{@anchor}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + <xsl:call-template name="reference-name"/> + </dt> + + <xsl:variable name="included" select="exslt:node-set($includeDirectives)/myns:include[@in=generate-id(current())]/reference"/> + <xsl:choose> + <xsl:when test="$xml2rfc-sortrefs='yes' and $xml2rfc-symrefs!='no'"> + <xsl:for-each select="reference|$included"> + <xsl:sort select="concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor,.//ed:ins//reference/@anchor)" /> + <xsl:call-template name="insert-reference-body"> + <xsl:with-param name="in-reference-group" select="true()"/> + </xsl:call-template> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:for-each select="reference|$included"> + <xsl:call-template name="insert-reference-body"> + <xsl:with-param name="in-reference-group" select="true()"/> + </xsl:call-template> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="@target"> + <dd>&lt;<a href="{@target}"><xsl:value-of select="@target"/></a>></dd> + </xsl:if> +</xsl:template> + +<xsl:template match="references"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="refseccount" select="count(/rfc/back/references)+count(/rfc/back/ed:replace/ed:ins/references)"/> + + <xsl:choose> + <!-- handled in make-references --> + <xsl:when test="ancestor::references"/> + <!-- insert pseudo section when needed --> + <xsl:when test="not(preceding::references) and $refseccount!=1"> + <xsl:call-template name="insert-conditional-hrule"/> + <section id="{$anchor-pref}references"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-references-section-number"/> + </xsl:variable> + <h2 id="{$anchor-pref}section.{$sectionNumber}"> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + </xsl:call-template> + </a> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-refparent"/> + </h2> + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + <xsl:for-each select=".|following-sibling::references"> + <xsl:call-template name="make-references"> + <xsl:with-param name="nested" select="true()"/> + </xsl:call-template> + </xsl:for-each> + </section> + </xsl:when> + <xsl:when test="preceding::references"> + <!-- already processed --> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="make-references"> + <xsl:with-param name="nested" select="false()"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="make-references"> + <xsl:param name="nested"/> + + <xsl:variable name="name"> + <xsl:if test="ancestor::ed:del"> + <xsl:text>del-</xsl:text> + </xsl:if> + <xsl:number level="any"/> + </xsl:variable> + + <xsl:variable name="elemtype"> + <xsl:choose> + <xsl:when test="$nested and count(ancestor::references)&gt;=2">h4</xsl:when> + <xsl:when test="$nested">h3</xsl:when> + <xsl:otherwise>h2</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:if test="@title"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @title attribute and name child node present</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(@title) or @title=''"> + <xsl:value-of select="$xml2rfc-refparent"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">neither @title attribute nor name child node present, choosing default of '<xsl:value-of select="$xml2rfc-refparent"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + + <xsl:variable name="anchorpostfix"> + <xsl:if test="$nested">.<xsl:value-of select="$name"/></xsl:if> + </xsl:variable> + + <section> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="$name='1'"> + <xsl:call-template name="insert-conditional-pagebreak"/> + </xsl:if> + <div id="{$anchor-pref}references{$anchorpostfix}"> + <xsl:element name="{$elemtype}"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,'section.',$sectionNumber)"/></xsl:attribute> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + </xsl:call-template> + </a> + <xsl:text> </xsl:text> + <xsl:copy-of select="$title"/> + </xsl:element> + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="included" select="exslt:node-set($includeDirectives)/myns:include[@in=generate-id(current())]/*[self::reference or self::referencegroup]"/> + <xsl:variable name="refs" select="reference|referencegroup|ed:del|ed:ins|ed:replace|$included"/> + <xsl:choose> + <xsl:when test="references"> + <xsl:for-each select="references"> + <xsl:call-template name="make-references"> + <xsl:with-param name="nested" select="true()"/> + </xsl:call-template> + </xsl:for-each> + <xsl:if test="$refs"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Cannot mix &lt;references> elements with other child nodes such as <xsl:value-of select="local-name($refs[1])"/> (these will be ignored)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="$refs"> + <dl class="{$css-reference}"> + <xsl:choose> + <xsl:when test="$xml2rfc-sortrefs='yes' and $xml2rfc-symrefs!='no'"> + <xsl:apply-templates select="$refs"> + <xsl:sort select="concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor,.//ed:ins//reference/@anchor)" /> + </xsl:apply-templates> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="$refs"/> + </xsl:otherwise> + </xsl:choose> + </dl> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </div> + </section> +</xsl:template> + +<xsl:template match="xi:include"> + <xsl:choose> + <xsl:when test="not(parent::references) and not(parent::referencegroup)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Support for x:include is restricted to child elements of &lt;references> and &lt;referencegroup>'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- handled elsewhere --> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- processed earlier --> +<xsl:template match="references/name"/> + +<xsl:template match="rfc"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="ignored"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="//processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'SANITYCHECK'"/> + </xsl:call-template> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="//processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'SANITYCHECK'"/> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="lang"> + <xsl:call-template name="get-lang" /> + </xsl:variable> + + <html lang="{$lang}"> + <head> + <title> + <xsl:if test="$rfcno!=''"> + <xsl:value-of select="concat('RFC ',$rfcno,' - ')"/> + </xsl:if> + <xsl:apply-templates select="front/title" mode="get-text-content" /> + </title> + <xsl:call-template name="insertScripts" /> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-css-resource!='' and function-available('unparsed-text')"> + <xsl:comment><xsl:value-of select="$xml2rfc-ext-css-resource"/></xsl:comment> + <style><xsl:value-of select="unparsed-text($xml2rfc-ext-css-resource,'UTF-8')"/></style> + <xsl:if test="$xml2rfc-ext-css-contents!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">xml2rfc-ext-css-contents ignored, as xml2rfc-ext-css-resource was specified as well</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="$xml2rfc-ext-css-contents!=''"> + <xsl:comment>Specified as xml2rfc-ext-css-contents</xsl:comment> + <style><xsl:value-of select="$xml2rfc-ext-css-contents"/></style> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$xml2rfc-ext-css-resource!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Support for css inclusion requires 'unparsed-text' function support (XSLT 2)</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="insertCss" /> + </xsl:otherwise> + </xsl:choose> + <!-- <link rel="alternate stylesheet" media="screen" title="Plain (typewriter)" href="rfc2629tty.css" /> --> + + <!-- link elements --> + <xsl:if test="$xml2rfc-toc='yes'"> + <link rel="Contents" href="#{$anchor-pref}toc" /> + </xsl:if> + <xsl:if test="$xml2rfc-authorship!='no'"> + <link rel="Author" href="#{$anchor-pref}authors" /> + </xsl:if> + <xsl:if test="$xml2rfc-private='' and not($src/rfc/@ipr='none')"> + <xsl:choose> + <xsl:when test="$no-copylong"> + <link rel="License" href="#{$anchor-pref}copyrightnotice" /> + </xsl:when> + <xsl:otherwise> + <link rel="License" href="#{$anchor-pref}copyright" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:if test="$has-index"> + <link rel="Index" href="#{$anchor-pref}index" /> + </xsl:if> + <xsl:apply-templates select="/" mode="links" /> + <xsl:for-each select="x:link|link"> + <link> + <xsl:choose> + <xsl:when test="self::x:link and @basename"> + <xsl:attribute name="href"> + <xsl:value-of select="concat(@basename,'.',$outputExtension)"/> + </xsl:attribute> + <xsl:copy-of select="@rel|@title" /> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="@*" /> + </xsl:otherwise> + </xsl:choose> + </link> + </xsl:for-each> + <xsl:if test="$is-rfc"> + <link rel="Alternate" title="Authoritative ASCII Version" href="http://www.ietf.org/rfc/rfc{$rfcno}.txt" /> + <link rel="Help" title="RFC-Editor's Status Page" href="{$rfc-info-link}" /> + <link rel="Help" title="Additional Information on tools.ietf.org" href="https://tools.ietf.org/html/rfc{$rfcno}"/> + </xsl:if> + + <!-- viewport --> + <meta name="viewport" content="initial-scale=1"/> + + <!-- generator --> + <xsl:if test="$xml2rfc-ext-include-generator!='no'"> + <xsl:variable name="gen"> + <xsl:call-template name="get-generator" /> + </xsl:variable> + <meta name="generator" content="{$gen}" /> + </xsl:if> + + <!-- keywords --> + <xsl:if test="front/keyword"> + <xsl:variable name="keyw"> + <xsl:call-template name="get-keywords" /> + </xsl:variable> + <meta name="keywords" content="{$keyw}" /> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-support-rfc2731!='no'"> + <!-- Dublin Core Metadata --> + <link rel="schema.dcterms" href="http://purl.org/dc/terms/" /> + + <!-- DC creator, see RFC2731 --> + <xsl:for-each select="front/author"> + <xsl:variable name="initials"> + <xsl:call-template name="get-author-initials"/> + </xsl:variable> + <xsl:variable name="surname"> + <xsl:call-template name="get-author-surname"/> + </xsl:variable> + <xsl:variable name="disp"> + <xsl:if test="$surname!=''"> + <xsl:value-of select="$surname"/> + <xsl:if test="$initials!=''"> + <xsl:text>, </xsl:text> + <xsl:value-of select="$initials"/> + </xsl:if> + </xsl:if> + </xsl:variable> + <xsl:if test="normalize-space($disp)!=''"> + <meta name="dcterms.creator" content="{normalize-space($disp)}" /> + </xsl:if> + </xsl:for-each> + + <xsl:if test="$xml2rfc-private=''"> + <xsl:choose> + <xsl:when test="$is-rfc"> + <meta name="dcterms.identifier" content="urn:ietf:rfc:{$rfcno}" /> + </xsl:when> + <xsl:when test="@docName"> + <xsl:variable name="seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="@docName"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="number($seq)>=0"> + <meta name="dcterms.identifier" content="urn:ietf:id:{@docName}" /> + </xsl:if> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + <meta name="dcterms.issued"> + <xsl:attribute name="content"> + <xsl:value-of select="concat($xml2rfc-ext-pub-year,'-',$pub-month-numeric)"/> + <xsl:if test="$xml2rfc-ext-pub-day != '' and not($is-rfc)"> + <xsl:value-of select="concat('-',format-number($xml2rfc-ext-pub-day,'00'))"/> + </xsl:if> + </xsl:attribute> + </meta> + + <xsl:if test="@obsoletes!=''"> + <xsl:call-template name="rfclist-for-dcmeta"> + <xsl:with-param name="list" select="@obsoletes"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:if test="front/abstract"> + <meta name="dcterms.abstract" content="{normalize-space(front/abstract)}" /> + </xsl:if> + + <xsl:if test="$is-rfc"> + <meta name="dcterms.isPartOf" content="urn:issn:2070-1721" /> + </xsl:if> + + </xsl:if> + + <!-- this replicates dcterms.abstract, but is used by Google & friends --> + <xsl:if test="front/abstract"> + <meta name="description" content="{normalize-space(front/abstract)}" /> + </xsl:if> + </head> + + <xsl:call-template name="body" /> + </html> +</xsl:template> + +<xsl:template name="body"> + <body> + <!-- insert onload scripts, when required --> + <xsl:variable name="onload"> + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and $is-rfc">getMeta("<xsl:value-of select="$rfcno"/>","rfc.meta");</xsl:if> + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and /rfc/@docName"> + <xsl:if test="$is-submitted-draft">getMeta("<xsl:value-of select="$draft-basename"/>","<xsl:value-of select="$draft-seq"/>","rfc.meta");</xsl:if> + </xsl:if> + <xsl:if test="/rfc/x:feedback">initFeedback();</xsl:if> + <xsl:if test="$xml2rfc-ext-refresh-from!=''">RfcRefresh.initRefresh()</xsl:if> + </xsl:variable> + <xsl:if test="$onload!=''"> + <xsl:attribute name="onload"> + <xsl:value-of select="$onload"/> + </xsl:attribute> + </xsl:if> + + <xsl:call-template name="add-start-material" /> + + <!-- insert diagnostics --> + <xsl:call-template name="insert-diagnostics"/> + + <xsl:apply-templates select="front" /> + <xsl:apply-templates select="middle" /> + <xsl:call-template name="back" /> + + <xsl:call-template name="add-end-material" /> + </body> +</xsl:template> + +<xsl:template match="t"> + <xsl:param name="inherited-self-link"/> + + <xsl:variable name="textcontent" select="normalize-space(.)"/> + <xsl:variable name="endswith" select="substring($textcontent,string-length($textcontent))"/> + <xsl:variable name="keepwithnext" select="$endswith=':' or @keepWithNext='true'"/> + <xsl:variable name="keepwithprevious" select="@keepWithPrevious='true'"/> + + <xsl:variable name="stype"> + <xsl:choose> + <xsl:when test="ancestor::abstract">abstract</xsl:when> + <xsl:when test="ancestor::note">note</xsl:when> + <xsl:when test="ancestor::boilerplate">boilerplate</xsl:when> + <xsl:otherwise>section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="preceding-sibling::section or preceding-sibling::appendix"> + <xsl:call-template name="inline-warning"> + <xsl:with-param name="msg">The paragraph below is misplaced; maybe a section is closed in the wrong place: </xsl:with-param> + <xsl:with-param name="msg2"><xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="class"> + <xsl:if test="$keepwithnext">avoidbreakafter</xsl:if> + <xsl:text> </xsl:text> + <xsl:if test="$keepwithprevious">avoidbreakbefore</xsl:if> + </xsl:variable> + + <div> + <xsl:if test="not(ancestor::list)"> + <xsl:call-template name="attach-paragraph-number-as-id"/> + </xsl:if> + <xsl:if test="normalize-space($class)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($class)"/></xsl:attribute> + </xsl:if> + <xsl:if test="@indent and number(@indent)&gt;0"> + <xsl:attribute name="style">padding-left: <xsl:value-of select="@indent div 2"/>em</xsl:attribute> + </xsl:if> + <xsl:apply-templates mode="t-content" select="node()[1]"> + <xsl:with-param name="inherited-self-link" select="$inherited-self-link"/> + <xsl:with-param name="anchor" select="@anchor"/> + </xsl:apply-templates> + </div> +</xsl:template> + +<!-- for t-content, dispatch to default templates if it's block-level content --> +<xsl:template mode="t-content" match="list|figure|texttable"> + <!-- <xsl:comment>t-content block-level</xsl:comment> --> + <xsl:apply-templates select="." /> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content" /> +</xsl:template> + +<!-- ... otherwise group into p elements --> +<xsl:template mode="t-content" match="node()"> + <xsl:param name="inherited-self-link"/> + <xsl:param name="anchor"/> + + <xsl:variable name="p"> + <xsl:for-each select=".."> + <xsl:call-template name="get-paragraph-number" /> + </xsl:for-each> + </xsl:variable> + + <xsl:if test="not(self::text() and normalize-space(.)='' and not(following-sibling::node()))"> + <xsl:variable name="textcontent"> + <xsl:apply-templates mode="t-content2" select="." /> + </xsl:variable> + + <xsl:choose> + <!-- do not open a new p element if this is a whitespace-only text node and no siblings follow --> + <xsl:when test="normalize-space($textcontent)!=''"> + <p> + <xsl:if test="$anchor!=''"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:variable name="stype"> + <xsl:choose> + <xsl:when test="ancestor::abstract">abstract</xsl:when> + <xsl:when test="ancestor::note">note</xsl:when> + <xsl:when test="ancestor::boilerplate">boilerplate</xsl:when> + <xsl:otherwise>section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="anch"> + <xsl:if test="$p!='' and not(ancestor::li) and not(ancestor::x:lt) and not(preceding-sibling::node())"> + <xsl:value-of select="concat($anchor-pref,$stype,'.',$p)"/> + </xsl:if> + </xsl:variable> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates mode="t-content2" select="." /> + <xsl:if test="$xml2rfc-ext-paragraph-links='yes'"> + <xsl:if test="$anch!=''"> + <a class='self' href='#{$anch}'>&#xb6;</a> + </xsl:if> + <xsl:if test="$inherited-self-link!=''"> + <a class='self' href='#{$inherited-self-link}'>&#xb6;</a> + </xsl:if> + </xsl:if> + </p> + </xsl:when> + <xsl:otherwise> + <!-- but we still need to emit non textual content, such as irefs --> + <xsl:apply-templates mode="t-content2" select="." /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:apply-templates mode="t-content" select="following-sibling::*[self::list or self::figure or self::texttable][1]" /> +</xsl:template> + +<xsl:template mode="t-content2" match="*"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template mode="t-content2" match="text()"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template mode="t-content2" match="comment()|processing-instruction()"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template match="title"> + <xsl:variable name="t" select="normalize-space(.)"/> + <xsl:variable name="tlen" select="string-length($t)"/> + <xsl:variable name="alen" select="string-length(@abbrev)"/> + + <xsl:if test="@abbrev and $alen > 40"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title/@abbrev too long (max 40 characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$tlen > 40 and (not(@abbrev) or @abbrev='')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title too long, should supply title/@abbrev attribute with less than 40 characters</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$tlen &lt;= 40 and @abbrev!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title/@abbrev was specified despite the title being short enough (<xsl:value-of select="$tlen"/>)</xsl:with-param> + <xsl:with-param name="msg2">Title: '<xsl:value-of select="normalize-space($t)"/>', abbreviated title='<xsl:value-of select="@abbrev"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:apply-templates /> +</xsl:template> + +<xsl:template name="insertTitle"> + <xsl:choose> + <xsl:when test="@ed:old-title"> + <del> + <xsl:if test="ancestor-or-self::*[@ed:entered-by] and @ed:datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@ed:datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="@ed:old-title"/> + </del> + <ins> + <xsl:if test="ancestor-or-self::*[@ed:entered-by] and @ed:datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@ed:datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="@title"/> + </ins> + </xsl:when> + <xsl:when test="name"> + <xsl:if test="@title"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @title attribute and name child node present</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="name/node()"/> + <xsl:with-param name="strip-links" select="not(ancestor-or-self::figure)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- irefs that are section-level thus can use the section anchor --> +<xsl:variable name="section-level-irefs" select="//section/iref[count(preceding-sibling::*[not(self::iref) and not(self::x:anchor-alias) and not(self::name)])=0]"/> + +<!-- suppress xml2rfc preptool artefacts --> +<xsl:template match="section[author]"/> + +<xsl:template match="section|appendix"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:if test="self::appendix"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The "appendix" element is deprecated, use "section" inside "back" instead.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="sectionNumber"> + <xsl:choose> + <xsl:when test="ancestor::boilerplate"></xsl:when> + <xsl:otherwise><xsl:call-template name="get-section-number" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="not(ancestor::section) and not(ancestor::boilerplate)"> + <xsl:call-template name="insert-conditional-hrule"/> + </xsl:if> + + <xsl:variable name="elemtype"> + <xsl:choose> + <xsl:when test="count(ancestor::section) &lt;= 3">h<xsl:value-of select="2 + count(ancestor::section)"/></xsl:when> + <xsl:otherwise>h6</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and ($is-rfc or $is-submitted-draft) and @anchor='rfc.status'"> + <aside id="{$anchor-pref}meta" class="{$css-docstatus}"></aside> + </xsl:if> + + <xsl:variable name="classes"><xsl:if test="@removeInRFC='true'">rfcEditorRemove</xsl:if></xsl:variable> + + <section> + <xsl:call-template name="copy-anchor"/> + + <xsl:if test="normalize-space($classes)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($classes)"/></xsl:attribute> + </xsl:if> + + <xsl:element name="{$elemtype}"> + <xsl:if test="$sectionNumber!=''"> + <xsl:attribute name="id"><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$sectionNumber"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="$sectionNumber='1' or $sectionNumber='A'"> + <!-- pagebreak, this the first section --> + <xsl:attribute name="class">np</xsl:attribute> + </xsl:when> + <xsl:when test="not(ancestor::section) and not(ancestor::boilerplate)"> + <xsl:call-template name="insert-conditional-pagebreak"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <xsl:call-template name="insertInsDelClass" /> + + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,$unnumbered))"> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + <xsl:with-param name="appendixPrefix" select="true()"/> + </xsl:call-template> + </a> + <xsl:text>&#0160;</xsl:text> + </xsl:if> + + <!-- issue tracking? --> + <xsl:if test="@ed:resolves"> + <xsl:call-template name="insert-issue-pointer"/> + </xsl:if> + + <xsl:call-template name="check-anchor"/> + <xsl:variable name="anchor"> + <xsl:choose> + <xsl:when test="@anchor"><xsl:value-of select="@anchor"/></xsl:when> + <xsl:otherwise><xsl:call-template name="sluggy-anchor"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="name"> + <xsl:choose> + <xsl:when test="starts-with(@title,'Since ')"> + <xsl:value-of select="substring-after(@title,'Since ')"/> + </xsl:when> + <xsl:when test="starts-with(@title,'draft-')"> + <xsl:value-of select="@title"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="basename"> + <xsl:call-template name="draft-base-name"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="seq"> + <xsl:call-template name="draft-sequence-number"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="offset"> + <xsl:choose> + <xsl:when test="starts-with(@title,'Since ')">1</xsl:when> + <xsl:otherwise>0</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="smells-like-change-log" select="ancestor-or-self::section[@removeInRFC='true'] or ancestor::section[@title='Changes'] or ancestor::section[@title='Change Log']"/> + + <xsl:variable name="diff-uri"> + <xsl:if test="$smells-like-change-log and $basename!=''"> + <xsl:variable name="next" select="concat($basename,'-',format-number($offset + $seq,'00'))"/> + <xsl:choose> + <!-- check whether the "next" draft exists (is mentioned in a sibling section --> + <xsl:when test="../section[contains(@title,$next)]"> + <xsl:call-template name="compute-diff-uri"> + <xsl:with-param name="name" select="$next"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="starts-with(ancestor::rfc/@docName,$basename)"> + <xsl:call-template name="compute-latest-diff-uri"> + <xsl:with-param name="name" select="$basename"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> + </xsl:variable> + + <xsl:variable name="text-uri"> + <xsl:if test="$smells-like-change-log and $basename!=''"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$name"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$anchor!=''"> + <a href="#{$anchor}"><xsl:call-template name="insertTitle"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="insertTitle"/> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="$xml2rfc-ext-paragraph-links='yes' and $text-uri!=''"> + <xsl:text> </xsl:text> + <a class="self" href="{$text-uri}" title="plain text">&#x1f4c4;</a> + </xsl:if> + <xsl:if test="$xml2rfc-ext-paragraph-links='yes' and $diff-uri!=''"> + <xsl:text> </xsl:text> + <a class="self" href="{$diff-uri}" title="diffs">&#x1f50d;</a> + </xsl:if> + </xsl:element> + + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + + <xsl:if test="@removeInRFC='true' and (not(t) or t[1]!=$section-removeInRFC)"> + <xsl:variable name="t"> + <t><xsl:value-of select="$section-removeInRFC"/></t> + </xsl:variable> + <xsl:variable name="link" select="concat($anchor-pref,'section.',$sectionNumber,'.p.1')"/> + <div id="{$link}"> + <xsl:apply-templates mode="t-content" select="exslt:node-set($t)//text()"> + <xsl:with-param name="inherited-self-link" select="$link"/> + </xsl:apply-templates> + </div> + </xsl:if> + + <!-- continue with all child elements but the irefs processed above --> + <xsl:for-each select="*"> + <xsl:if test="count(.|$section-level-irefs)!=count($section-level-irefs)"> + <xsl:apply-templates select="."/> + </xsl:if> + </xsl:for-each> + </section> +</xsl:template> + +<!-- errata handling --> +<xsl:template name="insert-errata"> + <xsl:param name="section"/> + <xsl:variable name="es" select="$errata-parsed[section=$section or (not(section) and $section='1')]"/> + <xsl:if test="$es"> + <aside class="{$css-erratum}"> + <xsl:for-each select="$es"> + <xsl:sort select="@eid" data-type="number"/> + <div> + <xsl:variable name="tooltip"> + <xsl:value-of select="@reported-by"/> + <xsl:text>, </xsl:text> + <xsl:value-of select="@reported"/> + <xsl:if test="@type"> (<xsl:value-of select="@type"/>)</xsl:if> + </xsl:variable> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-erratum-uri"> + <xsl:with-param name="eid" select="@eid"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}" title="{$tooltip}">Erratum <xsl:value-of select="@eid"/></a> + <xsl:choose> + <xsl:when test="@status='Verified'"><xsl:text> </xsl:text><span title="verified">&#x2714;</span></xsl:when> + <xsl:when test="@status='Reported'"><xsl:text> </xsl:text><span title="reported">&#x2709;</span></xsl:when> + <xsl:when test="@status='Held for Document Update'"><xsl:text> </xsl:text><span title="held for update">&#x2700;</span></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </div> + </xsl:for-each> + </aside> + </xsl:if> +</xsl:template> + +<!-- already processed by insertTitle --> +<xsl:template match="note/name"/> +<xsl:template match="section/name"/> + +<xsl:template match="spanx[@style='emph' or not(@style)]|em"> + <em> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </em> +</xsl:template> + +<xsl:template match="spanx[@style='verb' or @style='vbare']|tt"> + <span class="tt"> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </span> +</xsl:template> + +<xsl:template match="spanx[@style='strong']|strong"> + <strong> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </strong> +</xsl:template> + +<xsl:template match="spanx[@style!='']" priority="0.1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown spanx style attribute '<xsl:value-of select="@style"/>' ignored</xsl:with-param> + </xsl:call-template> + <span> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </span> +</xsl:template> + +<xsl:template name="insert-blank-lines"> + <xsl:param name="no"/> + <xsl:choose> + <xsl:when test="$no >= $xml2rfc-ext-vspace-pagebreak"> + <br/> + <!-- done; this probably was an attempt to generate a pagebreak --> + </xsl:when> + <xsl:when test="$no &lt;= 0"> + <br/> + <!-- done --> + </xsl:when> + <xsl:otherwise> + <br/> + <xsl:call-template name="insert-blank-lines"> + <xsl:with-param name="no" select="$no - 1"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="vspace[not(@blankLines)]"> + <br /> +</xsl:template> + +<xsl:template match="vspace"> + <xsl:call-template name="insert-blank-lines"> + <xsl:with-param name="no" select="@blankLines"/> + </xsl:call-template> +</xsl:template> + +<xsl:template match="br"> + <br/> +</xsl:template> + +<!-- keep the root for the case when we process XSLT-inline markup --> +<xsl:variable name="src" select="/" /> + +<xsl:template name="render-section-ref"> + <xsl:param name="from" /> + <xsl:param name="to" /> + + <xsl:variable name="refname"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-section-type"/> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="refnum"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-section-number" /> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:choose> + <xsl:when test="$to/self::abstract"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">xref to abstract with format='counter' not allowed</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$refnum"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/name"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$to/name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$to/@title"> + <xsl:value-of select="normalize-space($to/@title)"/> + </xsl:when> + <xsl:when test="$to/self::abstract">Abstract</xsl:when> + <xsl:when test="$to/self::references">References</xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="starts-with($refnum,$unnumbered)"> + <xsl:value-of select="$refname"/> + <xsl:text> "</xsl:text> + <xsl:choose> + <xsl:when test="$to/name"> + <xsl:apply-templates select="$to/name/node()"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$to/@title"/> + </xsl:otherwise> + </xsl:choose> + <xsl:text>"</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat($refname,' ',$refnum))"/> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-xref-format"> + <xsl:param name="default"/> + <xsl:choose> + <xsl:when test="self::relref and (*|text())"> + <xsl:if test="@displayFormat"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@displayFormat is ignored on &lt;relref> with content</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="self::relref"> + <xsl:choose> + <xsl:when test="not(@displayFormat)">of</xsl:when> + <xsl:when test="@displayFormat='parens' or @displayFormat='of' or @displayFormat='comma' or @displayFormat='bare'"> + <xsl:value-of select="@displayFormat"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown format for @displayFormat: <xsl:value-of select="@displayFormat"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::xref and @sectionFormat"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@sectionFormat is deprecated, use @x:fmt instead</xsl:with-param> + </xsl:call-template> + <xsl:if test="@x:fmt"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @x:fmt and @sectionFormat specified</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="@sectionFormat='of' or @sectionFormat='comma' or @sectionFormat='parens' or @sectionFormat='bare'"> + <xsl:value-of select="@sectionFormat"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown format for @sectionFormat</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::xref and @x:fmt"> + <xsl:choose> + <xsl:when test="@x:fmt='()'">parens</xsl:when> + <xsl:when test="@x:fmt='of'">of</xsl:when> + <xsl:when test="@x:fmt=','">comma</xsl:when> + <xsl:when test="@x:fmt='none'">none</xsl:when> + <xsl:when test="@x:fmt='sec'">section</xsl:when> + <xsl:when test="@x:fmt='number'">bare</xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown format for @x:fmt</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$default"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-xref-section"> + <xsl:choose> + <xsl:when test="@section"> + <xsl:value-of select="@section"/> + </xsl:when> + <xsl:when test="@x:sec"> + <xsl:value-of select="@x:sec"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template match="xref[*|text()]|relref[*|text()]"> + + <xsl:variable name="xref" select="."/> + + <xsl:variable name="target"> + <xsl:call-template name="get-target-anchor"/> + </xsl:variable> + <xsl:variable name="node" select="key('anchor-item',$target)" /> + <xsl:variable name="anchor"><xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="$target"/>.<xsl:number level="any" count="xref[@target=$target]|relref[@target=$target]"/></xsl:variable> + + <xsl:if test="@target!=$target"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Target '<xsl:value-of select="@target"/>' rewritten to '<xsl:value-of select="$target"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="sfmt"> + <xsl:call-template name="get-section-xref-format"/> + </xsl:variable> + + <xsl:variable name="ssec"> + <xsl:call-template name="get-section-xref-section"/> + </xsl:variable> + + <xsl:variable name="href"> + <xsl:call-template name="computed-target"> + <xsl:with-param name="bib" select="$node"/> + <xsl:with-param name="ref" select="."/> + </xsl:call-template> + </xsl:variable> + + <xsl:choose> + <xsl:when test="self::relref and not($node/self::reference)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">relref/@target must be a reference</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="self::relref and $href=''"> + <xsl:apply-templates/> + </xsl:when> + <xsl:when test="self::relref"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="id" select="$anchor"/> + <xsl:with-param name="child-nodes" select="*|text()"/> + <xsl:with-param name="index-item" select="$target"/> + <xsl:with-param name="index-subitem" select="$ssec"/> + </xsl:call-template> + </xsl:when> + + <!-- $sfmt='none': do not generate any links --> + <xsl:when test="$sfmt='none'"> + <xsl:choose> + <xsl:when test="$node/self::reference"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="id" select="$anchor"/> + <xsl:with-param name="citation-title" select="normalize-space($node/front/title)"/> + <xsl:with-param name="child-nodes" select="*|text()"/> + <xsl:with-param name="index-item" select="$target"/> + <xsl:with-param name="index-subitem" select="$ssec"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- Other $sfmt values than "none": unsupported --> + <xsl:when test="$sfmt!='' and $sfmt!='none'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('ignoring unknown xref section format extension: ',$sfmt)"/> + </xsl:call-template> + </xsl:when> + + <!-- Section links --> + <xsl:when test="$node/self::section or $node/self::appendix"> + <xsl:choose> + <xsl:when test="@format='none' or $xml2rfc-ext-xref-with-text-generate='nothing'"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="concat('#',$target)"/> + <xsl:with-param name="id"> + <xsl:if test="//iref[@x:for-anchor=$target] | //iref[@x:for-anchor='' and ../@anchor=$target]"><xsl:value-of select="$anchor"/></xsl:if> + </xsl:with-param> + <xsl:with-param name="child-nodes" select="*|text()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- index links to this xref --> + <xsl:variable name="ireftargets" select="key('iref-xanch',$target) | key('iref-xanch','')[../@anchor=$target]"/> + + <xsl:apply-templates/> + <xsl:text> (</xsl:text> + <xsl:call-template name="xref-to-section"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + <xsl:with-param name="id" select="$anchor"/> + <xsl:with-param name="irefs" select="$ireftargets"/> + </xsl:call-template> + <xsl:text>)</xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <xsl:when test="$node/self::cref and $node/@display='false'"> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Comment ',$node/@anchor,' is hidden and thus can not be referenced')"/> + </xsl:call-template> + </xsl:for-each> + </xsl:when> + + <xsl:when test="$node/self::cref and $xml2rfc-comments='no'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">xref to cref, but comments aren't included in the output</xsl:with-param> + </xsl:call-template> + </xsl:when> + + <xsl:otherwise> + <!-- check normative/informative --> + <xsl:variable name="t-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="count($node)=1 and $is-normative"> + <xsl:variable name="t-r-is-normative" select="$node/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="not($r-is-normative)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Potentially normative reference to ',$target,' not referenced normatively')"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="concat('#',$target)"/> + <xsl:with-param name="id"> + <xsl:if test="@format='none'"><xsl:value-of select="$anchor"/></xsl:if> + </xsl:with-param> + <xsl:with-param name="child-nodes" select="*|text()"/> + </xsl:call-template> + + <xsl:if test="not(@format='none' or $xml2rfc-ext-xref-with-text-generate='nothing')"> + <xsl:for-each select="$src/rfc/back/references//reference[@anchor=$target]"> + <xsl:text> </xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="citation-title" select="normalize-space(front/title)"/> + <xsl:with-param name="id" select="$anchor"/> + <xsl:with-param name="text"> + <xsl:call-template name="reference-name"/> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:key name="iref-xanch" match="iref[@x:for-anchor]" use="@x:for-anchor"/> + +<!-- xref to section or appendix --> +<xsl:template name="xref-to-section"> + <xsl:param name="from"/> + <xsl:param name="to"/> + <xsl:param name="id"/> + <xsl:param name="irefs"/> + + <a href="#{$from/@target}"> + <xsl:if test="$irefs"> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:attribute name="title"> + <xsl:call-template name="get-title-as-string"> + <xsl:with-param name="node" select="$to"/> + </xsl:call-template> + </xsl:attribute> + <xsl:call-template name="render-section-ref"> + <xsl:with-param name="from" select="$from"/> + <xsl:with-param name="to" select="$to"/> + </xsl:call-template> + </a> +</xsl:template> + +<!-- xref to figure --> +<xsl:template name="xref-to-figure-text"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:variable name="figcnt"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-figure-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:value-of select="$figcnt" /> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/name"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$to/name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space($to/@title)" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Figure ',$figcnt))"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="xref-to-figure"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:variable name="title"> + <xsl:call-template name="get-title-as-string"> + <xsl:with-param name="node" select="$to"/> + </xsl:call-template> + </xsl:variable> + <a href="#{$from/@target}"> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"> + <xsl:value-of select="$title"/> + </xsl:attribute> + </xsl:if> + <xsl:call-template name="xref-to-figure-text"> + <xsl:with-param name="from" select="$from"/> + <xsl:with-param name="to" select="$to"/> + </xsl:call-template> + </a> +</xsl:template> + +<!-- xref to table --> +<xsl:template name="xref-to-table-text"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:variable name="tabcnt"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-table-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:value-of select="$tabcnt" /> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/self::table"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$to/name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$to/@title" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Table ',$tabcnt))"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="xref-to-table"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:variable name="title"> + <xsl:call-template name="get-title-as-string"> + <xsl:with-param name="node" select="$to"/> + </xsl:call-template> + </xsl:variable> + <a href="#{$from/@target}"> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"> + <xsl:value-of select="$title"/> + </xsl:attribute> + </xsl:if> + <xsl:call-template name="xref-to-table-text"> + <xsl:with-param name="from" select="$from"/> + <xsl:with-param name="to" select="$to"/> + </xsl:call-template> + </a> +</xsl:template> + +<!-- xref to paragraph --> +<xsl:template name="xref-to-paragraph-text"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:variable name="tcnt"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="pparent" select="$to/.."/> + <xsl:variable name="listtype"> + <xsl:choose> + <xsl:when test="$pparent/self::list"> + <xsl:value-of select="$pparent/@style"/> + </xsl:when> + <xsl:when test="$pparent/self::dl">definition</xsl:when> + <xsl:when test="$pparent/self::ol[@type='a']">letters</xsl:when> + <xsl:when test="$pparent/self::ol[@type='A']">Letters</xsl:when> + <xsl:when test="$pparent/self::ol[@type='i']">rnumbers</xsl:when> + <xsl:when test="$pparent/self::ol[@type='I']">Rnumbers</xsl:when> + <xsl:when test="$pparent/self::ol[string-length(@type)>1]">format <xsl:value-of select="$pparent/self::ol/@type"/></xsl:when> + <xsl:when test="$pparent/self::ol">numbers</xsl:when> + <xsl:otherwise></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="s"> + <xsl:choose> + <xsl:when test="$pparent/self::ol and $pparent/@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="$pparent"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$pparent/self::list and $pparent/@counter"> + <xsl:for-each select="$pparent"> + <xsl:value-of select="1 + count(preceding::list[@counter=$pparent/@counter]/*)"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="$pparent/self::ol and $pparent/@start"> + <xsl:value-of select="$pparent/@start"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="n"> + <xsl:for-each select="$to"> + <xsl:number/> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="format"> + <xsl:choose> + <xsl:when test="$listtype='letters'">a</xsl:when> + <xsl:when test="$listtype='Letters'">A</xsl:when> + <xsl:when test="$listtype='rnumbers'">i</xsl:when> + <xsl:when test="$listtype='Rnumbers'">I</xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="listindex"> + <xsl:choose> + <xsl:when test="starts-with($listtype,'format ')"> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring-after($listtype,'format ')"/> + <xsl:with-param name="pos" select="$n + $s - 1"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:number value="$n + $s - 1" format="{$format}"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:choose> + <xsl:when test="$listtype!='' and $listindex!=''"> + <xsl:value-of select="$listindex"/> + </xsl:when> + <xsl:when test="$listtype!='' and $listindex=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Use of format=counter for unsupported list type ',$listtype)"/> + </xsl:call-template> + <xsl:value-of select="$tcnt"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$tcnt"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/self::dt"> + <xsl:apply-templates select="$to/node()"/> + </xsl:when> + <xsl:when test="$to/@hangText"> + <xsl:value-of select="normalize-space($to/@hangText)"/> + </xsl:when> + <xsl:when test="$to/@title"> + <xsl:value-of select="normalize-space($to/@title)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$to/@anchor"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="pn" select="normalize-space(substring-after($tcnt,'p.'))"/> + <xsl:text>Paragraph </xsl:text> + <xsl:choose> + <xsl:when test="$pn=''"> + <xsl:text>?</xsl:text> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('No paragraph number for link target ',$from/@target)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$pn"/></xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="xref-to-paragraph"> + <xsl:param name="from"/> + <xsl:param name="to"/> + <xsl:param name="anchor"/> + + <a href="#{$anchor}"> + <xsl:call-template name="xref-to-paragraph-text"> + <xsl:with-param name="from" select="$from"/> + <xsl:with-param name="to" select="$to"/> + </xsl:call-template> + </a> +</xsl:template> + +<!-- xref to comment --> +<xsl:template name="xref-to-comment"> + <xsl:param name="from"/> + <xsl:param name="to"/> + + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="concat('#',$from/@target)"/> + <xsl:with-param name="text"> + <xsl:variable name="name"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-comment-name" /> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">xref to cref with format='counter' not allowed</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$name" /> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:value-of select="$to/@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Comment ',$name))"/> + </xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-link"> + <xsl:param name="target"/> + <xsl:param name="id"/> + <xsl:param name="title"/> + <xsl:param name="citation-title"/> + <xsl:param name="index-item"/> + <xsl:param name="index-subitem"/> + <xsl:param name="text"/> + <xsl:param name="child-nodes"/> + + <xsl:if test="$text!='' and $child-nodes"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">emit-link called both with text and child-nodes</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="element"> + <xsl:choose> + <xsl:when test="$target!=''">a</xsl:when> + <xsl:when test="$citation-title!=''">cite</xsl:when> + <xsl:when test="($id!='' and $xml2rfc-ext-include-references-in-index='yes') or $title!=''">span</xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$element!=''"> + <xsl:element name="{$element}"> + <xsl:if test="$target!=''"> + <xsl:attribute name="href"><xsl:value-of select="$target"/></xsl:attribute> + </xsl:if> + <xsl:if test="$element='cite' and $citation-title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$citation-title"/></xsl:attribute> + </xsl:if> + <xsl:if test="$id!='' and $xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="$element!='cite' and $citation-title!=''"> + <cite title="{$citation-title}"> + <xsl:choose> + <xsl:when test="$child-nodes"> + <xsl:apply-templates select="$child-nodes"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$text"/> + </xsl:otherwise> + </xsl:choose> + </cite> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="$child-nodes"> + <xsl:apply-templates select="$child-nodes"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$text"/> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </xsl:element> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="$child-nodes"> + <xsl:apply-templates select="$child-nodes"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$text"/> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- xref to reference --> +<xsl:template name="xref-to-reference"> + <xsl:param name="from"/> + <xsl:param name="to"/> + <xsl:param name="id"/> + + <xsl:variable name="front" select="$to/front[1]|document($to/x:source/@href)/rfc/front[1]"/> + + <xsl:variable name="is-xref" select="$from/self::xref"/> + + <xsl:variable name="sfmt"> + <xsl:for-each select="$from"> + <xsl:call-template name="get-section-xref-format"> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="ancestor::artwork or ancestor::sourcecode">comma</xsl:when> + <xsl:otherwise>of</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + + <xsl:variable name="ssec"> + <xsl:for-each select="$from"> + <xsl:call-template name="get-section-xref-section"/> + </xsl:for-each> + </xsl:variable> + + <!-- check normative/informative --> + <xsl:variable name="t-is-normative" select="$from/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="count($to)=1 and $is-normative"> + <xsl:variable name="t-r-is-normative" select="$to/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="not($r-is-normative)"> + <xsl:for-each select="$from"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Potentially normative reference to ',$from/@target,' not referenced normatively')"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + </xsl:if> + + <xsl:variable name="href"> + <xsl:call-template name="computed-target"> + <xsl:with-param name="bib" select="$to"/> + <xsl:with-param name="ref" select="$from"/> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="tsec"> + <xsl:choose> + <xsl:when test="starts-with($from/@x:rel,'#') and $ssec=''"> + <xsl:call-template name="compute-section-number"> + <xsl:with-param name="bib" select="$to"/> + <xsl:with-param name="ref" select="$from"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$from/@x:rel and not(starts-with($from/@x:rel,'#')) and $ssec=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">x:rel attribute '<xsl:value-of select="$from/@x:rel"/>' in reference to <xsl:value-of select="$to/@anchor"/> is expected to start with '#'.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$ssec"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sec"> + <xsl:choose> + <xsl:when test="contains($tsec,'@')">"<xsl:value-of select="substring-after($tsec,'@')"/>"</xsl:when> + <xsl:otherwise><xsl:value-of select="$tsec"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="secterm"> + <xsl:choose> + <!-- starts with letter or unnumbered? --> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')='' or starts-with($tsec,'A@')">Appendix</xsl:when> + <xsl:otherwise>Section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="starts-with($from/@x:rel,'#') and $ssec='' and $to/x:source/@href"> + <xsl:variable name="extdoc" select="document($to/x:source/@href)"/> + <xsl:variable name="anch" select="substring-after($from//@x:rel,'#')"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor=$anch or x:anchor-alias/@value=$anch]"/> + <xsl:if test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="substring-after($from//@x:rel,'#')"/>' not found in <xsl:value-of select="$to/x:source/@href"/>.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="$nodes"> + <xsl:value-of select="@title"/> + </xsl:for-each> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:variable> + + <!-- + Formats: + + parens [XXXX] (Section SS) + comma [XXXX], Section SS + of Section SS of [XXXX] + sec Section SS + number SS + --> + + <xsl:if test="$sfmt!='' and not($sfmt='of' or $sfmt='section' or $sfmt='bare' or $sfmt='parens' or $sfmt='comma')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('unknown xref section format extension: ',$sfmt)"/> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$sec!=''"> + <xsl:choose> + <xsl:when test="$sfmt='of'"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="text" select="concat($secterm,' ',$sec)"/> + <xsl:with-param name="title" select="$title"/> + </xsl:call-template> + <xsl:text> of </xsl:text> + </xsl:when> + <xsl:when test="$sfmt='section'"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="text" select="concat($secterm,' ',$sec)"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="id"> + <xsl:if test="$sfmt='section'"> + <xsl:value-of select="$id"/> + </xsl:if> + </xsl:with-param> + <xsl:with-param name="index-item" select="$from/@target"/> + <xsl:with-param name="index-subitem" select="$sec"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$sfmt='bare'"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="text" select="$sec"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="id" select="$id"/> + <xsl:with-param name="index-item" select="$from/@target"/> + <xsl:with-param name="index-subitem" select="$sec"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:if> + + <xsl:if test="$sec='' or ($sfmt!='section' and $sfmt!='bare')"> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="concat('#',$from/@target)"/> + <xsl:with-param name="text"> + <xsl:variable name="val"> + <xsl:call-template name="reference-name"> + <xsl:with-param name="node" select="$to" /> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$is-xref and $from/@format='none'"> + <!-- nothing to do here --> + </xsl:when> + <xsl:when test="$is-xref and $from/@format='counter'"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">xref to reference with format='counter' not allowed</xsl:with-param> + </xsl:call-template> + <!-- remove brackets --> + <xsl:value-of select="substring($val,2,string-length($val)-2)"/> + </xsl:when> + <xsl:when test="$is-xref and $from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/self::referencegroup"> + <xsl:value-of select="$to/@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="$front[1]/title/node()" mode="get-text-content"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:if test="not($is-xref) and $from/@format"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@format attribute is undefined for relref</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="$val"/> + </xsl:otherwise> + </xsl:choose> + </xsl:with-param> + <xsl:with-param name="id" select="$id"/> + <xsl:with-param name="index-item" select="$from/@target"/> + <xsl:with-param name="index-subitem" select="$sec"/> + <xsl:with-param name="citation-title" select="normalize-space($front[1]/title)"/> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$sec!=''"> + <xsl:choose> + <xsl:when test="$sfmt='parens'"> + <xsl:text> (</xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="text" select="concat($secterm,' ',$sec)"/> + <xsl:with-param name="title" select="$title"/> + </xsl:call-template> + <xsl:text>)</xsl:text> + </xsl:when> + <xsl:when test="$sfmt='comma'"> + <xsl:text>, </xsl:text> + <xsl:call-template name="emit-link"> + <xsl:with-param name="target" select="$href"/> + <xsl:with-param name="text" select="concat($secterm,' ',$sec)"/> + <xsl:with-param name="title" select="$title"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> + +</xsl:template> + +<xsl:template name="get-target-anchor"> + <xsl:variable name="xref" select="."/> + <xsl:for-each select="$src"> + <xsl:variable name="tn" select="key('anchor-item',$xref/@target)|exslt:node-set($includeDirectives)//reference[@anchor=$xref/@target]"/> + <xsl:for-each select="$src"> + <xsl:choose> + <xsl:when test="$tn/parent::artset and $tn/../@anchor"> + <xsl:value-of select="$tn/../@anchor"/> + </xsl:when> + <xsl:when test="$tn/parent::artset and $tn/../artwork/@anchor"> + <xsl:value-of select="$tn/../artwork[@anchor][1]/@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$xref/@target"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:for-each> +</xsl:template> + +<xsl:template match="xref[not(*|text())]|relref[not(*|text())]"> + + <xsl:variable name="xref" select="."/> + + <xsl:variable name="target"> + <xsl:call-template name="get-target-anchor"/> + </xsl:variable> + + <xsl:if test="@target!=$target"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Target '<xsl:value-of select="@target"/>' rewritten to '<xsl:value-of select="$target"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="anchor"><xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="$target"/>.<xsl:number level="any" count="xref[@target=$target]|relref[@target=$target]"/></xsl:variable> + + <!-- ensure we have the right context, this <xref> may be processed from within the boilerplate --> + <xsl:for-each select="$src"> + + <xsl:variable name="node" select="key('anchor-item',$target)|exslt:node-set($includeDirectives)//*[self::reference or self::referencegroup][@anchor=$target]"/> + <xsl:if test="count($node)=0 and not($node/ancestor::ed:del)"> + <xsl:for-each select="$xref"> + <xsl:choose> + <xsl:when test="not($xref/@target)"> + <xsl:variable name="present"> + <xsl:for-each select="$xref/@*"> + <xsl:text> @</xsl:text> + <xsl:value-of select="local-name(.)"/> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Undefined target: no @target attribute specified<xsl:if test="$present!=''"> (attributes found:<xsl:value-of select="$present"/>)</xsl:if></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Undefined target: '<xsl:value-of select="$xref/@target"/>'</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:if> + + <xsl:choose> + + <!-- Section links --> + <xsl:when test="$node/self::section or $node/self::appendix or $node/self::references or $node/self::abstract or $node/self::note"> + <!-- index links to this xref --> + <xsl:variable name="ireftargets" select="key('iref-xanch',$target) | key('iref-xanch','')[../@anchor=$target]"/> + + <xsl:call-template name="xref-to-section"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + <xsl:with-param name="id" select="$anchor"/> + <xsl:with-param name="irefs" select="$ireftargets"/> + </xsl:call-template> + </xsl:when> + + <!-- Figure links --> + <xsl:when test="$node/self::figure"> + <xsl:call-template name="xref-to-figure"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + </xsl:call-template> + </xsl:when> + + <!-- Table links --> + <xsl:when test="$node/self::texttable or $node/self::table"> + <xsl:call-template name="xref-to-table"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + </xsl:call-template> + </xsl:when> + + <!-- Paragraph links --> + <xsl:when test="$node/self::t or $node/self::aside or $node/self::x:note or $node/self::blockquote or $node/self::x:blockquote or $node/self::dl or $node/self::ol or $node/self::ul or $node/self::dd or $node/self::dt or $node/self::li or $node/self::artwork or $node/self::sourcecode or $node/self::artset"> + <xsl:call-template name="xref-to-paragraph"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + <xsl:with-param name="anchor" select="$target"/> + </xsl:call-template> + </xsl:when> + + <!-- Comment links --> + <xsl:when test="$node/self::cref"> + <xsl:choose> + <xsl:when test="$node/@display='false'"> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Comment ',$node/@anchor,' is hidden and thus can not be referenced')"/> + </xsl:call-template> + </xsl:for-each> + </xsl:when> + <xsl:when test="$xml2rfc-comments!='no'"> + <xsl:call-template name="xref-to-comment"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">xref to cref, but comments aren't included in the output</xsl:with-param> + </xsl:call-template> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- Reference links --> + <xsl:when test="$node/self::reference or $node/self::referencegroup"> + <xsl:call-template name="xref-to-reference"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + <xsl:with-param name="id" select="$anchor"/> + </xsl:call-template> + </xsl:when> + + <xsl:otherwise> + <xsl:if test="$node"> + <!-- make it the correct context --> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('xref to unknown element: ',name($node))"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + +<!-- mark unmatched elements red --> + +<xsl:template match="*"> + <xsl:call-template name="error"> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="msg">no XSLT template for element '<xsl:value-of select="name()"/>'</xsl:with-param> + </xsl:call-template> + <span class="tt {$css-error}">&lt;<xsl:value-of select="name()" />&gt;</span> + <xsl:copy><xsl:apply-templates select="node()|@*" /></xsl:copy> + <span class="tt {$css-error}">&lt;/<xsl:value-of select="name()" />&gt;</span> +</xsl:template> + +<xsl:template match="/"> + <xsl:apply-templates select="*" mode="validate"/> + <xsl:apply-templates select="*" /> +</xsl:template> + +<!-- utility templates --> + +<xsl:template name="collectLeftHeaderColumn"> + <!-- default case --> + <xsl:if test="$xml2rfc-private=''"> + <xsl:if test="count(/rfc/front/workgroup)>1"> + <xsl:call-template name="error"> + <xsl:with-param name="inline">no</xsl:with-param> + <xsl:with-param name="msg">There are multiple /rfc/front/workgroup elements; ignoring all but the first</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="/rfc/front/workgroup"> + <xsl:variable name="v" select="normalize-space(.)"/> + <xsl:if test="translate($v, $ucase, $lcase)='internet engineering task force' or $v=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">'<xsl:value-of select="$v"/>' definitively is not the name of a Working Group</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + <xsl:choose> + <xsl:when test="$is-rfc and $header-format='2010' and $submissionType='independent'"> + <myns:item>Independent Submission</myns:item> + </xsl:when> + <xsl:when test="$is-rfc and $header-format='2010' and $submissionType='IETF'"> + <myns:item>Internet Engineering Task Force (IETF)</myns:item> + </xsl:when> + <xsl:when test="$is-rfc and $header-format='2010' and $submissionType='IRTF'"> + <myns:item>Internet Research Task Force (IRTF)</myns:item> + </xsl:when> + <xsl:when test="$is-rfc and $header-format='2010' and $submissionType='IAB'"> + <myns:item>Internet Architecture Board (IAB)</myns:item> + </xsl:when> + <xsl:when test="/rfc/front/workgroup and (not($is-rfc) or $rfcno='')"> + <xsl:choose> + <xsl:when test="starts-with(/rfc/@docName,'draft-ietf-') and $submissionType='IETF'"/> + <xsl:when test="starts-with(/rfc/@docName,'draft-irft-') and $submissionType='IRTF'"/> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">The /rfc/front/workgroup should only be used for Working/Research Group drafts</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:for-each select="/rfc/front/workgroup"> + <xsl:variable name="v" select="normalize-space(.)"/> + <xsl:variable name="tmp" select="translate($v, $ucase, $lcase)"/> + <xsl:if test="contains($tmp,' research group') or contains($tmp,' working group')"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">No need to include 'Working Group' or 'Research Group' postfix in /rfc/front/workgroup value '<xsl:value-of select="$v"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="h"> + <!-- when a single name, append WG/RG postfix automatically --> + <xsl:choose> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-ietf-') and $submissionType='IETF'"> + <xsl:value-of select="concat($v, ' Working Group')"/> + </xsl:when> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-irtf-') and $submissionType='IRTF'"> + <xsl:value-of select="concat($v, ' Research Group')"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$v"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <myns:item> + <xsl:value-of select="$h"/> + </myns:item> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:if test="starts-with(/rfc/@docName,'draft-ietf-') and not(/rfc/front/workgroup)"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">WG submissions should include a /rfc/front/workgroup element</xsl:with-param> + </xsl:call-template> + </xsl:if> + <myns:item>Network Working Group</myns:item> + </xsl:otherwise> + </xsl:choose> + <!-- check <area> value --> + <xsl:for-each select="/rfc/front/area"> + <xsl:variable name="area" select="normalize-space(.)"/> + <xsl:variable name="rallowed"> + <xsl:if test="$pub-yearmonth &lt; 201509"> + <ed:v>Applications</ed:v> + <ed:v>app</ed:v> + </xsl:if> + <xsl:if test="$pub-yearmonth &gt; 201505"> + <ed:v>Applications and Real-Time</ed:v> + <ed:v>art</ed:v> + </xsl:if> + <ed:v>General</ed:v> + <ed:v>gen</ed:v> + <ed:v>Internet</ed:v> + <ed:v>int</ed:v> + <ed:v>Operations and Management</ed:v> + <ed:v>ops</ed:v> + <xsl:if test="$pub-yearmonth &lt; 201509"> + <ed:v>Real-time Applications and Infrastructure</ed:v> + <ed:v>rai</ed:v> + </xsl:if> + <ed:v>Routing</ed:v> + <ed:v>rtg</ed:v> + <ed:v>Security</ed:v> + <ed:v>sec</ed:v> + <ed:v>Transport</ed:v> + <ed:v>tsv</ed:v> + </xsl:variable> + <xsl:variable name="allowed" select="exslt:node-set($rallowed)"/> + <xsl:choose> + <xsl:when test="$allowed/ed:v=$area"> + <!-- ok --> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown IETF area: "<xsl:value-of select="$area"/>" - should be one of: <xsl:for-each select="$allowed/ed:v"> + <xsl:text>"</xsl:text> + <xsl:value-of select="."/> + <xsl:text>"</xsl:text> + <xsl:if test="position()!=last()"> + <xsl:text>, </xsl:text> + </xsl:if> + </xsl:for-each> + <xsl:text> (as of the publication date of </xsl:text> + <xsl:value-of select="$pub-yearmonth"/> + <xsl:text>)</xsl:text> + </xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + <myns:item> + <xsl:choose> + <xsl:when test="/rfc/@ipr and not($is-rfc)">Internet-Draft</xsl:when> + <xsl:otherwise> + <xsl:text>Request for Comments: </xsl:text> + <xsl:value-of select="$rfcno"/> + </xsl:otherwise> + </xsl:choose> + </myns:item> + <xsl:if test="/rfc/@obsoletes!=''"> + <myns:item> + <xsl:text>Obsoletes: </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(/rfc/@obsoletes)" /> + </xsl:call-template> + <xsl:if test="not($is-rfc)"> (if approved)</xsl:if> + </myns:item> + </xsl:if> + <xsl:if test="/rfc/@seriesNo"> + <myns:item> + <xsl:choose> + <xsl:when test="/rfc/@category='bcp'"> + <xsl:text>BCP: </xsl:text> + <xsl:value-of select="/rfc/@seriesNo"/> + <xsl:for-each select="/rfc/front/seriesInfo[@name='BCP']"> + <xsl:if test="number(@value) != number(/rfc/@seriesNo)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">BCP number given in /rfc/front/seriesInfo (<xsl:value-of select="@value"/>) inconsistent with rfc element (<xsl:value-of select="/rfc/@seriesNo"/>)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:when test="/rfc/@category='info'"> + <xsl:text>FYI: </xsl:text> + <xsl:value-of select="/rfc/@seriesNo"/> + <xsl:for-each select="/rfc/front/seriesInfo[@name='FYI']"> + <xsl:if test="number(@value) != number(/rfc/@seriesNo)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">FYI number given in /rfc/front/seriesInfo (<xsl:value-of select="@value"/>) inconsistent with rfc element (<xsl:value-of select="/rfc/@seriesNo"/>)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:when test="/rfc/@category='std'"> + <xsl:text>STD: </xsl:text> + <xsl:value-of select="/rfc/@seriesNo"/> + <xsl:for-each select="/rfc/front/seriesInfo[@name='STD']"> + <xsl:if test="number(@value) != number(/rfc/@seriesNo)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">STD number given in /rfc/front/seriesInfo (<xsl:value-of select="@value"/>) inconsistent with rfc element (<xsl:value-of select="/rfc/@seriesNo"/>)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">There is no IETF document series called '<xsl:value-of select="/rfc/@category"/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="concat(translate(/rfc/@category,$lcase,$ucase),': ',/rfc/@seriesNo)" /> + </xsl:otherwise> + </xsl:choose> + </myns:item> + </xsl:if> + <xsl:if test="/rfc/@updates!=''"> + <myns:item> + <xsl:text>Updates: </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(/rfc/@updates)" /> + </xsl:call-template> + <xsl:if test="not($is-rfc)"> (if approved)</xsl:if> + </myns:item> + </xsl:if> + <myns:item> + <xsl:choose> + <xsl:when test="$is-rfc"> + <xsl:text>Category: </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>Intended status: </xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="get-category-long" /> + </myns:item> + <xsl:if test="/rfc/@ipr and not($is-rfc)"> + <myns:item>Expires: <xsl:call-template name="expirydate" /></myns:item> + </xsl:if> + </xsl:if> + + <!-- private case --> + <xsl:if test="$xml2rfc-private!=''"> + <myns:item><xsl:value-of select="$xml2rfc-private" /></myns:item> + </xsl:if> + + <xsl:if test="$header-format='2010' and $is-rfc"> + <myns:item>ISSN: 2070-1721</myns:item> + </xsl:if> +</xsl:template> + +<!-- author name handling --> + +<xsl:template name="get-surname-from-fullname"> + <xsl:param name="s"/> + <xsl:variable name="n" select="normalize-space($s)"/> + <xsl:choose> + <xsl:when test="contains($n,' ')"> + <xsl:call-template name="get-surname-from-fullname"> + <xsl:with-param name="s" select="substring-after($n,' ')"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$n"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-author-surname"> + <xsl:variable name="s" select="normalize-space(@surname)"/> + <xsl:choose> + <xsl:when test="$s='' and normalize-space(@fullname)!=''"> + <xsl:variable name="computed"> + <xsl:call-template name="get-surname-from-fullname"> + <xsl:with-param name="s" select="@fullname"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="info"> + <xsl:with-param name="msg">author/@surname is missing for author with fullname '<xsl:value-of select="@fullname"/>', extracted as '<xsl:value-of select="normalize-space($computed)"/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="normalize-space($computed)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$s"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-initials-from-fullname"> + <xsl:param name="s"/> + <xsl:variable name="n" select="normalize-space($s)"/> + <xsl:choose> + <xsl:when test="contains($n,' ')"> + <xsl:value-of select="substring($n,1,1)"/><xsl:text>. </xsl:text> + <xsl:call-template name="get-initials-from-fullname"> + <xsl:with-param name="s" select="substring-after($n,' ')"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-author-initials"> + <xsl:variable name="s" select="normalize-space(@initials)"/> + <xsl:choose> + <xsl:when test="$s='' and normalize-space(@fullname)!='' and normalize-space(@surname)=''"> + <xsl:variable name="computed"> + <xsl:call-template name="get-initials-from-fullname"> + <xsl:with-param name="s" select="@fullname"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="info"> + <xsl:with-param name="msg">author/@initials is missing for author with fullname '<xsl:value-of select="@fullname"/>', extracted as '<xsl:value-of select="normalize-space($computed)"/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="normalize-space($computed)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$s"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="collectRightHeaderColumn"> + <xsl:for-each select="author"> + <xsl:variable name="surname"> + <xsl:call-template name="get-author-surname"/> + </xsl:variable> + <xsl:variable name="rawinitials"> + <xsl:call-template name="get-author-initials"/> + </xsl:variable> + <xsl:variable name="initials"> + <xsl:call-template name="format-initials"> + <xsl:with-param name="initials" select="$rawinitials"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="truncated-initials"> + <xsl:call-template name="truncate-initials"> + <xsl:with-param name="initials" select="$initials"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$surname!=''"> + <myns:item> + <xsl:value-of select="$truncated-initials"/> + <xsl:if test="$truncated-initials!=''"> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:value-of select="$surname" /> + <xsl:if test="@asciiInitials!='' or @asciiSurname!=''"> + <xsl:text> (</xsl:text> + <xsl:value-of select="@asciiInitials"/> + <xsl:if test="@asciiInitials!='' and @asciiSurname!=''"> </xsl:if> + <xsl:value-of select="@asciiSurname"/> + <xsl:text>)</xsl:text> + </xsl:if> + <xsl:if test="@role"> + <xsl:choose> + <xsl:when test="@role='editor'"> + <xsl:text>, Editor</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text><xsl:value-of select="@role" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </myns:item> + </xsl:if> + <xsl:variable name="org"> + <xsl:choose> + <xsl:when test="organization/@showOnFrontPage='false'"/> + <xsl:when test="organization/@abbrev"><xsl:value-of select="organization/@abbrev"/></xsl:when> + <xsl:otherwise><xsl:value-of select="organization"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="orgOfFollowing"> + <xsl:choose> + <xsl:when test="following-sibling::*[1]/organization/@showOnFrontPage='false'"/> + <xsl:when test="following-sibling::*[1]/organization/@abbrev"><xsl:value-of select="following-sibling::*[1]/organization/@abbrev" /></xsl:when> + <xsl:otherwise><xsl:value-of select="following-sibling::*/organization" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="$org != $orgOfFollowing and $org != ''"> + <myns:item> + <xsl:value-of select="$org"/> + <xsl:if test="organization/@ascii"> + <xsl:value-of select="concat(' (',organization/@ascii,')')"/> + </xsl:if> + </myns:item> + </xsl:if> + </xsl:for-each> + <myns:item> + <xsl:if test="$xml2rfc-ext-pub-month!=''"> + <xsl:if test="$xml2rfc-ext-pub-day!='' and /rfc/front/date/@x:include-day='true' and $is-rfc"> + <xsl:value-of select="number($xml2rfc-ext-pub-day)" /> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:value-of select="$xml2rfc-ext-pub-month" /> + <xsl:if test="$xml2rfc-ext-pub-day!='' and /rfc/@ipr and not($is-rfc)"> + <xsl:text> </xsl:text> + <xsl:value-of select="number($xml2rfc-ext-pub-day)" /> + <xsl:text>,</xsl:text> + </xsl:if> + </xsl:if> + <xsl:if test="$xml2rfc-ext-pub-month='' and $rfcno!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'month missing but is required for RFCs'"/> + </xsl:call-template> + </xsl:if> + <xsl:if test="$xml2rfc-ext-pub-day='' and /rfc/@docName and $rfcno='' and not(substring(/rfc/@docName, string-length(/rfc/@docName) - string-length('-latest') + 1) = '-latest')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/front/date/@day appears to be missing for a historic draft dated ', $pub-yearmonth)"/> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="concat(' ',$xml2rfc-ext-pub-year)" /> + </myns:item> +</xsl:template> + + +<xsl:template name="emitheader"> + <xsl:param name="lc" /> + <xsl:param name="rc" /> + + <tbody> + <xsl:for-each select="$lc/myns:item | $rc/myns:item"> + <xsl:variable name="pos" select="position()" /> + <xsl:if test="$pos &lt; count($lc/myns:item) + 1 or $pos &lt; count($rc/myns:item) + 1"> + <tr> + <td class="{$css-left}"><xsl:call-template name="copynodes"><xsl:with-param name="nodes" select="$lc/myns:item[$pos]/node()" /></xsl:call-template></td> + <td class="{$css-right}"><xsl:call-template name="copynodes"><xsl:with-param name="nodes" select="$rc/myns:item[$pos]/node()" /></xsl:call-template></td> + </tr> + </xsl:if> + </xsl:for-each> + </tbody> +</xsl:template> + +<!-- convenience template that avoids copying namespace nodes we don't want --> +<xsl:template name="copynodes"> + <xsl:param name="nodes" /> + <xsl:for-each select="$nodes"> + <xsl:choose> + <xsl:when test="namespace-uri()='http://www.w3.org/1999/xhtml'"> + <xsl:element name="{name()}" namespace="{namespace-uri()}"> + <xsl:copy-of select="@*|node()" /> + </xsl:element> + </xsl:when> + <xsl:when test="self::*"> + <xsl:element name="{name()}"> + <xsl:copy-of select="@*|node()" /> + </xsl:element> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="." /> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + +<xsl:template name="expirydate"> + <xsl:param name="in-prose"/> + <xsl:choose> + <xsl:when test="number($xml2rfc-ext-pub-day) >= 1"> + <!-- have day of month? --> + <xsl:if test="$in-prose"> + <xsl:text>on </xsl:text> + </xsl:if> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$xml2rfc-ext-pub-year"/> + <xsl:with-param name="month" select="$pub-month-numeric"/> + <xsl:with-param name="day" select="$xml2rfc-ext-pub-day + 185"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$in-prose"> + <xsl:text>in </xsl:text> + </xsl:if> + <xsl:variable name="month"> + <xsl:call-template name="get-month-as-num"> + <xsl:with-param name="month" select="$xml2rfc-ext-pub-month"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="string(number($month))!='NaN' and number($month) &gt; 0 and number($month) &lt; 7"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="number($month) + 6"/> + </xsl:call-template> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-ext-pub-year" /> + </xsl:when> + <xsl:when test="string(number($month))!='NaN' and number($month) &gt; 6 and number($month) &lt; 13"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="number($month) - 6"/> + </xsl:call-template> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-ext-pub-year + 1" /> + </xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="normalize-date"> + <xsl:param name="year"/> + <xsl:param name="month"/> + <xsl:param name="day"/> + + <xsl:variable name="isleap" select="(($year mod 4) = 0 and ($year mod 100 != 0)) or ($year mod 400) = 0" /> + + <!--<xsl:message> + <xsl:value-of select="concat($year,' ',$month,' ',$day)"/> + </xsl:message>--> + + <xsl:variable name="dim"> + <xsl:choose> + <xsl:when test="$month=1 or $month=3 or $month=5 or $month=7 or $month=8 or $month=10 or $month=12">31</xsl:when> + <xsl:when test="$month=2 and $isleap">29</xsl:when> + <xsl:when test="$month=2 and not($isleap)">28</xsl:when> + <xsl:otherwise>30</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$day > $dim and $month=12"> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$year + 1"/> + <xsl:with-param name="month" select="1"/> + <xsl:with-param name="day" select="$day - $dim"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$day > $dim"> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$year"/> + <xsl:with-param name="month" select="$month + 1"/> + <xsl:with-param name="day" select="$day - $dim"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$month"/> + </xsl:call-template> + <xsl:value-of select="concat(' ',$day,', ',$year)"/> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="get-month-as-num"> + <xsl:param name="month" /> + <xsl:choose> + <xsl:when test="$month='January'">01</xsl:when> + <xsl:when test="$month='February'">02</xsl:when> + <xsl:when test="$month='March'">03</xsl:when> + <xsl:when test="$month='April'">04</xsl:when> + <xsl:when test="$month='May'">05</xsl:when> + <xsl:when test="$month='June'">06</xsl:when> + <xsl:when test="$month='July'">07</xsl:when> + <xsl:when test="$month='August'">08</xsl:when> + <xsl:when test="$month='September'">09</xsl:when> + <xsl:when test="$month='October'">10</xsl:when> + <xsl:when test="$month='November'">11</xsl:when> + <xsl:when test="$month='December'">12</xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-month-as-name"> + <xsl:param name="month"/> + <xsl:choose> + <xsl:when test="$month=1">January</xsl:when> + <xsl:when test="$month=2">February</xsl:when> + <xsl:when test="$month=3">March</xsl:when> + <xsl:when test="$month=4">April</xsl:when> + <xsl:when test="$month=5">May</xsl:when> + <xsl:when test="$month=6">June</xsl:when> + <xsl:when test="$month=7">July</xsl:when> + <xsl:when test="$month=8">August</xsl:when> + <xsl:when test="$month=9">September</xsl:when> + <xsl:when test="$month=10">October</xsl:when> + <xsl:when test="$month=11">November</xsl:when> + <xsl:when test="$month=12">December</xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- produce back section with author information --> +<xsl:template name="get-authors-section-title"> + <xsl:choose> + <xsl:when test="count(/rfc/front/author)=1">Author's Address</xsl:when> + <xsl:otherwise>Authors' Addresses</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-authors-section-number"> + <xsl:if test="/*/x:assign-section-number[@builtin-target='authors']"> + <xsl:value-of select="/*/x:assign-section-number[@builtin-target='authors']/@number"/> + </xsl:if> +</xsl:template> + +<xsl:template name="insertAuthors"> + + <xsl:variable name="number"> + <xsl:call-template name="get-authors-section-number"/> + </xsl:variable> + + <xsl:if test="$number!='suppress' and $xml2rfc-authorship!='no'"> + <xsl:call-template name="insert-conditional-hrule"/> + + <section id="{$anchor-pref}authors" class="avoidbreakinside"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <h2> + <xsl:if test="$number != ''"> + <a href="#{$anchor-pref}section.{$number}" id="{$anchor-pref}section.{$number}"><xsl:value-of select="$number"/>.</a> + <xsl:text> </xsl:text> + </xsl:if> + <a href="#{$anchor-pref}authors"><xsl:call-template name="get-authors-section-title"/></a> + </h2> + + <xsl:apply-templates select="/rfc/front/author" /> + </section> + </xsl:if> +</xsl:template> + + + +<!-- insert copyright statement --> + +<xsl:template name="insertCopyright" myns:namespaceless-elements="xml2rfc"> + +<boilerplate> + <xsl:if test="not($no-copylong)"> + <section title="Full Copyright Statement" anchor="{$anchor-pref}copyright" x:fixed-section-number="3"> + <xsl:choose> + <xsl:when test="$ipr-rfc3667"> + <t> + <xsl:choose> + <xsl:when test="$ipr-rfc4748"> + Copyright &#169; The IETF Trust (<xsl:value-of select="$xml2rfc-ext-pub-year" />). + </xsl:when> + <xsl:otherwise> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). + </xsl:otherwise> + </xsl:choose> + </t> + <t> + This document is subject to the rights, licenses and restrictions + contained in BCP 78<xsl:if test="$submissionType='independent'"> and at <eref target="http://www.rfc-editor.org/copyright.html">http://www.rfc-editor.org/copyright.html</eref></xsl:if>, and except as set forth therein, the authors + retain all their rights. + </t> + <t> + This document and the information contained herein are provided + on an &#8220;AS IS&#8221; basis and THE CONTRIBUTOR, + THE ORGANIZATION HE/SHE REPRESENTS OR IS SPONSORED BY (IF ANY), + THE INTERNET SOCIETY<xsl:if test="$ipr-rfc4748">, THE IETF TRUST</xsl:if> + AND THE INTERNET ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, + EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE + INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED + WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + </t> + </xsl:when> + <xsl:otherwise> + <!-- <http://tools.ietf.org/html/rfc2026#section-10.4> --> + <t> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + <t> + This document and translations of it may be copied and furnished to + others, and derivative works that comment on or otherwise explain it + or assist in its implementation may be prepared, copied, published and + distributed, in whole or in part, without restriction of any kind, + provided that the above copyright notice and this paragraph are + included on all such copies and derivative works. However, this + document itself may not be modified in any way, such as by removing + the copyright notice or references to the Internet Society or other + Internet organizations, except as needed for the purpose of + developing Internet standards in which case the procedures for + copyrights defined in the Internet Standards process must be + followed, or as required to translate it into languages other than + English. + </t> + <t> + The limited permissions granted above are perpetual and will not be + revoked by the Internet Society or its successors or assigns. + </t> + <t> + This document and the information contained herein is provided on an + &#8220;AS IS&#8221; basis and THE INTERNET SOCIETY AND THE INTERNET ENGINEERING + TASK FORCE DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE INFORMATION + HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED WARRANTIES OF + MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + </t> + </xsl:otherwise> + </xsl:choose> + </section> + + <section title="Intellectual Property" anchor="{$anchor-pref}ipr" x:fixed-section-number="4"> + <xsl:choose> + <xsl:when test="$ipr-rfc3667"> + <t> + The IETF takes no position regarding the validity or scope of any + Intellectual Property Rights or other rights that might be claimed to + pertain to the implementation or use of the technology described in + this document or the extent to which any license under such rights + might or might not be available; nor does it represent that it has + made any independent effort to identify any such rights. Information + on the procedures with respect to rights in RFC documents + can be found in BCP 78 and BCP 79. + </t> + <t> + Copies of IPR disclosures made to the IETF Secretariat and any + assurances of licenses to be made available, or the result of an + attempt made to obtain a general license or permission for the use + of such proprietary rights by implementers or users of this + specification can be obtained from the IETF on-line IPR repository + at <eref target="http://www.ietf.org/ipr">http://www.ietf.org/ipr</eref>. + </t> + <t> + The IETF invites any interested party to bring to its attention any + copyrights, patents or patent applications, or other proprietary + rights that may cover technology that may be required to implement + this standard. Please address the information to the IETF at + <eref target="mailto:ietf-ipr@ietf.org">ietf-ipr@ietf.org</eref>. + </t> + </xsl:when> + <xsl:otherwise> + <t> + The IETF takes no position regarding the validity or scope of + any intellectual property or other rights that might be claimed + to pertain to the implementation or use of the technology + described in this document or the extent to which any license + under such rights might or might not be available; neither does + it represent that it has made any effort to identify any such + rights. Information on the IETF's procedures with respect to + rights in standards-track and standards-related documentation + can be found in BCP-11. Copies of claims of rights made + available for publication and any assurances of licenses to + be made available, or the result of an attempt made + to obtain a general license or permission for the use of such + proprietary rights by implementors or users of this + specification can be obtained from the IETF Secretariat. + </t> + <t> + The IETF invites any interested party to bring to its + attention any copyrights, patents or patent applications, or + other proprietary rights which may cover technology that may be + required to practice this standard. Please address the + information to the IETF Executive Director. + </t> + <xsl:if test="$xml2rfc-iprnotified='yes'"> + <t> + The IETF has been notified of intellectual property rights + claimed in regard to some or all of the specification contained + in this document. For more information consult the online list + of claimed rights. + </t> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </section> + + <xsl:choose> + <xsl:when test="$no-funding"/> + <xsl:when test="$funding1 and $is-rfc"> + <section x:fixed-section-number="5"> + <xsl:attribute name="title"> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Acknowledgement</xsl:when> + <xsl:otherwise>Acknowledgment</xsl:otherwise> + </xsl:choose> + </xsl:attribute> + <t> + Funding for the RFC Editor function is provided by the IETF + Administrative Support Activity (IASA). + </t> + </section> + </xsl:when> + <xsl:when test="$funding0 and $is-rfc"> + <section x:fixed-section-number="5"> + <xsl:attribute name="title"> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Acknowledgement</xsl:when> + <xsl:otherwise>Acknowledgment</xsl:otherwise> + </xsl:choose> + </xsl:attribute> + <t> + Funding for the RFC Editor function is currently provided by + the Internet Society. + </t> + </section> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> +</boilerplate> +</xsl:template> + +<!-- optional scripts --> +<xsl:template name="insertScripts"> +<xsl:if test="$xml2rfc-ext-refresh-from!=''"> +<script> +var RfcRefresh = {}; +RfcRefresh.NS_XHTML = "http://www.w3.org/1999/xhtml"; +RfcRefresh.NS_MOZERR = "http://www.mozilla.org/newlayout/xml/parsererror.xml"; +RfcRefresh.lastTxt = ""; +RfcRefresh.lastEtag = ""; +RfcRefresh.xslt = null; +RfcRefresh.xmlsource = "<xsl:value-of select='$xml2rfc-ext-refresh-from'/>"; +RfcRefresh.xsltsource = "<xsl:value-of select='$xml2rfc-ext-refresh-xslt'/>"; +RfcRefresh.interval = "<xsl:value-of select='number($xml2rfc-ext-refresh-interval)'/>"; + +RfcRefresh.getXSLT = function() { + if (! window.XSLTProcessor) { + var err = document.createElement("pre"); + err.className = "refreshbrowsererror <xsl:value-of select="$css-noprint"/>"; + var msg = "This browser does not support the window.XSLTProcessor functionality."; + err.appendChild(document.createTextNode(msg)); + RfcRefresh.showMessage("refreshxmlerror", err); + } + else { + try { + var xhr = new XMLHttpRequest(); + xhr.open("GET", RfcRefresh.xsltsource, true); + xhr.onload = function (e) { + if (xhr.readyState === 4) { + RfcRefresh.xslt = new XSLTProcessor(); + RfcRefresh.xslt.importStylesheet(xhr.responseXML); + } + } + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); + } + catch (e) { + var err = document.createElement("pre"); + err.className = "refreshbrowsererror <xsl:value-of select="$css-noprint"/>"; + var msg = "Failed to load XSLT code from &lt;" + RfcRefresh.xsltsource + "&gt;.\n"; + msg += "Your browser might not support loading from a file: URI.\n"; + msg += "Error details: " + e; + err.appendChild(document.createTextNode(msg)); + RfcRefresh.showMessage("refreshxmlerror", err); + } + } +} + +RfcRefresh.findAndUpdate = function(olddoc, elem) { + var changed = ""; + var children = elem.childNodes; + for (var i = 0; i != children.length; i++) { + var n = children[i]; + if (n.nodeType == 1) { + var c = RfcRefresh.findAndUpdate(olddoc, n); + if (changed == '') { + changed = c; + } + var id = n.id; + if (id != "") { + var old = olddoc.getElementById(id); + var newtext = n.innerHTML; + if (!old) { + console.debug("new " + id); + } else { + var oldtext = old.innerHTML; + if (oldtext != newtext) { + console.debug("updating " + id); + old.innerHTML = n.innerHTML; + if (changed == '') { + changed = id; + } + } + } + } + } + } + return changed; +} + +RfcRefresh.findDifferences = function(olddoc, newdoc) { + var changed = RfcRefresh.findAndUpdate(olddoc, newdoc.documentElement); + if (changed != "") { + console.debug("changed: " + changed); + document.location = "#" + changed; + } + // final check for changes; if those were not processed earlier, + // we refresh the whole document + var oldtext = olddoc.documentElement.getElementsByTagName("body")[0].innerHTML; + var newtext = newdoc.documentElement.getElementsByTagName("body")[0].innerHTML; + if (oldtext != newtext) { + console.debug("full refresh: " + newtext); + olddoc.documentElement.innerHTML = newdoc.documentElement.innerHTML; + } +} + +RfcRefresh.getNodeText = function(elem) { + var result = ""; + var children = elem.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 3) { + result += children[i].nodeValue; + } + } + return result; +} + +RfcRefresh.getParserError = function(dom) { + // FIREFOX + if ("parsererror" == dom.documentElement.nodeName &amp;&amp; RfcRefresh.NS_MOZERR == dom.documentElement.namespaceURI) { + var errmsg = new Object(); + errmsg.msg = ""; + errmsg.src = ""; + var children = dom.documentElement.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 3) { + errmsg.msg += children[i].nodeValue; + } else if (children[i].nodeName == "sourcetext") { + errmsg.src = RfcRefresh.getNodeText(children[i]); + } + } + return errmsg; + } + + var list = dom.getElementsByTagNameNS(RfcRefresh.NS_XHTML, "parsererror"); + if (list.length != 0) { + // Webkit + var errmsg = new Object(); + errmsg.msg = "XML parse error"; + list = dom.getElementsByTagNameNS(RfcRefresh.NS_XHTML, "div"); + if (list.length != 0) { + errmsg.msg = RfcRefresh.getNodeText(list[0]); + } + return errmsg; + } + + + return null; +} + +RfcRefresh.showMessage = function(cls, node) { + // remove previous message + var list = document.getElementsByClassName(cls); + if (list.length != 0) { + list[0].parentNode.removeChild(list[0]); + } + document.body.appendChild(node); +} + +RfcRefresh.refresh = function(txt) { + if (txt != RfcRefresh.lastTxt) { + RfcRefresh.lastTxt = txt; + // try to parse + var parser = new DOMParser(); + var dom = parser.parseFromString(txt, "text/xml"); + var errmsg = RfcRefresh.getParserError(dom); + + if (errmsg != null) { + var err = document.createElement("pre"); + err.className = "refreshxmlerror <xsl:value-of select="$css-noprint"/>"; + err.appendChild(document.createTextNode(errmsg.msg)); + if (errmsg.src != null) { + err.appendChild(document.createElement("hr")); + err.appendChild(document.createTextNode(errmsg.src)); + } + RfcRefresh.showMessage("refreshxmlerror", err); + } else { + // find new refresh + var children = dom.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 7 &amp;&amp; children[i].target == "rfc-ext") { + var s = "&lt;foo " + children[i].data + "/>"; + var sd = parser.parseFromString(s, "text/xml"); + var refresh = sd.documentElement.getAttribute("refresh-interval"); + if (refresh != null &amp;&amp; refresh != "") { + refresh = parseInt(refresh, 10); + if (RfcRefresh.interval != refresh) { + if (Number.isNaN(refresh) || refresh &lt; 5) { + console.debug("refresh requested to be: " + refresh + " - ignored, using 5 instead."); + RfcRefresh.interval = 5; + } else { + RfcRefresh.interval = refresh; + console.debug("refresh changed to: " + refresh); + } + } + } + } + } + + var html = RfcRefresh.xslt.transformToDocument(dom); + RfcRefresh.findDifferences(document, html); + } + } +} + +RfcRefresh.initRefresh = function() { + RfcRefresh.getXSLT(); + + window.setTimeout(function(){ + if (RfcRefresh.xslt != null) { + var xhr = new XMLHttpRequest(); + xhr.open("GET", RfcRefresh.xmlsource, true); + if (RfcRefresh.lastEtag != "") { + xhr.setRequestHeader("If-None-Match", RfcRefresh.lastEtag); + } + xhr.onload = function (e) { + if (xhr.readyState === 4) { + console.debug(xhr.status + " " + xhr.statusText); + if (xhr.status != 304) { + RfcRefresh.refresh(xhr.responseText); + } + RfcRefresh.lastEtag = xhr.getResponseHeader("ETag"); + } + } + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); + setTimeout(arguments.callee, RfcRefresh.interval * 1000); + } + }, RfcRefresh.interval * 1000); +} +</script> +</xsl:if> +<xsl:if test="/rfc/x:feedback"> +<script> +var buttonsAdded = false; + +function initFeedback() { + var fb = document.createElement("div"); + fb.className = "<xsl:value-of select="concat($css-feedback,' ',$css-noprint)"/>"; + fb.setAttribute("onclick", "feedback();"); + fb.appendChild(document.createTextNode("feedback")); + + document.body.appendChild(fb); +} + +function feedback() { + toggleButtonsToElementsByName("h2"); + toggleButtonsToElementsByName("h3"); + toggleButtonsToElementsByName("h4"); + toggleButtonsToElementsByName("h5"); + + buttonsAdded = !buttonsAdded; +} + +function toggleButtonsToElementsByName(name) { + var list = document.getElementsByTagName(name); + for (var i = 0; i &lt; list.length; i++) { + toggleButton(list.item(i)); + } +} + +function toggleButton(node) { + if (! buttonsAdded) { + + // docname + var template = "<xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="/rfc/x:feedback/@template"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> +</xsl:call-template>"; + + var id = node.getAttribute("id"); + // try also parent + if (id == null || id == "") { + var id = node.parentNode.getAttribute("id"); + } + // better id available? + var titlelinks = node.getElementsByTagName("a"); + for (var i = 0; i &lt; titlelinks.length; i++) { + var tl = titlelinks.item(i); + if (tl.getAttribute("id")) { + id = tl.getAttribute("id"); + } + } + + // ref + var ref = window.location.toString(); + var hash = ref.indexOf("#"); + if (hash != -1) { + ref = ref.substring(0, hash); + } + if (id != null &amp;&amp; id != "") { + ref += "#" + id; + } + + // docname + var docname = "<xsl:value-of select="/rfc/@docName"/>"; + + // section + var section = node.textContent; + section = section.replace("\u00a0", " ").trim(); + + // build URI from template + var uri = template.replace("{docname}", encodeURIComponent(docname)); + uri = uri.replace("{section}", encodeURIComponent(section)); + uri = uri.replace("{ref}", encodeURIComponent(ref)); + + var button = document.createElement("a"); + button.className = "<xsl:value-of select="concat($css-fbbutton,' ',$css-noprint)"/>"; + button.setAttribute("href", uri); + button.appendChild(document.createTextNode("send feedback")); + node.appendChild(button); + } + else { + var buttons = node.getElementsByTagName("a"); + for (var i = 0; i &lt; buttons.length; i++) { + var b = buttons.item(i); + if (b.className == "<xsl:value-of select="concat($css-fbbutton,' ',$css-noprint)"/>") { + node.removeChild(b); + } + } + } +}</script></xsl:if> +<xsl:if test="$xml2rfc-ext-insert-metadata='yes' and ($is-rfc or $is-submitted-draft)"><script> +<xsl:if test="$rfcno!=''"> +function getMeta(rfcno, container) { + var xhr = new XMLHttpRequest(); + xhr.open("GET", "https://www.rfc-editor.org/rfc/rfc" + rfcno + ".json", true); + xhr.onload = function (e) { + if (xhr.readyState === 4) { + if (xhr.status === 200) { + var data = JSON.parse(xhr.response); + + var cont = document.getElementById(container); + // empty the container + while (cont.firstChild) { + cont.removeChild(myNode.firstChild); + } + + var c = data.status; + if (c) { + var bld = newElementWithText("b", c); + cont.appendChild(bld); + } else { + cont.appendChild(newElementWithText("i", "(document status unknown)")); + } + + c = data.updated_by; + if (c &amp;&amp; c.length > 0 &amp;&amp; c[0] !== null &amp;&amp; c[0].length > 0) { + cont.appendChild(newElement("br")); + cont.appendChild(newText("Updated by: ")); + appendRfcLinks(cont, c); + } + + c = data.obsoleted_by; + if (c &amp;&amp; c.length > 0 &amp;&amp; c[0] !== null &amp;&amp; c[0].length > 0) { + cont.appendChild(newElement("br")); + cont.appendChild(newText("Obsoleted by: ")); + appendRfcLinks(cont, c); + } + + c = data.errata_url; + if (c) { + cont.appendChild(newElement("br")); + var link = newElementWithText("a", "errata"); + link.setAttribute("href", c); + var errata = newElementWithText("i", "This document has "); + errata.appendChild(link); + errata.appendChild(newText(".")); + cont.appendChild(errata); + } + + cont.style.display = "block"; + } else { + console.error(xhr.statusText); + } + } + }; + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); +} +function appendRfcLinks(parent, updates) { + var template = "<xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-uri"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> +</xsl:call-template>"; + for (var i = 0; i &lt; updates.length; i++) { + var rfc = updates[i].trim().toLowerCase(); + if (rfc.substring(0, 3) == "rfc") { + var no = parseInt(rfc.substring(3), 10); + + var link = newElement("a"); + link.setAttribute("href", template.replace("{rfc}", no)); + link.appendChild(newText(no)); + parent.appendChild(link); + } else { + parent.appendChild(newText(rfc)); + } + if (i != updates.length - 1) { + parent.appendChild(newText(", ")); + } + } +}</xsl:if><xsl:if test="$is-submitted-draft"> +function getMeta(docname, revision, container) { + var xhr = new XMLHttpRequest(); + xhr.open("GET", "https://datatracker.ietf.org/doc/" + docname + "/doc.json", true); + xhr.onload = function (e) { + if (xhr.readyState === 4) { + if (xhr.status === 200) { + var data = JSON.parse(xhr.response); + + var cont = document.getElementById(container); + // empty the container + while (cont.firstChild) { + cont.removeChild(myNode.firstChild); + } + + if (data.rev) { + cont.style.display = "block"; + var bld = newElementWithText("b", "Internet Draft Status"); + cont.appendChild(bld); + cont.appendChild(newElement("br")); + if (data.rev == revision) { + var rev = newElementWithText("i", "This is the latest submitted version."); + cont.appendChild(rev); + } else { + var rev = newElementWithText("i", "This is not the current version:"); + cont.appendChild(rev); + cont.appendChild(newElement("br")); + var dat = ""; + if (data.time) { + dat = ", submitted on " + data.time.substring(0,10); + } + rev = newElementWithText("i", "please see version " + data.rev + dat + "."); + cont.appendChild(rev); + } + } + } else { + console.error(xhr.statusText); + } + } + }; + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); +}</xsl:if> + +// DOM helpers +function newElement(name) { + return document.createElement(name); +} +function newElementWithText(name, txt) { + var e = document.createElement(name); + e.appendChild(newText(txt)); + return e; +} +function newText(text) { + return document.createTextNode(text); +} +</script> +</xsl:if> +<script> +function anchorRewrite() { +<xsl:text> map = { </xsl:text> + <xsl:for-each select="//x:anchor-alias"> + <xsl:text>"</xsl:text> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="@value"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> + </xsl:call-template> + <xsl:text>": "</xsl:text> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="ancestor::*[@anchor][1]/@anchor"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> + </xsl:call-template> + <xsl:text>"</xsl:text> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> +<xsl:text>};</xsl:text> + if (window.location.hash.length >= 1) { + var fragid = window.location.hash.substr(1); + if (fragid) { + if (! document.getElementById(fragid)) { + var prefix = "<xsl:value-of select="$anchor-pref"/>"; + var mapped = map[fragid]; + if (mapped) { + window.location.hash = mapped; + } else if (fragid.indexOf("section-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(8); + } else if (fragid.indexOf("appendix-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(9); + } else if (fragid.indexOf("s-") == 0) { + var postfix = fragid.substring(2); + if (postfix.startsWith("abstract")) { + window.location.hash = prefix + postfix; + } else if (postfix.startsWith("note-")) { + window.location.hash = prefix + "note." + postfix.substring(5); + } else { + window.location.hash = prefix + "section." + postfix; + } + } else if (fragid.indexOf("p-") == 0) { + var r = fragid.substring(2); + var p = r.indexOf("-"); + if (p >= 0) { + window.location.hash = prefix + "section." + r.substring(0, p) + ".p." + r.substring(p + 1); + } + } + } + } + } +} +window.addEventListener('hashchange', anchorRewrite); +window.addEventListener('DOMContentLoaded', anchorRewrite); +</script><xsl:if test="$prettyprint-script!=''"> +<script src="{$prettyprint-script}"/></xsl:if><xsl:if test="contains($prettyprint-script,'prettify') and (//artwork[contains(@type,'abnf')] or //sourcecode[contains(@type,'abnf')])"> +<script><![CDATA[try { +PR['registerLangHandler']( + PR['createSimpleLexer']( + [ + // comment + [PR['PR_COMMENT'], /^;[^\x00-\x1f]*/, null, ";"], + ], + [ + // string literals + [PR['PR_STRING'], /^(\%s|\%i)?"[^"\x00-\x1f]*"/, null], + // binary literals + [PR['PR_LITERAL'], /^\%b[01]+((-[01]+)|(\.[01]+)*)/, null], + // decimal literals + [PR['PR_LITERAL'], /^\%d[0-9]+((-[0-9]+)|(\.[0-9]+)*)/, null], + // hex literals + [PR['PR_LITERAL'], /^(\%x[A-Za-z0-9]+((-[A-Za-z0-9]+)|(\.[A-Za-z0-9]+)*))/, null], + // prose rule + [PR['PR_NOCODE'], /^<[^>\x00-\x1f]*>/, null], + // rule name + [PR['PR_TYPE'], /^([A-Za-z][A-Za-z0-9-]*)/, null], + [PR['PR_PUNCTUATION'], /^[=\(\)\*\/\[\]#]/, null], + ]), + ['ietf_abnf']); +} catch(e){}]]> +</script> +</xsl:if></xsl:template> + +<!-- insert CSS style info --> + +<xsl:template name="insertCss"> +<style title="rfc2629.xslt"> +<xsl:value-of select="$xml2rfc-ext-webfonts"/> +:root { + --col-bg: white; + --col-bg-error: red; + --col-bg-highlight: yellow; + --col-bg-highligh2: lime; + --col-bg-light: gray; + --col-bg-pre: lightyellow; + --col-bg-pre1: #f8f8f8; + --col-bg-pre2: #f0f0f0; + --col-bg-th: #e9e9e9; + --col-bg-tr: #f5f5f5; + --col-fg: black; + --col-fg-del: red; + --col-fg-error: red; + --col-fg-ins: green; + --col-fg-light: gray; + --col-fg-link: blue; + --col-fg-title: green; +} +a { + color: var(--col-fg-link); + text-decoration: none; +} +a.smpl { + color: var(--col-fg); +} +a:hover { + text-decoration: underline; +} +a:active { + text-decoration: underline; +} +address { + margin-top: 1em; + margin-left: 2em; + font-style: normal; +}<xsl:if test="//x:blockquote|//blockquote"> +blockquote { + border-style: solid; + border-color: var(--col-fg-light); + border-width: 0 0 0 .25em; + font-style: italic; + padding-left: 0.5em; +}</xsl:if> +body {<xsl:if test="$xml2rfc-background!=''"> + background: url(<xsl:value-of select="$xml2rfc-background" />) var(--col-bg) left top;</xsl:if> + background-color: var(--col-bg); + color: var(--col-fg); + font-family: <xsl:value-of select="$xml2rfc-ext-ff-body"/>; + font-size: 16px; + line-height: 1.5; + margin: 10px 0px 10px 10px; +}<xsl:if test="$parsedMaxwidth!=''"> +@media screen and (min-width: <xsl:value-of select="number($parsedMaxwidth + 40)"/>px) { + body { + margin: 10px auto; + max-width: <xsl:value-of select="$parsedMaxwidth"/>px; + } +}</xsl:if> +samp, span.tt, code, pre { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-pre"/>; +}<xsl:if test="//xhtml:p"> +br.p { + line-height: 150%; +}</xsl:if> +cite { + font-style: normal; +}<xsl:if test="//x:note|//aside"> +aside { + margin-left: 2em; +}</xsl:if> +dl { + margin-left: 2em; +} +dl > dt { + float: left; + margin-right: 1em; +} +dl.nohang > dt { + float: none; +} +dl > dd { + margin-bottom: .5em; +} +dl.compact > dd { + margin-bottom: .0em; +} +dl > dd > dl { + margin-top: 0.5em; +} +ul.empty {<!-- spacing between two entries in definition lists --> + list-style-type: none; +} +<xsl:if test="//ul[@bare='true']">ul.bare { + margin-left: -2em; +} +</xsl:if>ul.empty li { + margin-top: .5em; +} +dl p { + margin-left: 0em; +} +dl.<xsl:value-of select="$css-reference"/> > dt { + font-weight: bold; +} +dl.<xsl:value-of select="$css-reference"/> > dd { + margin-left: <xsl:choose><xsl:when test="$xml2rfc-symrefs='no'">3.5</xsl:when><xsl:otherwise>6</xsl:otherwise></xsl:choose>em; +} +h1 { + color: var(--col-fg-title); + font-size: 150%; + font-weight: bold; + text-align: center; + margin-top: 36pt; + margin-bottom: 0pt; +} +h2 { + font-size: 130%; + page-break-after: avoid; +} +h2.np { + page-break-before: always; +} +h3 { + font-size: 120%; + page-break-after: avoid; +} +h4 { + font-size: 110%; + page-break-after: avoid; +} +h5, h6 { + font-size: 100%; + page-break-after: avoid; +} +h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + color: var(--col-fg); +} +img { + margin-left: 3em; +} +ol { + margin-left: 2em; +} +li ol { + margin-left: 0em; +} +ol p { + margin-left: 0em; +}<xsl:if test="//xhtml:q"> +q { + font-style: italic; +}</xsl:if> +p { + margin-left: 2em; +} +pre { + font-size: 90%; + margin-left: 3em; + background-color: var(--col-bg-pre); + padding: .25em; + page-break-inside: avoid; +}<xsl:if test="//artwork[@x:is-code-component='yes']|//sourcecode[@markers='true']"><!-- support "<CODE BEGINS>" and "<CODE ENDS>" markers--> +pre.ccmarker { + background-color: var(--col-bg); + color: var(--col-fg-light); +} +pre.ccmarker > span { + font-size: small; +} +pre.cct { + margin-bottom: -1em; +} +pre.ccb { + margin-top: -1em; +}</xsl:if> +pre.text2 { + border-style: dotted; + border-width: 1px; + background-color: var(--col-bg-pre2); +} +pre.inline { + background-color: var(--col-bg); + padding: 0em; + page-break-inside: auto;<xsl:if test="$prettyprint-script!=''"> + border: none !important;</xsl:if> +} +pre.text { + border-style: dotted; + border-width: 1px; + background-color: var(--col-bg-pre1); +} +pre.drawing { + border-style: solid; + border-width: 1px; + background-color: var(--col-bg-pre1); + padding: 2em; +}<xsl:if test="//x:q"> +q { + font-style: italic; +}</xsl:if> +<xsl:if test="//x:sup|//sup"> +sup { + font-size: 60%; +}</xsl:if><xsl:if test="//x:sub|//sub"> +sub { + font-size: 60%; +}</xsl:if> +table { + margin-left: 2em; +}<xsl:if test="//texttable|//table"> +div.<xsl:value-of select="$css-tt"/> { + margin-left: 2em; +} +table.<xsl:value-of select="$css-tt"/> { + border-collapse: collapse; + border-color: var(--col-fg-light); + border-spacing: 0; + vertical-align: top; + } +table.<xsl:value-of select="$css-tt"/> th { + border-color: var(--col-fg-light); + padding: 3px; +} +table.<xsl:value-of select="$css-tt"/> td { + border-color: var(--col-fg-light); + padding: 3px; +} +table.all { + border-style: solid; + border-width: 2px; +} +table.full { + border-style: solid; + border-width: 2px; +} +table.<xsl:value-of select="$css-tt"/> td { + vertical-align: top; +} +table.all td { + border-style: solid; + border-width: 1px; +} +table.full td { + border-style: none solid; + border-width: 1px; +} +table.<xsl:value-of select="$css-tt"/> th { + vertical-align: top; +} +table.all th { + border-style: solid; + border-width: 1px; +} +table.full th { + border-style: solid; + border-width: 1px 1px 2px 1px; +} +table.<xsl:value-of select="$css-tleft"/> { + margin-right: auto; +} +table.<xsl:value-of select="$css-tright"/> { + margin-left: auto; +} +table.<xsl:value-of select="$css-tcenter"/> { + margin-left: auto; + margin-right: auto; +} +caption { + caption-side: bottom; + font-weight: bold; + font-size: 80%; + margin-top: .5em; +} +<xsl:if test="//@x:caption-side"> +caption.caption-top { + caption-side: top; +} +</xsl:if> +<xsl:if test="//table"> +table.v3 tr { + vertical-align: top; +} +table.v3 th { + background-color: var(--col-bg-th); + vertical-align: top; + padding: 0.25em 0.5em; +} +table.v3 td { + padding: 0.25em 0.5em; +} +table.v3 tr:nth-child(2n) > td { + background-color: var(--col-bg-tr); + vertical-align: top; +} +tr p { + margin-left: 0em; +} +tr pre { + margin-left: 1em; +} +tr ol { + margin-left: 1em; +} +tr ul { + margin-left: 1em; +} +tr dl { + margin-left: 1em; +} +</xsl:if> +</xsl:if> +table.<xsl:value-of select="$css-header"/> { + border-spacing: 1px; + width: 95%; + font-size: 90%;<xsl:if test="not(contains($styles,' header-bw '))"> + color: var(--col-bg);</xsl:if> +} +td.top { + vertical-align: top; +} +td.topnowrap { + vertical-align: top; + white-space: nowrap; +} +table.<xsl:value-of select="$css-header"/> td { + vertical-align: top;<xsl:if test="not(contains($styles,' header-bw '))"> + background-color: var(--col-bg-light);</xsl:if> + width: 50%; +}<xsl:if test="/rfc/@obsoletes | /rfc/@updates"> +table.<xsl:value-of select="$css-header"/> a { + color: <xsl:choose><xsl:when test="not(contains($styles,' header-bw '))">var(--col-bg)</xsl:when><xsl:otherwise>var(--col-fg)</xsl:otherwise></xsl:choose>; +}</xsl:if> +ul.toc, ul.toc ul { + list-style: none; + margin-left: 1.5em; + padding-left: 0em; +} +ul.toc li { + line-height: 150%; + font-weight: bold; + margin-left: 0em; +} +ul.toc li li { + line-height: normal; + font-weight: normal; + font-size: 90%; + margin-left: 0em; +} +li.excluded { + font-size: 0%; +} +ul { + margin-left: 2em; +} +li ul { + margin-left: 0em; +} +ul p { + margin-left: 0em; +} +.filename, h1, h2, h3, h4 { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-title"/>; +} +<xsl:if test="$has-index">ul.ind, ul.ind ul { + list-style: none; + margin-left: 1.5em; + padding-left: 0em; + page-break-before: avoid; +} +ul.ind li { + font-weight: bold; + line-height: 200%; + margin-left: 0em; +} +ul.ind li li { + font-weight: normal; + line-height: 150%; + margin-left: 0em; +}</xsl:if><xsl:if test="//svg:svg"> +@namespace svg url(http://www.w3.org/2000/svg); +svg|svg { + margin-left: 3em; +} +svg { + margin-left: 3em; +}</xsl:if> +.avoidbreakinside { + page-break-inside: avoid; +} +.avoidbreakafter { + page-break-after: avoid; +} +<xsl:if test="//t/@keepWithPrevious">.avoidbreakbefore { + page-break-before: avoid; +} +</xsl:if><xsl:if test="//*[@removeInRFC='true']">section.rfcEditorRemove > div:first-of-type { + font-style: italic; +}</xsl:if><xsl:if test="//x:bcp14|//bcp14">.bcp14 { + font-style: normal; + text-transform: lowercase; + font-variant: small-caps; +}</xsl:if><xsl:if test="//x:blockquote|//blockquote"> +blockquote > * .bcp14 { + font-style: italic; +}</xsl:if> +.comment { + background-color: var(--col-bg-highlight); +}<xsl:if test="$xml2rfc-editing='yes'"> +.editingmark { + background-color: var(--col-bg-highlight); +}</xsl:if> +.<xsl:value-of select="$css-center"/> { + text-align: center; +} +.<xsl:value-of select="$css-error"/> { + color: var(--col-fg-error); + font-style: italic; + font-weight: bold; +} +.figure { + font-weight: bold; + text-align: center; + font-size: 80%; +} +.filename { + font-size: 112%; + font-weight: bold; + line-height: 21pt; + text-align: center; + margin-top: 0pt; +} +.fn { + font-weight: bold; +} +.<xsl:value-of select="$css-left"/> { + text-align: left; +} +.<xsl:value-of select="$css-right"/> { + text-align: right; +} +.warning { + font-size: 130%; + background-color: var(--col-bg-highlight); +}<xsl:if test="$xml2rfc-ext-paragraph-links='yes'"> +.self { + color: var(--col-fg-light); + margin-left: .3em; + text-decoration: none; + visibility: hidden; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +} +.self:hover { + text-decoration: none; +} +h1:hover > a.self, h2:hover > a.self, h3:hover > a.self, li:hover > a.self, p:hover > a.self { + visibility: visible; +}</xsl:if><xsl:if test="$has-edits">del { + color: var(--col-fg-del); + text-decoration: line-through; +} +.del { + color: var(--col-fg-del); + text-decoration: line-through; +} +ins { + color: var(--col-fg-ins); + text-decoration: underline; +} +.ins { + color: var(--col-fg-ins); + text-decoration: underline; +} +div.issuepointer { + float: left; +}</xsl:if><xsl:if test="//ed:issue"> +table.openissue { + background-color: var(--col-bg-highlight); + border-width: thin; + border-style: solid; + border-color: var(--col-fg); +} +table.closedissue { + background-color: var(--col-bg); + border-width: thin; + border-style: solid; + border-color: var(--col-fg-light); + color: var(--col-fg-light); +} +thead th { + text-align: left; +} +.bg-issue { + border: solid; + border-width: 1px; + font-size: 66%; +} +.closed-issue { + border: solid; + border-width: thin; + background-color: var(--col-bg-highlight2); + font-size: smaller; + font-weight: bold; +} +.open-issue { + border: solid; + border-width: thin; + background-color: var(--col-bg-error); + font-size: smaller; + font-weight: bold; +} +.editor-issue { + border: solid; + border-width: thin; + background-color: var(--col-bg-highlight); + font-size: smaller; + font-weight: bold; +}</xsl:if><xsl:if test="$xml2rfc-ext-refresh-from!=''">.refreshxmlerror { + position: fixed; + top: 1%; + right: 1%; + padding: 5px 5px; + color: var(--col-bg-highlight); + background: var(--col-fg); +} +.refreshbrowsererror { + position: fixed; + top: 1%; + left: 1%; + padding: 5px 5px; + color: var(--col-bg-error); + background: var(--col-fg); +}</xsl:if><xsl:if test="/rfc/x:feedback">.<xsl:value-of select="$css-feedback"/> { + position: fixed; + bottom: 1%; + right: 1%; + padding: 3px 5px; + color: var(--col-bg); + border-radius: 5px; + background: #006400; + border: 1px solid silver; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +} +.<xsl:value-of select="$css-fbbutton"/> { + margin-left: 1em; + color: #303030; + font-size: small; + font-weight: normal; + background: #d0d000; + padding: 1px 4px; + border: 1px solid silver; + border-radius: 5px; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}</xsl:if><xsl:if test="$xml2rfc-ext-justification='always'"> +dd, li, p { + text-align: justify; +}</xsl:if><xsl:if test="$xml2rfc-ext-insert-metadata='yes' and ($is-rfc or $is-submitted-draft)"> +.<xsl:value-of select="$css-docstatus"/> { + border: 1px solid var(--col-fg); + display: none; + float: right; + margin: 2em; + padding: 1em; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}</xsl:if><xsl:if test="$errata-parsed"> +.<xsl:value-of select="$css-erratum"/> { + border: 1px solid orangered; + border-left: 0.75em solid orangered; + float: right; + padding: 0.5em; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}<xsl:if test="$parsedMaxwidth!=''"> +@media screen and (min-width: <xsl:value-of select="number($parsedMaxwidth + 350)"/>px) { + .<xsl:value-of select="$css-erratum"/> { + margin-right: -150px; + } +}</xsl:if></xsl:if><xsl:if test="$published-as-rfc"> +.<xsl:value-of select="$css-publishedasrfc"/> { + background-color: var(--col-bg-highlight); + color: var(--col-fg); + font-size: 115%; + text-align: center; +}</xsl:if><xsl:if test="$prettyprint-class='prettyprint' and contains($prettyprint-script,'prettify') and not(contains($prettyprint-script,'skin='))"> + pre.prettyprint .pln { color: #000; } + pre.prettyprint .str, pre.prettyprint .atv { color: #080; } + pre.prettyprint .kwd, pre.prettyprint .tag { color: #008; } + pre.prettyprint .com { color: #800; } + pre.prettyprint .typ, pre.prettyprint .atn, pre.prettyprint .dec, pre.prettyprint .var { color: #606; } + pre.prettyprint .lit { color: #066; } + pre.prettyprint .pun, pre.prettyprint .opn, pre.prettyprint .clo { color: #660; } +</xsl:if> + +@media screen { + pre.text, pre.text2, pre.drawing { + width: 69ch; + } +} + +@media print { + .<xsl:value-of select="$css-noprint"/> { + display: none; + } + + a { + color: black; + text-decoration: none; + } + + table.<xsl:value-of select="$css-header"/> { + width: 90%; + } + + td.<xsl:value-of select="$css-header"/> { + width: 50%; + color: black; + background-color: white; + vertical-align: top; + font-size: 110%; + } + + ul.toc a:last-child::after { + content: leader('.') target-counter(attr(href), page); + } + + ul.ind li li a {<!-- links in the leaf nodes of the index should go to page numbers --> + content: target-counter(attr(href), page); + } + + .print2col { + column-count: 2; + -moz-column-count: 2;<!-- for Firefox --> + column-fill: auto;<!-- for PrinceXML --> + } +<xsl:if test="$xml2rfc-ext-justification='print'"> + dd, li, p { + text-align: justify; + } +</xsl:if>} +@page<xsl:if test="$xml2rfc-ext-duplex='yes'">:right</xsl:if> { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-body"/>; + @top-left { + content: "<xsl:call-template name="get-header-left"/>"; + } + @top-right { + content: "<xsl:call-template name="get-header-right"/>"; + } + @top-center { + content: "<xsl:call-template name="get-header-center"/>"; + } + @bottom-left { + content: "<xsl:call-template name="get-author-summary"/>"; + } + @bottom-center { + content: "<xsl:call-template name="get-bottom-center"/>"; + } + @bottom-right { + content: "[Page " counter(page) "]"; + } +}<xsl:if test="$xml2rfc-ext-duplex='yes'"> +@page:left { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-body"/>; + @top-left { + content: "<xsl:call-template name="get-header-right"/>"; + } + @top-right { + content: "<xsl:call-template name="get-header-left"/>"; + } + @top-center { + content: "<xsl:call-template name="get-header-center"/>"; + } + @bottom-left { + content: "[Page " counter(page) "]"; + } + @bottom-center { + content: "<xsl:call-template name="get-bottom-center"/>"; + } + @bottom-right { + content: "<xsl:call-template name="get-author-summary"/>"; + } +} +</xsl:if> +@page:first { + @top-left { + content: normal; + } + @top-right { + content: normal; + } + @top-center { + content: normal; + } +} +<xsl:if test="$xml2rfc-ext-dark-mode!='no'"> +@media (prefers-color-scheme: dark) { + :root { + --col-bg: black; + --col-bg-error: red; + --col-bg-highlight: #9e9e20; + --col-bg-highligh2: lime; + --col-bg-light: gray; + --col-bg-pre: #202000; + --col-bg-pre1: #080808; + --col-bg-pre2: #101010; + --col-bg-th: #303030; + --col-bg-tr: #202020; + --col-fg: white; + --col-fg-del: red; + --col-fg-error: red; + --col-fg-ins: green; + --col-fg-light: gray; + --col-fg-link: lightblue; + --col-fg-title: green; + } + + pre.prettyprint .pln { color: #fff; } + pre.prettyprint .str, pre.prettyprint .atv { color: #8f8; } + pre.prettyprint .kwd, pre.prettyprint .tag { color: #88f; } + pre.prettyprint .com { color: #f88; } + pre.prettyprint .typ, pre.prettyprint .atn, pre.prettyprint .dec, pre.prettyprint .var { color: #f8f; } + pre.prettyprint .lit { color: #8ff; } + pre.prettyprint .pun, pre.prettyprint .opn, pre.prettyprint .clo { color: #ff8; } +} +</xsl:if> +</style> +</xsl:template> + + +<!-- generate the index section --> + +<xsl:template name="insertSingleIref"> + <xsl:choose> + <xsl:when test="@ed:xref"> + <!-- special index generator mode --> + <xsl:text>[</xsl:text> + <a href="#{@ed:xref}"><xsl:value-of select="@ed:xref"/></a> + <xsl:text>, </xsl:text> + <a> + <xsl:variable name="htmluri" select="//reference[@anchor=current()/@ed:xref]/format[@type='HTML']/@target"/> + <xsl:if test="$htmluri"> + <xsl:attribute name="href"><xsl:value-of select="concat($htmluri,'#',@ed:frag)"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="@primary='true'"><b><xsl:value-of select="@ed:label" /></b></xsl:when> + <xsl:otherwise><xsl:value-of select="@ed:label" /></xsl:otherwise> + </xsl:choose> + </a> + <xsl:text>]</xsl:text> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="_n"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$_n!=''"> + <xsl:value-of select="$_n"/> + </xsl:when> + <xsl:otherwise>&#167;</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="backlink"> + <xsl:choose> + <xsl:when test="self::xref"> + <xsl:variable name="target" select="@target"/> + <xsl:comment>workaround for Saxon 9.1 bug; force evaluation of: <xsl:value-of select="$target"/></xsl:comment> + <xsl:variable name="no"><xsl:number level="any" count="xref[@target=$target]"/></xsl:variable> + <xsl:text>#</xsl:text> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>xref.</xsl:text> + <xsl:value-of select="@target"/> + <xsl:text>.</xsl:text> + <xsl:value-of select="$no"/> + </xsl:when> + <xsl:when test="self::iref"> + <xsl:text>#</xsl:text> + <xsl:call-template name="compute-iref-anchor"/> + </xsl:when> + <xsl:when test="self::x:ref"> + <xsl:text>#</xsl:text> + <xsl:call-template name="compute-extref-anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:message>Unsupported element type for insertSingleIref</xsl:message> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <a href="{$backlink}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:choose> + <xsl:when test="@primary='true'"><b><xsl:value-of select="$n"/></b></xsl:when> + <xsl:otherwise><xsl:value-of select="$n"/></xsl:otherwise> + </xsl:choose> + </a> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="insertSingleXref"> + <xsl:variable name="_n"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$_n!=''"> + <xsl:value-of select="$_n"/> + </xsl:when> + <xsl:otherwise>&#167;</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="self::reference"> + <a href="#{@anchor}"> + <xsl:call-template name="insertInsDelClass"/> + <b><xsl:value-of select="$n"/></b> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="target" select="@target"/> + <xsl:variable name="backlink">#<xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="$target"/>.<xsl:number level="any" count="xref[@target=$target]|relref[@target=$target]"/></xsl:variable> + <a href="{$backlink}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:value-of select="$n"/> + </a> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="position()!=last()">, </xsl:if> +</xsl:template> + +<!-- generate navigation links to index subsections --> +<xsl:template name="insert-index-navigation"> + <p class="{$css-noprint}"> + + <xsl:for-each select="//iref | //reference[not(starts-with(@anchor,'deleted-'))]"> + + <xsl:sort select="translate(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),$lcase,$ucase)" /> + + <xsl:variable name="letter" select="translate(substring(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase)"/> + + <!-- first of character and character? --> + <xsl:if test="generate-id(.) = generate-id(key('index-first-letter',$letter)[1]) and translate($letter,$alnum,'')=''"> + <xsl:variable name="showit" select="$xml2rfc-ext-include-references-in-index='yes' or self::iref"/> + + <xsl:if test="$showit"> + <a href="#{$anchor-pref}index.{$letter}"> + <xsl:value-of select="$letter" /> + </a> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:if> + </xsl:for-each> + </p> +</xsl:template> + +<xsl:template name="format-section-ref"> + <xsl:param name="number"/> + <xsl:choose> + <xsl:when test="translate(substring($number,1,1),$ucase,'')=''"> + <xsl:text>Appendix </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>Section </xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="$number"/> +</xsl:template> + +<xsl:template name="insert-index-item"> + <xsl:param name="in-artwork"/> + <xsl:param name="irefs"/> + <xsl:param name="xrefs"/> + <xsl:param name="extrefs"/> + + <xsl:choose> + <xsl:when test="$in-artwork"> + <span class="tt"><xsl:value-of select="@item" /></span> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@item" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:for-each select="$irefs|$xrefs|$extrefs"> + <xsl:call-template name="insertSingleIref" /> + </xsl:for-each> +</xsl:template> + +<xsl:template name="insert-index-subitem"> + <xsl:param name="in-artwork"/> + <xsl:param name="irefs"/> + <xsl:param name="xrefs"/> + <xsl:param name="extrefs"/> + + <li> + <xsl:choose> + <xsl:when test="$in-artwork"> + <span class="tt"><xsl:value-of select="@subitem" /></span> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@subitem" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:for-each select="$irefs|$xrefs|$extrefs"> + <xsl:call-template name="insertSingleIref" /> + </xsl:for-each> + </li> +</xsl:template> + + +<xsl:variable name="item-wrapper-element">li</xsl:variable> +<xsl:attribute-set name="item-wrapper-element"/> +<xsl:variable name="subitems-wrapper-element">ul</xsl:variable> + +<xsl:template name="insert-index-regular-iref"> + + <xsl:if test="generate-id(.) = generate-id(key('index-item',concat(@item,@anchor))[1])"> + <xsl:variable name="item" select="@item"/> + <xsl:variable name="in-artwork" select="key('index-item',$item)[@primary='true' and ancestor::artwork]"/> + + <xsl:element name="{$item-wrapper-element}" use-attribute-sets="item-wrapper-element"> + <xsl:variable name="irefs3" select="key('index-item',@item)[not(@subitem) or @subitem='']"/> + <xsl:variable name="xrefs3" select="key('xref-item',$irefs3[@x:for-anchor='']/../@anchor) | key('xref-item',$irefs3/@x:for-anchor)"/> + <xsl:variable name="extrefs3" select="key('extref-item',$irefs3[@x:for-anchor='']/../@anchor) | key('extref-item',$irefs3/@x:for-anchor)"/> + + <xsl:call-template name="insert-index-item"> + <xsl:with-param name="in-artwork" select="key('index-item',@item)[@primary='true' and (ancestor::artwork or ancestor::sourcecode)]"/> + <xsl:with-param name="irefs" select="$irefs3"/> + <xsl:with-param name="xrefs" select="$xrefs3"/> + <xsl:with-param name="extrefs" select="$extrefs3"/> + </xsl:call-template> + + <xsl:variable name="s2" select="key('index-item',@item)[@subitem!='']"/> + <xsl:if test="$s2"> + <xsl:element name="{$subitems-wrapper-element}"> + <xsl:for-each select="$s2"> + <xsl:sort select="translate(@subitem,$lcase,$ucase)" /> + + <xsl:if test="generate-id(.) = generate-id(key('index-item-subitem',concat(@item,'..',@subitem))[1])"> + + <xsl:variable name="irefs4" select="key('index-item-subitem',concat(@item,'..',@subitem))"/> + <xsl:variable name="xrefs4" select="key('xref-item',$irefs4[@x:for-anchor='']/../@anchor) | key('xref-item',$irefs4/@x:for-anchor)"/> + <xsl:variable name="extrefs4" select="key('extref-item',$irefs4[@x:for-anchor='']/../@anchor) | key('extref-item',$irefs4/@x:for-anchor)"/> + + <xsl:call-template name="insert-index-subitem"> + <xsl:with-param name="in-artwork" select="key('index-item-subitem',concat(@item,'..',@subitem))[@primary='true' and (ancestor::artwork or ancestor::sourcecode)]"/> + <xsl:with-param name="irefs" select="$irefs4"/> + <xsl:with-param name="xrefs" select="$xrefs4"/> + <xsl:with-param name="extrefs" select="$extrefs4"/> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + </xsl:element> + </xsl:if> + </xsl:element> + </xsl:if> +</xsl:template> + +<!-- generate the index section --> + +<xsl:template name="insertIndex"> + + <xsl:call-template name="insert-conditional-hrule"/> + + <section id="{$anchor-pref}index"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <h2> + <a href="#{$anchor-pref}index">Index</a> + </h2> + + <xsl:call-template name="insert-index-navigation"/> + + <!-- for each index subsection --> + <div class="print2col"> + <ul class="ind"> + <xsl:for-each select="//iref | //reference[not(starts-with(@anchor,'deleted-'))]"> + <xsl:sort select="translate(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),$lcase,$ucase)" /> + <xsl:variable name="letter" select="translate(substring(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase)"/> + + <xsl:variable name="showit" select="$xml2rfc-ext-include-references-in-index='yes' or self::iref"/> + <xsl:if test="$showit and generate-id(.) = generate-id(key('index-first-letter',$letter)[1])"> + <li> + + <!-- make letters and digits stand out --> + <xsl:choose> + <xsl:when test="translate($letter,concat($lcase,$ucase,'0123456789'),'')=''"> + <a id="{$anchor-pref}index.{$letter}" href="#{$anchor-pref}index.{$letter}"> + <b><xsl:value-of select="$letter" /></b> + </a> + </xsl:when> + <xsl:otherwise> + <b><xsl:value-of select="$letter" /></b> + </xsl:otherwise> + </xsl:choose> + + <ul> + <xsl:for-each select="key('index-first-letter',$letter)"> + + <xsl:sort select="translate(concat(@item,@anchor),$lcase,$ucase)" /> + + <xsl:choose> + <xsl:when test="self::reference"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes' and not(starts-with(@anchor,'deleted-'))"> + <li> + <xsl:variable name="val"> + <xsl:call-template name="reference-name"/> + </xsl:variable> + <em> + <xsl:value-of select="substring($val,2,string-length($val)-2)"/> + </em> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:variable name="rs" select="key('xref-item',current()/@anchor) | . | key('anchor-item',concat('deleted-',current()/@anchor))"/> + + <xsl:for-each select="$rs"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + + <xsl:variable name="rs2" select="$rs[@x:sec|@section]"/> + + <xsl:if test="$rs2"> + <ul> + <xsl:for-each select="$rs2"> + <xsl:sort select="substring-before(concat(@x:sec,@section,'.'),'.')" data-type="number"/> + <xsl:sort select="substring(concat(@x:sec,@section),2+string-length(substring-before(concat(@x:sec,@section),'.')))" data-type="number"/> + + <xsl:if test="generate-id(.) = generate-id(key('index-xref-by-sec',concat(@target,'..',@x:sec,@section))[1])"> + <li> + <em> + <xsl:call-template name="format-section-ref"> + <xsl:with-param name="number" select="concat(@x:sec,@section)"/> + </xsl:call-template> + </em> + <xsl:text>&#160;&#160;</xsl:text> + <xsl:for-each select="key('index-xref-by-sec',concat(@target,'..',@x:sec,@section))"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + </li> + </xsl:if> + </xsl:for-each> + </ul> + </xsl:if> + + <xsl:if test="current()/x:source/@href"> + <xsl:variable name="rs3" select="$rs[not(@x:sec) and @x:rel]"/> + <xsl:variable name="doc" select="document(current()/x:source/@href)"/> + <xsl:if test="$rs3"> + <ul> + <xsl:for-each select="$rs3"> + <xsl:sort select="count($doc//*[@anchor and following::*/@anchor=substring-after(current()/@x:rel,'#')])" order="ascending" data-type="number"/> + <xsl:if test="generate-id(.) = generate-id(key('index-xref-by-anchor',concat(@target,'..',@x:rel))[1])"> + <xsl:variable name="sec"> + <xsl:for-each select="$doc//*[@anchor=substring-after(current()/@x:rel,'#')]"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$sec!=''"> + <li> + <em> + <xsl:choose> + <xsl:when test="starts-with($sec,$unnumbered)"> + <xsl:for-each select="$doc//*[@anchor=substring-after(current()/@x:rel,'#')]"> + <xsl:call-template name="get-title-as-string"/> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="format-section-ref"> + <xsl:with-param name="number" select="$sec"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </em> + <xsl:text>&#160;&#160;</xsl:text> + <xsl:for-each select="key('index-xref-by-anchor',concat(@target,'..',@x:rel))"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + </li> + </xsl:if> + </xsl:if> + </xsl:for-each> + </ul> + </xsl:if> + </xsl:if> + </li> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="insert-index-regular-iref"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </ul> + </li> + </xsl:if> + + </xsl:for-each> + </ul> + </div> + </section> +</xsl:template> + +<xsl:template name="insertPreamble" myns:namespaceless-elements="xml2rfc"> + + <xsl:param name="notes"/> + +<boilerplate> + <!-- TLP4, Section 6.c.iii --> + <xsl:variable name="pre5378EscapeClause"> + This document may contain material from IETF Documents or IETF Contributions published or + made publicly available before November 10, 2008. The person(s) controlling the copyright in + some of this material may not have granted the IETF Trust the right to allow modifications of such + material outside the IETF Standards Process. Without obtaining an adequate license from the + person(s) controlling the copyright in such materials, this document may not be modified outside + the IETF Standards Process, and derivative works of it may not be created outside the IETF + Standards Process, except to format it for publication as an RFC or to translate it into languages + other than English. + </xsl:variable> + + <!-- TLP1, Section 6.c.i --> + <xsl:variable name="noModificationTrust200811Clause"> + This document may not be modified, and derivative works of it may not be + created, except to format it for publication as an RFC and to translate it + into languages other than English. + </xsl:variable> + + <!-- TLP2..4, Section 6.c.i --> + <xsl:variable name="noModificationTrust200902Clause"> + This document may not be modified, and derivative works of it may not be + created, except to format it for publication as an RFC or to translate it + into languages other than English.<!-- "and" changes to "or" --> + </xsl:variable> + + <!-- TLP1..4, Section 6.c.ii --> + <xsl:variable name="noDerivativesTrust200___Clause"> + This document may not be modified, and derivative works of it may not be + created, and it may not be published except as an Internet-Draft. + </xsl:variable> + + <section anchor="{$anchor-pref}status"> + <name> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes' or $src/rfc/@version >= 3">Status of This Memo</xsl:when> + <xsl:otherwise>Status of this Memo</xsl:otherwise> + </xsl:choose> + </name> + + <xsl:choose> + <xsl:when test="@ipr and not($is-rfc)"> + <t> + <xsl:choose> + + <!-- RFC2026 --> + <xsl:when test="@ipr = 'full2026'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026. + </xsl:when> + <xsl:when test="@ipr = 'noDerivativeWorks2026'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026 + except that the right to produce derivative works is not granted. + </xsl:when> + <xsl:when test="@ipr = 'noDerivativeWorksNow'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026 + except that the right to produce derivative works is not granted. + (If this document becomes part of an IETF working group activity, + then it will be brought into full compliance with Section 10 of RFC2026.) + </xsl:when> + <xsl:when test="@ipr = 'none'"> + This document is an Internet-Draft and is + NOT offered in accordance with Section 10 of RFC2026, + and the author does not provide the IETF with any rights other + than to publish as an Internet-Draft. + </xsl:when> + + <!-- RFC3667 --> + <xsl:when test="@ipr = 'full3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667. By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she become aware will be disclosed, in accordance with + RFC 3668. + </xsl:when> + <xsl:when test="@ipr = 'noModification3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667. By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she become aware will be disclosed, in accordance with + RFC 3668. This document may not be modified, and derivative works of + it may not be created, except to publish it as an RFC and to + translate it into languages other than English<xsl:if test="@iprExtract">, + other than to extract <xref target="{@iprExtract}"/> as-is + for separate use</xsl:if>. + </xsl:when> + <xsl:when test="@ipr = 'noDerivatives3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667 except for the right to produce derivative + works. By submitting this Internet-Draft, each author represents + that any applicable patent or other IPR claims of which he or she + is aware have been or will be disclosed, and any of which he or she + become aware will be disclosed, in accordance with RFC 3668. This + document may not be modified, and derivative works of it may + not be created<xsl:if test="@iprExtract">, other than to extract + <xref target="{@iprExtract}"/> as-is for separate use</xsl:if>. + </xsl:when> + + <!-- RFC3978 --> + <xsl:when test="@ipr = 'full3978'"> + By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she becomes aware will be disclosed, in accordance with + Section 6 of BCP 79. + </xsl:when> + <xsl:when test="@ipr = 'noModification3978'"> + By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she becomes aware will be disclosed, in accordance with + Section 6 of BCP 79. This document may not be modified, and derivative works of + it may not be created, except to publish it as an RFC and to + translate it into languages other than English<xsl:if test="@iprExtract">, + other than to extract <xref target="{@iprExtract}"/> as-is + for separate use</xsl:if>. + </xsl:when> + <xsl:when test="@ipr = 'noDerivatives3978'"> + By submitting this Internet-Draft, each author represents + that any applicable patent or other IPR claims of which he or she + is aware have been or will be disclosed, and any of which he or she + becomes aware will be disclosed, in accordance with Section 6 of BCP 79. This + document may not be modified, and derivative works of it may + not be created<xsl:if test="@iprExtract">, other than to extract + <xref target="{@iprExtract}"/> as-is for separate use</xsl:if>. + </xsl:when> + + <!-- as of Jan 2010, TLP 4.0 --> + <xsl:when test="$ipr-2010-01 and (@ipr = 'trust200902' + or @ipr = 'noModificationTrust200902' + or @ipr = 'noDerivativesTrust200902' + or @ipr = 'pre5378Trust200902')"> + This Internet-Draft is submitted in full conformance with + the provisions of BCP 78 and BCP 79. + </xsl:when> + + <!-- as of Nov 2008, Feb 2009 and Sep 2009 --> + <xsl:when test="@ipr = 'trust200811' + or @ipr = 'noModificationTrust200811' + or @ipr = 'noDerivativesTrust200811' + or @ipr = 'trust200902' + or @ipr = 'noModificationTrust200902' + or @ipr = 'noDerivativesTrust200902' + or @ipr = 'pre5378Trust200902'"> + This Internet-Draft is submitted to IETF in full conformance with + the provisions of BCP 78 and BCP 79. + </xsl:when> + <xsl:otherwise> + CONFORMANCE UNDEFINED. + </xsl:otherwise> + </xsl:choose> + + <!-- warn about iprExtract without effect --> + <xsl:if test="@iprExtract and (@ipr != 'noModification3667' and @ipr != 'noDerivatives3667' and @ipr != 'noModification3978' and @ipr != 'noDerivatives3978')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/@iprExtract does not have any effect for /rfc/@ipr=',@ipr)"/> + </xsl:call-template> + </xsl:if> + + <!-- restrictions --> + <xsl:choose> + <xsl:when test="@ipr = 'noModificationTrust200811'"> + <xsl:value-of select="$noModificationTrust200811Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noDerivativesTrust200811'"> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noModificationTrust200902'"> + <xsl:value-of select="$noModificationTrust200902Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noDerivativesTrust200902'"> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </xsl:when> + <!-- escape clause moved to Copyright Notice as of 2009-11 --> + <xsl:when test="@ipr = 'pre5378Trust200902' and $pub-yearmonth &lt; 200911"> + <xsl:value-of select="$pre5378EscapeClause"/> + </xsl:when> + + <xsl:otherwise /> + </xsl:choose> + </t> + <xsl:choose> + <xsl:when test="$id-boilerplate='2010'"> + <xsl:variable name="current-uri">http<xsl:if test="$rfc-boilerplate-use-https">s</xsl:if>://datatracker.ietf.org/drafts/current/</xsl:variable> + <t> + Internet-Drafts are working documents of the Internet Engineering + Task Force (IETF). Note that other groups may also distribute + working documents as Internet-Drafts. The list of current + Internet-Drafts is at <eref target="{$current-uri}"><xsl:value-of select="$current-uri"/></eref>. + </t> + </xsl:when> + <xsl:otherwise> + <t> + Internet-Drafts are working documents of the Internet Engineering + Task Force (IETF), its areas, and its working groups. + Note that other groups may also distribute working documents as + Internet-Drafts. + </t> + </xsl:otherwise> + </xsl:choose> + <t> + Internet-Drafts are draft documents valid for a maximum of six months + and may be updated, replaced, or obsoleted by other documents at any time. + It is inappropriate to use Internet-Drafts as reference material or to cite + them other than as &#8220;work in progress&#8221;. + </t> + <xsl:if test="$id-boilerplate=''"> + <t> + The list of current Internet-Drafts can be accessed at + <eref target='http://www.ietf.org/ietf/1id-abstracts.txt'>http://www.ietf.org/ietf/1id-abstracts.txt</eref>. + </t> + <t> + The list of Internet-Draft Shadow Directories can be accessed at + <eref target='http://www.ietf.org/shadow.html'>http://www.ietf.org/shadow.html</eref>. + </t> + </xsl:if> + <t> + This Internet-Draft will expire <xsl:call-template name="expirydate"><xsl:with-param name="in-prose" select="true()"/></xsl:call-template>. + </t> + </xsl:when> + + <xsl:when test="@category='bcp' and $rfc-boilerplate='2010'"> + <t> + This memo documents an Internet Best Current Practice. + </t> + </xsl:when> + <xsl:when test="@category='bcp'"> + <t> + This document specifies an Internet Best Current Practices for the Internet + Community, and requests discussion and suggestions for improvements. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='exp' and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for examination, experimental implementation, and evaluation. + </t> + </xsl:when> + <xsl:when test="@category='exp'"> + <t> + This memo defines an Experimental Protocol for the Internet community. + It does not specify an Internet standard of any kind. + Discussion and suggestions for improvement are requested. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='historic' and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for the historical record. + </t> + </xsl:when> + <xsl:when test="@category='historic'"> + <t> + This memo describes a historic protocol for the Internet community. + It does not specify an Internet standard of any kind. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='std' and $rfc-boilerplate='2010'"> + <t> + This is an Internet Standards Track document. + </t> + </xsl:when> + <xsl:when test="@category='std'"> + <t> + This document specifies an Internet standards track protocol for the Internet + community, and requests discussion and suggestions for improvements. + Please refer to the current edition of the &#8220;Internet Official Protocol + Standards&#8221; (STD 1) for the standardization state and status of this + protocol. Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="(@category='info' or not(@category)) and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for informational purposes. + </t> + </xsl:when> + <xsl:when test="@category='info' or not(@category)"> + <t> + This memo provides information for the Internet community. + It does not specify an Internet standard of any kind. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:otherwise> + <t> + UNSUPPORTED CATEGORY. + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@category: ', @category)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <!-- 2nd and 3rd paragraph --> + <xsl:if test="$rfc-boilerplate='2010' and $is-rfc"> + <t> + <xsl:if test="@category='exp'"> + This document defines an Experimental Protocol for the Internet + community. + </xsl:if> + <xsl:if test="@category='historic'"> + This document defines a Historic Document for the Internet community. + </xsl:if> + <xsl:choose> + <xsl:when test="$submissionType='IETF'"> + This document is a product of the Internet Engineering Task Force + (IETF). + <xsl:choose> + <xsl:when test="$consensus='yes'"> + It represents the consensus of the IETF community. It has + received public review and has been approved for publication by + the Internet Engineering Steering Group (IESG). + </xsl:when> + <xsl:otherwise> + It has been approved for publication by the Internet Engineering + Steering Group (IESG). + <!-- sanity check of $consensus --> + <xsl:if test="@category='std' or @category='bcp'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'IETF BCPs and Standards Track documents require IETF consensus, check values of @category and @consensus!'"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$submissionType='IAB'"> + This document is a product of the Internet Architecture Board (IAB) + and represents information that the IAB has deemed valuable to + provide for permanent record. + <xsl:if test="$consensus='yes'"> + It represents the consensus of the Internet Architecture Board (IAB). + </xsl:if> + </xsl:when> + <xsl:when test="$submissionType='IRTF'"> + This document is a product of the Internet Research Task Force (IRTF). + The IRTF publishes the results of Internet-related research and + development activities. These results might not be suitable for + deployment. + <xsl:choose> + <xsl:when test="$consensus='yes' and front/workgroup[1]!=''"> + This RFC represents the consensus of the + <xsl:value-of select="front/workgroup[1]"/> Research Group of the Internet + Research Task Force (IRTF). + </xsl:when> + <xsl:when test="$consensus='no' and front/workgroup[1]!=''"> + This RFC represents the individual opinion(s) of one or more + members of the <xsl:value-of select="front/workgroup[1]"/> Research Group of the + Internet Research Task Force (IRTF). + </xsl:when> + <xsl:otherwise> + <!-- no research group --> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$submissionType='independent'"> + This is a contribution to the RFC Series, independently of any other + RFC stream. The RFC Editor has chosen to publish this document at + its discretion and makes no statement about its value for + implementation or deployment. + </xsl:when> + <xsl:otherwise> + <!-- will contain error message already --> + <xsl:value-of select="$submissionType"/> + </xsl:otherwise> + </xsl:choose> + <xsl:variable name="candidates"> + <!-- see https://www.rfc-editor.org/errata/eid5248 --> + <xsl:choose> + <xsl:when test="$pub-yearmonth &lt; 201802">a candidate</xsl:when> + <xsl:otherwise>candidates</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$submissionType='IETF'"> + <xsl:choose> + <xsl:when test="@category='bcp'"> + Further information on BCPs is available in <xsl:copy-of select="$hab-reference"/>. + </xsl:when> + <xsl:when test="@category='std'"> + Further information on Internet Standards is available in <xsl:copy-of select="$hab-reference"/>. + </xsl:when> + <xsl:otherwise> + Not all documents approved by the IESG are <xsl:value-of select="$candidates"/> for any + level of Internet Standard; see <xsl:copy-of select="$hab-reference"/>. + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="approver"> + <xsl:choose> + <xsl:when test="$submissionType='IAB'">IAB</xsl:when> + <xsl:when test="$submissionType='IRTF'">IRSG</xsl:when> + <xsl:otherwise>RFC Editor</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + Documents approved for publication by the + <xsl:value-of select="$approver"/> are not <xsl:value-of select="$candidates"/> for any level + of Internet Standard; see <xsl:copy-of select="$hab-reference"/>. + </xsl:otherwise> + </xsl:choose> + </t> + <t> + Information about the current status of this document, any errata, and + how to provide feedback on it may be obtained at + <eref target="{$rfc-info-link}"><xsl:value-of select="$rfc-info-link"/></eref>. + </t> + </xsl:if> + + </section> + + <!-- some notes might go here; see http://www.rfc-editor.org/rfc-style-guide/rfc-style --> + <xsl:copy-of select="$notes"/> + + <xsl:choose> + <xsl:when test="$src/rfc/@ipr='none'"/> + <xsl:when test="$ipr-2008-11"> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright (c) <xsl:value-of select="$xml2rfc-ext-pub-year" /> IETF Trust and the persons identified + as the document authors. All rights reserved. + </t> + <xsl:choose> + <xsl:when test="$ipr-2010-01"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents (<eref target="{$trust-license-info-link}"><xsl:value-of select="$trust-license-info-link"/></eref>) + in effect on the date of publication of this document. Please + review these documents carefully, as they describe your rights + and restrictions with respect to this document. + <xsl:if test="$submissionType='IETF'"> + Code Components extracted from this document must include + Simplified BSD License text as described in Section 4.e of the + Trust Legal Provisions and are provided without warranty as + described in the Simplified BSD License. + </xsl:if> + </t> + </xsl:when> + <xsl:when test="$ipr-2009-09"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>) + in effect on the date of publication of this document. Please + review these documents carefully, as they describe your rights + and restrictions with respect to this document. Code Components + extracted from this document must include Simplified BSD License + text as described in Section 4.e of the Trust Legal Provisions + and are provided without warranty as described in the BSD License. + </t> + </xsl:when> + <xsl:when test="$ipr-2009-02"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents in effect on the date of + publication of this document + (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>). + Please review these documents carefully, as they describe your rights and restrictions with + respect to this document. + </t> + </xsl:when> + <xsl:otherwise> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents + (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>) in effect on the date of + publication of this document. Please review these documents + carefully, as they describe your rights and restrictions with respect + to this document. + </t> + </xsl:otherwise> + </xsl:choose> + + <!-- add warning for incompatible IPR attribute on RFCs --> + <xsl:variable name="stds-rfc-compatible-ipr" + select="@ipr='pre5378Trust200902' or @ipr='trust200902' or @ipr='trust200811' or @ipr='full3978' or @ipr='full3667' or @ipr='full2026'"/> + + <xsl:variable name="rfc-compatible-ipr" + select="$stds-rfc-compatible-ipr or @ipr='noModificationTrust200902' or @ipr='noDerivativesTrust200902' or @ipr='noModificationTrust200811' or @ipr='noDerivativesTrust200811'"/> + <!-- TODO: may want to add more historic variants --> + + <xsl:variable name="is-stds-track" + select="$submissionType='IETF' and @category='std'"/> + + <xsl:variable name="status-diags"> + <xsl:choose> + <xsl:when test="$is-stds-track and $is-rfc and @ipr and not($stds-rfc-compatible-ipr)"> + <xsl:value-of select="concat('The /rfc/@ipr attribute value of ',@ipr,' is not allowed on standards-track RFCs.')"/> + </xsl:when> + <xsl:when test="$is-rfc and @ipr and not($rfc-compatible-ipr)"> + <xsl:value-of select="concat('The /rfc/@ipr attribute value of ',@ipr,' is not allowed on RFCs.')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$status-diags!=''"> + <t> + <spanx><xsl:value-of select="$status-diags"/></spanx> + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$status-diags"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="($is-rfc or $pub-yearmonth >= 200911) and @ipr = 'pre5378Trust200902'"> + <!-- special case: RFC5378 escape applies to RFCs as well --> + <!-- for IDs historically in Status Of This Memo, over here starting 2009-11 --> + <t> + <xsl:value-of select="$pre5378EscapeClause"/> + </t> + </xsl:when> + <xsl:when test="not($is-rfc)"> + <!-- not an RFC, handled elsewhere --> + </xsl:when> + <xsl:when test="not(@ipr)"> + <!-- no IPR value; done --> + </xsl:when> + <xsl:when test="@ipr='trust200902' or @ipr='trust200811' or @ipr='full3978' or @ipr='full3667' or @ipr='full2026'"> + <!-- default IPR, allowed here --> + </xsl:when> + <xsl:when test="@ipr='noModificationTrust200811'"> + <t> + <xsl:value-of select="$noModificationTrust200811Clause"/> + </t> + </xsl:when> + <xsl:when test="@ipr='noModificationTrust200902'"> + <t> + <xsl:value-of select="$noModificationTrust200902Clause"/> + </t> + </xsl:when> + <xsl:when test="@ipr='noDerivativesTrust200902' or @ipr='noDerivativesTrust200811'"> + <t> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </t> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="msg" select="concat('unexpected value of /rfc/@ipr for this type of document: ',@ipr)"/> + <t> + <spanx><xsl:value-of select="$msg"/></spanx> + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + </section> + </xsl:when> + <xsl:when test="$ipr-2007-08"> + <!-- no copyright notice --> + </xsl:when> + <xsl:when test="$ipr-rfc4748"> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright &#169; The IETF Trust (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + </section> + </xsl:when> + <xsl:otherwise> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + </section> + </xsl:otherwise> + </xsl:choose> +</boilerplate> + +</xsl:template> + +<!-- TOC generation --> + +<xsl:template match="/" mode="toc"> + <hr class="{$css-noprint}"/> + + <nav id="{$anchor-pref}toc"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'toc'"/> + </xsl:call-template> + + <h2 class="np"> <!-- this pagebreak occurs always --> + <a href="#{$anchor-pref}toc">Table of Contents</a> + </h2> + + <ul class="toc"> + <xsl:apply-templates mode="toc" /> + </ul> + + <xsl:call-template name="insertTocAppendix" /> + </nav> +</xsl:template> + +<xsl:template name="insert-toc-line"> + <xsl:param name="number" /> + <xsl:param name="target" /> + <xsl:param name="title" /> + <xsl:param name="name" /> + <xsl:param name="tocparam" /> + <xsl:param name="oldtitle" /> + <xsl:param name="waschanged" /> + + <xsl:variable name="depth"> + <!-- count the dots --> + <xsl:choose> + <xsl:when test="starts-with($number,$unnumbered)"> + <xsl:value-of select="string-length(translate(substring-after($number,$unnumbered),'.ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890&#167;','.'))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="string-length(translate($number,'.ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890&#167;','.'))"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- handle tocdepth parameter --> + <xsl:choose> + <xsl:when test="(not($tocparam) or $tocparam='' or $tocparam='default') and $depth >= $parsedTocDepth"> + <!-- dropped entry because excluded --> + <xsl:attribute name="class">excluded</xsl:attribute> + </xsl:when> + <xsl:when test="$tocparam='exclude'"> + <!-- dropped entry because excluded --> + <xsl:attribute name="class">excluded</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="starts-with($number,'del-')"> + <del> + <xsl:value-of select="$number" /> + <a href="#{$target}"><xsl:value-of select="$title"/></a> + </del> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$number != '' and not(contains($number,$unnumbered))"> + <a href="#{$anchor-pref}section.{$number}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$number"/> + <xsl:with-param name="appendixPrefix" select="true()"/> + </xsl:call-template> + </a> + <xsl:text>&#160;&#160;&#160;</xsl:text> + </xsl:if> + <a href="#{$target}"> + <xsl:choose> + <xsl:when test="$waschanged!=''"> + <ins><xsl:value-of select="$title"/></ins> + <del><xsl:value-of select="$oldtitle"/></del> + </xsl:when> + <xsl:when test="$name"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$title"/> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="back-toc"> + + <xsl:if test="//cref and $xml2rfc-comments='yes' and $xml2rfc-inline!='yes'"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'comments')"/> + <xsl:with-param name="title" select="'Editorial Comments'"/> + </xsl:call-template> + </li> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-authors-section='before-appendices'"> + <xsl:apply-templates select="/rfc/front" mode="toc" /> + </xsl:if> + <xsl:apply-templates select="back/*[not(self::references)]" mode="toc" /> + + <!-- insert the index if index entries exist --> + <xsl:if test="$has-index"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'index')"/> + <xsl:with-param name="title" select="'Index'"/> + </xsl:call-template> + </li> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-authors-section='end'"> + <xsl:apply-templates select="/rfc/front" mode="toc" /> + </xsl:if> + + <!-- copyright statements --> + <xsl:if test="$xml2rfc-private='' and not($no-copylong)"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'ipr')"/> + <xsl:with-param name="title" select="'Intellectual Property and Copyright Statements'"/> + </xsl:call-template> + </li> + </xsl:if> + +</xsl:template> + +<xsl:template match="front" mode="toc"> + + <xsl:variable name="authors-title"> + <xsl:call-template name="get-authors-section-title"/> + </xsl:variable> + <xsl:variable name="authors-number"> + <xsl:call-template name="get-authors-section-number"/> + </xsl:variable> + + <xsl:if test="$authors-number!='suppress' and $xml2rfc-authorship!='no'"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'authors')"/> + <xsl:with-param name="title" select="$authors-title"/> + <xsl:with-param name="number" select="$authors-number"/> + </xsl:call-template> + </li> + </xsl:if> + +</xsl:template> + +<xsl:template name="references-toc"> + + <!-- distinguish two cases: (a) single references element (process + as toplevel section; (b) multiple references sections (add one toplevel + container with subsection) --> + + <xsl:variable name="refsecs" select="/rfc/back/references|/rfc/back/ed:replace/ed:ins/references"/> + + <xsl:choose> + <xsl:when test="count($refsecs) = 0"> + <!-- nop --> + </xsl:when> + <xsl:when test="count($refsecs) = 1"> + <xsl:for-each select="$refsecs"> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="@title!=''"><xsl:value-of select="@title" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$xml2rfc-refparent"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number"> + <xsl:call-template name="get-references-section-number"/> + </xsl:with-param> + <xsl:with-param name="target" select="concat($anchor-pref,'references')"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="name" select="name"/> + </xsl:call-template> + + <xsl:if test="references"> + <ul> + <xsl:for-each select="references"> + <xsl:call-template name="references-toc-entry"/> + </xsl:for-each> + </ul> + </xsl:if> + </li> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <li> + <!-- insert pseudo container --> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number"> + <xsl:call-template name="get-references-section-number"/> + </xsl:with-param> + <xsl:with-param name="target" select="concat($anchor-pref,'references')"/> + <xsl:with-param name="title" select="$xml2rfc-refparent"/> + </xsl:call-template> + + <ul> + <!-- ...with subsections... --> + <xsl:for-each select="$refsecs"> + <xsl:call-template name="references-toc-entry"/> + </xsl:for-each> + </ul> + </li> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="references-toc-entry"> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="@title!=''"><xsl:value-of select="@title" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$xml2rfc-refparent"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + + <xsl:variable name="num"> + <xsl:number level="any"/> + </xsl:variable> + + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="concat($anchor-pref,'references','.',$num)"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="name" select="name"/> + </xsl:call-template> + + <xsl:if test="references"> + <ul> + <xsl:for-each select="references"> + <xsl:call-template name="references-toc-entry"/> + </xsl:for-each> + </ul> + </xsl:if> + </li> +</xsl:template> + +<!-- suppress xml2rfc preptool artefacts --> +<xsl:template match="section[author]" mode="toc"/> + +<xsl:template match="section|appendix" mode="toc"> + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + + <xsl:variable name="target"> + <xsl:choose> + <xsl:when test="@anchor"><xsl:value-of select="@anchor" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$sectionNumber" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- obtain content, just to check whether we need to recurse at all --> + <xsl:variable name="content"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="$target"/> + <xsl:with-param name="title" select="@title"/> + <xsl:with-param name="name" select="name"/> + <xsl:with-param name="tocparam" select="@toc"/> + <xsl:with-param name="oldtitle" select="@ed:old-title"/> + <xsl:with-param name="waschanged" select="@ed:resolves"/> + </xsl:call-template> + + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </li> + </xsl:variable> + + <xsl:if test="$content!=''"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="$target"/> + <xsl:with-param name="title" select="@title"/> + <xsl:with-param name="name" select="name"/> + <xsl:with-param name="tocparam" select="@toc"/> + <xsl:with-param name="oldtitle" select="@ed:old-title"/> + <xsl:with-param name="waschanged" select="@ed:resolves"/> + </xsl:call-template> + + <!-- obtain nested content, just to check whether we need to recurse at all --> + <xsl:variable name="nested-content"> + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </xsl:variable> + + <!-- only recurse if we need to (do not produce useless list container) --> + <xsl:if test="$nested-content!=''"> + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </xsl:if> + </li> + </xsl:if> +</xsl:template> + +<xsl:template match="middle" mode="toc"> + <xsl:apply-templates mode="toc" /> + <xsl:call-template name="references-toc" /> +</xsl:template> + +<xsl:template match="rfc" mode="toc"> + <xsl:apply-templates select="middle" mode="toc" /> + <xsl:call-template name="back-toc" /> +</xsl:template> + +<xsl:template match="ed:del|ed:ins|ed:replace" mode="toc"> + <xsl:apply-templates mode="toc" /> +</xsl:template> + +<xsl:template match="*|text()" mode="toc" /> + + +<xsl:template name="insertTocAppendix"> + + <xsl:if test="//figure[@title!='' or @anchor!='' or name]"> + <ul class="toc"> + <li> + <xsl:text>Figures</xsl:text> + <ul> + <xsl:for-each select="//figure[@title!='' or @anchor!='' or name]"> + <xsl:variable name="n"><xsl:call-template name="get-figure-number"/></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Figure </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!='' or name">: </xsl:if> + </xsl:if> + <xsl:choose> + <xsl:when test="name"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(@title)" /> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'figure.',$n)" /> + <xsl:with-param name="title" select="$title" /> + </xsl:call-template> + </li> + </xsl:for-each> + </ul> + </li> + </ul> + </xsl:if> + + <!-- experimental --> + <xsl:if test="//ed:issue"> + <xsl:call-template name="insertIssuesList" /> + </xsl:if> + +</xsl:template> + +<xsl:template name="reference-name"> + <xsl:param name="node" select="."/> + + <xsl:for-each select="$node"> + <xsl:choose> + <xsl:when test="$xml2rfc-symrefs!='no' and ancestor::ed:del"> + <xsl:variable name="unprefixed" select="substring-after(@anchor,'deleted-')"/> + <xsl:choose> + <xsl:when test="$unprefixed!=''"> + <xsl:value-of select="concat('[',$unprefixed,']')"/> + </xsl:when> + <xsl:otherwise> + <xsl:if test="count(//reference[@anchor=current()/@anchor])!=1"> + <xsl:message>Deleted duplicate anchors should have the prefix "deleted-": <xsl:value-of select="@anchor"/></xsl:message> + </xsl:if> + <xsl:value-of select="concat('[',@anchor,']')"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$xml2rfc-symrefs!='no'"> + <xsl:text>[</xsl:text> + <xsl:choose> + <xsl:when test="$src/rfc/back/displayreference[@target=current()/@anchor]"> + <xsl:value-of select="$src/rfc/back/displayreference[@target=current()/@anchor]/@to"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@anchor"/> + </xsl:otherwise> + </xsl:choose> + <xsl:text>]</xsl:text> + </xsl:when> + <xsl:when test="ancestor::ed:del"> + <xsl:text>[del]</xsl:text> + </xsl:when> + <xsl:otherwise>[<xsl:number level="any" count="reference[not(ancestor::ed:del)]"/>]</xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + + +<xsl:template name="replace-substring"> + <xsl:param name="string" /> + <xsl:param name="replace" /> + <xsl:param name="by" /> + + <xsl:choose> + <xsl:when test="contains($string,$replace)"> + <xsl:value-of select="concat(substring-before($string, $replace),$by)" /> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="substring-after($string,$replace)" /> + <xsl:with-param name="replace" select="$replace" /> + <xsl:with-param name="by" select="$by" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$string" /></xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="rfc-or-id-link"> + <xsl:param name="name" /> + + <xsl:choose> + <xsl:when test="starts-with($name,'draft-')"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$name"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$name"/></a> + <xsl:call-template name="check-front-matter-ref"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="string(number($name))=$name"> + <xsl:variable name="uri"> + <xsl:variable name="refs" select="exslt:node-set($includeDirectives)//reference|/rfc/back/references//reference"/> + <xsl:variable name="ref" select="$refs[not(starts-with(front/title,'Erratum ID')) and seriesInfo[@name='RFC' and @value=$name]]"/> + <xsl:choose> + <xsl:when test="$ref"> + <xsl:value-of select="concat('#',$ref/@anchor)"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="$name"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$name"/></a> + <xsl:call-template name="check-front-matter-ref"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$name"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('In metadata obsoletes/updates, RFC number of draft name is expected - found: ',$name)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="rfclist"> + <xsl:param name="list" /> + <xsl:choose> + <xsl:when test="contains($list,',')"> + <xsl:variable name="rfcNo" select="substring-before($list,',')" /> + <xsl:call-template name="rfc-or-id-link"> + <xsl:with-param name="name" select="$rfcNo"/> + </xsl:call-template> + <xsl:text>, </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(substring-after($list,','))" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rfcNo" select="$list" /> + <xsl:call-template name="rfc-or-id-link"> + <xsl:with-param name="name" select="$rfcNo"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="check-front-matter-ref"> + <xsl:param name="name"/> + <xsl:variable name="refs" select="exslt:node-set($includeDirectives)//reference|/rfc/back/references//reference"/> + <xsl:choose> + <xsl:when test="starts-with($name,'draft-')"> + <xsl:if test="not($refs//seriesInfo[@name='Internet-Draft' and @value=$name])"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('front matter mentions I-D ',$name,' for which there is no reference element')"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="ref" select="$refs[not(starts-with(front/title,'Erratum ID')) and seriesInfo[@name='RFC' and @value=$name]]"/> + <xsl:if test="not($ref)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('front matter mentions RFC ',$name,' for which there is no reference element')"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="check-anchor"> + <xsl:if test="@anchor and @anchor!=''"> + <!-- check validity of anchor name --> + <xsl:variable name="t" select="@anchor"/> + <xsl:variable name="tstart" select="substring($t,1,1)"/> + + <!-- we only check for disallowed ASCII characters for now --> + <xsl:variable name="not-namestartchars">&#9;&#10;&#13;&#32;!"#$%&amp;'()*+,-./0123456789;&lt;=&gt;?@[\]^`[|}~</xsl:variable> + + <xsl:if test="$tstart!=translate($tstart,$not-namestartchars,'')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('anchor &quot;',$t,'&quot; can not start with character &quot;',$tstart,'&quot;')"/> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="check-anchor-non-start"> + <xsl:with-param name="f" select="$t"/> + <xsl:with-param name="t" select="$t"/> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="check-anchor-non-start"> + <xsl:param name="f"/> + <xsl:param name="t"/> + + <xsl:variable name="not-namechars">&#9;&#10;&#13;&#32;!"#$%&amp;'()*+,/;&lt;=&gt;?@[\]^`[|}~</xsl:variable> + + <xsl:choose> + <xsl:when test="$t=''"> + <!-- Done --> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="s" select="substring($t,1,1)"/> + <xsl:choose> + <xsl:when test="$s!=translate($s,$not-namechars,'')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('anchor &quot;',$f,'&quot; contains invalid character &quot;',$s,'&quot; at position ',string-length($f) - string-length($t))"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="check-anchor-non-start"> + <xsl:with-param name="f" select="$f"/> + <xsl:with-param name="t" select="substring($t,2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="sluggy-anchor"> + <xsl:if test="self::section and (not(@anchor) or @anchor='')"> + <xsl:variable name="fr">ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789.'"()+-_ :!%,/@=&lt;&gt;*&#8212;&#8232;</xsl:variable> + <xsl:variable name="to">abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz0123456789.__----_--.-------.--</xsl:variable> + <xsl:variable name="canslug" select="translate(normalize-space(concat(@title,name)),$fr,'')=''"/> + <xsl:if test="$canslug"> + <xsl:variable name="slug" select="translate(normalize-space(concat(@title,name)),$fr,$to)"/> + <xsl:variable name="conflicts" select="//section[not(@anchor) and $slug=translate(normalize-space(concat(@title,name)),$fr,$to)]"/> + <xsl:choose> + <xsl:when test="count($conflicts)>1"> + <xsl:variable name="c" select="preceding::*[not(@anchor) and $slug=translate(normalize-space(concat(@title,name)),$fr,$to)]"/> + <xsl:value-of select="concat('n-',$slug,'_',(1+count($c)))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('n-',$slug)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:if> +</xsl:template> + +<xsl:template name="copy-anchor"> + <xsl:call-template name="check-anchor"/> + <xsl:choose> + <xsl:when test="@anchor and @anchor!=''"> + <xsl:attribute name="id"><xsl:value-of select="@anchor"/></xsl:attribute> + </xsl:when> + <xsl:when test="self::section"> + <xsl:variable name="slug"> + <xsl:call-template name="sluggy-anchor"/> + </xsl:variable> + <xsl:if test="$slug!=''"> + <xsl:attribute name="id"><xsl:value-of select="$slug"/></xsl:attribute> + </xsl:if> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template name="rfclist-for-dcmeta"> + <xsl:param name="list" /> + <xsl:choose> + <xsl:when test="contains($list,',')"> + <xsl:variable name="rfcNo" select="substring-before($list,',')" /> + <meta name="dct.replaces" content="urn:ietf:rfc:{$rfcNo}" /> + <xsl:call-template name="rfclist-for-dcmeta"> + <xsl:with-param name="list" select="normalize-space(substring-after($list,','))" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rfcNo" select="$list" /> + <meta name="dct.replaces" content="urn:ietf:rfc:{$rfcNo}" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-paragraph-number"> + <xsl:choose> + <!-- inside artset --> + <xsl:when test="parent::artset"> + <xsl:for-each select=".."> + <xsl:call-template name="get-paragraph-number"/> + </xsl:for-each> + </xsl:when> + + <!-- no numbering inside certain containers --> + <xsl:when test="ancestor::dl or ancestor::figure or ancestor::ol or ancestor::ul or ancestor::ed:del or ancestor::ed:ins"/> + + <xsl:when test="parent::blockquote or parent::x:blockquote"> + <!-- boilerplate --> + <xsl:for-each select="parent::blockquote|parent::x:blockquote"><xsl:call-template name="get-paragraph-number" />.</xsl:for-each> + <xsl:number count="artset|artwork|aside|blockquote|dl|ol|sourcecode|t|ul|x:blockquote|x:note"/> + </xsl:when> + + <xsl:when test="parent::aside or parent::x:note"> + <!-- boilerplate --> + <xsl:for-each select="parent::aside|parent::x:note"><xsl:call-template name="get-paragraph-number" />.</xsl:for-each> + <xsl:number count="artset|artwork|aside|blockquote|dl|ol|sourcecode|t|ul|x:blockquote|x:note"/> + </xsl:when> + + <xsl:when test="ancestor::section"> + <!-- get section number of ancestor section element, then add t number --> + <xsl:for-each select="ancestor::section[1]"><xsl:call-template name="get-section-number" />.p.</xsl:for-each> + <xsl:variable name="b"><xsl:number count="artset|artwork|aside|blockquote|dl|ol|sourcecode|t|ul|x:blockquote|x:note"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::section and ../@removeInRFC='true' and ../t[1]!=$section-removeInRFC"> + <xsl:value-of select="1 + $b"/> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$b"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + + <xsl:when test="ancestor::note"> + <!-- get section number of ancestor note element, then add t number --> + <xsl:for-each select="ancestor::note[1]"><xsl:call-template name="get-section-number" />.p.</xsl:for-each> + <xsl:variable name="b"><xsl:number count="artset|artwork|aside|blockquote|dl|ol|sourcecode|t|ul|x:blockquote|x:note"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::note and ../@removeInRFC='true' and ../t[1]!=$note-removeInRFC"> + <xsl:value-of select="1 + $b"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$b"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- abstract --> + <xsl:when test="ancestor::abstract"> + <xsl:text>p.</xsl:text> + <xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol|artwork|artset|sourcecode"/> + </xsl:when> + + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template name="attach-paragraph-number-as-id"> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number"/> + </xsl:variable> + <xsl:variable name="container"> + <xsl:choose> + <xsl:when test="ancestor::abstract">abstract</xsl:when> + <xsl:when test="ancestor::note">note</xsl:when> + <xsl:when test="ancestor::boilerplate">boilerplate</xsl:when> + <xsl:otherwise>section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="$p!='' and not(ancestor::list)"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,$container,'.',$p)"/></xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template name="editingMark"> + <xsl:if test="$xml2rfc-editing='yes' and ancestor::rfc"> + <sup class="editingmark"><span><xsl:number level="any" count="postamble|preamble|t"/></span>&#0160;</sup> + </xsl:if> +</xsl:template> + +<!-- internal ref support --> +<xsl:key name="anchor-item-alias" match="//*[@anchor and (x:anchor-alias/@value or ed:replace/ed:ins/x:anchor-alias)]" use="x:anchor-alias/@value | ed:replace/ed:ins/x:anchor-alias/@value"/> + +<xsl:template match="x:ref"> + <xsl:variable name="val" select="normalize-space(.)"/> + <xsl:variable name="target" select="key('anchor-item',$val) | key('anchor-item-alias',$val) | //reference/x:source[x:defines=$val]"/> + <xsl:if test="count($target)>1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">internal link target for '<xsl:value-of select="."/>' is ambiguous; picking first.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="$target[1]/@anchor"> + <a href="#{$target[1]/@anchor}" class="smpl"> + <xsl:call-template name="copy-anchor"/> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="//iref[@x:for-anchor=$val] | //iref[@x:for-anchor='' and ../@anchor=$val]"> + <xsl:attribute name="id"><xsl:call-template name="compute-extref-anchor"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="."/> + </a> + </xsl:when> + <xsl:when test="$target[1]/self::x:source"> + <xsl:variable name="extdoc" select="document($target[1]/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor and (x:anchor-alias/@value=$val)]"/> + <xsl:choose> + <xsl:when test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found in source file '<xsl:value-of select="$target[1]/@href"/>'.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="."/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="t"> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="$target[1]/.."/> + <xsl:with-param name="ref" select="$nodes[1]"/> + </xsl:call-template> + </xsl:variable> + <a href="{$t}" class="smpl"> + <xsl:value-of select="."/> + </a> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="//x:source"> + <xsl:variable name="ref" select="."/> + <xsl:variable name="out"> + <!-- try referenced documents one by one --> + <xsl:for-each select="//reference[x:source]"> + <xsl:variable name="extdoc" select="document(x:source/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor and (x:anchor-alias/@value=$val)]"/> + <xsl:choose> + <xsl:when test="not($nodes)"> + <xsl:call-template name="trace"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found in source file '<xsl:value-of select="x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' found in source file '<xsl:value-of select="x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + <xsl:variable name="t"> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="ref" select="$nodes[1]"/> + </xsl:call-template> + </xsl:variable> + <a href="{$t}" class="smpl"> + <xsl:value-of select="$ref"/> + </a> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:variable> + <xsl:copy-of select="$out"/> + <xsl:variable name="plainout" select="normalize-space($out)"/> + <xsl:if test="string-length($plainout)=0"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found anywhere in references.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$val"/> + </xsl:if> + <xsl:if test="string-length($plainout)!=string-length($val)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Multiple targets found for anchor '<xsl:value-of select="$val"/>' - need to disambiguate.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">internal link target for '<xsl:value-of select="."/>' does not exist.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- Nothing to do here --> +<xsl:template match="x:anchor-alias" /> + +<!-- Quotes --> +<xsl:template match="x:q"> + <q> + <xsl:copy-of select="@cite"/> + <xsl:apply-templates/> + </q> +</xsl:template> + +<!-- Notes --> +<xsl:template match="x:note|aside"> + <xsl:call-template name="check-no-text-content"/> + + <div> + <xsl:call-template name="attach-paragraph-number-as-id"/> + <aside> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </aside> + </div> +</xsl:template> + +<xsl:template match="x:bcp14|bcp14"> + <!-- check valid BCP14 keywords, then emphasize them --> + <xsl:variable name="c" select="normalize-space(translate(.,'&#160;',' '))"/> + <xsl:choose> + <xsl:when test="$c='MUST' or $c='REQUIRED' or $c='SHALL' or $c='MUST NOT' + or $c='SHALL NOT' or $c='SHOULD' or $c='RECOMMENDED' or $c='SHOULD NOT' + or $c='NOT RECOMMENDED' or $c='MAY' or $c='OPTIONAL'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unknown BCP14 keyword: ',$c)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:blockquote|blockquote"> + <div> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:call-template name="attach-paragraph-number-as-id"/> + <blockquote> + <xsl:call-template name="copy-anchor"/> + <xsl:copy-of select="@cite"/> + <xsl:choose> + <xsl:when test="t|ul|ol|dl|artwork|figure|sourcecode"> + <xsl:apply-templates/> + </xsl:when> + <xsl:otherwise> + <p> + <xsl:apply-templates/> + </p> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@quotedFrom"> + <cite> + <xsl:text>&#8212; </xsl:text> + <xsl:choose> + <xsl:when test="@cite"><a href="{@cite}"><xsl:value-of select="@quotedFrom"/></a></xsl:when> + <xsl:otherwise><xsl:value-of select="@quotedFrom"/></xsl:otherwise> + </xsl:choose> + </cite> + </xsl:if> + </blockquote> + </div> +</xsl:template> + +<!-- Definitions --> +<xsl:template match="x:dfn"> + <dfn> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </dfn> +</xsl:template> + +<!-- headings --> +<xsl:template match="x:h"> + <b> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </b> +</xsl:template> + +<!-- superscripts --> +<xsl:template match="x:sup|sup"> + <sup> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </sup> +</xsl:template> + +<!-- subscripts --> +<xsl:template match="sub"> + <sub> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </sub> +</xsl:template> + +<!-- bold --> +<xsl:template match="x:highlight"> + <b> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </b> +</xsl:template> + +<!-- measuring lengths --> +<xsl:template match="x:length-of"> + <xsl:variable name="target" select="//*[@anchor=current()/@target]"/> + <xsl:if test="count($target)!=1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('@target ',@target,' defined ',count($target),' times.')"/> + </xsl:call-template> + </xsl:if> + <xsl:variable name="content"> + <xsl:apply-templates select="$target"/> + </xsl:variable> + <xsl:variable name="lineends" select="string-length($content) - string-length(translate($content,'&#10;',''))"/> + <xsl:variable name="indents"> + <xsl:choose> + <xsl:when test="@indented"> + <xsl:value-of select="number(@indented) * $lineends"/> + </xsl:when> + <xsl:otherwise>0</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="string-length($content) + $lineends - $indents"/> +</xsl:template> + +<!-- Almost Nop --> +<xsl:template match="x:span"> + <xsl:choose> + <xsl:when test="@x:lang and $prettyprint-class!=''"> + <code class="{$prettyprint-class}"> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </code> + </xsl:when> + <xsl:otherwise> + <span> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </span> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:parse-xml"> + <xsl:apply-templates/> + + <xsl:if test="function-available('exslt:node-set')"> + <xsl:variable name="cleaned"> + <xsl:apply-templates mode="cleanup-edits"/> + </xsl:variable> + <xsl:if test="$xml2rfc-ext-trace-parse-xml='yes'"> + <xsl:call-template name="trace"> + <xsl:with-param name="msg" select="concat('Parsing XML: ', $cleaned)"/> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="function-available('myns:parseXml')" use-when="function-available('myns:parseXml')"> + <xsl:if test="myns:parseXml(concat($cleaned,''))!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Parse error in XML: ', myns:parseXml(concat($cleaned,'')))"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="function-available('saxon:parse')" use-when="function-available('saxon:parse')"> + <xsl:variable name="parsed" select="saxon:parse(concat($cleaned,''))"/> + <xsl:if test="$parsed='foo'"> + <xsl:comment>should not get here</xsl:comment> + </xsl:if> + </xsl:when> + <xsl:when test="false()"></xsl:when> + <xsl:otherwise></xsl:otherwise> + </xsl:choose> + </xsl:if> +</xsl:template> + +<!-- inlined RDF support --> +<xsl:template match="rdf:Description"> + <!-- ignore --> +</xsl:template> + +<!-- cleanup for ins/del --> + +<xsl:template match="comment()|@*" mode="cleanup-edits"><xsl:copy/></xsl:template> + +<xsl:template match="text()" mode="cleanup-edits"><xsl:copy/></xsl:template> + +<xsl:template match="/" mode="cleanup-edits"> + <xsl:copy><xsl:apply-templates select="node()" mode="cleanup-edits" /></xsl:copy> +</xsl:template> + +<xsl:template match="ed:del" mode="cleanup-edits"/> + +<xsl:template match="ed:replace" mode="cleanup-edits"> + <xsl:apply-templates mode="cleanup-edits"/> +</xsl:template> + +<xsl:template match="ed:ins" mode="cleanup-edits"> + <xsl:apply-templates mode="cleanup-edits"/> +</xsl:template> + + +<!-- ABNF support --> +<xsl:template name="to-abnf-char-sequence"> + <xsl:param name="chars"/> + + <xsl:variable name="asciistring">&#160; !"#$%&amp;'()*+,-./<xsl:value-of select="$digits"/>:;&lt;=>?@<xsl:value-of select="$ucase"/>[\]^_`<xsl:value-of select="$lcase"/>{|}~&#127;</xsl:variable> + <xsl:variable name="hex">0123456789ABCDEF</xsl:variable> + + <xsl:variable name="c" select="substring($chars,1,1)"/> + <xsl:variable name="r" select="substring($chars,2)"/> + <xsl:variable name="pos" select="string-length(substring-before($asciistring,$c))"/> + + <xsl:choose> + <xsl:when test="$pos >= 1"> + <xsl:variable name="ascii" select="$pos + 31"/> + <xsl:variable name="h" select="floor($ascii div 16)"/> + <xsl:variable name="l" select="floor($ascii mod 16)"/> + <xsl:value-of select="concat(substring($hex,1 + $h,1),substring($hex,1 + $l,1))"/> + </xsl:when> + <xsl:otherwise> + <xsl:text>??</xsl:text> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('unexpected character in ABNF char sequence: ',substring($chars,1,1))" /> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="$r!=''"> + <xsl:text>.</xsl:text> + <xsl:call-template name="to-abnf-char-sequence"> + <xsl:with-param name="chars" select="$r"/> + </xsl:call-template> + </xsl:if> + +</xsl:template> + +<xsl:template match="x:abnf-char-sequence"> + <xsl:choose> + <xsl:when test="substring(.,1,1) != '&quot;' or substring(.,string-length(.),1) != '&quot;'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'contents of x:abnf-char-sequence needs to be quoted.'" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text>%x</xsl:text> + <xsl:call-template name="to-abnf-char-sequence"> + <xsl:with-param name="chars" select="substring(.,2,string-length(.)-2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- box drawing --> + +<!-- nop for alignment --> +<xsl:template match="x:x"/> + +<!-- box (top) --> +<xsl:template match="x:bt"> + <xsl:text>&#x250c;</xsl:text> + <xsl:value-of select="translate(substring(.,2,string-length(.)-2),'-','&#x2500;')"/> + <xsl:text>&#x2510;</xsl:text> +</xsl:template> + +<!-- box (center) --> +<xsl:template match="x:bc"> + <xsl:variable name="first" select="substring(.,1)"/> + <xsl:variable name="content" select="substring(.,2,string-length(.)-2)"/> + <xsl:variable name="is-delimiter" select="translate($content,'-','')=''"/> + + <xsl:choose> + <xsl:when test="$is-delimiter"> + <xsl:text>&#x251c;</xsl:text> + <xsl:value-of select="translate($content,'-','&#x2500;')"/> + <xsl:text>&#x2524;</xsl:text> + </xsl:when> + <xsl:when test="*"> + <xsl:for-each select="*|text()"> + <xsl:choose> + <xsl:when test="position()=1"> + <xsl:text>&#x2502;</xsl:text> + <xsl:value-of select="substring(.,2)"/> + </xsl:when> + <xsl:when test="position()=last()"> + <xsl:value-of select="substring(.,1,string-length(.)-1)"/> + <xsl:text>&#x2502;</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:text>&#x2502;</xsl:text> + <xsl:value-of select="$content"/> + <xsl:text>&#x2502;</xsl:text> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<!-- box (bottom) --> +<xsl:template match="x:bb"> + <xsl:text>&#x2514;</xsl:text> + <xsl:value-of select="translate(substring(.,2,string-length(.)-2),'-','&#x2500;')"/> + <xsl:text>&#x2518;</xsl:text> +</xsl:template> + +<!-- author handling extensions --> +<xsl:template match="x:include-author"> + <xsl:for-each select="/*/front/author[@anchor=current()/@target]"> + <xsl:apply-templates select="."/> + </xsl:for-each> +</xsl:template> + +<!-- boilerplate --> +<xsl:template match="boilerplate"> + <xsl:apply-templates/> +</xsl:template> + +<!-- experimental annotation support --> + +<xsl:template match="ed:issueref"> + <xsl:choose> + <xsl:when test=".=//ed:issue/@name"> + <a href="#{$anchor-pref}issue.{.}"> + <xsl:apply-templates/> + </a> + </xsl:when> + <xsl:when test="@href"> + <a href="{@href}" id="{$anchor-pref}issue.{.}"> + <xsl:apply-templates/> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Dangling ed:issueref: <xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="ed:issue"> + <xsl:variable name="class"> + <xsl:choose> + <xsl:when test="@status='closed'">closedissue</xsl:when> + <xsl:otherwise>openissue</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <table class="{$class}"> + <tr> + <td colspan="3"> + <a id="{$anchor-pref}issue.{@name}"> + <xsl:choose> + <xsl:when test="@status='closed'"> + <xsl:attribute name="class">closed-issue</xsl:attribute> + </xsl:when> + <xsl:when test="@status='editor'"> + <xsl:attribute name="class">editor-issue</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue</xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:text>&#160;</xsl:text> + <xsl:choose> + <xsl:when test="@href"> + <em><a href="{@href}"><xsl:value-of select="@name" /></a></em> + </xsl:when> + <xsl:when test="@alternate-href"> + <em>[<a href="{@alternate-href}">alternate link</a>]</em> + </xsl:when> + <xsl:otherwise> + <em><xsl:value-of select="@name" /></em> + </xsl:otherwise> + </xsl:choose> + &#0160; + (type: <xsl:value-of select="@type"/>, status: <xsl:value-of select="@status"/>) + </td> + </tr> + + <xsl:apply-templates select="ed:item"/> + <xsl:apply-templates select="ed:resolution"/> + + <xsl:variable name="changes" select="//*[@ed:resolves=current()/@name or ed:resolves=current()/@name]" /> + <xsl:if test="$changes"> + <tr> + <td class="top" colspan="3"> + Associated changes in this document: + <xsl:variable name="issue" select="@name"/> + <xsl:for-each select="$changes"> + <a href="#{$anchor-pref}change.{$issue}.{position()}"> + <xsl:variable name="label"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="$label!=''"><xsl:value-of select="$label"/></xsl:when> + <xsl:otherwise>&lt;<xsl:value-of select="concat('#',$anchor-pref,'change.',$issue,'.',position())"/>&gt;</xsl:otherwise> + </xsl:choose> + </a> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + <xsl:text>.</xsl:text> + </td> + </tr> + </xsl:if> + </table> + +</xsl:template> + +<xsl:template match="ed:item"> + <tr> + <td class="top"> + <xsl:if test="@entered-by"> + <a href="mailto:{@entered-by}?subject={/rfc/@docName},%20{../@name}"> + <i><xsl:value-of select="@entered-by"/></i> + </a> + </xsl:if> + </td> + <td class="topnowrap"> + <xsl:value-of select="@date"/> + </td> + <td class="top"> + <xsl:apply-templates select="node()" mode="issuehtml"/> + </td> + </tr> +</xsl:template> + +<xsl:template match="ed:resolution"> + <tr> + <td class="top"> + <xsl:if test="@entered-by"> + <a href="mailto:{@entered-by}?subject={/rfc/@docName},%20{../@name}"><i><xsl:value-of select="@entered-by"/></i></a> + </xsl:if> + </td> + <td class="topnowrap"> + <xsl:value-of select="@datetime"/> + </td> + <td class="top"> + <em>Resolution:</em> + <xsl:apply-templates select="node()" mode="issuehtml"/> + </td> + </tr> +</xsl:template> + +<xsl:template match="ed:annotation"> + <em> + <xsl:apply-templates/> + </em> +</xsl:template> + +<!-- special templates for handling XHTML in issues --> +<xsl:template match="text()" mode="issuehtml"> + <xsl:value-of select="."/> +</xsl:template> + +<xsl:template match="*|@*" mode="issuehtml"> + <xsl:message terminate="yes">Unexpected node in issue HTML: <xsl:value-of select="name(.)"/></xsl:message> +</xsl:template> + +<xsl:template match="xhtml:a|xhtml:b|xhtml:br|xhtml:cite|xhtml:del|xhtml:em|xhtml:i|xhtml:ins|xhtml:q|xhtml:pre|xhtml:tt" mode="issuehtml"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="@*|node()" mode="issuehtml"/> + </xsl:element> +</xsl:template> + +<xsl:template match="xhtml:p" mode="issuehtml"> + <xsl:apply-templates select="node()" mode="issuehtml"/> + <br class="p"/> +</xsl:template> + +<xsl:template match="xhtml:a/@href|xhtml:q/@cite" mode="issuehtml"> + <xsl:attribute name="{local-name(.)}"> + <xsl:value-of select="."/> + </xsl:attribute> +</xsl:template> + +<xsl:template match="ed:issueref" mode="issuehtml"> + <xsl:apply-templates select="."/> +</xsl:template> + +<xsl:template match="ed:eref" mode="issuehtml"> + <xsl:text>&lt;</xsl:text> + <a href="{.}"><xsl:value-of select="."/></a> + <xsl:text>&gt;</xsl:text> +</xsl:template> + +<xsl:template name="insertIssuesList"> + + <h2 id="{$anchor-pref}issues-list" ><a href="#{$anchor-pref}issues-list">Issues list</a></h2> + <table> + <thead> + <tr> + <th>Id</th> + <th>Type</th> + <th>Status</th> + <th>Date</th> + <th>Raised By</th> + </tr> + </thead> + <tbody> + <xsl:for-each select="//ed:issue"> + <xsl:sort select="@status" /> + <xsl:sort select="@name" /> + <tr> + <td><a href="#{$anchor-pref}issue.{@name}"><xsl:value-of select="@name" /></a></td> + <td><xsl:value-of select="@type" /></td> + <td><xsl:value-of select="@status" /></td> + <td><xsl:value-of select="ed:item[1]/@date" /></td> + <td><a href="mailto:{ed:item[1]/@entered-by}?subject={/rfc/@docName},%20{@name}"><xsl:value-of select="ed:item[1]/@entered-by" /></a></td> + </tr> + </xsl:for-each> + </tbody> + </table> + +</xsl:template> + +<xsl:template name="insert-diagnostics"> + + <!-- check anchor names --> + + <xsl:variable name="badAnchors" select="//*[starts-with(@anchor,$anchor-pref)]" /> + <xsl:if test="$badAnchors"> + <xsl:variable name="text"> + <xsl:text>The following anchor names may collide with internally generated anchors because of their prefix "</xsl:text> + <xsl:value-of select="$anchor-pref" /> + <xsl:text>": </xsl:text> + <xsl:for-each select="$badAnchors"> + <xsl:value-of select="@anchor"/> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="normalize-space($text)"/></xsl:with-param> + <xsl:with-param name="lineno" select="false()"/> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="badV3Anchors" select="//*[substring(@anchor,2,1)='-' and translate(substring(@anchor,1,1),$lcase,'')='']" /> + <xsl:if test="$badV3Anchors"> + <xsl:variable name="text"> + <xsl:text>The following anchor names may collide with internally generated anchors in XML2RFCV3 mode because: </xsl:text> + <xsl:for-each select="$badV3Anchors"> + <xsl:value-of select="@anchor"/> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="normalize-space($text)"/></xsl:with-param> + <xsl:with-param name="lineno" select="false()"/> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="all-refs" select="/rfc/back/references/reference|exslt:node-set($includeDirectives)//reference|exslt:node-set($sourcedReferences)//reference"/> + + <!-- check ABNF syntax references --> + <xsl:if test="//artwork[@type='abnf2616' or @type='abnf7230']|//sourcecode[@type='abnf2616' or type='abnf7320']"> + <xsl:if test="not($all-refs//seriesInfo[@name='RFC' and (@value='2068' or @value='2616' or @value='7230')]) and not($all-refs//seriesInfo[@name='Internet-Draft' and (starts-with(@value, 'draft-ietf-httpbis-p1-messaging-') or starts-with(@value, 'draft-ietf-httpbis-semantics-'))])"> + <!-- check for draft-ietf-httpbis-p1-messaging- is for backwards compat --> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">document uses HTTP-style ABNF syntax, but doesn't reference RFC 2068, RFC 2616, or RFC 7230.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + <xsl:if test="//artwork[@type='abnf']|//sourcecode[@type='abnf']"> + <xsl:if test="not($all-refs//seriesInfo[@name='RFC' and (@value='2234' or @value='4234' or @value='5234')])"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">document uses ABNF syntax, but doesn't reference RFC 2234, 4234 or 5234.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <!-- check IDs --> + <xsl:variable name="badTargets" select="//xref[not(ancestor::toc)][not(@target=//@anchor) and not(@target=exslt:node-set($includeDirectives)//@anchor) and not(ancestor::ed:del)]" /> + <xsl:if test="$badTargets"> + <xsl:variable name="text"> + <xsl:text>The following target names do not exist: </xsl:text> + <xsl:for-each select="$badTargets"> + <xsl:value-of select="@target"/> + <xsl:if test="not(@target)">(@target attribute missing)</xsl:if> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()"> + <xsl:text>, </xsl:text> + </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="$text"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + + +</xsl:template> + +<!-- special change mark support, not supported by RFC2629 yet --> + +<xsl:template match="@ed:*" /> + +<xsl:template match="ed:del"> + <xsl:call-template name="insert-issue-pointer"/> + <del> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </del> +</xsl:template> + +<xsl:template match="ed:ins"> + <xsl:call-template name="insert-issue-pointer"/> + <ins> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </ins> +</xsl:template> + +<xsl:template name="insert-issue-pointer"> + <xsl:param name="deleted-anchor"/> + <xsl:variable name="change" select="."/> + <xsl:for-each select="@ed:resolves|ed:resolves"> + <xsl:variable name="resolves" select="."/> + <!-- need the right context node for proper numbering --> + <xsl:variable name="count"><xsl:for-each select=".."><xsl:number level="any" count="*[@ed:resolves=$resolves or ed:resolves=$resolves]" /></xsl:for-each></xsl:variable> + <xsl:variable name="total" select="count(//*[@ed:resolves=$resolves or ed:resolves=$resolves])" /> + <xsl:variable name="id"> + <xsl:value-of select="$anchor-pref"/>change.<xsl:value-of select="$resolves"/>.<xsl:value-of select="$count" /> + </xsl:variable> + <xsl:choose> + <!-- block level? --> + <xsl:when test="not(ancestor::t) and not(ancestor::title) and not(ancestor::figure) and not($change/@ed:old-title)"> + <div class="issuepointer {$css-noprint}"> + <xsl:if test="not($deleted-anchor)"> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:if test="$count > 1"> + <a class="bg-issue" title="previous change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count - 1}">&#x2191;</a> + </xsl:if> + <a class="open-issue" href="#{$anchor-pref}issue.{$resolves}" title="resolves: {$resolves}"> + <xsl:choose> + <xsl:when test="//ed:issue[@name=$resolves and @status='closed']"> + <xsl:attribute name="class">closed-issue</xsl:attribute> + </xsl:when> + <xsl:when test="//ed:issue[@name=$resolves and @status='editor']"> + <xsl:attribute name="class">editor-issue</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue</xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:if test="$count &lt; $total"> + <a class="bg-issue" title="next change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count + 1}">&#x2193;</a> + </xsl:if> + <xsl:text>&#160;</xsl:text> + </div> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$count > 1"> + <a class="bg-issue" title="previous change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count - 1}">&#x2191;</a> + </xsl:if> + <a title="resolves: {$resolves}" href="#{$anchor-pref}issue.{$resolves}"> + <xsl:if test="not($deleted-anchor)"> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="//ed:issue[@name=$resolves and @status='closed']"> + <xsl:attribute name="class">closed-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:when> + <xsl:when test="//ed:issue[@name=$resolves and @status='editor']"> + <xsl:attribute name="class">editor-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:if test="$count &lt; $total"> + <a class="bg-issue" title="next change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count + 1}">&#x2193;</a> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:template match="ed:replace"> + <!-- we need to special-case things like lists and tables --> + <xsl:choose> + <xsl:when test="parent::list"> + <xsl:apply-templates select="ed:del/node()" /> + <xsl:apply-templates select="ed:ins/node()" /> + </xsl:when> + <xsl:when test="parent::references"> + <xsl:apply-templates select="ed:del/node()" /> + <xsl:apply-templates select="ed:ins/node()" /> + </xsl:when> + <xsl:otherwise> + <xsl:if test="@cite"> + <a class="editor-issue" href="{@cite}" target="_blank" title="see {@cite}"> + <xsl:text>&#160;i&#160;</xsl:text> + </a> + </xsl:if> + <xsl:call-template name="insert-issue-pointer"/> + <xsl:if test="ed:del"> + <del> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::xsl:template[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates select="ed:del/node()" /> + </del> + </xsl:if> + <xsl:if test="ed:ins"> + <ins> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates select="ed:ins/node()" /> + </ins> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- convenience template for helping Mozilla (pre/ins inheritance problem) --> +<xsl:template name="insertInsDelClass"> + <xsl:if test="ancestor::ed:del"> + <xsl:attribute name="class">del</xsl:attribute> + </xsl:if> + <xsl:if test="ancestor::ed:ins"> + <xsl:attribute name="class">ins</xsl:attribute> + </xsl:if> +</xsl:template> + + +<xsl:template name="sectionnumberAndEdits"> + <xsl:choose> + <xsl:when test="ancestor::ed:del"> + <xsl:text>del-</xsl:text> + <xsl:number count="ed:del//section" level="any"/> + </xsl:when> + <xsl:when test="@x:fixed-section-number and @x:fixed-section-number!=''"> + <xsl:value-of select="@x:fixed-section-number"/> + </xsl:when> + <xsl:when test="(@x:fixed-section-number and @x:fixed-section-number='') or @numbered='false'"> + <xsl:value-of select="$unnumbered"/> + <xsl:number count="section[@x:fixed-section-number='' or @numbered='false']" level="any"/> + </xsl:when> + <xsl:when test="self::section and parent::ed:ins and local-name(../..)='replace'"> + <xsl:for-each select="../.."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:for-each select=".."> + <xsl:if test="parent::ed:replace"> + <xsl:for-each select=".."> + <xsl:if test="parent::section">.</xsl:if> + <xsl:variable name="cnt" select="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + <xsl:choose> + <xsl:when test="ancestor::back and not(ancestor::section)"><xsl:number format="A" value="$cnt"/></xsl:when> + <xsl:otherwise><xsl:value-of select="$cnt"/></xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:when test="self::section[parent::ed:ins]"> + <xsl:for-each select="../.."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:for-each select=".."> + <xsl:if test="parent::section">.</xsl:if><xsl:value-of select="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:for-each> + </xsl:when> + <xsl:when test="self::section"> + <xsl:for-each select=".."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:if test="parent::section">.</xsl:if> + <xsl:choose> + <xsl:when test="parent::back"> + <xsl:number format="A" value="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:when> + <xsl:otherwise> + <xsl:number value="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::references"> + <xsl:choose> + <xsl:when test="count(/*/back/references)+count(/*/back/ed:replace/ed:ins/references)=1"><xsl:call-template name="get-references-section-number"/></xsl:when> + <xsl:otherwise><xsl:call-template name="get-references-section-number"/>.<xsl:number level="any"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::middle or self::back"><!-- done --></xsl:when> + <xsl:otherwise> + <!-- go up one level --> + <xsl:for-each select=".."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- utilities for warnings --> + +<xsl:template name="trace"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline" select="'no'"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">TRACE</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="$inline"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="inline-warning"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">WARNING</xsl:with-param> + <xsl:with-param name="dlevel">3</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'yes'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="warning"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">WARNING</xsl:with-param> + <xsl:with-param name="dlevel">3</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="info"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">INFO</xsl:with-param> + <xsl:with-param name="dlevel">2</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="error"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">ERROR</xsl:with-param> + <xsl:with-param name="dlevel">4</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="$inline"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-message-inline"> + <xsl:param name="message"/> + <xsl:choose> + <xsl:when test="ancestor::t or ancestor-or-self::seriesInfo"> + <span class="{$css-error}"><xsl:value-of select="$message"/></span> + </xsl:when> + <xsl:otherwise> + <div class="{$css-error}"><xsl:value-of select="$message"/></div> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="emit-message"> + <xsl:param name="level">DEBUG</xsl:param> + <xsl:param name="dlevel">0</xsl:param> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline"/> + <xsl:param name="lineno" select="true()"/> + <xsl:if test="$dlevel >= $log-level"> + <xsl:variable name="message"><xsl:value-of select="$level"/>: <xsl:value-of select="$msg"/><xsl:if test="$msg2!=''"> - <xsl:value-of select="$msg2"/></xsl:if><xsl:if test="$lineno"><xsl:call-template name="lineno"/></xsl:if></xsl:variable> + <xsl:choose> + <xsl:when test="$inline!='no'"> + <xsl:call-template name="emit-message-inline"> + <xsl:with-param name="message" select="$message"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- this fails when the message contains characters not encodable in the output encoding --> + <!-- <xsl:comment><xsl:value-of select="$message"/></xsl:comment> --> + </xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="$dlevel >= $abort-log-level"> + <xsl:message terminate="yes"><xsl:value-of select="$message"/></xsl:message> + </xsl:when> + <xsl:otherwise> + <xsl:message><xsl:value-of select="$message"/></xsl:message> + </xsl:otherwise> + </xsl:choose> + </xsl:if> +</xsl:template> + +<!-- table formatting --> + +<xsl:template match="table"> + <div class="{$css-tt}"> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="iref"/> + <xsl:variable name="style"> + <xsl:text>v3 </xsl:text> + <xsl:choose> + <xsl:when test="@align='left'"><xsl:value-of select="$css-tleft"/></xsl:when> + <xsl:when test="@align='right'"><xsl:value-of select="$css-tright"/></xsl:when> + <xsl:when test="@align='center' or not(@align) or @align=''"><xsl:value-of select="$css-tcenter"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <table class="{$style}"> + <xsl:variable name="n"><xsl:call-template name="get-table-number"/></xsl:variable> + <caption> + <xsl:text>Table </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="name"> + <xsl:text>: </xsl:text> + <xsl:apply-templates select="name/node()"/> + </xsl:if> + </caption> + <xsl:apply-templates select="*[not(self::iref)]"/> + </table> + </div> +</xsl:template> + +<xsl:template match="table/name"/> + +<xsl:template match="tbody"> + <tbody> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tbody> +</xsl:template> + +<xsl:template match="tfoot"> + <tfoot> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tfoot> +</xsl:template> + +<xsl:template match="thead"> + <thead> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </thead> +</xsl:template> + +<xsl:template match="tr"> + <tr> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tr> +</xsl:template> + +<xsl:template name="t-alignment"> + <xsl:attribute name="class"> + <xsl:choose> + <xsl:when test="@align='left' or not(@align) or @align=''"><xsl:value-of select="$css-left"/></xsl:when> + <xsl:when test="@align='right'"><xsl:value-of select="$css-right"/></xsl:when> + <xsl:when test="@align='center'"><xsl:value-of select="$css-center"/></xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown align attribute: <xsl:value-of select="@align"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:attribute> +</xsl:template> + +<xsl:template match="td"> + <td> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="t-alignment"/> + <xsl:copy-of select="@colspan|@rowspan"/> + <xsl:apply-templates select="node()"/> + </td> +</xsl:template> + +<xsl:template match="th"> + <th> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="t-alignment"/> + <xsl:copy-of select="@colspan|@rowspan"/> + <xsl:apply-templates select="node()"/> + </th> +</xsl:template> + +<xsl:template match="texttable"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="anch"> + <xsl:call-template name="get-table-anchor"/> + </xsl:variable> + + <div id="{$anch}" class="{$css-tt}"> + + <xsl:if test="@anchor!=''"> + <div id="{@anchor}"/> + </xsl:if> + <xsl:apply-templates select="preamble" /> + + <xsl:variable name="style"> + <xsl:value-of select="$css-tt"/> + <xsl:text> </xsl:text> + <xsl:choose> + <xsl:when test="@style!=''"> + <xsl:value-of select="@style"/> + </xsl:when> + <xsl:otherwise>full</xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="@align='left'"><xsl:text> </xsl:text><xsl:value-of select="$css-tleft"/></xsl:when> + <xsl:when test="@align='right'"><xsl:text> </xsl:text><xsl:value-of select="$css-tright"/></xsl:when> + <xsl:when test="@align='center' or not(@align) or @align=''"><xsl:text> </xsl:text><xsl:value-of select="$css-tcenter"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <table class="{$style}"> + <xsl:if test="(@title!='') or (@anchor!='' and not(@suppress-title='true'))"> + <xsl:variable name="n"><xsl:call-template name="get-table-number"/></xsl:variable> + <caption> + <xsl:if test="@x:caption-side='top'"> + <xsl:attribute name="class">caption-top</xsl:attribute> + </xsl:if> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Table </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!=''">: </xsl:if> + </xsl:if> + <xsl:if test="@title!=''"> + <xsl:value-of select="@title" /> + </xsl:if> + </caption> + </xsl:if> + + <xsl:if test="ttcol!=''"> + <!-- skip header when all column titles are empty --> + <thead> + <tr> + <xsl:apply-templates select="ttcol" /> + </tr> + </thead> + </xsl:if> + <tbody> + <xsl:variable name="columns" select="count(ttcol)" /> + <xsl:variable name="fields" select="c | ed:replace/ed:ins/c | ed:replace/ed:del/c" /> + <xsl:for-each select="$fields[$columns=1 or (position() mod $columns) = 1]"> + <tr> + <xsl:for-each select=". | following-sibling::c[position() &lt; $columns]"> + <td> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="pos" select="position()" /> + <xsl:variable name="col" select="../ttcol[position() = $pos]" /> + <xsl:choose> + <xsl:when test="$col/@align='right' or $col/@align='center'"> + <xsl:attribute name="class"><xsl:value-of select="$col/@align"/></xsl:attribute> + </xsl:when> + <xsl:when test="$col/@align='left' or not($col/@align)"> + <xsl:attribute name="class"><xsl:value-of select="$css-left"/></xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown align attribute on ttcol: <xsl:value-of select="$col/@align"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:apply-templates select="node()" /> + </td> + </xsl:for-each> + </tr> + </xsl:for-each> + </tbody> + </table> + <xsl:apply-templates select="postamble" /> + </div> + +</xsl:template> + +<xsl:template match="ttcol"> + <th> + + <xsl:choose> + <xsl:when test="@align='right' or @align='center' or @align='left'"> + <xsl:attribute name="class"><xsl:value-of select="@align"/></xsl:attribute> + </xsl:when> + <xsl:when test="not(@align)"> + <!-- that's the default, nothing to do here --> + </xsl:when> + <xsl:otherwise> + <xsl:message>Unknown align attribute on ttcol: <xsl:value-of select="@align"/></xsl:message> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="@width"> + <xsl:attribute name="style">width: <xsl:value-of select="@width" />;</xsl:attribute> + </xsl:if> + + <xsl:apply-templates /> + </th> +</xsl:template> + +<!-- cref support --> + +<xsl:template name="get-comment-name"> + <xsl:choose> + <xsl:when test="@anchor"> + <xsl:value-of select="@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>comment.</xsl:text> + <xsl:number count="cref[not(@anchor)]" level="any"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="cref[@display='false']"> + <!-- hidden --> +</xsl:template> + +<xsl:template match="cref[not(@display) or @display!='false']"> + <xsl:if test="$xml2rfc-comments!='no'"> + <xsl:variable name="cid"> + <xsl:call-template name="get-comment-name"/> + </xsl:variable> + + <span class="comment"> + <xsl:choose> + <xsl:when test="$xml2rfc-inline='yes'"> + <xsl:attribute name="id"> + <xsl:value-of select="$cid"/> + </xsl:attribute> + <xsl:text>[</xsl:text> + <xsl:if test="@anchor or (not(/rfc/@version) or /rfc/@version &lt; 3)"> + <a href="#{$cid}" class="smpl"> + <xsl:value-of select="$cid"/> + </a> + <xsl:text>: </xsl:text> + </xsl:if> + <xsl:apply-templates select="text()|eref|xref"/> + <xsl:if test="@source"> --<xsl:value-of select="@source"/></xsl:if> + <xsl:text>]</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="title"> + <xsl:if test="@source"><xsl:value-of select="@source"/>: </xsl:if> + <xsl:variable name="content"> + <xsl:apply-templates select="text()|eref|xref"/> + </xsl:variable> + <xsl:value-of select="$content"/> + </xsl:attribute> + <xsl:text>[</xsl:text> + <a href="#{$cid}"> + <xsl:value-of select="$cid"/> + </a> + <xsl:text>]</xsl:text> + </xsl:otherwise> + </xsl:choose> + </span> + </xsl:if> +</xsl:template> + +<xsl:template name="insertComments"> + + <xsl:call-template name="insert-conditional-hrule"/> + + <h2> + <xsl:call-template name="insert-conditional-pagebreak"/> + <a id="{$anchor-pref}comments" href="#{$anchor-pref}comments">Editorial Comments</a> + </h2> + + <dl> + <xsl:for-each select="//cref[not(@display) or @display!='false']"> + <xsl:variable name="cid"> + <xsl:choose> + <xsl:when test="@anchor"> + <xsl:value-of select="@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>comment.</xsl:text> + <xsl:number count="cref[not(@anchor)]" level="any"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <dt id="{$cid}"> + [<xsl:value-of select="$cid"/>] + </dt> + <dd> + <xsl:apply-templates select="node()"/> + <xsl:if test="@source"> --<xsl:value-of select="@source"/></xsl:if> + </dd> + </xsl:for-each> + </dl> +</xsl:template> + + +<!-- Chapter Link Generation --> + +<xsl:template match="*" mode="links"><xsl:apply-templates mode="links"/></xsl:template> +<xsl:template match="text()" mode="links" /> + +<xsl:template match="/*/middle//section[not(ancestor::section)]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-section-number" /></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,$unnumbered))"> + <xsl:value-of select="$sectionNumber"/> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <link rel="Chapter" title="{$title}" href="#{$anchor-pref}section.{$sectionNumber}"/> + <xsl:apply-templates mode="links" /> +</xsl:template> + +<xsl:template match="/*/back//section[not(ancestor::section)]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-section-number" /></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,$unnumbered))"> + <xsl:value-of select="$sectionNumber"/> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <link rel="Appendix" title="{$title}" href="#{$anchor-pref}section.{$sectionNumber}"/> + <xsl:apply-templates mode="links" /> +</xsl:template> + +<xsl:template match="/*/back/references[position()=1]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-references-section-number" /></xsl:variable> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="@title and count(/*/back/references)=1"> + <xsl:call-template name="get-references-section-number"/> + <xsl:text> </xsl:text> + <xsl:value-of select="@title"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-references-section-number"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-refparent"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <link rel="Chapter" title="{$title}" href="#{$anchor-pref}section.{$sectionNumber}"/> +</xsl:template> + +<!-- convenience templates --> + +<xsl:template name="get-author-summary"> + <xsl:choose> + <xsl:when test="count(/rfc/front/author)=1"> + <xsl:value-of select="/rfc/front/author[1]/@surname" /> + </xsl:when> + <xsl:when test="count(/rfc/front/author)=2"> + <xsl:value-of select="concat(/rfc/front/author[1]/@surname,' &amp; ',/rfc/front/author[2]/@surname)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(/rfc/front/author[1]/@surname,', et al.')" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-bottom-center"> + <xsl:choose> + <xsl:when test="/rfc/@docName"> + <!-- for IDs, use the expiry date --> + <xsl:text>Expires </xsl:text><xsl:call-template name="expirydate" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-category-long"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-category-long"> + <xsl:choose> + <xsl:when test="$xml2rfc-footer!=''"><xsl:value-of select="$xml2rfc-footer" /></xsl:when> + <xsl:when test="$xml2rfc-private!=''"/> <!-- private draft, footer not set --> + <xsl:when test="/rfc/@category='bcp'">Best Current Practice</xsl:when> + <xsl:when test="/rfc/@category='historic'">Historic</xsl:when> + <xsl:when test="/rfc/@category='info' or not(/rfc/@category)">Informational</xsl:when> + <xsl:when test="/rfc/@category='std'">Standards Track</xsl:when> + <xsl:when test="/rfc/@category='exp'">Experimental</xsl:when> + <xsl:otherwise>(category unknown)</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-header-center"> + <xsl:choose> + <xsl:when test="string-length(/rfc/front/title/@abbrev) &gt; 0"> + <xsl:value-of select="/rfc/front/title/@abbrev" /> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="/rfc/front/title" mode="get-text-content" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-header-left"> + <xsl:choose> + <xsl:when test="$xml2rfc-header!=''"><xsl:value-of select="$xml2rfc-header" /></xsl:when> + <xsl:when test="$xml2rfc-private!=''"/> <!-- private draft, header not set --> + <xsl:when test="/rfc/@ipr and not($is-rfc)">Internet-Draft</xsl:when> + <xsl:otherwise>RFC <xsl:value-of select="$rfcno"/></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-generator"> + <xsl:variable name="gen"> + <xsl:text>http://greenbytes.de/tech/webdav/rfc2629.xslt, </xsl:text> + <!-- when RCS keyword substitution in place, add version info --> + <xsl:if test="contains('$Revision: 1.1328 $',':')"> + <xsl:value-of select="concat('Revision ',normalize-space(translate(substring-after('$Revision: 1.1328 $', 'Revision: '),'$','')),', ')" /> + </xsl:if> + <xsl:if test="contains('$Date: 2020/09/24 11:34:24 $',':')"> + <xsl:value-of select="concat(normalize-space(translate(substring-after('$Date: 2020/09/24 11:34:24 $', 'Date: '),'$','')),', ')" /> + </xsl:if> + <xsl:variable name="product" select="normalize-space(concat(system-property('xsl:product-name'),' ',system-property('xsl:product-version')))"/> + <xsl:if test="$product!=''"> + <xsl:value-of select="concat('XSLT processor: ',$product,', ')"/> + </xsl:if> + <xsl:value-of select="concat('XSLT vendor: ',system-property('xsl:vendor'),' ',system-property('xsl:vendor-url'))" /> + </xsl:variable> + <xsl:variable name="via"> + <xsl:variable name="c1" select="/comment()[starts-with(normalize-space(.),'generated by ')]"/> + <xsl:variable name="mmark-lookup">name="GENERATOR" content=</xsl:variable> + <xsl:variable name="c2" select="/comment()[starts-with(normalize-space(.),$mmark-lookup)]"/> + <xsl:choose> + <xsl:when test="$c1"> + <xsl:value-of select="substring-after(normalize-space($c1),'generated by ')"/> + </xsl:when> + <xsl:when test="$c2"> + <xsl:variable name="remove">"</xsl:variable> + <xsl:value-of select="translate(substring-after(normalize-space($c2),$mmark-lookup),$remove,'')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="$gen" /> + <xsl:if test="$via!=''"> + <xsl:text>, via: </xsl:text> + <xsl:value-of select="$via"/> + </xsl:if> +</xsl:template> + +<xsl:template name="get-header-right"> + <xsl:if test="$xml2rfc-ext-pub-day!='' and /rfc/front/date/@x:include-day='true' and $is-rfc"> + <xsl:value-of select="number($xml2rfc-ext-pub-day)" /> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:value-of select="concat($xml2rfc-ext-pub-month, ' ', $xml2rfc-ext-pub-year)" /> +</xsl:template> + +<xsl:template name="get-keywords"> + <xsl:for-each select="/rfc/front/keyword"> + <xsl:if test="contains(.,',')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">keyword element appears to contain a comma-separated list, split into multiple elements instead.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="normalize-space(.)" /> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> +</xsl:template> + +<!-- get language from context node. nearest ancestor or return the default of "en" --> +<xsl:template name="get-lang"> + <xsl:choose> + <xsl:when test="ancestor-or-self::*[@xml:lang]"><xsl:value-of select="ancestor-or-self::*/@xml:lang" /></xsl:when> + <xsl:otherwise>en</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-number"> + <xsl:variable name="anchor" select="@anchor"/> + <xsl:choose> + <xsl:when test="self::note"> + <xsl:number count="note"/> + </xsl:when> + <xsl:when test="@x:fixed-section-number and @x:fixed-section-number!=''"> + <xsl:value-of select="@x:fixed-section-number"/> + </xsl:when> + <xsl:when test="(@x:fixed-section-number and @x:fixed-section-number='') or ancestor-or-self::*/@numbered='false'"> + <xsl:value-of select="$unnumbered"/> + <xsl:number count="section[@x:fixed-section-number='' or ancestor-or-self::*/@numbered='false']" level="any"/> + <!-- checks --> + <xsl:if test="@numbered='false'"> + <xsl:if test="ancestor::section or ancestor::section"> + <xsl:call-template name="error"> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="msg">Only top-level sections can be unnumbered</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="following-sibling::section[not(@numbered) or @numbered!='false']"> + <xsl:call-template name="error"> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="msg">Unnumbered section is followed by numbered sections</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="ancestor::middle and ../../back/references"> + <xsl:call-template name="error"> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="msg">Unnumbered section is followed by (numbered) references section</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + </xsl:when> + <xsl:when test="$has-edits or ancestor::*/@x:fixed-section-number"> + <xsl:call-template name="sectionnumberAndEdits" /> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="self::references and not(parent::references)"> + <xsl:choose> + <xsl:when test="count(/*/back/references)=1"> + <xsl:call-template name="get-references-section-number"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-references-section-number"/>.<xsl:number count="references"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::references and parent::references"> + <xsl:for-each select=".."><xsl:call-template name="get-section-number"/></xsl:for-each>.<xsl:number count="references"/> + </xsl:when> + <xsl:when test="self::reference"> + <xsl:for-each select="parent::references"> + <xsl:choose> + <xsl:when test="count(/*/back/references)=1"> + <xsl:call-template name="get-references-section-number"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-references-section-number"/>.<xsl:number count="references"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:when test="ancestor::reference"> + <xsl:for-each select="ancestor::reference"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="ancestor::back"><xsl:number count="section|appendix" level="multiple" format="A.1.1.1.1.1.1.1" /></xsl:when> + <xsl:when test="self::appendix"><xsl:number count="appendix" level="multiple" format="A.1.1.1.1.1.1.1" /></xsl:when> + <xsl:otherwise><xsl:number count="section" level="multiple"/></xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- get the section number for the references section --> +<xsl:template name="get-references-section-number"> + <xsl:value-of select="count(/rfc/middle/section[not(@numbered) or @numbered!='false']) + count(/rfc/middle/ed:replace/ed:ins/section[not(@numbered) or @numbered!='false']) + 1"/> +</xsl:template> + +<xsl:template name="emit-section-number"> + <xsl:param name="no"/> + <xsl:param name="appendixPrefix" select="false()"/> + <xsl:if test="$appendixPrefix and translate($no,$ucase,'')=''">Appendix </xsl:if> + <xsl:value-of select="$no"/><xsl:if test="not(contains($no,'.')) or $xml2rfc-ext-sec-no-trailing-dots!='no'">.</xsl:if> +</xsl:template> + +<xsl:template name="get-section-type"> + <xsl:choose> + <xsl:when test="self::abstract">Abstract</xsl:when> + <xsl:when test="self::note">Note</xsl:when> + <xsl:when test="ancestor::back and not(self::references)">Appendix</xsl:when> + <xsl:otherwise>Section</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-table-number"> + <xsl:choose> + <xsl:when test="self::table or @anchor!=''"> + <xsl:number level="any" count="texttable[@anchor!='']|table" /> + </xsl:when> + <xsl:otherwise> + <xsl:text>u.</xsl:text> + <xsl:number level="any" count="texttable[not(@anchor) or @anchor='']" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-table-anchor"> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>table.</xsl:text> + <xsl:call-template name="get-table-number"/> +</xsl:template> + +<xsl:template name="get-figure-number"> + <xsl:choose> + <xsl:when test="@anchor!='' or @title or name"> + <xsl:number level="any" count="figure[@anchor!='' or @title or name]" /> + </xsl:when> + <xsl:otherwise> + <xsl:text>u.</xsl:text> + <xsl:number level="any" count="figure[(not(@anchor) or @anchor='') and not(@title) and not(name)]" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-figure-anchor"> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>figure.</xsl:text> + <xsl:call-template name="get-figure-number"/> +</xsl:template> + +<!-- reformat contents of author/@initials --> +<xsl:template name="format-initials"> + <xsl:param name="initials" select="@initials"/> + + <xsl:variable name="computed-initials"> + <xsl:choose> + <xsl:when test="normalize-space($initials)!=''"> + <xsl:value-of select="$initials"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-author-initials"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="normalized" select="normalize-space($computed-initials)"/> + + <xsl:choose> + <xsl:when test="$normalized=''"> + <!-- nothing to do --> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="r"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="remainder" select="$normalized"/> + </xsl:call-template> + </xsl:variable> + + <xsl:if test="$r!=@initials"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@initials '<xsl:value-of select="@initials"/>': did you mean '<xsl:value-of select="$r"/>'?</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:value-of select="$r"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="t-format-initials"> + <xsl:param name="have"/> + <xsl:param name="remainder"/> + + <xsl:variable name="first" select="substring($remainder,1,1)"/> + <xsl:variable name="prev" select="substring($have,string-length($have))"/> + +<!--<xsl:message> +have: <xsl:value-of select="$have"/> +remainder: <xsl:value-of select="$remainder"/> +first: <xsl:value-of select="$first"/> +prev: <xsl:value-of select="$prev"/> +</xsl:message>--> + + <xsl:choose> + <xsl:when test="$remainder='' and $prev!='.'"> + <xsl:value-of select="concat($have,'.')"/> + </xsl:when> + <xsl:when test="$remainder=''"> + <xsl:value-of select="$have"/> + </xsl:when> + <xsl:when test="$prev='.' and $first='.'"> + <!-- repeating dots --> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="$have"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when> + <!-- missing dot before '-' --> +<!-- <xsl:when test="$prev!='.' and $first='-'"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,'.-')"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when>--> + <!-- missing space after '.' --> +<!-- <xsl:when test="$prev='.' and $first!=' '"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,' ',$first)"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when>--> + <xsl:otherwise> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,$first)"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="truncate-initials"> + <xsl:param name="initials"/> + <xsl:variable name="local-multiple-initials"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="../../processing-instruction('rfc')|../processing-instruction('rfc')|./processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'multiple-initials'"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="use-multiple-initials"> + <xsl:choose> + <xsl:when test="$local-multiple-initials!=''"> + <xsl:value-of select="$local-multiple-initials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$xml2rfc-multiple-initials"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="normalize-space($initials)=''"/> + <xsl:when test="$use-multiple-initials='yes'"> + <xsl:value-of select="$initials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(substring-before($initials,'.'),'.')"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- see https://chromium-i18n.appspot.com/ssl-address --> +<countries xmlns="mailto:julian.reschke@greenbytes.de?subject=rfc2629.xslt"> + <c c2="AR" c3="ARG" sn="Argentina" fmt="%A%n%Z %C%n%S"/> + <c c2="AU" c3="AUS" sn="Australia" fmt="%A%n%C %S %Z"/> + <c c2="AT" c3="AUT" sn="Austria" fmt="%A%n%Z %C"/> + <c c2="BE" c3="BEL" sn="Belgium" fmt="%A%n%Z %C"/> + <c c2="BR" c3="BRA" sn="Brazil" fmt="%A%n%D%n%C-%S%n%Z"/> + <c c2="CA" c3="CAN" sn="Canada" fmt="%A%n%C %S %Z"/> + <c c2="CL" c3="CHL" sn="Chile" fmt="%A%n%Z %C%n%S"/> + <c c2="CN" c3="CHN" sn="China" fmt="%Z%n%S%C%D%n%A"/> + <c c2="HR" c3="HRV" sn="Croatia" fmt="%A%n%Z %C" postprefix="HR-"/> + <c c2="CZ" c3="CZE" sn="Czechia" fmt="%A%n%Z %C"/> + <c c2="DK" c3="DNK" sn="Denmark" fmt="%A%n%Z %C"/> + <c c2="DE" c3="DEU" sn="Germany" fmt="%A%n%Z %C"/> + <c c2="GR" c3="GRC" sn="Greece" fmt="%A%n%Z %C"/> + <c c2="FI" c3="FIN" sn="Finland" fmt="%A%n%Z %C" postprefix="FI-"/>/> + <c c2="FR" c3="FRA" sn="France" fmt="%A%n%Z %C"/> + <c c2="HU" c3="HUN" sn="Hungary" fmt="%C%n%A%n%Z"/> + <c c2="IN" c3="IND" sn="India" fmt="%A%n%C %Z%n%S"/> + <c c2="IR" c3="IRL" sn="Ireland" fmt="%A%n%D%n%C%n%S %Z"/> + <c c2="IL" c3="ISR" sn="Israel" fmt="%A%n%C %Z"/> + <c c2="IT" c3="ITA" sn="Italy" fmt="%A%n%Z %C %S"/> + <c c2="JP" c3="JPN" sn="Japan" fmt="%Z%n%S%n%A" postprefix="&#12306;"/> + <c c2="KR" c3="KOR" sn="Korea (the Republic of)" fmt="%A%n%C, %S %Z"/> + <c c2="LU" c3="LUX" sn="Luxembourg" fmt="%A%n%Z %C" postprefix="L-"/> + <c c2="MU" c3="MUS" sn="Mauritius" fmt="%A%n%Z%n%C"/> + <c c2="MX" c3="MEX" sn="Mexico" fmt="%A%n%D%n%Z %C, %S"/> + <c c2="NL" c3="NLD" sn="Netherlands" fmt="%A%n%Z %C"/> + <c c2="NZ" c3="NZL" sn="New Zealand" fmt="%A%n%D%n%C %Z"/> + <c c2="NO" c3="NOR" sn="Norway" fmt="%A%n%Z %C"/> + <c c2="PL" c3="POL" sn="Poland" fmt="%A%n%Z %C"/> + <c c2="PT" c3="PRT" sn="Portugal" fmt="%A%n%Z %C"/> + <c c2="RO" c3="ROU" sn="Romania" fmt="%A%n%Z %C"/> + <c c2="RU" c3="RUS" sn="Russian Federation" fmt="%A%n%C%n%S%n%Z"/> + <c c2="SG" c3="SGP" sn="Singapore" fmt="%A%n%Z" postprefix="SINGAPORE "/> + <c c2="SK" c3="SVK" sn="Slovakia" fmt="%A%n%Z %C"/> + <c c2="SI" c3="SVN" sn="Slovenia" fmt="%A%n%Z %C" postprefix="SI-"/> + <c c2="ES" c3="ESP" sn="Spain" fmt="%A%n%Z %C %S"/> + <c c2="SE" c3="SWE" sn="Sweden" fmt="%A%n%Z %C" postprefix="SE-"/> + <c c2="CH" c3="CHE" sn="Switzerland" fmt="%A%n%Z %C" postprefix="CH-"/> + <c c2="TH" c3="THA" sn="Thailand" fmt="%A%n%D %C%n%S %Z"/> + <c c2="TR" c3="TUR" sn="Turkey" fmt="%A%n%Z %C/%S"/> + <c c2="GB" c3="GBR" sn="United Kingdom of Great Britain and Northern Ireland" alias1="UK" fmt="%A%n%C%n%Z"/> + <c c2="US" c3="USA" sn="United States of America" fmt="%A%n%C, %S %Z"/> + <c c2="UY" c3="URY" sn="Uruguay" fmt="%A%n%Z %C %S"/> +</countries> + +<xsl:template name="get-country-format"> + <xsl:param name="country"/> + <xsl:variable name="countries" select="document('')/*/myns:countries/myns:c"/> + + <xsl:variable name="short" select="translate(normalize-space(translate($country,'.','')),$lcase,$ucase)"/> + + <xsl:choose> + <xsl:when test="$countries[@sn=$country]"> + <!-- all good --> + <xsl:value-of select="$countries[@sn=$country]/@fmt"/> + </xsl:when> + <xsl:when test="$short=''"> + <!-- already warned --> + </xsl:when> + <xsl:when test="not($countries/@sn=$country) and ($countries/@c3=$short)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">'<xsl:value-of select="$country"/>' is not an ISO country short name, maybe you meant '<xsl:value-of select="$countries[@c3=$short]/@sn"/>'?</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not($countries/@sn=$country) and ($countries/@c2=$short)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">'<xsl:value-of select="$country"/>' is not an ISO country short name, maybe you meant '<xsl:value-of select="$countries[@c2=$short]/@sn"/>'?</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not($countries/@sn=$country) and ($countries/@alias1=$short)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">'<xsl:value-of select="$country"/>' is not an ISO country short name, maybe you meant '<xsl:value-of select="$countries[@alias1=$short]/@sn"/>'?</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="$countries[starts-with(translate(@sn,$lcase,$ucase),$short)]"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">'<xsl:value-of select="$country"/>' is not an ISO country short name, maybe you meant '<xsl:value-of select="$countries[starts-with(translate(@sn,$lcase,$ucase),$short)][1]/@sn"/>'? (lookup of short names: https://www.iso.org/obp/ui/)</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">ISO country short name '<xsl:value-of select="$country"/>' unknown (lookup of short names: https://www.iso.org/obp/ui/)</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-country-postprefix"> + <xsl:param name="country"/> + <xsl:variable name="countries" select="document('')/*/myns:countries/myns:c"/> + <xsl:value-of select="$countries[@sn=$country]/@postprefix"/> +</xsl:template> + +<xsl:template name="extract-normalized"> + <xsl:param name="node" select="."/> + <xsl:param name="ascii" select="false()"/> + + <xsl:variable name="name" select="local-name($node)"/> + + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$ascii and $node/@ascii!=''"> + <xsl:value-of select="$node/@ascii"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$node"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="text" select="normalize-space($n)"/> + <xsl:if test="string-length($n) != string-length($text)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">excessive whitespace in <xsl:value-of select="$name"/>: '<xsl:value-of select="$n"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$text=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">missing text in <xsl:value-of select="$name"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:value-of select="$text"/> +</xsl:template> + +<!-- checking for email element --> +<xsl:template name="extract-email"> + <xsl:variable name="email" select="normalize-space(.)"/> + <xsl:if test="contains($email,' ')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">whitespace in email address: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="email2"> + <xsl:choose> + <xsl:when test="starts-with($email,'mailto:')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">email should not include URI scheme: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="substring($email, 1 + string-length('mailto:'))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$email"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="$email2"/> +</xsl:template> + +<!-- checking for uri element --> +<xsl:template name="extract-uri"> + <xsl:variable name="uri" select="normalize-space(.)"/> + <xsl:if test="string-length(.) != string-length($uri) or contains($uri,' ')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">excessive whitespace in URI: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="$uri=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">URI is empty</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:value-of select="$uri"/> +</xsl:template> + +<xsl:template name="insert-conditional-pagebreak"> + <xsl:if test="$xml2rfc-compact!='yes'"> + <xsl:attribute name="class">np</xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-conditional-hrule"> + <xsl:if test="$xml2rfc-compact!='yes'"> + <hr class="{$css-noprint}" /> + </xsl:if> +</xsl:template> + +<!-- get text content from marked-up text --> + +<xsl:template match="text()" mode="get-text-content"> + <xsl:value-of select="normalize-space(.)"/> +</xsl:template> + +<xsl:template match="br" mode="get-text-content"> + <xsl:text> </xsl:text> +</xsl:template> + +<xsl:template match="*" mode="get-text-content"> + <xsl:apply-templates mode="get-text-content"/> +</xsl:template> + +<xsl:template match="ed:del" mode="get-text-content"> +</xsl:template> + +<!-- parsing of processing instructions --> +<xsl:template name="parse-pis"> + <xsl:param name="nodes"/> + <xsl:param name="attr"/> + <xsl:param name="sep"/> + <xsl:param name="ret"/> + <xsl:param name="default"/> + <xsl:param name="duplicate-warning" select="'yes'"/> + + <xsl:choose> + <xsl:when test="count($nodes)=0"> + <xsl:choose> + <xsl:when test="$ret!=''"> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$default"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="ret2"> + <xsl:for-each select="$nodes[1]"> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="."/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$nodes[position()!=1]"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret2"/> + <xsl:with-param name="default" select="$default"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="parse-one-pi"> + <xsl:param name="str"/> + <xsl:param name="attr"/> + <xsl:param name="sep"/> + <xsl:param name="ret"/> + <xsl:param name="duplicate-warning"/> + + <xsl:variable name="str2"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="$str"/> + </xsl:call-template> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$str2=''"> + <!-- done --> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="attrname" select="substring-before($str2,'=')"/> + + <xsl:choose> + <xsl:when test="$attrname=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">bad PI syntax: <xsl:value-of select="$str2"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="remainder" select="substring($str2,2+string-length($attrname))"/> + <xsl:choose> + <xsl:when test="string-length($remainder) &lt; 2"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">bad PI value syntax: <xsl:value-of select="$remainder"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rem"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="$remainder"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="qchars">&apos;&quot;</xsl:variable> + <xsl:variable name="qchar" select="substring($rem,1,1)"/> + <xsl:variable name="rem2" select="substring($rem,2)"/> + <xsl:choose> + <xsl:when test="not(contains($qchars,$qchar))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">pseudo-attribute value needs to be quoted: <xsl:value-of select="$rem"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:when test="not(contains($rem2,$qchar))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unmatched quote in: <xsl:value-of select="$rem2"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="value" select="substring-before($rem2,$qchar)"/> + + <!-- check pseudo-attribute names --> + <xsl:if test="name()='rfc-ext' and $attr='SANITYCHECK'"> + <xsl:choose> + <xsl:when test="$attrname='abort-on'"/> + <xsl:when test="$attrname='allow-markup-in-artwork'"/> + <xsl:when test="$attrname='authors-section'"/> + <xsl:when test="$attrname='check-artwork-width'"/> + <xsl:when test="$attrname='css-contents'"/> + <xsl:when test="$attrname='css-resource'"/> + <xsl:when test="$attrname='duplex'"/> + <xsl:when test="$attrname='html-pretty-print'"/> + <xsl:when test="$attrname='include-index'"/> + <xsl:when test="$attrname='include-references-in-index'"/> + <xsl:when test="$attrname='internet-draft-uri'"/> + <xsl:when test="$attrname='justification'"/> + <xsl:when test="$attrname='log-level'"/> + <xsl:when test="$attrname='paragraph-links'"/> + <xsl:when test="$attrname='parse-xml-in-artwork'"/> + <xsl:when test="$attrname='refresh-from'"/> + <xsl:when test="$attrname='refresh-interval'"/> + <xsl:when test="$attrname='refresh-xslt'"/> + <xsl:when test="$attrname='rfc-uri'"/> + <xsl:when test="$attrname='sec-no-trailing-dots'"/> + <xsl:when test="$attrname='trace-parse-xml'"/> + <xsl:when test="$attrname='ucd-file'"/> + <xsl:when test="$attrname='use-system-time'"/> + <xsl:when test="$attrname='vspace-pagebreak'"/> + <xsl:when test="$attrname='xml2rfc-backend'"/> + <xsl:when test="$attrname='xref-with-text-generate'"/> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unsupported rfc-ext pseudo-attribute '<xsl:value-of select="$attrname"/>'</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:if test="name()='rfc' and $attr='SANITYCHECK'"> + <xsl:choose> + <xsl:when test="$attrname='authorship'"/> + <xsl:when test="$attrname='comments'"/> + <xsl:when test="$attrname='compact'"/> + <xsl:when test="$attrname='docmapping'"> + <xsl:if test="$value!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc docmapping pseudo-attribute with values other than 'yes' in not supported by this processor.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="$attrname='editing'"/> + <xsl:when test="$attrname='footer'"/> + <xsl:when test="$attrname='header'"/> + <xsl:when test="$attrname='include'"> + <xsl:choose> + <xsl:when test="not(parent::references)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">the rfc include pseudo-attribute (unless a child node of &lt;references&gt;) is not supported by this processor, see http://greenbytes.de/tech/webdav/rfc2629xslt/rfc2629xslt.html#examples.internalsubset for alternative syntax.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc include pseudo-attribute is only partially supported by this processor, see http://greenbytes.de/tech/webdav/rfc2629xslt/rfc2629xslt.html#examples.internalsubset for alternative syntax.</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$attrname='inline'"/> + <xsl:when test="$attrname='iprnotified'"/> + <xsl:when test="$attrname='linefile'"/> + <xsl:when test="$attrname='linkmailto'"/> + <xsl:when test="$attrname='multiple-initials'"/> + <xsl:when test="$attrname='private'"/> + <xsl:when test="$attrname='rfcedstyle'"/> + <xsl:when test="$attrname='sortrefs'"/> + <xsl:when test="$attrname='subcompact'"/> + <xsl:when test="$attrname='strict'"/> + <xsl:when test="$attrname='symrefs'"/> + <xsl:when test="$attrname='toc'"/> + <xsl:when test="$attrname='tocdepth'"/> + <xsl:when test="$attrname='tocindent'"> + <xsl:if test="$value!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc tocindent pseudo-attribute with values other than 'yes' in not supported by this processor.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">unsupported rfc pseudo-attribute '<xsl:value-of select="$attrname"/>'</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:choose> + <xsl:when test="$attrname != $attr"> + <!-- pseudo-attr does not match, continue --> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$sep='' and $ret!=''"> + <!-- pseudo-attr does match, but we only want one value --> + <xsl:if test="$ret != $value and $duplicate-warning='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">duplicate pseudo-attribute <xsl:value-of select="$attr"/>, overwriting value <xsl:value-of select="$ret"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$value"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- pseudo-attr does match --> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + <xsl:with-param name="ret"> + <xsl:choose> + <xsl:when test="$ret!=''"> + <xsl:value-of select="concat($ret,$sep,$value)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$value"/> + </xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + + </xsl:choose> + + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="eat-leading-whitespace"> + <xsl:param name="str"/> + + <xsl:choose> + <xsl:when test="$str=''"> + </xsl:when> + <xsl:when test="translate(substring($str,1,1),' &#10;&#13;&#9;',' ')=' '"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="substring($str,2)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$str"/> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<!-- diag support --> +<xsl:template name="lineno"> + <xsl:if test="function-available('saxon-old:line-number')" use-when="function-available('saxon-old:line-number')"> + <xsl:if test="saxon-old:line-number() > 0"> + <xsl:text> (at line </xsl:text> + <xsl:value-of select="saxon-old:line-number()"/> + <xsl:if test="function-available('saxon-old:systemId')"> + <xsl:variable name="rootsys"> + <xsl:for-each select="/*"> + <xsl:value-of select="saxon-old:systemId()"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$rootsys != saxon-old:systemId()"> + <xsl:text> of </xsl:text> + <xsl:value-of select="saxon-old:systemId()"/> + </xsl:if> + </xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + </xsl:if> + <xsl:if test="function-available('saxon:line-number')" use-when="function-available('saxon:line-number')"> + <xsl:if test="saxon:line-number() > 0"> + <xsl:text> (at line </xsl:text> + <xsl:value-of select="saxon:line-number()"/> + <xsl:if test="function-available('saxon:systemId')"> + <xsl:variable name="rootsys"> + <xsl:for-each select="/*"> + <xsl:value-of select="saxon:systemId()"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$rootsys != saxon:systemId()"> + <xsl:text> of </xsl:text> + <xsl:value-of select="saxon:systemId()"/> + </xsl:if> + </xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + </xsl:if> +</xsl:template> + +<!-- define exslt:node-set for msxml --> +<msxsl:script language="JScript" implements-prefix="exslt"> + this['node-set'] = function (x) { + return x; + } +</msxsl:script> + +<!-- date handling --> + +<msxsl:script language="JScript" implements-prefix="date"> + function twodigits(s) { + return s &lt; 10 ? "0" + s : s; + } + + this['date-time'] = function (x) { + var now = new Date(); + var offs = now.getTimezoneOffset(); + return now.getFullYear() + "-" + + twodigits(1 + now.getMonth()) + "-" + + twodigits(now.getDate()) + "T" + + twodigits(now.getHours()) + ":" + + twodigits(now.getMinutes()) + ":" + + twodigits(now.getSeconds()) + + (offs >= 0 ? "-" : "+") + + twodigits(Math.abs(offs) / 60) + ":" + + twodigits(Math.abs(offs) % 60); + } +</msxsl:script> + +<xsl:variable name="current-year"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-use-system-time='no'"/> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-before(date:date-time(),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-date')"> + <xsl:value-of select="substring-before(string(current-date()),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="current-month"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-use-system-time='no'"/> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-before(substring-after(date:date-time(),'-'),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-date')"> + <xsl:value-of select="substring-before(substring-after(string(current-date()),'-'),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="current-day"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-use-system-time='no'"/> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-after(substring-after(substring-before(date:date-time(),'T'),'-'),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-dateTime')"> + <xsl:value-of select="substring-after(substring-after(substring-before(string(current-dateTime()),'T'),'-'),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="may-default-dates"> + <xsl:choose> + <xsl:when test="$current-year!='' and $current-month!='' and $current-day!=''"> + <xsl:variable name="year-specified" select="/rfc/front/date/@year and /rfc/front/date/@year!=''"/> + <xsl:variable name="month-specified" select="/rfc/front/date/@month and /rfc/front/date/@month!=''"/> + <xsl:variable name="day-specified" select="/rfc/front/date/@day and /rfc/front/date/@day!=''"/> + <xsl:variable name="system-month"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$current-month"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$year-specified and /rfc/front/date/@year!=$current-year">Specified year <xsl:value-of select="/rfc/front/date/@year"/> does not match system date (<xsl:value-of select="$current-year"/>)</xsl:when> + <xsl:when test="$month-specified and /rfc/front/date/@month!=$system-month">Specified month <xsl:value-of select="/rfc/front/date/@month"/> does not match system date (<xsl:value-of select="$system-month"/>)</xsl:when> + <xsl:when test="$day-specified and /rfc/front/date/@day!=$current-day">Specified day does not match system date</xsl:when> + <xsl:when test="not($year-specified) and ($month-specified or $day-specified)">Can't default year when month or day is specified</xsl:when> + <xsl:when test="not($month-specified) and $day-specified">Can't default month when day is specified</xsl:when> + <xsl:otherwise>yes</xsl:otherwise> + </xsl:choose> + </xsl:when> + <!-- may, but won't --> + <xsl:otherwise>yes</xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<xsl:param name="xml2rfc-ext-pub-year"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@year and /rfc/front/date/@year!=''"> + <xsl:value-of select="/rfc/front/date/@year"/> + </xsl:when> + <xsl:when test="$current-year!='' and $may-default-dates='yes'"> + <xsl:value-of select="$current-year"/> + </xsl:when> + <xsl:when test="$current-year!='' and $may-default-dates!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="$may-default-dates"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'/rfc/front/date/@year missing (and XSLT processor cannot compute the system date)'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:param name="xml2rfc-ext-pub-month"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@month and /rfc/front/date/@month!=''"> + <xsl:variable name="m" select="/rfc/front/date/@month"/> + <xsl:choose> + <xsl:when test="string(number($m))!='NaN' and number($m) &gt; 0 and number($m) &lt; 13"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$m"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$m"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$current-month!='' and $may-default-dates='yes'"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$current-month"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$current-month!='' and $may-default-dates!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="$may-default-dates"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'/rfc/front/date/@month missing (and XSLT processor cannot compute the system date)'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:param name="pub-month-numeric"> + <xsl:call-template name="get-month-as-num"> + <xsl:with-param name="month" select="$xml2rfc-ext-pub-month" /> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-pub-day"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@day and /rfc/front/date/@day!=''"> + <xsl:value-of select="/rfc/front/date/@day"/> + </xsl:when> + <xsl:when test="$current-day!='' and $may-default-dates='yes'"> + <xsl:value-of select="$current-day"/> + </xsl:when> + <xsl:otherwise /> <!-- harmless, we just don't have it --> + </xsl:choose> +</xsl:param> + +<xsl:param name="pub-yearmonth"> + <!-- year or 0000 --> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-pub-year!=''"> + <xsl:value-of select="format-number($xml2rfc-ext-pub-year,'0000')"/> + </xsl:when> + <xsl:otherwise>0000</xsl:otherwise> + </xsl:choose> + <!-- month or 00 --> + <xsl:choose> + <xsl:when test="number($pub-month-numeric) &gt; 0"> + <xsl:value-of select="format-number($pub-month-numeric,'00')"/> + </xsl:when> + <xsl:otherwise>00</xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- <u> element --> +<xsl:template name="convert-u-compact-remainder"> + <xsl:param name="f"/> + <xsl:choose> + <xsl:when test="contains($f,'-')"> + <xsl:text>{</xsl:text> + <xsl:value-of select="substring-before($f,'-')"/> + <xsl:text>}, </xsl:text> + <xsl:call-template name="convert-u-compact-remainder"> + <xsl:with-param name="f" select="substring-after($f,'-')"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text>{</xsl:text> + <xsl:value-of select="$f"/> + <xsl:text>}</xsl:text> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="convert-u-compact"> + <xsl:param name="f"/> + <xsl:choose> + <xsl:when test="contains($f,'-')"> + <xsl:text>{</xsl:text> + <xsl:value-of select="substring-before($f,'-')"/> + <xsl:text>}</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>{</xsl:text> + <xsl:value-of select="$f"/> + <xsl:text>}</xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="contains($f,'-')"> + <xsl:text> (</xsl:text> + <xsl:call-template name="convert-u-compact-remainder"> + <xsl:with-param name="f" select="substring-after($f,'-')"/> + </xsl:call-template> + <xsl:text>)</xsl:text> + </xsl:if> +</xsl:template> + +<xsl:template name="u-hex2"> + <xsl:param name="n"/> + <xsl:variable name="hex">0123456789ABCDEF</xsl:variable> + + <xsl:if test="$n &gt;= 16"> + <xsl:call-template name="u-hex2"> + <xsl:with-param name="n" select="floor($n div 16)"/> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="substring($hex, 1 + ($n mod 16), 1)"/> +</xsl:template> + +<xsl:template name="u-hex"> + <xsl:param name="n"/> + <xsl:variable name="t"> + <xsl:call-template name="u-hex2"> + <xsl:with-param name="n" select="$n"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="string-length($t) &lt; 4"> + <xsl:value-of select="substring('0000',1,4-string-length($t))"/> + </xsl:if> + <xsl:value-of select="$t"/> +</xsl:template> + +<xsl:template name="u-expanded-name"> + <xsl:param name="lit"/> + <xsl:choose> + <xsl:when test="string-length($lit)=0"></xsl:when> + <xsl:otherwise> + <xsl:call-template name="name-of-first-char"> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + <xsl:if test="string-length($lit)!=1"> + <xsl:text>, </xsl:text> + <xsl:call-template name="u-expanded-name"> + <xsl:with-param name="lit" select="substring($lit,2)"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="name-of-first-char"> + <xsl:param name="lit"/> + <xsl:variable name="c" select="substring($lit,1,1)"/> + <xsl:variable name="map" select="//x:u-map/x:c[@c=$c][@d]"/> + <xsl:choose> + <xsl:when test="$map"> + <xsl:value-of select="$map/@d"/> + </xsl:when> + <xsl:when test="$xml2rfc-ext-ucd-file!='' and document($xml2rfc-ext-ucd-file)/x:u-map/x:c[@c=$c]"> + <xsl:value-of select="document($xml2rfc-ext-ucd-file)/x:u-map/x:c[@c=$c]/@d"/> + </xsl:when> + <xsl:otherwise> + <xsl:text>???</xsl:text> + <xsl:call-template name="error"> + <xsl:with-param name="msg"> + <xsl:text>'name' expansion for &lt;u>: character '</xsl:text> + <xsl:value-of select="$c"/> + <xsl:text>' requires local definition using x:u-map or local UCD mapping file </xsl:text> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-ucd-file=''">which can be specified using the 'ucd-file' directive</xsl:when> + <xsl:otherwise>'<xsl:value-of select="$xml2rfc-ext-ucd-file"/>'</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="u-expanded-num"> + <xsl:param name="lit"/> + <xsl:choose> + <xsl:when test="string-length($lit)=0"></xsl:when> + <xsl:otherwise> + <xsl:text>U+</xsl:text> + <xsl:variable name="n"> + <xsl:call-template name="codepoint-of-first-char"> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + </xsl:variable> + <xsl:call-template name="u-hex"> + <xsl:with-param name="n" select="$n"/> + </xsl:call-template> + <xsl:if test="string-length($lit)!=1"> + <xsl:text> </xsl:text> + <xsl:call-template name="u-expanded-num"> + <xsl:with-param name="lit" select="substring($lit,2)"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="codepoint-of-first-char"> + <xsl:param name="lit"/> + <xsl:variable name="c" select="substring($lit,1,1)"/> + <xsl:choose> + <xsl:when test="not(function-available('string-to-codepoints'))"> + <xsl:variable name="map" select="//x:u-map/x:c[@c=$c]"/> + <xsl:variable name="asciistring"> !"#$%&amp;'()*+,-./<xsl:value-of select="$digits"/>:;&lt;=>?@<xsl:value-of select="$ucase"/>[\]^_`<xsl:value-of select="$lcase"/>{|}~&#127;</xsl:variable> + <xsl:choose> + <xsl:when test="contains($asciistring,$c)"> + <xsl:value-of select="32 + string-length(substring-before($asciistring,$c))"/> + </xsl:when> + <xsl:when test="$map"> + <xsl:value-of select="number($map/@n)"/> + </xsl:when> + <xsl:when test="$xml2rfc-ext-ucd-file!='' and document($xml2rfc-ext-ucd-file)/x:u-map/x:c[@c=substring($lit,1,1)]"> + <xsl:value-of select="document($xml2rfc-ext-ucd-file)/x:u-map/x:c[@c=substring($lit,1,1)]/@n"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg">'<xsl:value-of select="$xml2rfc-ext-ucd-file"/>' + <xsl:text>'num' expansion for &lt;u>: character '</xsl:text> + <xsl:value-of select="$c"/> + <xsl:text>' requires XSLT 2, local definition using x:u-map, or local UCD mapping file </xsl:text> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-ucd-file=''">which can be specified using the 'ucd-file' directive</xsl:when> + <xsl:otherwise>'<xsl:value-of select="$xml2rfc-ext-ucd-file"/>'</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + <xsl:value-of select="number(65533)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="number(string-to-codepoints($c))"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="u-expanded"> + <xsl:param name="format"/> + <xsl:param name="lit"/> + <xsl:param name="ascii"/> + + <xsl:choose> + <xsl:when test="starts-with($format,'{') and contains($format,'}')"> + <xsl:variable name="tok" select="substring(substring-before($format,'}'),2)"/> + <xsl:choose> + <xsl:when test="$tok='lit'"> + <xsl:text>"</xsl:text> + <xsl:value-of select="$lit"/> + <xsl:text>"</xsl:text> + </xsl:when> + <xsl:when test="$tok='ascii'"> + <xsl:text>"</xsl:text> + <xsl:value-of select="$ascii"/> + <xsl:text>"</xsl:text> + </xsl:when> + <xsl:when test="$tok='name'"> + <xsl:call-template name="u-expanded-name"> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$tok='num'"> + <xsl:call-template name="u-expanded-num"> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown expansion for &lt;u>: <xsl:value-of select="$tok"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="u-expanded"> + <xsl:with-param name="format" select="substring-after($format,'}')"/> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="starts-with($format,'{')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Broken format string for &lt;u>: <xsl:value-of select="$format"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(contains($format,'{'))"> + <xsl:value-of select="$format"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="s" select="substring-before($format,'{')"/> + <xsl:value-of select="$s"/> + <xsl:call-template name="u-expanded"> + <xsl:with-param name="format" select="substring($format, 1+string-length($s))"/> + <xsl:with-param name="lit" select="$lit"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="emit-u"> + <xsl:variable name="format"> + <xsl:choose> + <xsl:when test="@format!=''"> + <xsl:value-of select="@format"/> + </xsl:when> + <xsl:otherwise>lit-name-num</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="f"> + <xsl:choose> + <xsl:when test="translate($format,concat($lcase,'-'),'')=''"> + <!-- compact notation --> + <xsl:call-template name="convert-u-compact"> + <xsl:with-param name="f" select="$format"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$format"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:call-template name="u-expanded"> + <xsl:with-param name="format" select="$f"/> + <xsl:with-param name="lit" select="."/> + <xsl:with-param name="ascii" select="@ascii"/> + </xsl:call-template> +</xsl:template> + +<xsl:template match="u"> + <xsl:call-template name="emit-u"/> +</xsl:template> + +<xsl:template match="x:u-map"/> + +<!-- simple validation support --> + +<xsl:template match="*" mode="validate"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="@*" mode="validate"/> + +<xsl:template name="validation-error"> + <xsl:param name="additionalDiagnostics"/> + <xsl:variable name="pname"> + <xsl:if test="namespace-uri(..)!=''"> + <xsl:value-of select="concat('{',namespace-uri(..),'}')"/> + </xsl:if> + <xsl:value-of select="local-name(..)"/> + </xsl:variable> + <xsl:variable name="cname"> + <xsl:if test="namespace-uri(.)!=''"> + <xsl:value-of select="concat('{',namespace-uri(.),'}')"/> + </xsl:if> + <xsl:value-of select="local-name(.)"/> + </xsl:variable> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat($cname,' not allowed inside ',$pname,$additionalDiagnostics)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> +</xsl:template> + +<!-- artwork/sourcecode element --> +<xsl:template match="blockquote/artwork | figure/artwork | figure/ed:replace/ed:*/artwork | section/artwork | li/artwork | dd/artwork | artset/artwork" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="blockquote/sourcecode | figure/sourcecode | figure/ed:replace/ed:*/sourcecode | section/sourcecode | li/sourcecode | dd/sourcecode | td/sourcecode" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="artwork|sourcecode" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- author element --> +<xsl:template match="front/author" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="author" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- li element --> +<xsl:template match="ol/li | ul/li" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="li" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- list element --> +<xsl:template match="t/list | t/ed:replace/ed:*/list" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="list" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- dl element --> +<xsl:template match="abstract/dl | aside/dl | blockquote/dl | dd/dl | li/dl | note/dl | section/dl | td/dl | th/dl" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="dl" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- t element --> +<xsl:template match="abstract/t | abstract/ed:replace/ed:*/t | + list/t | list/ed:replace/ed:*/t | + note/t | note/ed:replace/ed:*/t | + section/t | section/ed:replace/ed:*/t | + blockquote/t | + x:blockquote/t | x:blockquote/ed:replace/ed:*/t | + x:note/t | x:note/ed:replace/ed:*/t | + aside/t | + td/t | th/t | + x:lt/t | li/t | x:lt/ed:replace/ed:*/t | dd/t" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="t" mode="validate"> + <xsl:call-template name="validation-error"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- xref element --> +<xsl:template match="abstract//xref" mode="validate"> + <xsl:call-template name="validation-error"> + <xsl:with-param name="additionalDiagnostics"> (inside &lt;abstract>)</xsl:with-param> + </xsl:call-template> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<xsl:template name="check-no-text-content"> + <xsl:for-each select="text()"> + <xsl:if test="normalize-space(.)!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">No text content allowed inside &lt;<xsl:value-of select="local-name(..)"/>&gt;, but found: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> +</xsl:template> + +<xsl:template name="render-name"> + <xsl:param name="n"/> + <xsl:param name="strip-links" select="true()"/> + <xsl:variable name="t"> + <xsl:apply-templates select="$n"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="not($strip-links)"> + <xsl:copy-of select="exslt:node-set($t)"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="exslt:node-set($t)" mode="strip-links"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="render-name-ref"> + <xsl:param name="n"/> + <xsl:variable name="t"> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="$n"/> + </xsl:call-template> + </xsl:variable> + <xsl:apply-templates select="exslt:node-set($t)" mode="strip-ids-and-linebreaks"/> +</xsl:template> + +<!-- clean up links from HTML --> +<xsl:template match="comment()|@*" mode="strip-links"><xsl:copy/></xsl:template> +<xsl:template match="text()" mode="strip-links"><xsl:copy/></xsl:template> +<xsl:template match="*" mode="strip-links"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="@*|node()" mode="strip-links" /> + </xsl:element> +</xsl:template> +<xsl:template match="a|xhtml:a" mode="strip-links" xmlns:xhtml="http://www.w3.org/1999/xhtml"> + <xsl:choose> + <xsl:when test="@id"> + <span id="{@id}"> + <xsl:apply-templates select="node()" mode="strip-links" /> + </span> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="node()" mode="strip-links" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="node()|@*" mode="strip-ids-and-linebreaks"> + <xsl:copy> + <xsl:apply-templates select="node()|@*" mode="strip-ids-and-linebreaks" /> + </xsl:copy> +</xsl:template> +<xsl:template match="xhtml:br" mode="strip-ids-and-linebreaks"> + <xsl:text> </xsl:text> +</xsl:template> +<xsl:template match="@id" mode="strip-ids-and-linebreaks"/> + + +<!-- customization: these templates can be overridden in an XSLT that imports from this one --> +<xsl:template name="add-start-material"/> +<xsl:template name="add-end-material"/> + +</xsl:transform> diff --git a/test/fixtures/cache-tests/spec/lib/rfc2629-other.ent b/test/fixtures/cache-tests/spec/lib/rfc2629-other.ent new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfc2629-other.ent @@ -0,0 +1,61 @@ +<!-- rfc2629-other.ent + + Character entity set for RFC-2629 source xml documents. + There is no need to invoke this directly from + the source document itself; just invoke the DTD. + + These are unknown to (X)HTML, so any converter to these + formats must either substitute these entities + or invoke this file from the output file. + + Conversion to txt or nroff format will replace + these entities by the parenthesized text. + + Typical invocation: + <!ENTITY % rfc2629-other + PUBLIC "-//IETF//ENTITIES Other for RFC 2629//EN" + "http://xml.resource.org/authoring/rfc2629-other.ent"> + %rfc2629-other; +--> + +<!-- Magical --> +<!-- rfc.number (automatically expanded to content + of number="..." attribute + of <rfc> element, or to "XXXX") --> + +<!-- ASCII --> +<!ENTITY excl "&#33;"><!-- U+0021 EXCLAMATION MARK ("!") --> +<!ENTITY num "&#35;"><!-- U+0023 NUMBER SIGN ("#") --> +<!ENTITY dollar "&#36;"><!-- U+0024 DOLLAR SIGN ("$") --> +<!ENTITY percnt "&#37;"><!-- U+0025 PERCENT SIGN ("%") --> +<!ENTITY lpar "&#40;"><!-- U+0028 LEFT PARENTHESIS ("(") --> +<!ENTITY rpar "&#41;"><!-- U+0029 RIGHT PARENTHESIS (")") --> +<!ENTITY ast "&#42;"><!-- U+002A ASTERISK ("*") --> +<!ENTITY plus "&#43;"><!-- U+002B PLUS SIGN ("+") --> +<!ENTITY comma "&#44;"><!-- U+002C COMMA (",") --> +<!ENTITY hyphen "&#45;"><!-- U+002D HYPHEN-MINUS ("-") --> +<!ENTITY period "&#46;"><!-- U+002E FULL STOP (".") --> +<!ENTITY sol "&#47;"><!-- U+002F SOLIDUS ("/") --> +<!ENTITY colon "&#58;"><!-- U+003A COLON (":") --> +<!ENTITY semi "&#59;"><!-- U+003B SEMICOLON (";") --> +<!ENTITY equals "&#61;"><!-- U+003D EQUALS SIGN ("=") --> +<!ENTITY quest "&#63;"><!-- U+003F QUESTION MARK ("?") --> +<!ENTITY commat "&#64;"><!-- U+0040 COMMERCIAL AT ("@") --> +<!ENTITY lsqb "&#91;"><!-- U+005B LEFT SQUARE BRACKET ("[") --> +<!ENTITY bsol "&#92;"><!-- U+005C REVERSE SOLIDUS ("\\") --> +<!ENTITY rsqb "&#93;"><!-- U+005D RIGHT SQUARE BRACKET ("]") --> +<!ENTITY circ "&#94;"><!-- U+005E CIRCUMFLEX ACCENT ("^") --> +<!ENTITY lowbar "&#95;"><!-- U+005F LOW LINE ("_") --> +<!ENTITY grave "&#96;"><!-- U+0060 GRAVE ACCENT ("`") --> +<!ENTITY lcub "&#123;"><!-- U+007B LEFT CURLY BRACKET ("{") --> +<!ENTITY verbar "&#124;"><!-- U+007C VERTICAL LINE ("|") --> +<!ENTITY rcub "&#125;"><!-- U+007D RIGHT CURLY BRACKET ("}") --> + +<!-- Useful Unicode --> +<!ENTITY Zcaron "&#381;"><!-- U+017D LATIN CAPITAL LETTER Z WITH CARON ("Z") --> +<!ENTITY zcaron "&#382;"><!-- U+017E LATIN SMALL LETTER Z WITH CARON ("z") --> +<!ENTITY dash "&#8208;"><!-- U+2010 HYPHEN ("-") --> +<!ENTITY nbhy "&#8209;"><!-- U+2011 NON-BREAKING HYPHEN (special "-") --> +<!ENTITY wj "&#8288;"><!-- U+2060 WORD JOINER (special "") --> + +<!-- EOF --> diff --git a/test/fixtures/cache-tests/spec/lib/rfc2629-xhtml.ent b/test/fixtures/cache-tests/spec/lib/rfc2629-xhtml.ent new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfc2629-xhtml.ent @@ -0,0 +1,165 @@ +<!-- rfc2629-xhtml.ent + + Character entity set for RFC-2629 source xml documents. + There is no need to invoke this directly from + the source document itself; just invoke the DTD. + + These are known to (X)HTML, so any converter to these + formats can just leave them as is without having + to invoke this file from the output file. + + Conversion to txt or nroff format will replace + these entities by the parenthesized text. + + Typical invocation: + <!ENTITY % rfc2629-xhtml + PUBLIC "-//IETF//ENTITIES XHTML subset for RFC 2629//EN" + "http://xml.resource.org/authoring/rfc2629-xhtml.ent"> + %rfc2629-xhtml; +--> + +<!-- All of ISO Latin 1 --> +<!ENTITY nbsp "&#160;"><!-- U+00A0 NO-BREAK SPACE (special " ") --> +<!ENTITY iexcl "&#161;"><!-- U+00A1 INVERTED EXCLAMATION MARK ("!") --> +<!ENTITY cent "&#162;"><!-- U+00A2 CENT SIGN ("[cents]") --> +<!ENTITY pound "&#163;"><!-- U+00A3 POUND SIGN ("GBP") --> +<!ENTITY curren "&#164;"><!-- U+00A4 CURRENCY SIGN ("[currency units]") --> +<!ENTITY yen "&#165;"><!-- U+00A5 YEN SIGN ("JPY") --> +<!ENTITY brvbar "&#166;"><!-- U+00A6 BROKEN BAR ("|") --> +<!ENTITY sect "&#167;"><!-- U+00A7 SECTION SIGN ("S.") --> +<!ENTITY uml "&#168;"><!-- U+00A8 DIAERESIS ('"') --> +<!ENTITY copy "&#169;"><!-- U+00A9 COPYRIGHT SIGN ("(C)") --> +<!ENTITY ordf "&#170;"><!-- U+00AA FEMININE ORDINAL INDICATOR ("a") --> +<!ENTITY laquo "&#171;"><!-- U+00AB LEFT-POINTING DOUBLE ANGLE QUOTATION MARK ("<<") --> +<!ENTITY not "&#172;"><!-- U+00AC NOT SIGN ("[not]") --> +<!ENTITY shy "&#173;"><!-- U+00AD SOFT HYPHEN (ignored "") --> +<!ENTITY reg "&#174;"><!-- U+00AE REGISTERED SIGN ("(R)") --> +<!ENTITY macr "&#175;"><!-- U+00AF MACRON ("_") --> +<!ENTITY deg "&#176;"><!-- U+00B0 DEGREE SIGN ("o") --> +<!ENTITY plusmn "&#177;"><!-- U+00B1 PLUS-MINUS SIGN ("+/-") --> +<!ENTITY sup2 "&#178;"><!-- U+00B2 SUPERSCRIPT TWO ("^2") --> +<!ENTITY sup3 "&#179;"><!-- U+00B3 SUPERSCRIPT THREE ("^3") --> +<!ENTITY acute "&#180;"><!-- U+00B4 ACUTE ACCENT ("'") --> +<!ENTITY micro "&#181;"><!-- U+00B5 MICRO SIGN ("[micro]") --> +<!ENTITY para "&#182;"><!-- U+00B6 PILCROW SIGN ("P.") --> +<!ENTITY middot "&#183;"><!-- U+00B7 MIDDLE DOT (".") --> +<!ENTITY cedil "&#184;"><!-- U+00B8 CEDILLA (",") --> +<!ENTITY sup1 "&#185;"><!-- U+00B9 SUPERSCRIPT ONE ("^1") --> +<!ENTITY ordm "&#186;"><!-- U+00BA MASCULINE ORDINAL INDICATOR ("o") --> +<!ENTITY raquo "&#187;"><!-- U+00BB RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK (">>") --> +<!ENTITY frac14 "&#188;"><!-- U+00BC VULGAR FRACTION ONE QUARTER ("1/4") --> +<!ENTITY frac12 "&#189;"><!-- U+00BD VULGAR FRACTION ONE HALF ("1/2") --> +<!ENTITY frac34 "&#190;"><!-- U+00BE VULGAR FRACTION THREE QUARTERS ("3/4") --> +<!ENTITY iquest "&#191;"><!-- U+00BF INVERTED QUESTION MARK ("?") --> +<!ENTITY Agrave "&#192;"><!-- U+00C0 LATIN CAPITAL LETTER A WITH GRAVE ("A") --> +<!ENTITY Aacute "&#193;"><!-- U+00C1 LATIN CAPITAL LETTER A WITH ACUTE ("A") --> +<!ENTITY Acirc "&#194;"><!-- U+00C2 LATIN CAPITAL LETTER A WITH CIRCUMFLEX ("A") --> +<!ENTITY Atilde "&#195;"><!-- U+00C3 LATIN CAPITAL LETTER A WITH TILDE ("A") --> +<!ENTITY Auml "&#196;"><!-- U+00C4 LATIN CAPITAL LETTER A WITH DIAERESIS ("Ae") --> +<!ENTITY Aring "&#197;"><!-- U+00C5 LATIN CAPITAL LETTER A WITH RING ABOVE ("Ae") --> +<!ENTITY AElig "&#198;"><!-- U+00C6 LATIN CAPITAL LETTER AE ("AE") --> +<!ENTITY Ccedil "&#199;"><!-- U+00C7 LATIN CAPITAL LETTER C WITH CEDILLA ("C") --> +<!ENTITY Egrave "&#200;"><!-- U+00C8 LATIN CAPITAL LETTER E WITH GRAVE ("E") --> +<!ENTITY Eacute "&#201;"><!-- U+00C9 LATIN CAPITAL LETTER E WITH ACUTE ("E") --> +<!ENTITY Ecirc "&#202;"><!-- U+00CA LATIN CAPITAL LETTER E WITH CIRCUMFLEX ("E") --> +<!ENTITY Euml "&#203;"><!-- U+00CB LATIN CAPITAL LETTER E WITH DIAERESIS ("E") --> +<!ENTITY Igrave "&#204;"><!-- U+00CC LATIN CAPITAL LETTER I WITH GRAVE ("I") --> +<!ENTITY Iacute "&#205;"><!-- U+00CD LATIN CAPITAL LETTER I WITH ACUTE ("I") --> +<!ENTITY Icirc "&#206;"><!-- U+00CE LATIN CAPITAL LETTER I WITH CIRCUMFLEX ("I") --> +<!ENTITY Iuml "&#207;"><!-- U+00CF LATIN CAPITAL LETTER I WITH DIAERESIS ("I") --> +<!ENTITY ETH "&#208;"><!-- U+00D0 LATIN CAPITAL LETTER ETH ("[ETH]") --> +<!ENTITY Ntilde "&#209;"><!-- U+00D1 LATIN CAPITAL LETTER N WITH TILDE ("N") --> +<!ENTITY Ograve "&#210;"><!-- U+00D2 LATIN CAPITAL LETTER O WITH GRAVE ("O") --> +<!ENTITY Oacute "&#211;"><!-- U+00D3 LATIN CAPITAL LETTER O WITH ACUTE ("O") --> +<!ENTITY Ocirc "&#212;"><!-- U+00D4 LATIN CAPITAL LETTER O WITH CIRCUMFLEX ("O") --> +<!ENTITY Otilde "&#213;"><!-- U+00D5 LATIN CAPITAL LETTER O WITH TILDE ("O") --> +<!ENTITY Ouml "&#214;"><!-- U+00D6 LATIN CAPITAL LETTER O WITH DIAERESIS ("Oe") --> +<!ENTITY times "&#215;"><!-- U+00D7 MULTIPLICATION SIGN ("x") --> +<!ENTITY Oslash "&#216;"><!-- U+00D8 LATIN CAPITAL LETTER O WITH STROKE ("Oe") --> +<!ENTITY Ugrave "&#217;"><!-- U+00D9 LATIN CAPITAL LETTER U WITH GRAVE ("U") --> +<!ENTITY Uacute "&#218;"><!-- U+00DA LATIN CAPITAL LETTER U WITH ACUTE ("U") --> +<!ENTITY Ucirc "&#219;"><!-- U+00DB LATIN CAPITAL LETTER U WITH CIRCUMFLEX ("U") --> +<!ENTITY Uuml "&#220;"><!-- U+00DC LATIN CAPITAL LETTER U WITH DIAERESIS ("Ue") --> +<!ENTITY Yacute "&#221;"><!-- U+00DD LATIN CAPITAL LETTER Y WITH ACUTE ("Y") --> +<!ENTITY THORN "&#222;"><!-- U+00DE LATIN CAPITAL LETTER THORN ("[THORN]") --> +<!ENTITY szlig "&#223;"><!-- U+00DF LATIN SMALL LETTER SHARP S ("ss") --> +<!ENTITY agrave "&#224;"><!-- U+00E0 LATIN SMALL LETTER A WITH GRAVE ("a") --> +<!ENTITY aacute "&#225;"><!-- U+00E1 LATIN SMALL LETTER A WITH ACUTE ("a") --> +<!ENTITY acirc "&#226;"><!-- U+00E2 LATIN SMALL LETTER A WITH CIRCUMFLEX ("a") --> +<!ENTITY atilde "&#227;"><!-- U+00E3 LATIN SMALL LETTER A WITH TILDE ("a") --> +<!ENTITY auml "&#228;"><!-- U+00E4 LATIN SMALL LETTER A WITH DIAERESIS ("ae") --> +<!ENTITY aring "&#229;"><!-- U+00E5 LATIN SMALL LETTER A WITH RING ABOVE ("ae") --> +<!ENTITY aelig "&#230;"><!-- U+00E6 LATIN SMALL LETTER AE ("ae") --> +<!ENTITY ccedil "&#231;"><!-- U+00E7 LATIN SMALL LETTER C WITH CEDILLA ("c") --> +<!ENTITY egrave "&#232;"><!-- U+00E8 LATIN SMALL LETTER E WITH GRAVE ("e") --> +<!ENTITY eacute "&#233;"><!-- U+00E9 LATIN SMALL LETTER E WITH ACUTE ("e") --> +<!ENTITY ecirc "&#234;"><!-- U+00EA LATIN SMALL LETTER E WITH CIRCUMFLEX ("e") --> +<!ENTITY euml "&#235;"><!-- U+00EB LATIN SMALL LETTER E WITH DIAERESIS ("e") --> +<!ENTITY igrave "&#236;"><!-- U+00EC LATIN SMALL LETTER I WITH GRAVE ("i") --> +<!ENTITY iacute "&#237;"><!-- U+00ED LATIN SMALL LETTER I WITH ACUTE ("i") --> +<!ENTITY icirc "&#238;"><!-- U+00EE LATIN SMALL LETTER I WITH CIRCUMFLEX ("i") --> +<!ENTITY iuml "&#239;"><!-- U+00EF LATIN SMALL LETTER I WITH DIAERESIS ("i") --> +<!ENTITY eth "&#240;"><!-- U+00F0 LATIN SMALL LETTER ETH ("[eth]") --> +<!ENTITY ntilde "&#241;"><!-- U+00F1 LATIN SMALL LETTER N WITH TILDE ("n") --> +<!ENTITY ograve "&#242;"><!-- U+00F2 LATIN SMALL LETTER O WITH GRAVE ("o") --> +<!ENTITY oacute "&#243;"><!-- U+00F3 LATIN SMALL LETTER O WITH ACUTE ("o") --> +<!ENTITY ocirc "&#244;"><!-- U+00F4 LATIN SMALL LETTER O WITH CIRCUMFLEX ("o") --> +<!ENTITY otilde "&#245;"><!-- U+00F5 LATIN SMALL LETTER O WITH TILDE ("o") --> +<!ENTITY ouml "&#246;"><!-- U+00F6 LATIN SMALL LETTER O WITH DIAERESIS ("oe") --> +<!ENTITY divide "&#247;"><!-- U+00F7 DIVISION SIGN ("/") --> +<!ENTITY oslash "&#248;"><!-- U+00F8 LATIN SMALL LETTER O WITH STROKE ("oe") --> +<!ENTITY ugrave "&#249;"><!-- U+00F9 LATIN SMALL LETTER U WITH GRAVE ("u") --> +<!ENTITY uacute "&#250;"><!-- U+00FA LATIN SMALL LETTER U WITH ACUTE ("u") --> +<!ENTITY ucirc "&#251;"><!-- U+00FB LATIN SMALL LETTER U WITH CIRCUMFLEX ("u") --> +<!ENTITY uuml "&#252;"><!-- U+00FC LATIN SMALL LETTER U WITH DIAERESIS ("ue") --> +<!ENTITY yacute "&#253;"><!-- U+00FD LATIN SMALL LETTER Y WITH ACUTE ("y") --> +<!ENTITY thorn "&#254;"><!-- U+00FE LATIN SMALL LETTER THORN ("[thorn]") --> +<!ENTITY yuml "&#255;"><!-- U+00FF LATIN SMALL LETTER Y WITH DIAERESIS ("y") --> + +<!-- Some of ISO Latin 9 and 10 --> +<!ENTITY OElig "&#338;"><!-- U+0152 LATIN CAPITAL LIGATURE OE ("OE") --> +<!ENTITY oelig "&#339;"><!-- U+0153 LATIN SMALL LIGATURE OE ("oe") --> +<!ENTITY Scaron "&#352;"><!-- U+0160 LATIN CAPITAL LETTER S WITH CARON ("S") --> +<!ENTITY scaron "&#353;"><!-- U+0161 LATIN SMALL LETTER S WITH CARON ("s") --> +<!ENTITY Yuml "&#376;"><!-- U+0178 LATIN CAPITAL LETTER Y WITH DIAERESIS ("Y") --> + +<!-- Other Unicode (including some characters from the windows-1252 repertoire) --> +<!ENTITY fnof "&#402;"><!-- U+0192 LATIN SMALL LETTER F WITH HOOK ("f") --> +<!ENTITY tilde "&#732;"><!-- U+02DC SMALL TILDE ("~") --> +<!ENTITY ensp "&#8194;"><!-- U+2002 EN SPACE (" ") --> +<!ENTITY emsp "&#8195;"><!-- U+2003 EM SPACE (" ") --> +<!ENTITY thinsp "&#8201;"><!-- U+2009 THIN SPACE (" ") --> +<!ENTITY ndash "&#8211;"><!-- U+2013 EN DASH ("-") --> +<!ENTITY mdash "&#8212;"><!-- U+2014 EM DASH ("-\u002D") --> +<!ENTITY lsquo "&#8216;"><!-- U+2018 LEFT SINGLE QUOTATION MARK ("'") --> +<!ENTITY rsquo "&#8217;"><!-- U+2019 RIGHT SINGLE QUOTATION MARK ("'") --> +<!ENTITY sbquo "&#8218;"><!-- U+201A SINGLE LOW-9 QUOTATION MARK ("'") --> +<!ENTITY ldquo "&#8220;"><!-- U+201C LEFT DOUBLE QUOTATION MARK ('"') --> +<!ENTITY rdquo "&#8221;"><!-- U+201D RIGHT DOUBLE QUOTATION MARK ('"') --> +<!ENTITY bdquo "&#8222;"><!-- U+201E DOUBLE LOW-9 QUOTATION MARK ('"') --> +<!ENTITY dagger "&#8224;"><!-- U+2020 DAGGER ("*!*") --> +<!ENTITY Dagger "&#8225;"><!-- U+2021 DOUBLE DAGGER ("*!!*") --> +<!ENTITY bull "&#8226;"><!-- U+2022 BULLET ("o") --> +<!ENTITY hellip "&#8230;"><!-- U+2026 HORIZONTAL ELLIPSIS ("...") --> +<!ENTITY permil "&#8242;"><!-- U+2030 PER MILLE SIGN ("[/1000]") --> +<!ENTITY prime "&#8242;"><!-- U+2032 PRIME ("'") --> +<!ENTITY Prime "&#8243;"><!-- U+2033 DOUBLE PRIME ('"') --> +<!ENTITY lsaquo "&#8249;"><!-- U+2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK ("<") --> +<!ENTITY rsaquo "&#8250;"><!-- U+203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK (">") --> +<!ENTITY frasl "&#8260;"><!-- U+2044 FRACTION SLASH ("/") --> +<!ENTITY euro "&#8364;"><!-- U+20AC EURO SIGN ("EUR") --> +<!ENTITY trade "&#8482;"><!-- U+2122 TRADE MARK SIGN ("[TM]") --> +<!ENTITY larr "&#8592;"><!-- U+2190 LEFTWARDS ARROW ("<-\u002D") --> +<!ENTITY rarr "&#8594;"><!-- U+2192 RIGHTWARDS ARROW ("\u002D->") --> +<!ENTITY harr "&#8596;"><!-- U+2194 LEFT RIGHT ARROW ("<->") --> +<!ENTITY lArr "&#8656;"><!-- U+21D0 LEFTWARDS DOUBLE ARROW ("<==") --> +<!ENTITY rArr "&#8658;"><!-- U+21D2 RIGHTWARDS DOUBLE ARROW ("==>") --> +<!ENTITY hArr "&#8660;"><!-- U+21D4 LEFT RIGHT DOUBLE ARROW ("<=>") --> +<!ENTITY minus "&#8722;"><!-- U+2212 MINUS SIGN ("-") --> +<!ENTITY lowast "&#8727;"><!-- U+2217 ASTERISK OPERATOR ("*") --> +<!ENTITY le "&#8804;"><!-- U+2264 LESS-THAN OR EQUAL TO ("<=") --> +<!ENTITY ge "&#8805;"><!-- U+2265 GREATER-THAN OR EQUAL TO (">=") --> +<!ENTITY lang "&#9001;"><!-- U+2329 LEFT-POINTING ANGLE BRACKET ("<") --> +<!ENTITY rang "&#9002;"><!-- U+232A RIGHT-POINTING ANGLE BRACKET (">") --> + +<!-- EOF --> diff --git a/test/fixtures/cache-tests/spec/lib/rfc2629.dtd b/test/fixtures/cache-tests/spec/lib/rfc2629.dtd new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfc2629.dtd @@ -0,0 +1,312 @@ +<!-- + revised DTD for the RFC document series, draft of 2009-10-06 + --> + + +<!-- + Typical invocation: + <!DOCTYPE rfc PUBLIC "-//IETF//DTD RFC 2629//EN" + "http://xml.resource.org/authoring/rfc2629.dtd" [ + ... dtd subset ... + ]> + or + <!DOCTYPE rfc SYSTEM "rfc2629.dtd" [ + ... dtd subset ... + ]> + --> + + +<!-- + Contents + + Character entities + + DTD data types + + The top-level + + Front matter + + The Body + + Back matter + --> + + +<!-- + Character entities + --> + + +<!ENTITY % rfc2629-xhtml + PUBLIC "-//IETF//ENTITIES XHTML subset for RFC 2629//EN" + "rfc2629-xhtml.ent"> +%rfc2629-xhtml; + +<!ENTITY % rfc2629-other + PUBLIC "-//IETF//ENTITIES Other for RFC 2629//EN" + "rfc2629-other.ent"> +%rfc2629-other; + + +<!-- + DTD data types: + + entity description + ====== =============================================== + NUMBER [0-9]+ + NUMBERS a comma-separated list of NUMBER + + DAY the day of the month, e.g., "1" + MONTH the month of the year, e.g., "January" + YEAR a four-digit year, e.g., "1999" + + URI e.g., "http://invisible.net/" + + ATEXT/CTEXT printable ASCII text (no line-terminators) + + TEXT character data + --> + + +<!ENTITY % NUMBER "CDATA"> +<!ENTITY % NUMBERS "CDATA"> + +<!ENTITY % DAY "CDATA"> +<!ENTITY % MONTH "CDATA"> +<!ENTITY % YEAR "CDATA"> + +<!ENTITY % URI "CDATA"> + +<!ENTITY % ATEXT "CDATA"> +<!ENTITY % CTEXT "#PCDATA"> + +<!ENTITY % TEXT "#PCDATA"> + +<!ENTITY rfc.number "XXXX"> + + +<!-- + The top-level + --> + + +<!-- + attributes for the "rfc" element are supplied by the RFC + editor. when preparing drafts, authors should leave them blank. + + the "seriesNo" attribute is used if the category is, e.g., BCP. + --> +<!ELEMENT rfc (front,middle,back?)> +<!ATTLIST rfc + number %NUMBER; #IMPLIED + obsoletes %NUMBERS; "" + updates %NUMBERS; "" + category (std|bcp|info|exp|historic) + #IMPLIED + consensus (no|yes) #IMPLIED + seriesNo %NUMBER; #IMPLIED + ipr (full2026|noDerivativeWorks2026|none + |full3667|noModification3667|noDerivatives3667 + |full3978|noModification3978|noDerivatives3978 + |trust200811|noModificationTrust200811|noDerivativesTrust200811 + |trust200902|noModificationTrust200902|noDerivativesTrust200902 + |pre5378Trust200902) + #IMPLIED + iprExtract IDREF #IMPLIED + submissionType + (IETF|IAB|IRTF|independent) "IETF" + docName %ATEXT; #IMPLIED + xml:lang %ATEXT; "en"> + + +<!-- + Front matter + --> + + +<!ELEMENT front (title,author+,date,area*,workgroup*,keyword*, + abstract?,note*)> + +<!-- the "abbrev" attribute is used for headers, etc. --> +<!ELEMENT title (%CTEXT;)> +<!ATTLIST title + abbrev %ATEXT; #IMPLIED> + +<!ELEMENT author (organization?,address?)> +<!ATTLIST author + initials %ATEXT; #IMPLIED + surname %ATEXT; #IMPLIED + fullname %ATEXT; #IMPLIED + role (editor) #IMPLIED> + +<!ELEMENT organization + (%CTEXT;)> +<!ATTLIST organization + abbrev %ATEXT; #IMPLIED> + +<!ELEMENT address (postal?,phone?,facsimile?,email?,uri?)> + +<!-- this content model should be more strict: + at most one of each the city, region, code, and country + elements may be present --> +<!ELEMENT postal (street+,(city|region|code|country)*)> +<!ELEMENT street (%CTEXT;)> +<!ELEMENT city (%CTEXT;)> +<!ELEMENT region (%CTEXT;)> +<!ELEMENT code (%CTEXT;)> +<!ELEMENT country (%CTEXT;)> +<!ELEMENT phone (%CTEXT;)> +<!ELEMENT facsimile (%CTEXT;)> +<!ELEMENT email (%CTEXT;)> +<!ELEMENT uri (%CTEXT;)> + +<!ELEMENT date EMPTY> +<!ATTLIST date + day %DAY; #IMPLIED + month %MONTH; #IMPLIED + year %YEAR; #IMPLIED> + +<!-- meta-data... --> +<!ELEMENT area (%CTEXT;)> +<!ELEMENT workgroup (%CTEXT;)> +<!ELEMENT keyword (%CTEXT;)> + +<!ELEMENT abstract (t)+> +<!ELEMENT note (t)+> +<!ATTLIST note + title %ATEXT; #REQUIRED> + + +<!-- + The body + --> + + +<!ELEMENT middle (section+)> + +<!ELEMENT section ((t|figure|texttable|iref)*,section*)> +<!ATTLIST section + anchor ID #IMPLIED + title %ATEXT; #REQUIRED + toc (include|exclude|default) + "default"> + +<!-- use of <appendix/> is deprecated... +<!ELEMENT appendix ((t|figure|texttable|iref)*,appendix*)> +<!ATTLIST appendix + anchor ID #IMPLIED + title %ATEXT; #REQUIRED + toc (include|exclude|default) + "default"> + --> + +<!-- use of <figure/> is deprecated... --> +<!ELEMENT t (%TEXT;|list|figure|xref|eref|iref|cref|spanx|vspace)*> +<!ATTLIST t + anchor ID #IMPLIED + hangText %ATEXT; #IMPLIED> + +<!-- the value of the style attribute is inherited from the closest + parent --> +<!ELEMENT list (t+)> +<!ATTLIST list + style %ATEXT; #IMPLIED + hangIndent %NUMBER; #IMPLIED + counter %ATEXT; #IMPLIED> + +<!ELEMENT xref (%CTEXT;)> +<!ATTLIST xref + target IDREF #REQUIRED + pageno (true|false) "false" + format (counter|title|none|default) + "default"> + +<!ELEMENT eref (%CTEXT;)> +<!ATTLIST eref + target %URI; #REQUIRED> + +<!ELEMENT iref EMPTY> +<!ATTLIST iref + item %ATEXT; #REQUIRED + subitem %ATEXT; "" + primary (true|false) "false"> + +<!ELEMENT cref (%CTEXT;)> +<!ATTLIST cref + anchor ID #IMPLIED + source %ATEXT; #IMPLIED> + +<!ELEMENT spanx (%CTEXT;)> +<!ATTLIST spanx + xml:space (default|preserve) "preserve" + style %ATEXT; "emph"> + +<!ELEMENT vspace EMPTY> +<!ATTLIST vspace + blankLines %NUMBER; "0"> + +<!ELEMENT figure (iref*,preamble?,artwork,postamble?)> +<!ATTLIST figure + anchor ID #IMPLIED + title %ATEXT; "" + suppress-title (true|false) "false" + src %URI; #IMPLIED + align (left|center|right) "left" + alt %ATEXT; "" + width %ATEXT; "" + height %ATEXT; ""> + +<!ELEMENT preamble (%TEXT;|xref|eref|iref|cref|spanx)*> +<!ELEMENT artwork (%TEXT;)*> +<!ATTLIST artwork + xml:space (default|preserve) "preserve" + name %ATEXT; "" + type %ATEXT; "" + src %URI; #IMPLIED + align (left|center|right) "left" + alt %ATEXT; "" + width %ATEXT; "" + height %ATEXT; ""> + +<!ELEMENT postamble (%TEXT;|xref|eref|iref|cref|spanx)*> + +<!ELEMENT texttable (preamble?,ttcol+,c*,postamble?)> +<!ATTLIST texttable + anchor ID #IMPLIED + title %ATEXT; "" + suppress-title (true|false) "false" + align (left|center|right) "center" + style (all|none|headers|full) "full"> +<!ELEMENT ttcol (%CTEXT;)> +<!ATTLIST ttcol + width %ATEXT; #IMPLIED + align (left|center|right) "left"> +<!ELEMENT c (%TEXT;|xref|eref|iref|cref|spanx)*> + + +<!-- + Back matter + --> + + +<!-- sections, if present, are appendices --> +<!ELEMENT back (references*,section*)> + +<!ELEMENT references (reference+)> +<!ATTLIST references + title %ATEXT; "References"> +<!ELEMENT reference (front,seriesInfo*,format*,annotation*)> +<!ATTLIST reference + anchor ID #IMPLIED + target %URI; #IMPLIED> +<!ELEMENT seriesInfo EMPTY> +<!ATTLIST seriesInfo + name %ATEXT; #REQUIRED + value %ATEXT; #REQUIRED> +<!ELEMENT format EMPTY> +<!ATTLIST format + target %URI; #IMPLIED + type %ATEXT; #REQUIRED + octets %NUMBER; #IMPLIED> +<!ELEMENT annotation (%TEXT;|xref|eref|iref|cref|spanx)*> diff --git a/test/fixtures/cache-tests/spec/lib/rfc2629.xslt b/test/fixtures/cache-tests/spec/lib/rfc2629.xslt new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfc2629.xslt @@ -0,0 +1,10226 @@ +<!-- + XSLT transformation from RFC2629 XML format to HTML + + Copyright (c) 2006-2017, Julian Reschke (julian.reschke@greenbytes.de) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Julian Reschke nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +--> + +<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" + version="2.0" + + xmlns:date="http://exslt.org/dates-and-times" + xmlns:ed="http://greenbytes.de/2002/rfcedit" + xmlns:exslt="http://exslt.org/common" + xmlns:msxsl="urn:schemas-microsoft-com:xslt" + xmlns:myns="mailto:julian.reschke@greenbytes.de?subject=rcf2629.xslt" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:saxon="http://saxon.sf.net/" + xmlns:saxon-old="http://icl.com/saxon" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns:x="http://purl.org/net/xml2rfc/ext" + xmlns:xi="http://www.w3.org/2001/XInclude" + xmlns:xhtml="http://www.w3.org/1999/xhtml" + + exclude-result-prefixes="date ed exslt msxsl myns rdf saxon saxon-old svg x xi xhtml" + > + +<xsl:strip-space elements="abstract address author back figure front list middle note postal reference references rfc section table tbody thead tr texttable"/> + +<xsl:output method="html" encoding="utf-8" doctype-system="about:legacy-compat" indent="no"/> + +<!-- PIs outside the root element, or inside the root element but before <front> --> +<xsl:variable name="global-std-pis" select="/processing-instruction('rfc') | /*/processing-instruction('rfc')[following-sibling::front]"/> + +<!-- rfc authorship PI --> + +<xsl:param name="xml2rfc-authorship"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'authorship'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc comments PI --> + +<xsl:param name="xml2rfc-comments"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'comments'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc compact PI --> + +<xsl:param name="xml2rfc-compact"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'compact'"/> + <xsl:with-param name="default" select="$xml2rfc-rfcedstyle"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc footer PI --> + +<xsl:param name="xml2rfc-footer"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'footer'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc header PI --> + +<xsl:param name="xml2rfc-header"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'header'"/> + </xsl:call-template> +</xsl:param> + +<!-- rfc inline PI --> + +<xsl:param name="xml2rfc-inline"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'inline'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- include a table of contents if a processing instruction <?rfc?> + exists with contents toc="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-toc"> + <xsl:variable name="default"> + <xsl:choose> + <xsl:when test="/rfc/@version >= 3">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="/rfc/@tocInclude='false'">no</xsl:when> + <xsl:when test="/rfc/@tocInclude='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'toc'"/> + <xsl:with-param name="default" select="$default"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- optional tocdepth--> + +<xsl:param name="xml2rfc-tocdepth"> + <xsl:choose> + <xsl:when test="/rfc/@tocDepth"> + <xsl:value-of select="/rfc/@tocDepth"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'tocdepth'"/> + <xsl:with-param name="default" select="'3'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:variable name="parsedTocDepth"> + <xsl:choose> + <xsl:when test="$xml2rfc-tocdepth='1'">1</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='2'">2</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='3'">3</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='4'">4</xsl:when> + <xsl:when test="$xml2rfc-tocdepth='5'">5</xsl:when> + <xsl:otherwise>99</xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- suppress top block if a processing instruction <?rfc?> + exists with contents tocblock="no". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-topblock"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'topblock'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- Format to the RFC Editor's taste --> + +<xsl:param name="xml2rfc-rfcedstyle"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'rfcedstyle'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- the name of an automatically inserted references section --> + +<xsl:param name="xml2rfc-refparent"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'refparent'"/> + <xsl:with-param name="default" select="'References'"/> + </xsl:call-template> +</xsl:param> + +<!-- use symbolic reference names instead of numeric ones unless a processing instruction <?rfc?> + exists with contents symrefs="no". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-symrefs"> + <xsl:choose> + <xsl:when test="/rfc/@symRefs='false'">no</xsl:when> + <xsl:when test="/rfc/@symRefs='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'symrefs'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- sort references if a processing instruction <?rfc?> + exists with contents sortrefs="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-sortrefs"> + <xsl:choose> + <xsl:when test="/rfc/@sortRefs='true'">yes</xsl:when> + <xsl:when test="/rfc/@sortRefs='false'">no</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'sortrefs'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- insert editing marks if a processing instruction <?rfc?> + exists with contents editing="yes". Can be overridden by an XSLT parameter --> + +<xsl:param name="xml2rfc-editing"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'editing'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- make it a private paper --> + +<xsl:param name="xml2rfc-private"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'private'"/> + </xsl:call-template> +</xsl:param> + +<!-- background image? --> + +<xsl:param name="xml2rfc-background"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'background'"/> + </xsl:call-template> +</xsl:param> + +<!-- CSS max page width --> + +<xsl:param name="xml2rfc-ext-maxwidth"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'maxwidth'"/> + <xsl:with-param name="default" select="'1000'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="parsedMaxwidth"> + <xsl:choose> + <xsl:when test="string(number($xml2rfc-ext-maxwidth)) != 'NaN'"> + <xsl:value-of select="$xml2rfc-ext-maxwidth"/> + </xsl:when> + <xsl:when test="$xml2rfc-ext-maxwidth='none'"></xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Unsupported value of rfc-ext maxwidth PI: ', $xml2rfc-ext-maxwidth)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- CSS styles --> + +<xsl:param name="xml2rfc-ext-styles">fft-sans-serif ffb-serif ff-cleartype</xsl:param> + +<xsl:variable name="styles" select="concat(' ',normalize-space($xml2rfc-ext-styles),' ')"/> + +<xsl:param name="xml2rfc-ext-ff-body"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' ffb-serif ')"> + 'Noto Serif', + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + 'Noto Sans', + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + <xsl:if test="contains($styles,' ffb-serif ')"> + cambria, georgia, + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + candara, calibri, + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ffb-sans-serif ')"> + segoe, optima, arial, sans-serif, + </xsl:if> + serif + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-ff-title"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' fft-serif ')"> + 'Noto Serif', + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + 'Noto Sans', + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + <xsl:if test="contains($styles,' fft-serif ')"> + cambria, georgia, + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + candara, calibri, + </xsl:if> + </xsl:if> + <xsl:if test="contains($styles,' fft-serif ')"> + serif, + </xsl:if> + <xsl:if test="contains($styles,' fft-sans-serif ')"> + segoe, optima, arial, + </xsl:if> + sans-serif + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-ff-pre"> + <xsl:variable name="t"> + <xsl:if test="contains($styles,' ff-noto ')"> + 'Roboto Mono', + </xsl:if> + <xsl:if test="contains($styles,' ff-cleartype ')"> + consolas, monaco, + </xsl:if> + monospace + </xsl:variable> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="normalize-space($t)"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-webfonts"> + <xsl:if test="contains($styles,' ff-noto ')"> + <xsl:if test="contains($styles,' ffb-sans-serif ') or contains($styles,' fft-sans-serif ')"> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Noto+Sans:r,b,i,bi');&#10;</xsl:text> + </xsl:if> + <xsl:if test="contains($styles,' ffb-serif ') or contains($styles,' fft-serif ')"> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Noto+Serif:r,b,i,bi');&#10;</xsl:text> + </xsl:if> + <xsl:text>@import url('https://fonts.googleapis.com/css?family=Roboto+Mono:r,b,i,bi');&#10;</xsl:text> + </xsl:if> +</xsl:param> + +<xsl:template name="ff-list"> + <xsl:param name="s"/> + <xsl:choose> + <xsl:when test="not(contains($s,','))"> + <xsl:value-of select="normalize-space($s)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(substring-before($s,','))"/> + <xsl:text>, </xsl:text> + <xsl:call-template name="ff-list"> + <xsl:with-param name="s" select="substring-after($s,',')"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- include PI --> + +<xsl:template name="getIncludes"> + <xsl:param name="nodes"/> + <xsl:for-each select="$nodes"> + <xsl:variable name="include"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="."/> + <xsl:with-param name="attr" select="'include'"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="doc"> + <xsl:choose> + <xsl:when test="$include=''"/> + <xsl:when test="substring($include, string-length($include) - 3) != '.xml'"> + <xsl:copy-of select="document(concat($include,'.xml'))"/> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="document($include)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="count(exslt:node-set($doc)) = 1"> + <myns:include from="{$include}" in="{generate-id(..)}"> + <xsl:copy-of select="$doc"/> + </myns:include> + </xsl:if> + </xsl:for-each> +</xsl:template> + +<xsl:template name="getXIncludes"> + <xsl:param name="nodes"/> + <xsl:for-each select="$nodes"> + <xsl:choose> + <xsl:when test="(@parse and @parse!='xml') or @xpointer"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Unsupported attributes on x:include element'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="doc"> + <xsl:copy-of select="document(@href)"/> + </xsl:variable> + <xsl:if test="count(exslt:node-set($doc)) = 1"> + <myns:include from="{@href}" in="{generate-id(..)}"> + <xsl:copy-of select="$doc"/> + </myns:include> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:variable name="includeDirectives"> + <xsl:call-template name="getIncludes"> + <xsl:with-param name="nodes" select="/rfc/back/references/processing-instruction('rfc')"/> + </xsl:call-template> + <xsl:call-template name="getXIncludes"> + <xsl:with-param name="nodes" select="/rfc/back/references/xi:include"/> + </xsl:call-template> +</xsl:variable> + +<!-- prettyprinting --> + +<xsl:param name="xml2rfc-ext-html-pretty-print"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'html-pretty-print'"/> + </xsl:call-template> +</xsl:param> + +<xsl:variable name="prettyprint-class"> + <xsl:if test="$xml2rfc-ext-html-pretty-print"> + <xsl:value-of select="substring-before(normalize-space($xml2rfc-ext-html-pretty-print),' ')"/> + </xsl:if> +</xsl:variable> + +<xsl:variable name="prettyprint-script"> + <xsl:if test="$xml2rfc-ext-html-pretty-print"> + <xsl:value-of select="substring-after(normalize-space($xml2rfc-ext-html-pretty-print),' ')"/> + </xsl:if> +</xsl:variable> + +<!-- external resource containing errata, as generated by parse-errata.xslt --> +<xsl:param name="xml2rfc-ext-errata"/> +<xsl:variable name="errata-parsed" select="document($xml2rfc-ext-errata)//erratum[@status!='Rejected']"/> + +<!-- "remove in RFC phrases" --> +<xsl:variable name="note-removeInRFC">This note is to be removed before publishing as an RFC.</xsl:variable> +<xsl:variable name="section-removeInRFC">This section is to be removed before publishing as an RFC.</xsl:variable> + + +<!-- CSS class name remapping --> + +<xsl:param name="xml2rfc-ext-css-map"/> + +<xsl:template name="generate-css-class"> + <xsl:param name="name"/> + <xsl:variable name="cssmap" select="document($xml2rfc-ext-css-map)"/> + <xsl:variable name="entry" select="$cssmap/*/map[@from=$name]"/> + <xsl:choose> + <xsl:when test="$entry"> + <xsl:value-of select="$entry/@css"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$name"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- WORK IN PROGRESS; ONLY A FEW CLASSES SUPPORTED FOR NOW --> +<xsl:variable name="css-artwork"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'artwork'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-art-svg"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'art-svg'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-docstatus"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'docstatus'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-center"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'center'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-erratum"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'erratum'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-error"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'error'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-fbbutton"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'fbbutton'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-feedback"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'feedback'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-header"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'header'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-left"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'left'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-noprint"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'noprint'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-publishedasrfc"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'publishedasrfc'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-reference"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'reference'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-right"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'right'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tcenter"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tcenter'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tleft"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tleft'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tright"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tright'"/></xsl:call-template></xsl:variable> +<xsl:variable name="css-tt"><xsl:call-template name="generate-css-class"><xsl:with-param name="name" select="'tt'"/></xsl:call-template></xsl:variable> + + +<!-- RFC-Editor site linking --> + +<xsl:param name="xml2rfc-ext-link-rfc-to-info-page"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'link-rfc-to-info-page'"/> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201503">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:param> + +<!-- DOI insertion --> + +<xsl:param name="xml2rfc-ext-insert-doi"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'insert-doi'"/> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201505">yes</xsl:when> + <xsl:otherwise>no</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> +</xsl:param> + +<!-- initials handling? --> + +<xsl:param name="xml2rfc-multiple-initials"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'multiple-initials'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- paragraph links? --> + +<xsl:param name="xml2rfc-ext-paragraph-links"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'paragraph-links'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for XML parsing in artwork --> + +<xsl:param name="xml2rfc-ext-parse-xml-in-artwork"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'parse-xml-in-artwork'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-trace-parse-xml"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'trace-parse-xml'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for excluding the index --> + +<xsl:param name="xml2rfc-ext-include-index"> + <xsl:choose> + <xsl:when test="/rfc/@indexInclude='false'">no</xsl:when> + <xsl:when test="/rfc/@indexInclude='true'">yes</xsl:when> + <xsl:otherwise> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'include-index'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- extension for inserting RFC metadata --> + +<xsl:param name="xml2rfc-ext-insert-metadata"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'insert-metadata'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for excluding DCMI properties in meta tag (RFC2731) --> + +<xsl:param name="xml2rfc-ext-support-rfc2731"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'support-rfc2731'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for specifying the value for <vspace> after which it's taken as a page break --> + +<xsl:param name="xml2rfc-ext-vspace-pagebreak"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'vspace-pagebreak'"/> + <xsl:with-param name="default" select="'100'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for allowing markup inside artwork --> + +<xsl:param name="xml2rfc-ext-allow-markup-in-artwork"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'allow-markup-in-artwork'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- extension for including references into index --> + +<xsl:param name="xml2rfc-ext-include-references-in-index"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'include-references-in-index'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- position of author's section --> + +<xsl:param name="xml2rfc-ext-authors-section"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'authors-section'"/> + <xsl:with-param name="default" select="'end'"/> + </xsl:call-template> +</xsl:param> + +<!-- justification? --> + +<xsl:param name="xml2rfc-ext-justification"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'justification'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- switch for doublesided layout --> + +<xsl:param name="xml2rfc-ext-duplex"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'duplex'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- trailing dots in section numbers --> + +<xsl:param name="xml2rfc-ext-sec-no-trailing-dots"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'sec-no-trailing-dots'"/> + </xsl:call-template> +</xsl:param> + +<!-- check artwork width? --> + +<xsl:param name="xml2rfc-ext-check-artwork-width"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'check-artwork-width'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- choose whether or not to do mailto links --> + +<xsl:param name="xml2rfc-linkmailto"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'linkmailto'"/> + <xsl:with-param name="default" select="'yes'"/> + </xsl:call-template> +</xsl:param> + +<!-- iprnotified switch --> + +<xsl:param name="xml2rfc-iprnotified"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$global-std-pis"/> + <xsl:with-param name="attr" select="'iprnotified'"/> + <xsl:with-param name="default" select="'no'"/> + </xsl:call-template> +</xsl:param> + +<!-- URL templates for RFCs and Internet Drafts. --> + +<!-- Reference the authoritative ASCII versions +<xsl:param name="rfcUrlPrefix" select="'http://www.ietf.org/rfc/rfc'" /> +<xsl:param name="rfcUrlPostfix" select="'.txt'" /> +--> +<!-- Reference the marked up versions over on https://tools.ietf.org/html. --> +<xsl:param name="rfcUrlFragSection" select="'section-'" /> +<xsl:param name="rfcUrlFragAppendix" select="'appendix-'" /> +<xsl:param name="internetDraftUrlFragSection" select="'section-'" /> +<xsl:param name="internetDraftUrlFragAppendix" select="'appendix-'" /> + +<!--templates for URI calculation --> + +<xsl:param name="xml2rfc-ext-isbn-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'isbn-uri'"/> + <xsl:with-param name="default">https://www.worldcat.org/search?q=isbn:{isbn}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-isbn-uri"> + <xsl:param name="isbn"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-isbn-uri"/> + <xsl:with-param name="replace" select="'{isbn}'"/> + <xsl:with-param name="by" select="translate($isbn,'-','')"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-uri'"/> + <xsl:with-param name="default">https://tools.ietf.org/html/rfc{rfc}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-rfc-uri"> + <xsl:param name="rfc"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-uri"/> + <xsl:with-param name="replace" select="'{rfc}'"/> + <xsl:with-param name="by" select="$rfc"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-internet-draft-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'internet-draft-uri'"/> + <xsl:with-param name="default">https://tools.ietf.org/html/{internet-draft}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-internet-draft-uri"> + <xsl:param name="internet-draft"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-internet-draft-uri"/> + <xsl:with-param name="replace" select="'{internet-draft}'"/> + <xsl:with-param name="by" select="$internet-draft"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-doi-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'doi-uri'"/> + <xsl:with-param name="default">http://dx.doi.org/{doi}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-doi-uri"> + <xsl:param name="doi"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-doi-uri"/> + <xsl:with-param name="replace" select="'{doi}'"/> + <xsl:with-param name="by" select="$doi"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-erratum-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-erratum-uri'"/> + <xsl:with-param name="default">https://www.rfc-editor.org/errata/eid{eid}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<xsl:template name="compute-rfc-erratum-uri"> + <xsl:param name="eid"/> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-erratum-uri"/> + <xsl:with-param name="replace" select="'{eid}'"/> + <xsl:with-param name="by" select="$eid"/> + </xsl:call-template> +</xsl:template> + +<xsl:param name="xml2rfc-ext-rfc-errata-uri"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'rfc-errata-uri'"/> + <xsl:with-param name="default">https://www.rfc-editor.org/errata_search.php?rfc={rfc}</xsl:with-param> + </xsl:call-template> +</xsl:param> + +<!-- the format we're producing --> +<xsl:param name="outputExtension" select="'html'"/> + +<!-- source for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-from"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-from'"/> + <xsl:with-param name="default" select="''"/> + </xsl:call-template> +</xsl:param> + +<!-- XSLT for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-xslt"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-xslt'"/> + <xsl:with-param name="default" select="'rfc2629.xslt'"/> + </xsl:call-template> +</xsl:param> + +<!-- interval for autorefresh --> +<xsl:param name="xml2rfc-ext-refresh-interval"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="/processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'refresh-interval'"/> + <xsl:with-param name="default" select="10"/> + </xsl:call-template> +</xsl:param> + +<!-- warning re: absent node-set ext. function --> +<xsl:variable name="node-set-warning"> + This stylesheet requires either an XSLT-1.0 processor with node-set() + extension function, or an XSLT-2.0 processor. Therefore, parts of the + document couldn't be displayed. +</xsl:variable> + +<!-- character translation tables --> +<xsl:variable name="lcase" select="'abcdefghijklmnopqrstuvwxyz'" /> +<xsl:variable name="ucase" select="'ABCDEFGHIJKLMNOPQRSTUVWXYZ'" /> +<xsl:variable name="digits" select="'0123456789'" /> +<xsl:variable name="alpha" select="concat($lcase,$ucase)"/> +<xsl:variable name="alnum" select="concat($alpha,$digits)"/> + +<!-- build help keys for indices --> +<xsl:key name="index-first-letter" + match="iref|reference" + use="translate(substring(concat(@anchor,@item),1,1),'abcdefghijklmnopqrstuvwxyz','ABCDEFGHIJKLMNOPQRSTUVWXYZ')" /> + +<xsl:key name="index-item" + match="iref" + use="@item" /> + +<xsl:key name="index-item-subitem" + match="iref" + use="concat(@item,'..',@subitem)" /> + +<xsl:key name="index-xref-by-sec" + match="xref[@x:sec|@section]" + use="concat(@target,'..',@x:sec,@section)" /> + +<xsl:key name="index-xref-by-anchor" + match="xref[@x:rel|@relative]" + use="concat(@target,'..',@x:rel,@relative)" /> + +<xsl:key name="anchor-item" + match="//*[@anchor]" + use="@anchor"/> + +<xsl:key name="xref-item" + match="//xref" + use="@target"/> + +<xsl:key name="extref-item" + match="//x:ref" + use="."/> + +<!-- prefix for automatically generated anchors --> +<xsl:variable name="anchor-pref" select="'rfc.'" /> + +<!-- IPR version switch --> +<xsl:variable name="ipr-rfc3667" select="( + /rfc/@number &gt; 3708) or + not( + (/rfc/@ipr = 'full2026') or + (/rfc/@ipr = 'noDerivativeWorks2026') or + (/rfc/@ipr = 'noDerivativeWorksNow') or + (/rfc/@ipr = 'none') or + (/rfc/@ipr = '') or + not(/rfc/@ipr) + )" /> + +<xsl:variable name="rfcno" select="/rfc/@number"/> + +<xsl:variable name="submissionType"> + <xsl:choose> + <xsl:when test="/rfc/@submissionType='IETF' or not(/rfc/@submissionType) or /rfc/submissionType=''">IETF</xsl:when> + <xsl:when test="/rfc/@submissionType='IAB' or /rfc/@submissionType='IRTF' or /rfc/@submissionType='independent'"> + <xsl:value-of select="/rfc/@submissionType"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('(UNSUPPORTED SUBMISSION TYPE: ',/rfc/@submissionType,')')"/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@submissionType: ', /rfc/@submissionType)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <!-- sanity check on @consensus --> + <xsl:if test="/rfc/@consensus and /rfc/@submissionType='independent'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/@consensus meaningless with a /rfc/@submissionType value of ', /rfc/@submissionType)"/> + </xsl:call-template> + </xsl:if> +</xsl:variable> + +<xsl:variable name="consensus"> + <xsl:choose> + <xsl:when test="/rfc/@consensus='yes' or /rfc/@consensus='true' or not(/rfc/@consensus)">yes</xsl:when> + <xsl:when test="/rfc/@consensus='no' or /rfc/@consensus='false'">no</xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('(UNSUPPORTED VALUE FOR CONSENSUS: ',/rfc/@consensus,')')"/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@consensus: ', /rfc/@consensus)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<!-- Header format as defined in RFC 5741, and deployed end of Dec 2009 --> +<xsl:variable name="header-format"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201001 or + ($rfcno=5741 or $rfcno=5742 or $rfcno=5743)" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="rfc-boilerplate"> + <xsl:choose> + <!-- RFC boilerplate as defined in RFC 5741, and deployed end of Dec 2009 --> + <xsl:when test="$pub-yearmonth >= 201001 or + ($rfcno=5741 or $rfcno=5742 or $rfcno=5743)" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<!-- the reference to the latest and greatest headers-and-boilerplates document --> +<xsl:variable name="hab-reference"> + <xsl:choose> + <xsl:when test="$pub-yearmonth >= 201606 or ($rfcno=7846 or $rfcno=7865 or $rfcno=7866 or $rfcno=7873 or $rfcno=7879 or $rfcno=7892)">Section 2 of RFC 7841</xsl:when> + <xsl:otherwise>Section 2 of RFC 5741</xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="id-boilerplate"> + <xsl:choose> + <!-- ID boilerplate approved by IESG on Jan 14 2010--> + <xsl:when test="$pub-yearmonth >= 201004" + >2010</xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="ipr-rfc4748" select="( + $ipr-rfc3667 and + ( $rfcno &gt;= 4715 and ( $rfcno != 4718 and $rfcno != 4735 and $rfcno != 4749 )) + or + ( $rfcno=4578 or $rfcno=4582 or $rfcno=4583 or $rfcno=4628 or $rfcno=4629 or $rfcno=4639 or $rfcno=4651 or $rfcno=4682 or $rfcno=4684 or $rfcno=4695 or $rfcno=4696 ) + or + ( not(/rfc/@number) and $pub-yearmonth >= 200611) + )" /> + +<xsl:variable name="ipr-2007-08" select="( + $ipr-rfc4748 and + ( + ($rfcno &gt; 5000 + and $rfcno != 5020 + and $rfcno != 5021 + and $rfcno != 5034 + and $rfcno != 5052 + and $rfcno != 5065 + and $rfcno != 5094) or + ($xml2rfc-ext-pub-year >= 2008) or + (not(/rfc/@number) and $pub-yearmonth >= 200709) + ) + )" /> + +<xsl:variable name="ipr-2008-11" select="( + /rfc/@number and $pub-yearmonth >= 200811 + ) + or + ( + /rfc/@ipr = 'trust200811' or + /rfc/@ipr = 'noModificationTrust200811' or + /rfc/@ipr = 'noDerivativesTrust200902' or + /rfc/@ipr = 'trust200902' or + /rfc/@ipr = 'noModificationTrust200902' or + /rfc/@ipr = 'noDerivativesTrust200902' or + /rfc/@ipr = 'pre5378Trust200902' + )" /> + +<xsl:variable name="ipr-2009-02" select="( + $ipr-2008-11 and $pub-yearmonth >= 200902 + )" /> + +<!-- this makes the Sep 2009 TLP text depend on the publication date to be >= 2009-11 + for IDs, and around 2009-09 for RFCs--> +<xsl:variable name="ipr-2009-09" select="( + ( not(/rfc/@number) and $pub-yearmonth >= 200911 ) + or + ( + /rfc/@number and $pub-yearmonth >= 200909 and + $rfcno!=5582 and $rfcno!=5621 and $rfcno!=5632 and $rfcno!=5645 and $rfcno!=5646 and $rfcno!=5681 + ) + )" /> + +<!-- this makes the Jan 2010 TLP text depend on the publication date to be >= 2010-04 + for IDs, and around 2010-01 for RFCs--> +<xsl:variable name="ipr-2010-01" select="( + ( not(/rfc/@number) and $pub-yearmonth >= 201004 ) + or + ( + /rfc/@number and ($pub-yearmonth >= 201001 or + $rfcno=5741 or $rfcno=5742 or $rfcno=5743) + ) + )" /> + +<!-- see http://mailman.rfc-editor.org/pipermail/rfc-interest/2009-June/001373.html --> +<!-- for IDs, implement the change as 2009-11 --> +<xsl:variable name="abstract-first" select="( + (/rfc/@number and $pub-yearmonth >= 200907) + or + (not(/rfc/@number) and $pub-yearmonth >= 200911) + )" /> + +<!-- RFC 7322 changed the placement of notes --> +<xsl:variable name="notes-follow-abstract" select="( + (/rfc/@number and /rfc/@number >= 7200) + or + ($pub-yearmonth >= 201409) + )" /> + +<!-- funding switch --> +<xsl:variable name="funding0" select="( + $rfcno &gt; 2499) or + (not(/rfc/@number) and /rfc/@docName and $xml2rfc-ext-pub-year &gt;= 1999 + )" /> + +<xsl:variable name="funding1" select="( + $rfcno &gt; 4320) or + (not(/rfc/@number) and /rfc/@docName and $xml2rfc-ext-pub-year &gt;= 2006 + )" /> + +<xsl:variable name="no-funding" select="$ipr-2007-08"/> + +<xsl:variable name="no-copylong" select="$ipr-2008-11"/> + +<!-- will document have an index --> +<xsl:variable name="has-index" select="(//iref or (//xref and $xml2rfc-ext-include-references-in-index='yes')) and $xml2rfc-ext-include-index!='no'" /> + +<!-- does the document contain edits? --> +<xsl:variable name="has-edits" select="//ed:ins | //ed:del | //ed:replace" /> + +<!-- does the document have a published-as-rfc link? --> +<xsl:variable name="published-as-rfc" select="/*/x:link[@rel='Alternate' and starts-with(@title,'RFC')]"/> + + +<xsl:template match="text()[not(ancestor::artwork) and not(ancestor::sourcecode)]"> + <xsl:variable name="ws" select="'&#9;&#10;&#13;&#32;'"/> + <xsl:variable name="starts-with-ws" select="'' = translate(substring(.,1,1),$ws,'')"/> + <xsl:variable name="ends-with-ws" select="'' = translate(substring(.,string-length(.),1),$ws,'')"/> + <xsl:variable name="normalized" select="normalize-space(.)"/> + <!--<xsl:message> Orig: "<xsl:value-of select="."/>"</xsl:message> + <xsl:message>Start: "<xsl:value-of select="$starts-with-ws"/>"</xsl:message> + <xsl:message> End: "<xsl:value-of select="$ends-with-ws"/>"</xsl:message> --> + <xsl:if test="$starts-with-ws and (preceding-sibling::node() | parent::ed:ins | parent::ed:del)"> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:value-of select="$normalized"/> + <xsl:if test="$ends-with-ws and $normalized!='' and (following-sibling::node() | parent::ed:ins | parent::ed:del)"> + <xsl:text> </xsl:text> + </xsl:if> +</xsl:template> + + +<xsl:template match="abstract"> + <xsl:call-template name="check-no-text-content"/> + <section id="{$anchor-pref}abstract"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'abstract'"/> + </xsl:call-template> + <h2><a href="#{$anchor-pref}abstract">Abstract</a></h2> + <xsl:apply-templates /> + </section> +</xsl:template> + +<msxsl:script language="JScript" implements-prefix="myns"> + function parseXml(str) { + try { + var doc = new ActiveXObject("MSXML2.DOMDocument"); + doc.async = false; + if (doc.loadXML(str)) { + return ""; + } + else { + return doc.parseError.reason + "\n" + doc.parseError.srcText + " (" + doc.parseError.line + "/" + doc.parseError.linepos + ")"; + } + } + catch(e) { + return ""; + } + } +</msxsl:script> + +<xsl:template name="add-artwork-class"> + <xsl:variable name="v"> + <xsl:choose> + <xsl:when test="@type='abnf' or @type='abnf2045' or @type='abnf2616' or @type='abnf7230' or @type='application/xml-dtd' or @type='inline' or @type='application/relax-ng-compact-syntax'">inline</xsl:when> + <xsl:when test="starts-with(@type,'message/http') and contains(@type,'msgtype=&quot;request&quot;')">text2</xsl:when> + <xsl:when test="starts-with(@type,'message/http')">text</xsl:when> + <xsl:when test="starts-with(@type,'drawing')">drawing</xsl:when> + <xsl:when test="starts-with(@type,'text/plain') or @type='example' or @type='code' or @type='application/xml-dtd' or @type='application/json'">text</xsl:when> + <xsl:otherwise/> + </xsl:choose> + <xsl:if test="@x:lang and $prettyprint-class!=''"> + <xsl:value-of select="concat(' ',$prettyprint-class)"/> + <xsl:if test="@x:lang!=''"> + <xsl:value-of select="concat(' lang-',@x:lang)"/> + </xsl:if> + </xsl:if> + </xsl:variable> + <xsl:if test="normalize-space($v)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($v)"/></xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-begin-code"> + <xsl:if test="@x:is-code-component='yes'"> + <pre class="ccmarker cct"><span>&lt;CODE BEGINS></span></pre> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-end-code"> + <xsl:if test="@x:is-code-component='yes'"> + <pre class="ccmarker ccb"><span>&lt;CODE ENDS></span></pre> + </xsl:if> +</xsl:template> + +<xsl:template match="artwork|sourcecode"> + <xsl:if test="not(ancestor::ed:del) and $xml2rfc-ext-parse-xml-in-artwork='yes' and function-available('myns:parseXml')" use-when="function-available('myns:parseXml')"> + <xsl:if test="contains(.,'&lt;?xml')"> + <xsl:variable name="body" select="substring-after(substring-after(.,'&lt;?xml'),'?>')" /> + <xsl:if test="$body!='' and myns:parseXml($body)!=''"> + <table style="background-color: red; border-width: thin; border-style: solid; border-color: black;"> + <tr><td> + XML PARSE ERROR; parsed the body below: + <pre> + <xsl:value-of select="$body"/> + </pre> + resulting in: + <pre> + <xsl:value-of select="myns:parseXml($body)" /> + </pre> + </td></tr></table> + </xsl:if> + </xsl:if> + <xsl:if test="@ed:parse-xml-after"> + <xsl:if test="myns:parseXml(string(.))!=''"> + <table style="background-color: red; border-width: thin; border-style: solid; border-color: black;"> + <tr><td> + XML PARSE ERROR: + <pre><xsl:value-of select="myns:parseXml(string(.))" /></pre> + </td></tr></table> + </xsl:if> + </xsl:if> + </xsl:if> + <xsl:if test="contains(.,'&#9;')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'artwork contains HTAB character'"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:if> + <xsl:variable name="display"> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-allow-markup-in-artwork='yes'"> + <xsl:apply-templates/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="@align='right'"> + <div style="display:table; margin-left: auto; margin-right: 0em;"> + <xsl:call-template name="insert-begin-code"/> + <pre style="margin-left: 0em;"> + <xsl:call-template name="add-artwork-class"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="$display"/> + </pre> + <xsl:call-template name="insert-end-code"/> + </div> + </xsl:when> + <xsl:when test="@align='center'"> + <div style="display:table; margin-left: auto; margin-right: auto;"> + <xsl:call-template name="insert-begin-code"/> + <pre style="margin-left: 0em;"> + <xsl:call-template name="add-artwork-class"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="$display"/> + </pre> + <xsl:call-template name="insert-end-code"/> + </div> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="insert-begin-code"/> + <pre> + <xsl:call-template name="add-artwork-class"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="$display"/> + </pre> + <xsl:call-template name="insert-end-code"/> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="check-artwork-width"> + <xsl:with-param name="content"><xsl:apply-templates/></xsl:with-param> + <xsl:with-param name="indent"><xsl:value-of select="string-length(@x:indent-with)"/></xsl:with-param> + </xsl:call-template> +</xsl:template> + +<!-- special case for first text node in artwork --> +<xsl:template match="artwork/text()[1]"> + <xsl:choose> + <xsl:when test="starts-with(.,'&#10;')"> + <!-- reduce leading whitespace --> + <xsl:value-of select="substring(.,2)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + + +<xsl:template name="check-artwork-width"> + <xsl:param name="content"/> + <xsl:param name="indent"/> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-check-artwork-width='no'"> + <!-- skip check --> + </xsl:when> + <xsl:when test="not(contains($content,'&#10;'))"> + <xsl:if test="string-length($content) > 69 + number($indent)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">artwork line too long: '<xsl:value-of select="$content"/>' (<xsl:value-of select="string-length($content)"/> characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="start" select="substring-before($content,'&#10;')"/> + <xsl:variable name="end" select="substring-after($content,'&#10;')"/> + <xsl:variable name="max"> + <xsl:choose> + <xsl:when test="$indent!=''"><xsl:value-of select="69 + $indent"/></xsl:when> + <xsl:otherwise>69</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="string-length($start) > $max"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">artwork line too long: '<xsl:value-of select="$start"/>' (<xsl:value-of select="string-length($start)"/> characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="check-artwork-width"> + <xsl:with-param name="content" select="$end"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="artwork[@src and starts-with(@type,'image/') or @type='svg']|artwork[svg:svg]"> + <xsl:variable name="class"> + <xsl:value-of select="$css-artwork"/> + <xsl:text> </xsl:text> + <xsl:if test="svg:svg"> + <xsl:value-of select="$css-art-svg"/> + </xsl:if> + <xsl:choose> + <xsl:when test="@align='center'"><xsl:text> </xsl:text><xsl:value-of select="$css-center"/></xsl:when> + <xsl:when test="@align='right'"><xsl:text> </xsl:text><xsl:value-of select="$css-right"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <div class="{normalize-space($class)}"> + <xsl:choose> + <xsl:when test="svg:svg"> + <xsl:copy-of select="svg:svg"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="alt"> + <xsl:choose> + <xsl:when test="@alt!=''"> + <xsl:value-of select="@alt"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <img src="{@src}"> + <xsl:if test="normalize-space($alt)!=''"> + <xsl:attribute name="alt"><xsl:value-of select="$alt"/></xsl:attribute> + </xsl:if> + <xsl:if test="@width and @width!=''"> + <xsl:copy-of select="@width"/> + </xsl:if> + <xsl:if test="@height and @height!=''"> + <xsl:copy-of select="@height"/> + </xsl:if> + </img> + </xsl:otherwise> + </xsl:choose> + </div> +</xsl:template> + +<xsl:template match="author|x:contributor"> + <xsl:call-template name="check-no-text-content"/> + + <address> + <xsl:call-template name="emit-author"/> + + <xsl:if test="@asciiFullname!='' or organization/@ascii!='' or postal/*/@ascii"> + <br/><br/> + <em>Additional contact information:</em> + <br/> + <xsl:call-template name="emit-author"> + <xsl:with-param name="ascii" select="false()"/> + </xsl:call-template> + </xsl:if> + </address> +</xsl:template> + +<xsl:template name="emit-author"> + <xsl:param name="ascii" select="true()"/> + <b> + <xsl:choose> + <xsl:when test="@asciiFullname!='' and $ascii"> + <xsl:value-of select="@asciiFullname" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@fullname" /> + </xsl:otherwise> + </xsl:choose> + </b> + <xsl:if test="@role"> + <xsl:text> (</xsl:text> + <xsl:value-of select="@role" /> + <xsl:text>)</xsl:text> + </xsl:if> + <!-- annotation support for Martin "uuml" Duerst --> + <xsl:if test="@x:annotation"> + <xsl:text> </xsl:text> + <i><xsl:value-of select="@x:annotation"/></i> + </xsl:if> + + <xsl:if test="normalize-space(concat(organization,organization/@ascii)) != ''"> + <br/> + <xsl:choose> + <xsl:when test="organization/@ascii!='' and $ascii"> + <xsl:value-of select="organization/@ascii" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="organization" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:if test="address/postal"> + <xsl:for-each select="address/postal/street"> + <xsl:variable name="street"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="name" select="'street'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$street!=''"> + <br/> + <xsl:value-of select="$street"/> + </xsl:if> + </xsl:for-each> + <xsl:for-each select="address/postal/postalLine"> + <xsl:variable name="line"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="name" select="'postalLine'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$line!=''"> + <br/> + <xsl:value-of select="$line"/> + </xsl:if> + </xsl:for-each> + <xsl:if test="address/postal/city|address/postal/region|address/postal/code"> + <br/> + <xsl:variable name="city"> + <xsl:if test="address/postal/city"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/postal/city"/> + <xsl:with-param name="name" select="'address/postal/city'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="region"> + <xsl:if test="address/postal/region"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/postal/region"/> + <xsl:with-param name="name" select="'address/postal/region'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + <xsl:variable name="code"> + <xsl:if test="address/postal/code"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/postal/code"/> + <xsl:with-param name="name" select="'address/postal/code'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:if> + </xsl:variable> + + <xsl:if test="$city!=''"> + <xsl:value-of select="$city"/> + </xsl:if> + + <xsl:variable name="region-and-code"> + <xsl:value-of select="$region"/> + <xsl:if test="$region!='' and $code!=''"> + <xsl:text>&#160;</xsl:text> + </xsl:if> + <xsl:value-of select="$code"/> + </xsl:variable> + + <xsl:if test="$region-and-code!=''"> + <xsl:if test="$city!=''"> + <xsl:text>, </xsl:text> + </xsl:if> + <xsl:value-of select="$region-and-code"/> + </xsl:if> + </xsl:if> + <xsl:if test="address/postal/country"> + <xsl:variable name="country"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/postal/country"/> + <xsl:with-param name="name" select="'address/postal/country'"/> + <xsl:with-param name="ascii" select="$ascii"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$country!=''"> + <br/> + <xsl:value-of select="$country"/> + </xsl:if> + </xsl:if> + </xsl:if> + <xsl:if test="address/phone"> + <xsl:variable name="phone"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/phone"/> + <xsl:with-param name="name" select="'address/phone'"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$phone!=''"> + <br/> + <xsl:text>Phone: </xsl:text> + <a href="tel:{translate($phone,' ','')}"><xsl:value-of select="$phone" /></a> + </xsl:if> + </xsl:if> + <xsl:if test="address/facsimile"> + <xsl:variable name="facsimile"> + <xsl:call-template name="extract-normalized"> + <xsl:with-param name="node" select="address/facsimile"/> + <xsl:with-param name="name" select="'address/facsimile'"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="$facsimile!=''"> + <br/> + <xsl:text>Fax: </xsl:text> + <a href="fax:{translate($facsimile,' ','')}"><xsl:value-of select="$facsimile" /></a> + </xsl:if> + </xsl:if> + <xsl:for-each select="address/email"> + <xsl:variable name="email"> + <xsl:call-template name="extract-email"/> + </xsl:variable> + + <br/> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Email: </xsl:when> + <xsl:otherwise>EMail: </xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="$xml2rfc-linkmailto!='no'"> + <a href="mailto:{$email}"><xsl:value-of select="$email" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$email" /> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + <xsl:for-each select="address/uri"> + <xsl:variable name="uri"> + <xsl:call-template name="extract-uri"/> + </xsl:variable> + <xsl:if test="$uri!=''"> + <br/> + <xsl:text>URI: </xsl:text> + <a href="{$uri}"><xsl:value-of select="$uri" /></a> + <xsl:if test="@x:annotation"> + <xsl:text> </xsl:text> + <i><xsl:value-of select="@x:annotation"/></i> + </xsl:if> + </xsl:if> + </xsl:for-each> +</xsl:template> + +<!-- this is a named template because <back> may be absent --> +<xsl:template name="back"> + <xsl:call-template name="check-no-text-content"/> + + <!-- add editorial comments --> + <xsl:if test="//cref and $xml2rfc-comments='yes' and $xml2rfc-inline!='yes'"> + <xsl:call-template name="insertComments" /> + </xsl:if> + + <!-- next, add information about the document's authors --> + <xsl:if test="$xml2rfc-ext-authors-section='before-appendices'"> + <xsl:call-template name="insertAuthors" /> + </xsl:if> + + <!-- add all other top-level sections under <back> --> + <xsl:apply-templates select="back/*[not(self::references) and not(self::ed:replace and .//references)]" /> + + <!-- insert the index if index entries exist --> + <!-- note it always comes before the authors section --> + <xsl:if test="$has-index"> + <xsl:call-template name="insertIndex" /> + </xsl:if> + + <!-- Authors section is the absolute last thing, except for copyright stuff --> + <xsl:if test="$xml2rfc-ext-authors-section='end'"> + <xsl:call-template name="insertAuthors" /> + </xsl:if> + + <xsl:if test="$xml2rfc-private=''"> + <!-- copyright statements --> + <xsl:variable name="copyright"> + <xsl:call-template name="insertCopyright" /> + </xsl:variable> + + <!-- emit it --> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <xsl:apply-templates select="exslt:node-set($copyright)" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + +</xsl:template> + +<xsl:template match="eref[node()]"> + <a href="{@target}"><xsl:apply-templates /></a> +</xsl:template> + +<xsl:template match="eref[not(node())]"> + <xsl:text>&lt;</xsl:text> + <a href="{@target}"><xsl:value-of select="@target" /></a> + <xsl:text>&gt;</xsl:text> +</xsl:template> + +<xsl:template match="figure"> + <xsl:call-template name="check-no-text-content"/> + <!-- warn about the attributes that we do not support --> + <xsl:for-each select="@*[local-name()!='title' and local-name()!='suppress-title' and local-name()!='anchor' and local-name()!='pn' and normalize-space(.)!='']"> + <xsl:if test="local-name(.)!='align' or normalize-space(.)!='left'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('unsupported attribute ',local-name(.),' on figure element')"/> + </xsl:call-template> + </xsl:if> + </xsl:for-each> + <xsl:variable name="anch-container"> + <xsl:choose> + <xsl:when test="ancestor::t">span</xsl:when> + <xsl:otherwise>div</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="@anchor!=''"> + <xsl:call-template name="check-anchor"/> + <xsl:element name="{$anch-container}"> + <xsl:attribute name="id"><xsl:value-of select="@anchor"/></xsl:attribute> + </xsl:element> + </xsl:if> + <xsl:variable name="anch"> + <xsl:call-template name="get-figure-anchor"/> + </xsl:variable> + <xsl:element name="{$anch-container}"> + <xsl:attribute name="id"><xsl:value-of select="$anch"/></xsl:attribute> + <xsl:apply-templates select="*[not(self::name)]"/> + </xsl:element> + <xsl:if test="(@title!='' or name) or (@anchor!='' and not(@suppress-title='true'))"> + <xsl:variable name="n"><xsl:call-template name="get-figure-number"/></xsl:variable> + <p class="figure"> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Figure </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!='' or name">: </xsl:if> + </xsl:if> + <xsl:call-template name="insertTitle"/> + </p> + </xsl:if> +</xsl:template> + +<xsl:variable name="all-notes" select="/rfc/front/note"/> +<xsl:variable name="all-edited-notes" select="/rfc/front/ed:replace[.//note]"/> +<xsl:variable name="notes-not-in-boilerplate" select="$all-notes[@title!='IESG Note' or $xml2rfc-private!='' or $notes-follow-abstract]"/> +<xsl:variable name="edited-notes-not-in-boilerplate" select="$all-edited-notes[.//note/@title!='IESG Note' or $xml2rfc-private!='' or $notes-follow-abstract]"/> +<xsl:variable name="notes-in-boilerplate" select="$all-notes[not(@title!='IESG Note' or $xml2rfc-private!='' or $notes-follow-abstract)]"/> +<xsl:variable name="edited-notes-in-boilerplate" select="$all-edited-notes[not(.//note/@title!='IESG Note' or $xml2rfc-private!='' or $notes-follow-abstract)]"/> + +<xsl:template match="front"> + <xsl:call-template name="check-no-text-content"/> + <header> + <xsl:if test="$xml2rfc-topblock!='no'"> + <!-- collect information for left column --> + <xsl:variable name="leftColumn"> + <xsl:call-template name="collectLeftHeaderColumn" /> + </xsl:variable> + <!-- collect information for right column --> + <xsl:variable name="rightColumn"> + <xsl:call-template name="collectRightHeaderColumn" /> + </xsl:variable> + <!-- insert the collected information --> + <table class="{$css-header}" id="{$anchor-pref}headerblock"> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <xsl:call-template name="emitheader"> + <xsl:with-param name="lc" select="exslt:node-set($leftColumn)" /> + <xsl:with-param name="rc" select="exslt:node-set($rightColumn)" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </table> + </xsl:if> + + <div id="{$anchor-pref}title"> + <!-- main title --> + <h1><xsl:apply-templates select="title"/></h1> + <xsl:if test="/rfc/@docName"> + <xsl:variable name="docname" select="/rfc/@docName"/> + <xsl:choose> + <xsl:when test="$rfcno!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' is ignored because an RFC number is specified as well.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <div class="filename"><xsl:value-of select="$docname"/></div> + </xsl:otherwise> + </xsl:choose> + + <xsl:variable name="docname-noext"> + <xsl:choose> + <xsl:when test="contains($docname,'.')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' should contain the base name, not the filename (thus no file extension).</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="substring-before($docname,'.')"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$docname"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- more name checks --> + <xsl:variable name="offending" select="translate($docname,concat($lcase,$digits,'-.'),'')"/> + <xsl:if test="$offending != ''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' should not contain the character '<xsl:value-of select="substring($offending,1,1)"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="contains($docname,'--')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' should not contain the character sequence '--'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="not(starts-with($docname,'draft-'))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' should start with 'draft-'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <!-- sequence number --> + <xsl:variable name="seq"> + <xsl:choose> + <xsl:when test="substring($docname-noext,string-length($docname-noext) + 1 - string-length('-latest'))='-latest'">latest</xsl:when> + <xsl:when test="substring($docname-noext,string-length($docname-noext) - 2, 1)='-'"><xsl:value-of select="substring($docname-noext,string-length($docname-noext)-1)"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:if test="$seq='' or ($seq!='latest' and translate($seq,$digits,'')!='')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>' should end with a two-digit sequence number or 'latest'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="string-length($docname)-string-length($seq) > 50"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The @docName attribute '<xsl:value-of select="$docname"/>', excluding sequence number, should have less than 50 characters.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + </xsl:if> + </div> + </header> + + <!-- insert notice about update --> + <xsl:if test="$published-as-rfc"> + <p class="{$css-publishedasrfc}"> + <b>Note:</b> a later version of this document has been published as <a href="{$published-as-rfc/@href}"><xsl:value-of select="$published-as-rfc/@title"/></a>. + </p> + </xsl:if> + + <!-- check for conforming ipr attribute --> + <xsl:choose> + <xsl:when test="not(/rfc/@ipr)"> + <xsl:if test="not(/rfc/@number) and $xml2rfc-private=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Either /rfc/@ipr or /rfc/@number is required</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="/rfc/@ipr = 'full2026'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativeWorks'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativeWorksNow'" /> + <xsl:when test="/rfc/@ipr = 'none'" /> + <xsl:when test="/rfc/@ipr = 'full3667'" /> + <xsl:when test="/rfc/@ipr = 'noModification3667'" /> + <xsl:when test="/rfc/@ipr = 'noDerivatives3667'" /> + <xsl:when test="/rfc/@ipr = 'full3978'" /> + <xsl:when test="/rfc/@ipr = 'noModification3978'" /> + <xsl:when test="/rfc/@ipr = 'noDerivatives3978'" /> + <xsl:when test="/rfc/@ipr = 'trust200811'" /> + <xsl:when test="/rfc/@ipr = 'noModificationTrust200811'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativesTrust200811'" /> + <xsl:when test="/rfc/@ipr = 'trust200902'" /> + <xsl:when test="/rfc/@ipr = 'noModificationTrust200902'" /> + <xsl:when test="/rfc/@ipr = 'noDerivativesTrust200902'" /> + <xsl:when test="/rfc/@ipr = 'pre5378Trust200902'" /> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unknown value for /rfc/@ipr: ', /rfc/@ipr)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'boilerplate'"/> + </xsl:call-template> + + <xsl:if test="not($abstract-first)"> + <xsl:if test="$xml2rfc-private=''"> + <xsl:call-template name="emit-ietf-preamble"> + <xsl:with-param name="notes" select="$notes-in-boilerplate|$edited-notes-in-boilerplate"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:apply-templates select="abstract" /> + <xsl:if test="$notes-follow-abstract"> + <xsl:apply-templates select="$notes-not-in-boilerplate|$edited-notes-not-in-boilerplate" /> + </xsl:if> + + <xsl:if test="$abstract-first"> + <xsl:if test="$xml2rfc-private=''"> + <xsl:call-template name="emit-ietf-preamble"> + <xsl:with-param name="notes" select="$notes-in-boilerplate|$edited-notes-in-boilerplate"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:if test="not($notes-follow-abstract)"> + <xsl:apply-templates select="$notes-not-in-boilerplate|$edited-notes-not-in-boilerplate" /> + </xsl:if> + + <xsl:if test="$xml2rfc-toc='yes'"> + <xsl:apply-templates select="/" mode="toc" /> + <xsl:call-template name="insertTocAppendix" /> + </xsl:if> + +</xsl:template> + +<xsl:template name="emit-ietf-preamble"> + <xsl:param name="notes"/> + + <!-- Get status info formatted as per RFC2629--> + <xsl:variable name="preamble"> + <xsl:for-each select="/rfc"> + <xsl:call-template name="insertPreamble"> + <xsl:with-param name="notes" select="$notes"/> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + + <!-- get document-supplied boilerplate --> + <xsl:variable name="userboiler" select="/rfc/front/boilerplate"/> + + <!-- emit it --> + <xsl:choose> + <xsl:when test="function-available('exslt:node-set')"> + <xsl:variable name="differ" select="$userboiler and translate(normalize-space(string($userboiler)),' ','')!=translate(normalize-space(string($preamble)),' ','')"/> + <!--<xsl:if test="$differ"> + <xsl:message>1: <xsl:value-of select="normalize-space(string($userboiler))"/></xsl:message> + <xsl:message>2: <xsl:value-of select="normalize-space(string($preamble))"/></xsl:message> + </xsl:if>--> + <xsl:apply-templates select="exslt:node-set($preamble)" /> + <xsl:if test="$differ"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'user-supplied boilerplate differs from auto-generated boilerplate (inserting auto-generated)'"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$node-set-warning"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="iref"> + <xsl:variable name="anchor"><xsl:call-template name="compute-iref-anchor"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::figure"> + <div id="{$anchor}"/> + </xsl:when> + <xsl:when test="ancestor::t or ancestor::artwork or ancestor::preamble or ancestor::postamble"> + <span id="{$anchor}"/> + </xsl:when> + <xsl:otherwise> + <div id="{$anchor}"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="compute-iref-anchor"> + <xsl:variable name="first" select="translate(substring(@item,1,1),$ucase,$lcase)"/> + <xsl:variable name="nkey" select="translate($first,$alnum,'')"/> + <xsl:choose> + <xsl:when test="count(.|$section-level-irefs)=count($section-level-irefs)"> + <xsl:for-each select=".."> + <xsl:value-of select="$anchor-pref"/>section.<xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="$nkey=''"> + <xsl:value-of select="$anchor-pref"/>iref.<xsl:value-of select="$first"/>.<xsl:number level="any" count="iref[starts-with(translate(@item,$ucase,$lcase),$first) and count(.|$section-level-irefs)!=count($section-level-irefs)]"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/>iref.<xsl:number level="any" count="iref[translate(substring(@item,1,1),$alnum,'')!='' and count(.|$section-level-irefs)!=count($section-level-irefs)]"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="compute-extref-anchor"> + <xsl:variable name="first" select="translate(substring(.,1,1),$ucase,$lcase)"/> + <xsl:variable name="nkey" select="translate($first,$lcase,'')"/> + <xsl:choose> + <xsl:when test="$nkey=''"> + <xsl:value-of select="$anchor-pref"/>extref.<xsl:value-of select="$first"/>.<xsl:number level="any" count="x:ref[starts-with(translate(.,$ucase,$lcase),$first)]"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/>extref.<xsl:number level="any" count="x:ref[translate(substring(.,1,1),concat($lcase,$ucase),'')='']"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- list templates depend on the list style --> + +<xsl:template match="list[@style='empty' or (not(@style) and not(ancestor::list[@style]) or (not(@style) and ancestor::list[@style='empty']))]"> + <xsl:call-template name="check-no-text-content"/> + <ul class="empty"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ul> +</xsl:template> + +<xsl:template match="ol[string-length(@type)>1]"> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + <xsl:variable name="start"> + <xsl:choose> + <xsl:when test="@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@start"> + <xsl:value-of select="@start"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$p!='' and not(ancestor::list) and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <div id="{$anchor-pref}section.{$p}"> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:for-each select="li"> + <xsl:variable name="label"> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="../@type"/> + <xsl:with-param name="pos" select="$start - 1 + position()"/> + </xsl:call-template> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:value-of select="$label"/> + </dt> + <dd> + <xsl:apply-templates/> + </dd> + </xsl:for-each> + </dl> + </div> + </xsl:when> + <xsl:otherwise> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:for-each select="li"> + <xsl:variable name="label"> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="../@type"/> + <xsl:with-param name="pos" select="$start - 1 + position()"/> + </xsl:call-template> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:value-of select="$label"/> + </dt> + <dd> + <xsl:apply-templates/> + </dd> + </xsl:for-each> + </dl> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="dl"> + <xsl:variable name="hang" select="@hanging"/> + <xsl:variable name="spac" select="@spacing"/> + <xsl:variable name="class"> + <xsl:if test="$spac='compact'">compact </xsl:if> + <xsl:if test="$hang='false'">nohang </xsl:if> + </xsl:variable> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + <div> + <xsl:if test="$p!='' and not(ancestor::list) and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,'section.',$p)"/></xsl:attribute> + </xsl:if> + <dl> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="normalize-space($class)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($class)"/></xsl:attribute> + </xsl:if> + <xsl:for-each select="dt"> + <xsl:apply-templates select="."/> + <xsl:apply-templates select="following-sibling::dd[1]"/> + </xsl:for-each> + </dl> + </div> +</xsl:template> + +<xsl:template match="dt"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </dt> +</xsl:template> + +<xsl:template match="dd"> + <dd> + <xsl:variable name="block-level-children" select="t | dl"/> + <xsl:choose> + <xsl:when test="$block-level-children"> + <!-- TODO: improve error handling--> + <xsl:for-each select="$block-level-children"> + <xsl:choose> + <xsl:when test="self::t"> + <p> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </p> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + </dd> +</xsl:template> + +<xsl:template match="list[starts-with(@style,'format ')]"> + <xsl:call-template name="check-no-text-content"/> + <dl> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </dl> +</xsl:template> + +<!-- get value of "compact" mode, checking subcompact first, then compact --> +<xsl:template name="get-compact-setting"> + <xsl:variable name="t1"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="preceding::processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'subcompact'"/> + <xsl:with-param name="default" select="'?'"/> + <xsl:with-param name="duplicate-warning" select="'no'"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$t1='?'"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="preceding::processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'compact'"/> + <xsl:with-param name="default" select="'?'"/> + <xsl:with-param name="duplicate-warning" select="'no'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$t1"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="list[@style='hanging']"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="compact"> + <xsl:call-template name="get-compact-setting"/> + </xsl:variable> + <!-- insert a hard space for nested lists so that indentation works ok --> + <xsl:if test="ancestor::list and normalize-space(preceding-sibling::text())=''"> + <xsl:text>&#160;</xsl:text> + </xsl:if> + <dl> + <xsl:if test="$compact='yes'"> + <xsl:attribute name="class">compact</xsl:attribute> + </xsl:if> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </dl> +</xsl:template> + +<xsl:template match="list[@style='numbers' or (not(@style) and ancestor::list[@style='numbers'])]"> + <xsl:call-template name="check-no-text-content"/> + <ol> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ol> +</xsl:template> + +<xsl:template name="ol-start"> + <xsl:param name="node"/> + <xsl:variable name="group" select="$node/@group"/> + <xsl:variable name="prec" select="$node/preceding::ol[@group=$group]"/> + <xsl:choose> + <xsl:when test="$node/@start"> + <xsl:value-of select="$node/@start"/> + </xsl:when> + <xsl:when test="$prec"> + <xsl:variable name="s"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="$prec[last()]"/> + </xsl:call-template> + </xsl:variable> + <xsl:value-of select="$s + count($prec[last()]/li)"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="ol[not(@type) or string-length(@type)=1]"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + <xsl:variable name="start"> + <xsl:choose> + <xsl:when test="@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@start"> + <xsl:value-of select="@start"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + <div> + <xsl:if test="$p!='' and not(ancestor::list) and not(ancestor::ul) and not(ancestor::dl) and not(ancestor::ol) and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,'section.',$p)"/></xsl:attribute> + </xsl:if> + <ol> + <xsl:if test="$start!=''"> + <xsl:attribute name="start"><xsl:value-of select="$start"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:copy-of select="@type"/> + <xsl:apply-templates /> + </ol> + </div> +</xsl:template> + +<xsl:template match="ul"> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + <div> + <xsl:call-template name="insertInsDelClass"/> + <xsl:if test="$p!='' and not(ancestor::list) and not(ancestor::ul) and not(ancestor::dl) and not(ancestor::ol) and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,'section.',$p)"/></xsl:attribute> + </xsl:if> + <ul> + <xsl:call-template name="copy-anchor"/> + <xsl:if test="@empty='true'"> + <xsl:attribute name="class">empty</xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </ul> + </div> +</xsl:template> + +<xsl:template match="li"> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + <xsl:if test="not(following-sibling::li)"> + <xsl:variable name="l"> + <xsl:for-each select=".."> + <xsl:call-template name="get-paragraph-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$l!=''"> + <a class='self' href='#{$anchor-pref}section.{$l}'>&#xb6;</a> + </xsl:if> + </xsl:if> + </li> +</xsl:template> + +<xsl:template match="list[@style='letters' or (not(@style) and ancestor::list[@style='letters'])]"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="style"> + <xsl:choose> + <!-- lowercase for even-numbered nesting levels --> + <xsl:when test="0=(count(ancestor::list[@style='letters']) mod 2)">la</xsl:when> + <!-- uppercase otherwise --> + <xsl:otherwise>ua</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <ol class="{$style}"> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ol> +</xsl:template> + +<xsl:template match="list[@style='symbols' or (not(@style) and ancestor::list[@style='symbols'])]"> + <xsl:call-template name="check-no-text-content"/> + <ul> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </ul> +</xsl:template> + + +<!-- same for t(ext) elements --> + +<xsl:template match="list[@style='empty' or not(@style)]/t | list[@style='empty' or not(@style)]/ed:replace/ed:*/t"> + <xsl:if test="@hangText"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="'t/@hangText used on unstyled list'"/> + </xsl:call-template> + </xsl:if> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:apply-templates /> + </li> +</xsl:template> + +<xsl:template match="list[@style='numbers' or @style='symbols' or @style='letters']/x:lt"> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="t" /> + </li> +</xsl:template> + +<xsl:template match="list[@style='numbers' or @style='symbols' or @style='letters']/t | list[@style='numbers' or @style='symbols' or @style='letters']/ed:replace/ed:*/t"> + <xsl:if test="@hangText"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="'t/@hangText used on non-hanging list'"/> + </xsl:call-template> + </xsl:if> + <li> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"/> + </xsl:for-each> + <xsl:apply-templates /> + </li> +</xsl:template> + +<xsl:template match="list[@style='hanging']/x:lt"> + <xsl:if test="@hangText!=''"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + <xsl:value-of select="@hangText" /> + </dt> + </xsl:if> + <dd> + <xsl:call-template name="insertInsDelClass"/> + <!-- if hangIndent present, use 0.7 of the specified value (1em is the width of the "m" character --> + <xsl:if test="../@hangIndent"> + <xsl:attribute name="style">margin-left: <xsl:value-of select="format-number(../@hangIndent * 0.7,'#.#')"/>em</xsl:attribute> + </xsl:if> + <xsl:apply-templates select="t" /> + </dd> +</xsl:template> + +<xsl:template match="list[@style='hanging']/t | list[@style='hanging']/ed:replace/ed:*/t"> + <xsl:if test="@hangText!=''"> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:if test="count(preceding-sibling::t)=0"> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + <xsl:value-of select="@hangText" /> + </dt> + </xsl:if> + + <xsl:variable name="dd-content"> + <xsl:apply-templates/> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$dd-content!=''"> + <dd> + <xsl:call-template name="insertInsDelClass"/> + <!-- if hangIndent present, use 0.7 of the specified value (1em is the width of the "m" character --> + <xsl:if test="../@hangIndent"> + <xsl:attribute name="style">margin-left: <xsl:value-of select="format-number(../@hangIndent * 0.7,'#.#')"/>em</xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </dd> + </xsl:when> + <xsl:otherwise> + <dd>&#160;</dd> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="list[starts-with(@style,'format ')]/t"> + <xsl:variable name="list" select=".." /> + <xsl:variable name="format" select="substring-after(../@style,'format ')" /> + <xsl:variable name="pos"> + <xsl:choose> + <xsl:when test="$list/@counter"> + <xsl:number level="any" count="list[@counter=$list/@counter or (not(@counter) and @style=concat('format ',$list/@counter))]/t" /> + </xsl:when> + <xsl:otherwise> + <xsl:number level="any" count="list[concat('format ',@counter)=$list/@style or (not(@counter) and @style=$list/@style)]/t" /> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <dt> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="$format"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </dt> + <dd> + <xsl:apply-templates /> + </dd> +</xsl:template> + +<xsl:template name="expand-format-percent"> + <xsl:param name="format"/> + <xsl:param name="pos"/> + + <xsl:choose> + <xsl:when test="$format=''"><!-- done--></xsl:when> + <xsl:when test="substring($format,1,1)!='%' or string-length($format)=1"> + <xsl:value-of select="substring($format,1,1)"/> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring($format,2)"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="f" select="substring($format,2,1)"/> + <xsl:choose> + <xsl:when test="$f='%'">%</xsl:when> + <xsl:when test="$f='c'"><xsl:number value="$pos" format="a"/></xsl:when> + <xsl:when test="$f='C'"><xsl:number value="$pos" format="A"/></xsl:when> + <xsl:when test="$f='d'"><xsl:number value="$pos"/></xsl:when> + <xsl:when test="$f='i'"><xsl:number value="$pos" format="i"/></xsl:when> + <xsl:when test="$f='I'"><xsl:number value="$pos" format="I"/></xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported % format: ', $f)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring($format,3)"/> + <xsl:with-param name="pos" select="$pos"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template match="middle"> + <xsl:apply-templates /> + <xsl:apply-templates select="../back//references"/> +</xsl:template> + +<xsl:template match="note"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="classes"> + <xsl:text>note</xsl:text> + <xsl:text> </xsl:text> + <xsl:if test="@removeInRFC='true'">rfcEditorRemove</xsl:if> + </xsl:variable> + <xsl:variable name="num"><xsl:number/></xsl:variable> + <section id="{$anchor-pref}note.{$num}" class="{normalize-space($classes)}"> + <h2> + <xsl:call-template name="insertInsDelClass"/> + <a href="#{$anchor-pref}note.{$num}"> + <xsl:call-template name="insertTitle" /> + </a> + </h2> + <xsl:if test="@removeInRFC='true' and t[1]!=$note-removeInRFC"> + <xsl:variable name="t"> + <t><xsl:value-of select="$note-removeInRFC"/></t> + </xsl:variable> + <xsl:variable name="link" select="concat($anchor-pref,'note.',$num,'.p.1')"/> + <div id="{$link}"> + <xsl:apply-templates mode="t-content" select="exslt:node-set($t)//text()"> + <xsl:with-param name="inherited-self-link" select="$link"/> + </xsl:apply-templates> + </div> + </xsl:if> + <xsl:apply-templates /> + </section> +</xsl:template> + +<xsl:template match="postamble"> + <xsl:if test="normalize-space(.) != ''"> + <p> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates /> + </p> + </xsl:if> +</xsl:template> + +<xsl:template match="preamble"> + <xsl:if test="normalize-space(.) != ''"> + <p> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates /> + </p> + </xsl:if> +</xsl:template> + +<xsl:template name="computed-auto-target"> + <xsl:param name="bib"/> + <xsl:param name="ref"/> + + <xsl:variable name="sec"> + <xsl:choose> + <xsl:when test="$ref and starts-with($ref/@x:rel,'#') and not($ref/@x:sec) and not($ref/@section)"> + <xsl:variable name="extdoc" select="document($bib/x:source/@href)"/> + <xsl:for-each select="$extdoc//*[@anchor=substring-after($ref/@x:rel,'#')]"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="$ref and $ref/@section"> + <xsl:value-of select="$ref/@section"/> + </xsl:when> + <xsl:when test="$ref"> + <xsl:value-of select="$ref/@x:sec"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$ref and $bib/x:source/@href and $bib/x:source/@basename and $ref/@x:rel"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension,$ref/@x:rel)" /> + </xsl:when> + <xsl:when test="$ref and $bib/x:source/@href and $bib/x:source/@basename and $ref/@anchor"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension,'#',$ref/@anchor)" /> + </xsl:when> + <!-- tools.ietf.org won't have the "-latest" draft --> + <xsl:when test="$bib//seriesInfo/@name='Internet-Draft' and $bib/x:source/@href and $bib/x:source/@basename and substring($bib/x:source/@basename, (string-length($bib/x:source/@basename) - string-length('-latest')) + 1)='-latest'"> + <xsl:value-of select="concat($bib/x:source/@basename,'.',$outputExtension)" /> + </xsl:when> + <!-- TODO: this should handle the case where there's one BCP entry but + multiple RFC entries in a more useful way--> + <xsl:when test="$bib//seriesInfo/@name='RFC'"> + <xsl:variable name="rfcEntries" select="$bib//seriesInfo[@name='RFC']"/> + <xsl:if test="count($rfcEntries)!=1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('seriesInfo/@name=RFC encountered multiple times for reference ',$bib/@anchor,', will generate link to first entry only')"/> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="$rfcEntries[1]/@value"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $rfcUrlFragSection and $rfcUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$rfcUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$rfcUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:when test="$bib//seriesInfo/@name='Internet-Draft'"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$bib//seriesInfo[@name='Internet-Draft']/@value"/> + </xsl:call-template> + <xsl:if test="$ref and $sec!='' and $internetDraftUrlFragSection and $internetDraftUrlFragAppendix"> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:value-of select="concat('#',$internetDraftUrlFragAppendix,$sec)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('#',$internetDraftUrlFragSection,$sec)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> +</xsl:template> + +<xsl:template name="compute-section-number"> + <xsl:param name="bib"/> + <xsl:param name="ref"/> + + <xsl:variable name="anch" select="substring-after($ref/@x:rel,'#')"/> + + <xsl:choose> + <xsl:when test="$anch=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Not a fragment identifier: <xsl:value-of select="$ref/@x:rel"/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="extdoc" select="document($bib/x:source/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor=$anch]"/> + <xsl:if test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$anch"/>' in <xsl:value-of select="$bib/@anchor"/> not found in source file '<xsl:value-of select="$bib/x:source/@href"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="$nodes"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="computed-target"> + <xsl:param name="bib"/> + <xsl:param name="ref"/> + + <xsl:variable name="bibtarget"> + <xsl:choose> + <xsl:when test="starts-with($bib/@target,'http://www.rfc-editor.org/info/rfc') or starts-with($bib/@target,'https://www.rfc-editor.org/info/rfc') and $ref and ($ref/@x:sec or $ref/@x:rel or $ref/@section or $ref/@relative)"> + <!--ignored, use tools.ietf.org link instead --> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$bib/@target"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$bibtarget!=''"> + <xsl:if test="$ref and $ref/@x:sec"> + <xsl:choose> + <xsl:when test="$ref/@x:rel"> + <xsl:value-of select="concat($bib/@target,$ref/@x:rel)"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Can't generate section link for to <xsl:value-of select="$bib/@anchor"/>; no @x:rel specified</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:if test="$ref and $ref/@section"> + <xsl:choose> + <xsl:when test="$ref/@relative"> + <xsl:value-of select="concat($bib/@target,$ref/@relative)"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Can't generate section link for to <xsl:value-of select="$bib/@anchor"/>; no @relative specified</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="$bib"/> + <xsl:with-param name="ref" select="$ref"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="compute-doi"> + <xsl:choose> + <!-- xref seems to be for BCP, not RFC --> + <xsl:when test=".//seriesInfo[@name='BCP'] and starts-with(@anchor, 'BCP')" /> + <xsl:when test=".//seriesInfo[@name='RFC'] and not(.//organization='RFC Errata') and not(@target='http://www.rfc-editor.org' or @target='https://www.rfc-editor.org')"> + <xsl:variable name="rfc" select=".//seriesInfo[@name='RFC'][1]/@value"/> + <xsl:value-of select="concat('10.17487/RFC', format-number($rfc,'#0000'))"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<!-- processed elsewhere --> +<xsl:template match="displayreference"> + <xsl:variable name="t" select="@to"/> + <xsl:if test="//reference/@anchor=$t or count(//displayreference[@to=$t])!=1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">displayreference <xsl:value-of select="$t"/> will create non-unique reference name.</xsl:with-param> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="displayname-for-author"> + <xsl:param name="not-reversed"/> + + <xsl:variable name="initials"> + <xsl:call-template name="format-initials"/> + </xsl:variable> + <xsl:variable name="truncated-initials"> + <xsl:call-template name="truncate-initials"> + <xsl:with-param name="initials" select="$initials"/> + </xsl:call-template> + </xsl:variable> + + <!-- surname/initials is reversed for last author except when it's the only one --> + <xsl:choose> + <xsl:when test="$truncated-initials='' and @surname"> + <xsl:value-of select="@surname"/> + </xsl:when> + <xsl:when test="position()=last() and position()!=1"> + <xsl:value-of select="concat($truncated-initials,' ',@surname)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(@surname,', ',$truncated-initials)" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@asciiSurname!='' or @asciiInitials!=''"> + <xsl:text> (</xsl:text> + <xsl:variable name="i"> + <xsl:choose> + <xsl:when test="@asciiInitials!=''"> + <xsl:value-of select="@asciiInitials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$truncated-initials"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="s"> + <xsl:choose> + <xsl:when test="@asciiSurname!=''"> + <xsl:value-of select="@asciiSurname"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@surname"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$i=''"> + <xsl:value-of select="$s"/> + </xsl:when> + <xsl:when test="$not-reversed"> + <xsl:value-of select="concat($i,' ',$s)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat($s,', ',$i)" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>)</xsl:text> + </xsl:if> + <xsl:if test="@role='editor'"> + <xsl:text>, Ed.</xsl:text> + </xsl:if> +</xsl:template> + +<xsl:template name="link-ref-title-to"> + <xsl:choose> + <xsl:when test="starts-with(@target,'http://www.rfc-editor.org/info/rfc') or starts-with(@target,'https://www.rfc-editor.org/info/rfc')"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">Ignoring @target <xsl:value-of select="@target"/> in link calculation</xsl:with-param> + </xsl:call-template> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="."/> + </xsl:call-template> + </xsl:when> + <xsl:when test=".//seriesInfo/@name='RFC' and (@target='http://www.rfc-editor.org' or @target='https://www.rfc-editor.org') and starts-with(front/title,'Errata ID ') and front/author/organization='RFC Errata'"> + <!-- check for erratum link --> + <xsl:variable name="eid" select="normalize-space(substring(front/title,string-length('Errata ID ')))"/> + <xsl:call-template name="compute-rfc-erratum-uri"> + <xsl:with-param name="eid" select="$eid"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="@target"> + <xsl:if test="normalize-space(@target)=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">invalid (empty) target attribute in reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="normalize-space(@target)" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="."/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="reference"> + <xsl:call-template name="check-no-text-content"/> + + <!-- check for reference to reference --> + <xsl:variable name="anchor" select="@anchor"/> + <xsl:choose> + <xsl:when test="not(@anchor)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">missing anchor on reference: <xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (ancestor::rfc and not(key('xref-item',$anchor)))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(ancestor::ed:del) and (not(ancestor::rfc) and not($src//xref[@target=$anchor]))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unused (included) reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <!-- check normative/informative --> + <xsl:variable name="t-r-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="$r-is-normative and not(ancestor::ed:del)"> + <xsl:variable name="tst"> + <xsl:for-each select="key('xref-item',$anchor)"> + <xsl:variable name="t-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="$is-normative">OK</xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$tst=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">all references to the normative reference '<xsl:value-of select="@anchor"/>' appear to be informative</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:call-template name="check-anchor"/> + + <xsl:variable name="target"> + <xsl:call-template name="link-ref-title-to"/> + </xsl:variable> + + <dt id="{@anchor}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="del-node" select="ancestor::ed:del"/> + <xsl:variable name="rep-node" select="ancestor::ed:replace"/> + <xsl:variable name="deleted" select="$del-node and ($rep-node/ed:ins)"/> + <xsl:for-each select="../.."> + <xsl:call-template name="insert-issue-pointer"> + <xsl:with-param name="deleted-anchor" select="$deleted"/> + </xsl:call-template> + </xsl:for-each> + <xsl:call-template name="reference-name"/> + </dt> + + <dd> + <xsl:call-template name="insertInsDelClass"/> + <xsl:for-each select="front/author"> + <xsl:choose> + <xsl:when test="@surname and @surname!=''"> + <xsl:variable name="displayname"> + <xsl:call-template name="displayname-for-author"> + <xsl:with-param name="not-reversed" select="position()=last() and position()!=1"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="address/email and $xml2rfc-linkmailto!='no'"> + <a href="mailto:{address/email}"><xsl:value-of select="$displayname" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$displayname" /> + </xsl:otherwise> + </xsl:choose> + + <xsl:choose> + <xsl:when test="position()=last() - 1"> + <xsl:if test="last() &gt; 2">,</xsl:if> + <xsl:text> and </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="organization/text()"> + <xsl:choose> + <xsl:when test="address/uri"> + <a href="{address/uri}"><xsl:value-of select="organization" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="organization" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="organization/@ascii"> + <xsl:value-of select="concat(' (',normalize-space(organization/@ascii),')')"/> + </xsl:if> + <xsl:choose> + <xsl:when test="position()=last() - 1"> + <xsl:if test="last() &gt; 2">,</xsl:if> + <xsl:text> and </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:for-each> + + <xsl:variable name="quoted" select="not(front/title/@x:quotes='false') and not(@quoteTitle='false')"/> + <xsl:if test="$quoted">&#8220;</xsl:if> + <xsl:choose> + <xsl:when test="string-length($target) &gt; 0"> + <a href="{$target}"><xsl:value-of select="normalize-space(front/title)" /></a> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(front/title)" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$quoted">&#8221;</xsl:if> + + <xsl:if test="front/title/@ascii!=''"> + <xsl:text> (</xsl:text> + <xsl:if test="$quoted">&#8220;</xsl:if> + <xsl:value-of select="normalize-space(front/title/@ascii)" /> + <xsl:if test="$quoted">&#8221;</xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + + <xsl:variable name="si" select="seriesInfo|front/seriesInfo"/> + <xsl:if test="seriesInfo and front/seriesInfo"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">seriesInfo present both on reference and reference/front</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="rfcs" select="count($si[@name='RFC'])"/> + + <xsl:variable name="doi"> + <xsl:call-template name="compute-doi"/> + </xsl:variable> + + <xsl:for-each select="$si"> + <xsl:text>, </xsl:text> + <xsl:choose> + <xsl:when test="not(@name) and not(@value) and ./text()"><xsl:value-of select="." /></xsl:when> + <xsl:when test="@name='RFC' and $rfcs > 1"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="@value"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''">&#0160;<xsl:value-of select="@value" /></xsl:if> + </a> + </xsl:when> + <xsl:when test="@name='DOI'"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-doi-uri"> + <xsl:with-param name="doi" select="@value"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''">&#0160;<xsl:value-of select="@value" /></xsl:if> + </a> + <xsl:if test="$doi!='' and $doi!=@value"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unexpected DOI for RFC, found <xsl:value-of select="@value"/>, expected <xsl:value-of select="$doi"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="@name='ISBN'"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-isbn-uri"> + <xsl:with-param name="isbn" select="@value"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''">&#0160;<xsl:value-of select="@value" /></xsl:if> + </a> + </xsl:when> + <xsl:when test="@name='Internet-Draft' and $rfcno > 7375"> + <!-- special case in RFC formatting since 2015 --> + <xsl:text>Work in Progress, </xsl:text> + <xsl:value-of select="@value" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@name" /> + <xsl:if test="@value!=''">&#0160;<xsl:value-of select="@value" /></xsl:if> + <xsl:if test="translate(@name,$ucase,$lcase)='internet-draft'"> (work in progress)</xsl:if> + </xsl:otherwise> + </xsl:choose> + + <!-- check that BCP FYI STD RFC are in the right order --> + <xsl:if test="(@name='BCP' or @name='FYI' or @name='STD') and preceding-sibling::seriesInfo[@name='RFC']"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">RFC number preceding <xsl:value-of select="@name"/> number in reference '<xsl:value-of select="../@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + </xsl:for-each> + + <!-- Insert DOI for RFCs --> + <xsl:if test="$xml2rfc-ext-insert-doi='yes' and $doi!='' and not($si[@name='DOI'])"> + <xsl:text>, </xsl:text> + <xsl:variable name="uri"> + <xsl:call-template name="compute-doi-uri"> + <xsl:with-param name="doi" select="$doi"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}">DOI&#160;<xsl:value-of select="$doi"/></a> + </xsl:if> + + <!-- avoid hacks using seriesInfo when it's not really series information --> + <xsl:for-each select="x:prose|refcontent"> + <xsl:text>, </xsl:text> + <xsl:apply-templates/> + </xsl:for-each> + + <xsl:if test="not(front/date)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">&lt;date&gt; missing in reference '<xsl:value-of select="@anchor"/>' (note that it can be empty)</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="front/date/@year != ''"> + <xsl:if test="string(number(front/date/@year)) = 'NaN'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">date/@year should be a number: '<xsl:value-of select="front/date/@year"/>' in reference '<xsl:value-of select="@anchor"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:text>, </xsl:text> + <xsl:if test="front/date/@month!=''"><xsl:value-of select="front/date/@month" />&#0160;</xsl:if> + <xsl:value-of select="front/date/@year" /> + </xsl:if> + + <xsl:choose> + <xsl:when test="string-length(normalize-space(@target)) &gt; 0"> + <xsl:text>, &lt;</xsl:text> + <a href="{normalize-space(@target)}"><xsl:value-of select="normalize-space(@target)"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:when> + <xsl:when test="$xml2rfc-ext-link-rfc-to-info-page='yes' and $si[@name='BCP'] and starts-with(@anchor, 'BCP')"> + <xsl:text>, &lt;</xsl:text> + <xsl:variable name="uri" select="concat('http://www.rfc-editor.org/info/bcp',$si[@name='BCP']/@value)"/> + <a href="{$uri}"><xsl:value-of select="$uri"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:when> + <xsl:when test="$xml2rfc-ext-link-rfc-to-info-page='yes' and $si[@name='RFC']"> + <xsl:text>, &lt;</xsl:text> + <xsl:variable name="uri" select="concat('http://www.rfc-editor.org/info/rfc',$si[@name='RFC']/@value)"/> + <a href="{$uri}"><xsl:value-of select="$uri"/></a> + <xsl:text>&gt;</xsl:text> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <xsl:text>.</xsl:text> + + <xsl:for-each select="annotation"> + <br /> + <xsl:apply-templates /> + </xsl:for-each> + + </dd> + +</xsl:template> + +<xsl:template match="references"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="refseccount" select="count(/rfc/back/references)+count(/rfc/back/ed:replace/ed:ins/references)"/> + + <xsl:choose> + <!-- insert pseudo section when needed --> + <xsl:when test="not(preceding::references) and $refseccount!=1"> + <xsl:call-template name="insert-conditional-hrule"/> + <section id="{$anchor-pref}references"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-references-section-number"/> + </xsl:variable> + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + <h2 id="{$anchor-pref}section.{$sectionNumber}"> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + </xsl:call-template> + </a> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-refparent"/> + </h2> + <xsl:for-each select=".|following-sibling::references"> + <xsl:call-template name="make-references"> + <xsl:with-param name="nested" select="true()"/> + </xsl:call-template> + </xsl:for-each> + </section> + </xsl:when> + <xsl:when test="preceding::references"> + <!-- already processed --> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="make-references"> + <xsl:with-param name="nested" select="false()"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="make-references"> + <xsl:param name="nested"/> + + <xsl:variable name="name"> + <xsl:if test="ancestor::ed:del"> + <xsl:text>del-</xsl:text> + </xsl:if> + <xsl:number level="any"/> + </xsl:variable> + + <xsl:variable name="elemtype"> + <xsl:choose> + <xsl:when test="$nested">h3</xsl:when> + <xsl:otherwise>h2</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="name"> + <xsl:if test="@title"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @title attribute and name child node present</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="not(@title) or @title=''"><xsl:value-of select="$xml2rfc-refparent"/></xsl:when> + <xsl:otherwise><xsl:value-of select="@title"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + + <xsl:variable name="anchorpostfix"> + <xsl:if test="$nested">.<xsl:value-of select="$name"/></xsl:if> + </xsl:variable> + + <section id="{$anchor-pref}references{$anchorpostfix}"> + <xsl:if test="$name='1'"> + <xsl:call-template name="insert-conditional-pagebreak"/> + </xsl:if> + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + <xsl:element name="{$elemtype}"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,'section.',$sectionNumber)"/></xsl:attribute> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + </xsl:call-template> + </a> + <xsl:text> </xsl:text> + <xsl:copy-of select="$title"/> + </xsl:element> + + <xsl:variable name="included" select="exslt:node-set($includeDirectives)/myns:include[@in=generate-id(current())]/reference"/> + <dl class="{$css-reference}"> + <xsl:choose> + <xsl:when test="$xml2rfc-sortrefs='yes' and $xml2rfc-symrefs!='no'"> + <xsl:apply-templates select="*|$included"> + <xsl:sort select="concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor,.//ed:ins//reference/@anchor)" /> + </xsl:apply-templates> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="*|$included"/> + </xsl:otherwise> + </xsl:choose> + </dl> + </section> +</xsl:template> + +<xsl:template match="xi:include"> + <xsl:choose> + <xsl:when test="not(parent::references)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'Support for x:include is restricted to child elements of &lt;references>'"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- handled elsewhere --> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- processed earlier --> +<xsl:template match="references/name"/> + +<xsl:template match="rfc"> + <xsl:call-template name="check-no-text-content"/> + <xsl:variable name="ignored"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="//processing-instruction('rfc-ext')"/> + <xsl:with-param name="attr" select="'SANITYCHECK'"/> + </xsl:call-template> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="//processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'SANITYCHECK'"/> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="lang"> + <xsl:call-template name="get-lang" /> + </xsl:variable> + + <html lang="{$lang}"> + <head> + <title> + <xsl:apply-templates select="front/title" mode="get-text-content" /> + </title> + <xsl:call-template name="insertScripts" /> + <xsl:call-template name="insertCss" /> + <!-- <link rel="alternate stylesheet" type="text/css" media="screen" title="Plain (typewriter)" href="rfc2629tty.css" /> --> + + <!-- link elements --> + <xsl:if test="$xml2rfc-toc='yes'"> + <link rel="Contents" href="#{$anchor-pref}toc" /> + </xsl:if> + <xsl:if test="$xml2rfc-authorship!='no'"> + <link rel="Author" href="#{$anchor-pref}authors" /> + </xsl:if> + <xsl:if test="$xml2rfc-private=''"> + <xsl:choose> + <xsl:when test="$no-copylong"> + <link rel="Copyright" href="#{$anchor-pref}copyrightnotice" /> + </xsl:when> + <xsl:otherwise> + <link rel="Copyright" href="#{$anchor-pref}copyright" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + <xsl:if test="$has-index"> + <link rel="Index" href="#{$anchor-pref}index" /> + </xsl:if> + <xsl:apply-templates select="/" mode="links" /> + <xsl:for-each select="x:link|link"> + <link> + <xsl:choose> + <xsl:when test="self::x:link and @basename"> + <xsl:attribute name="href"> + <xsl:value-of select="concat(@basename,'.',$outputExtension)"/> + </xsl:attribute> + <xsl:copy-of select="@rel|@title" /> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="@*" /> + </xsl:otherwise> + </xsl:choose> + </link> + </xsl:for-each> + <xsl:if test="@number"> + <link rel="Alternate" title="Authoritative ASCII Version" href="http://www.ietf.org/rfc/rfc{@number}.txt" /> + <link rel="Help" title="RFC-Editor's Status Page" href="http://www.rfc-editor.org/info/rfc{@number}" /> + <link rel="Help" title="Additional Information on tools.ietf.org" href="https://tools.ietf.org/html/rfc{@number}"/> + </xsl:if> + + <!-- generator --> + <xsl:variable name="gen"> + <xsl:call-template name="get-generator" /> + </xsl:variable> + <meta name="generator" content="{$gen}" /> + + <!-- keywords --> + <xsl:if test="front/keyword"> + <xsl:variable name="keyw"> + <xsl:call-template name="get-keywords" /> + </xsl:variable> + <meta name="keywords" content="{$keyw}" /> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-support-rfc2731!='no'"> + <!-- Dublin Core Metadata --> + <link rel="schema.dcterms" href="http://purl.org/dc/terms/" /> + + <!-- DC creator, see RFC2731 --> + <xsl:for-each select="front/author"> + <xsl:variable name="initials"> + <xsl:call-template name="format-initials"/> + </xsl:variable> + <meta name="dcterms.creator" content="{concat(@surname,', ',$initials)}" /> + </xsl:for-each> + + <xsl:if test="$xml2rfc-private=''"> + <xsl:choose> + <xsl:when test="@number"> + <meta name="dcterms.identifier" content="urn:ietf:rfc:{@number}" /> + </xsl:when> + <xsl:when test="@docName"> + <meta name="dcterms.identifier" content="urn:ietf:id:{@docName}" /> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + <meta name="dcterms.issued"> + <xsl:attribute name="content"> + <xsl:value-of select="concat($xml2rfc-ext-pub-year,'-',$pub-month-numeric)"/> + <xsl:if test="$xml2rfc-ext-pub-day != '' and not(@number)"> + <xsl:value-of select="concat('-',format-number($xml2rfc-ext-pub-day,'00'))"/> + </xsl:if> + </xsl:attribute> + </meta> + + <xsl:if test="@obsoletes!=''"> + <xsl:call-template name="rfclist-for-dcmeta"> + <xsl:with-param name="list" select="@obsoletes"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <xsl:if test="front/abstract"> + <meta name="dcterms.abstract" content="{normalize-space(front/abstract)}" /> + </xsl:if> + + <xsl:if test="@number"> + <meta name="dcterms.isPartOf" content="urn:issn:2070-1721" /> + </xsl:if> + + </xsl:if> + + <!-- this replicates dcterms.abstract, but is used by Google & friends --> + <xsl:if test="front/abstract"> + <meta name="description" content="{normalize-space(front/abstract)}" /> + </xsl:if> + </head> + + <xsl:call-template name="body" /> + </html> +</xsl:template> + +<xsl:template name="body"> + <body> + <!-- insert onload scripts, when required --> + <xsl:variable name="onload"> + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and /rfc/@number">getMeta(<xsl:value-of select="/rfc/@number"/>,"rfc.meta");</xsl:if> + <xsl:if test="/rfc/x:feedback">initFeedback();</xsl:if> + <xsl:if test="$xml2rfc-ext-refresh-from!=''">RfcRefresh.initRefresh()</xsl:if> + </xsl:variable> + <xsl:if test="$onload!=''"> + <xsl:attribute name="onload"> + <xsl:value-of select="$onload"/> + </xsl:attribute> + </xsl:if> + + <xsl:call-template name="add-start-material" /> + + <!-- insert diagnostics --> + <xsl:call-template name="insert-diagnostics"/> + + <xsl:apply-templates select="front" /> + <xsl:apply-templates select="middle" /> + <xsl:call-template name="back" /> + + <xsl:call-template name="add-end-material" /> + </body> +</xsl:template> + +<xsl:template match="t"> + <xsl:param name="inherited-self-link"/> + + <xsl:variable name="textcontent" select="normalize-space(.)"/> + <xsl:variable name="endswith" select="substring($textcontent,string-length($textcontent))"/> + <xsl:variable name="keepwithnext" select="$endswith=':'"/> + + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + + <xsl:variable name="stype"> + <xsl:choose> + <xsl:when test="ancestor::abstract">abstract</xsl:when> + <xsl:when test="ancestor::note">note</xsl:when> + <xsl:when test="ancestor::boilerplate">boilerplate</xsl:when> + <xsl:otherwise>section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="preceding-sibling::section or preceding-sibling::appendix"> + <xsl:call-template name="inline-warning"> + <xsl:with-param name="msg">The paragraph below is misplaced; maybe a section is closed in the wrong place: </xsl:with-param> + <xsl:with-param name="msg2"><xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + </xsl:if> + + <div> + <xsl:if test="$p!='' and not(ancestor::list) and not(ancestor::ol) and not(ancestor::ul) and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="concat($anchor-pref,$stype,'.',$p)"/></xsl:attribute> + </xsl:if> + <xsl:if test="$keepwithnext"> + <xsl:attribute name="class">avoidbreakafter</xsl:attribute> + </xsl:if> + <xsl:apply-templates mode="t-content" select="node()[1]"> + <xsl:with-param name="inherited-self-link" select="$inherited-self-link"/> + <xsl:with-param name="anchor" select="@anchor"/> + </xsl:apply-templates> + </div> +</xsl:template> + +<!-- for t-content, dispatch to default templates if it's block-level content --> +<xsl:template mode="t-content" match="list|figure|texttable"> + <!-- <xsl:comment>t-content block-level</xsl:comment> --> + <xsl:apply-templates select="." /> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content" /> +</xsl:template> + +<!-- ... otherwise group into p elements --> +<xsl:template mode="t-content" match="*|node()"> + <xsl:param name="inherited-self-link"/> + <xsl:param name="anchor"/> + + <xsl:variable name="p"> + <xsl:choose> + <xsl:when test="self::text()"> + <xsl:for-each select=".."> + <xsl:call-template name="get-paragraph-number" /> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-paragraph-number" /> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- do not open a new p element if this is a whitespace-only text node and no siblings follow --> + <xsl:if test="not(self::text() and normalize-space(.)='' and not(following-sibling::node()))"> + <xsl:variable name="textcontent"> + <xsl:apply-templates mode="t-content2" select="." /> + </xsl:variable> + + <xsl:if test="normalize-space($textcontent)!=''"> + <p> + <xsl:if test="$anchor!=''"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:variable name="stype"> + <xsl:choose> + <xsl:when test="ancestor::abstract">abstract</xsl:when> + <xsl:when test="ancestor::note">note</xsl:when> + <xsl:when test="ancestor::boilerplate">boilerplate</xsl:when> + <xsl:otherwise>section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="anch"> + <xsl:if test="$p!='' and not(ancestor::ed:del) and not(ancestor::ed:ins) and not(ancestor::li) and not(ancestor::x:lt) and not(preceding-sibling::node())"> + <xsl:value-of select="concat($anchor-pref,$stype,'.',$p)"/> + </xsl:if> + </xsl:variable> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:apply-templates mode="t-content2" select="." /> + <xsl:if test="$xml2rfc-ext-paragraph-links='yes'"> + <xsl:if test="$anch!=''"> + <a class='self' href='#{$anch}'>&#xb6;</a> + </xsl:if> + <xsl:if test="$inherited-self-link!=''"> + <a class='self' href='#{$inherited-self-link}'>&#xb6;</a> + </xsl:if> + </xsl:if> + </p> + </xsl:if> + </xsl:if> + <xsl:apply-templates mode="t-content" select="following-sibling::*[self::list or self::figure or self::texttable][1]" /> +</xsl:template> + +<xsl:template mode="t-content2" match="*"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template mode="t-content2" match="text()"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template mode="t-content2" match="comment()|processing-instruction()"> + <xsl:apply-templates select="." /> + <xsl:if test="not(following-sibling::node()[1] [self::list or self::figure or self::texttable])"> + <xsl:apply-templates select="following-sibling::node()[1]" mode="t-content2" /> + </xsl:if> +</xsl:template> + +<xsl:template match="title"> + <xsl:variable name="t" select="normalize-space(.)"/> + <xsl:variable name="tlen" select="string-length($t)"/> + <xsl:variable name="alen" select="string-length(@abbrev)"/> + + <xsl:if test="@abbrev and $alen > 40"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title/@abbrev too long (max 40 characters)</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$tlen > 40 and (not(@abbrev) or @abbrev='')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title too long, should supply title/@abbrev attribute with less than 40 characters</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$tlen &lt;= 40 and @abbrev!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">title/@abbrev was specified despite the title being short enough (<xsl:value-of select="$tlen"/>)</xsl:with-param> + <xsl:with-param name="msg2">Title: '<xsl:value-of select="normalize-space($t)"/>', abbreviated title='<xsl:value-of select="@abbrev"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:apply-templates /> +</xsl:template> + +<xsl:template name="insertTitle"> + <xsl:choose> + <xsl:when test="@ed:old-title"> + <del> + <xsl:if test="ancestor-or-self::*[@ed:entered-by] and @ed:datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@ed:datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="@ed:old-title"/> + </del> + <ins> + <xsl:if test="ancestor-or-self::*[@ed:entered-by] and @ed:datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@ed:datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="@title"/> + </ins> + </xsl:when> + <xsl:when test="name"> + <xsl:if test="@title"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @title attribute and name child node present</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="name/node()"/> + <xsl:with-param name="strip-links" select="not(ancestor-or-self::figure)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- irefs that are section-level thus can use the section anchor --> +<xsl:variable name="section-level-irefs" select="//section/iref[count(preceding-sibling::*[not(self::iref) and not(self::x:anchor-alias) and not(self::name)])=0]"/> + +<xsl:template match="section|appendix"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:if test="self::appendix"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">The "appendix" element is deprecated, use "section" inside "back" instead.</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="sectionNumber"> + <xsl:choose> + <xsl:when test="ancestor::boilerplate"></xsl:when> + <xsl:otherwise><xsl:call-template name="get-section-number" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="not(ancestor::section) and not(ancestor::boilerplate)"> + <xsl:call-template name="insert-conditional-hrule"/> + </xsl:if> + + <xsl:variable name="elemtype"> + <xsl:choose> + <xsl:when test="count(ancestor::section) &lt;= 3">h<xsl:value-of select="2 + count(ancestor::section)"/></xsl:when> + <xsl:otherwise>h6</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and $rfcno!='' and @anchor='rfc.status'"> + <aside id="{$anchor-pref}meta" class="{$css-docstatus}"></aside> + </xsl:if> + + <xsl:variable name="classes"><xsl:if test="@removeInRFC='true'">rfcEditorRemove</xsl:if></xsl:variable> + + <section> + <xsl:call-template name="copy-anchor"/> + + <xsl:if test="normalize-space($classes)!=''"> + <xsl:attribute name="class"><xsl:value-of select="normalize-space($classes)"/></xsl:attribute> + </xsl:if> + + <xsl:if test="$sectionNumber!=''"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="$sectionNumber"/> + </xsl:call-template> + </xsl:if> + + <xsl:element name="{$elemtype}"> + <xsl:if test="$sectionNumber!=''"> + <xsl:attribute name="id"><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$sectionNumber"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="$sectionNumber='1' or $sectionNumber='A'"> + <!-- pagebreak, this the first section --> + <xsl:attribute name="class">np</xsl:attribute> + </xsl:when> + <xsl:when test="not(ancestor::section) and not(ancestor::boilerplate)"> + <xsl:call-template name="insert-conditional-pagebreak"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + + <xsl:call-template name="insertInsDelClass" /> + + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,'unnumbered-'))"> + <a href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$sectionNumber"/> + </xsl:call-template> + </a> + <xsl:text>&#0160;</xsl:text> + </xsl:if> + + <!-- issue tracking? --> + <xsl:if test="@ed:resolves"> + <xsl:call-template name="insert-issue-pointer"/> + </xsl:if> + + <xsl:call-template name="check-anchor"/> + <xsl:variable name="anchor"> + <xsl:choose> + <xsl:when test="@anchor"><xsl:value-of select="@anchor"/></xsl:when> + <xsl:otherwise><xsl:call-template name="sluggy-anchor"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$anchor!=''"> + <a href="#{$anchor}"><xsl:call-template name="insertTitle"/></a> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="insertTitle"/> + </xsl:otherwise> + </xsl:choose> + </xsl:element> + + <xsl:if test="@removeInRFC='true' and t[1]!=$section-removeInRFC"> + <xsl:variable name="t"> + <t><xsl:value-of select="$section-removeInRFC"/></t> + </xsl:variable> + <xsl:variable name="link" select="concat($anchor-pref,'section.',$sectionNumber,'.p.1')"/> + <div id="{$link}"> + <xsl:apply-templates mode="t-content" select="exslt:node-set($t)//text()"> + <xsl:with-param name="inherited-self-link" select="$link"/> + </xsl:apply-templates> + </div> + </xsl:if> + + <!-- continue with all child elements but the irefs processed above --> + <xsl:for-each select="*"> + <xsl:if test="count(.|$section-level-irefs)!=count($section-level-irefs)"> + <xsl:apply-templates select="."/> + </xsl:if> + </xsl:for-each> + </section> +</xsl:template> + +<!-- errata handling --> +<xsl:template name="insert-errata"> + <xsl:param name="section"/> + <xsl:variable name="es" select="$errata-parsed[section=$section or (not(section) and $section='1')]"/> + <xsl:if test="$es"> + <aside class="{$css-erratum}"> + <xsl:for-each select="$es"> + <xsl:sort select="@eid" data-type="number"/> + <div> + <xsl:variable name="tooltip"> + <xsl:value-of select="@reported-by"/> + <xsl:text>, </xsl:text> + <xsl:value-of select="@reported"/> + <xsl:if test="@type"> (<xsl:value-of select="@type"/>)</xsl:if> + </xsl:variable> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-erratum-uri"> + <xsl:with-param name="eid" select="@eid"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}" title="{$tooltip}">Erratum <xsl:value-of select="@eid"/></a> + <xsl:choose> + <xsl:when test="@status='Verified'"><xsl:text> </xsl:text><span title="verified">&#x2714;</span></xsl:when> + <xsl:when test="@status='Reported'"><xsl:text> </xsl:text><span title="reported">&#x2709;</span></xsl:when> + <xsl:when test="@status='Held for Document Update'"><xsl:text> </xsl:text><span title="held for update">&#x2700;</span></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </div> + </xsl:for-each> + </aside> + </xsl:if> +</xsl:template> + +<!-- already processed by insertTitle --> +<xsl:template match="note/name"/> +<xsl:template match="section/name"/> + +<xsl:template match="spanx[@style='emph' or not(@style)]|em"> + <em> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </em> +</xsl:template> + +<xsl:template match="spanx[@style='verb' or @style='vbare']|tt"> + <span class="tt"> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </span> +</xsl:template> + +<xsl:template match="spanx[@style='strong']|strong"> + <strong> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </strong> +</xsl:template> + +<xsl:template match="spanx[@style!='']" priority="0.1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown spanx style attribute '<xsl:value-of select="@style"/>' ignored</xsl:with-param> + </xsl:call-template> + <span> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates /> + </span> +</xsl:template> + +<xsl:template name="insert-blank-lines"> + <xsl:param name="no"/> + <xsl:choose> + <xsl:when test="$no >= $xml2rfc-ext-vspace-pagebreak"> + <br/> + <!-- done; this probably was an attempt to generate a pagebreak --> + </xsl:when> + <xsl:when test="$no &lt;= 0"> + <br/> + <!-- done --> + </xsl:when> + <xsl:otherwise> + <br/> + <xsl:call-template name="insert-blank-lines"> + <xsl:with-param name="no" select="$no - 1"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="vspace[not(@blankLines)]"> + <br /> +</xsl:template> + +<xsl:template match="vspace"> + <xsl:call-template name="insert-blank-lines"> + <xsl:with-param name="no" select="@blankLines"/> + </xsl:call-template> +</xsl:template> + +<!-- keep the root for the case when we process XSLT-inline markup --> +<xsl:variable name="src" select="/" /> + +<xsl:template name="render-section-ref"> + <xsl:param name="from" /> + <xsl:param name="to" /> + + <xsl:variable name="refname"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-section-type"> + <xsl:with-param name="prec" select="$from/preceding-sibling::node()[1]" /> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="refnum"> + <xsl:for-each select="$to"> + <xsl:call-template name="get-section-number" /> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="$to/name"> + <xsl:value-of select="$to/name"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$to/@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:attribute name="title"> + <xsl:value-of select="$title" /> + </xsl:attribute> + <xsl:choose> + <xsl:when test="$from/@format='counter'"> + <xsl:value-of select="$refnum"/> + </xsl:when> + <xsl:when test="$from/@format='title'"> + <xsl:choose> + <xsl:when test="$to/name"> + <xsl:apply-templates select="$to/name/node()"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$to/@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$from/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat($refname,'&#160;',$refnum))"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-xref-format"> + <xsl:param name="default"/> + <xsl:choose> + <xsl:when test="@sectionFormat"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@sectionFormat is deprecated, use @x:fmt instead</xsl:with-param> + </xsl:call-template> + <xsl:if test="@x:fmt"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @x:fmt and @sectionFormat specified</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="@sectionFormat='parens' or @sectionFormat='of' or @sectionFormat='comma' or @sectionFormat='section' or @sectionFormat='number-only'"> + <xsl:value-of select="@sectionFormat"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown format for @sectionFormat</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="@x:fmt"> + <xsl:choose> + <xsl:when test="@x:fmt='()'">parens</xsl:when> + <xsl:when test="@x:fmt='of'">of</xsl:when> + <xsl:when test="@x:fmt=','">comma</xsl:when> + <xsl:when test="@x:fmt='none'">none</xsl:when> + <xsl:when test="@x:fmt='sec'">section</xsl:when> + <xsl:when test="@x:fmt='number'">number-only</xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unknown format for @x:fmt</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$default"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-xref-section"> + <xsl:choose> + <xsl:when test="@section"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@section is deprecated, use @x:sec instead</xsl:with-param> + </xsl:call-template> + <xsl:if test="@x:sec"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">both @x:sec and @section specified</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="@section"/> + </xsl:when> + <xsl:when test="@x:sec"> + <xsl:value-of select="@x:sec"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template match="xref[node()]"> + + <xsl:variable name="target" select="@target" /> + <xsl:variable name="node" select="key('anchor-item',$target)" /> + <xsl:variable name="anchor"><xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="@target"/>.<xsl:number level="any" count="xref[@target=$target]"/></xsl:variable> + + <xsl:variable name="sfmt"> + <xsl:call-template name="get-section-xref-format"/> + </xsl:variable> + + <xsl:variable name="ssec"> + <xsl:call-template name="get-section-xref-section"/> + </xsl:variable> + + <xsl:choose> + <!-- $sfmt='none': do not generate any links --> + <xsl:when test="$sfmt='none'"> + <xsl:choose> + <xsl:when test="$node/self::reference"> + <cite title="{normalize-space($node/front/title)}"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="//iref[@x:for-anchor=$target] | //iref[@x:for-anchor='' and ../@anchor=$target]"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates/> + </cite> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- Other $sfmt values than "none": unsupported --> + <xsl:when test="$sfmt!='' and $sfmt!='none'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('unknown xref section format extension: ',$sfmt)"/> + </xsl:call-template> + </xsl:when> + + <!-- Section links --> + <xsl:when test="$node/self::section or $node/self::appendix"> + <xsl:choose> + <xsl:when test="@format='none'"> + <a href="#{@target}"> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="//iref[@x:for-anchor=$target] | //iref[@x:for-anchor='' and ../@anchor=$target]"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates/> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates/> + <xsl:text> (</xsl:text> + <a href="#{@target}"> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="//iref[@x:for-anchor=$target] | //iref[@x:for-anchor='' and ../@anchor=$target]"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="render-section-ref"> + <xsl:with-param name="from" select="."/> + <xsl:with-param name="to" select="$node"/> + </xsl:call-template> + </a> + <xsl:text>)</xsl:text> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <xsl:when test="$node/self::cref and $xml2rfc-comments='no'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">xref to cref, but comments aren't included in the output</xsl:with-param> + </xsl:call-template> + </xsl:when> + + <xsl:otherwise> + <!-- check normative/informative --> + <xsl:variable name="t-is-normative" select="ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="count($node)=1 and $is-normative"> + <xsl:variable name="t-r-is-normative" select="$node/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="not($r-is-normative)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Potentially normative reference to ',@target,' not referenced normatively')"/> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <a href="#{$target}"> + <xsl:if test="@format='none'"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + </xsl:if> + + <xsl:apply-templates /> + </a> + <xsl:if test="not(@format='none')"> + <xsl:for-each select="$src/rfc/back/references//reference[@anchor=$target]"> + <xsl:text> </xsl:text> + <cite title="{normalize-space(front/title)}"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="reference-name"/> + </cite> + </xsl:for-each> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:key name="iref-xanch" match="iref[@x:for-anchor]" use="@x:for-anchor"/> + +<xsl:template match="xref[not(node())]"> + + <xsl:variable name="xref" select="."/> + <xsl:variable name="anchor"><xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="$xref/@target"/>.<xsl:number level="any" count="xref[@target=$xref/@target]"/></xsl:variable> + + <xsl:variable name="sfmt"> + <xsl:call-template name="get-section-xref-format"> + <xsl:with-param name="default"> + <xsl:choose> + <xsl:when test="ancestor::artwork">comma</xsl:when> + <xsl:otherwise>of</xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="ssec"> + <xsl:call-template name="get-section-xref-section"/> + </xsl:variable> + + <!-- ensure we have the right context, this <xref> may be processed from within the boilerplate --> + <xsl:for-each select="$src"> + + <xsl:variable name="node" select="key('anchor-item',$xref/@target)|exslt:node-set($includeDirectives)//reference[@anchor=$xref/@target]"/> + <xsl:if test="count($node)=0 and not($node/ancestor::ed:del)"> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Undefined target: ',$xref/@target)"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + + <xsl:choose> + + <!-- Section links --> + <xsl:when test="$node/self::section or $node/self::appendix"> + <a href="#{$xref/@target}"> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="key('iref-xanch',$xref/@target) | key('iref-xanch','')[../@anchor=$xref/@target]"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:call-template name="render-section-ref"> + <xsl:with-param name="from" select="$xref"/> + <xsl:with-param name="to" select="$node"/> + </xsl:call-template> + </a> + </xsl:when> + + <!-- Figure links --> + <xsl:when test="$node/self::figure"> + <a href="#{$xref/@target}"> + <xsl:variable name="figcnt"> + <xsl:for-each select="$node"> + <xsl:call-template name="get-figure-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$xref/@format='counter'"> + <xsl:value-of select="$figcnt" /> + </xsl:when> + <xsl:when test="$xref/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$xref/@format='title'"> + <xsl:value-of select="$node/@title" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Figure&#160;',$figcnt))"/> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:when> + + <!-- Table links --> + <xsl:when test="$node/self::texttable or $node/self::table"> + <a href="#{$xref/@target}"> + <xsl:variable name="tabcnt"> + <xsl:for-each select="$node"> + <xsl:call-template name="get-table-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$xref/@format='counter'"> + <xsl:value-of select="$tabcnt" /> + </xsl:when> + <xsl:when test="$xref/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$xref/@format='title'"> + <xsl:choose> + <xsl:when test="$node/self::table"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$node/name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$node/@title" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Table&#160;',$tabcnt))"/> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:when> + + <!-- Paragraph links --> + <xsl:when test="$node/self::t or $node/self::aside or $node/self::blockquote or $node/self::dl or $node/self::ol or $node/self::ul or $node/self::dt or $node/self::li"> + <a href="#{$xref/@target}"> + <xsl:variable name="tcnt"> + <xsl:for-each select="$node"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="pparent" select="$node/.."/> + <xsl:variable name="listtype"> + <xsl:choose> + <xsl:when test="$pparent/self::list"> + <xsl:value-of select="$pparent/@style"/> + </xsl:when> + <xsl:when test="$pparent/self::dl">definition</xsl:when> + <xsl:when test="$pparent/self::ol[@type='a']">letters</xsl:when> + <xsl:when test="$pparent/self::ol[@type='A']">Letters</xsl:when> + <xsl:when test="$pparent/self::ol[@type='i']">rnumbers</xsl:when> + <xsl:when test="$pparent/self::ol[@type='I']">Rnumbers</xsl:when> + <xsl:when test="$pparent/self::ol[string-length(@type)>1]">format <xsl:value-of select="$pparent/self::ol/@type"/></xsl:when> + <xsl:when test="$pparent/self::ol">numbers</xsl:when> + <xsl:otherwise></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="s"> + <xsl:choose> + <xsl:when test="$pparent/@group"> + <xsl:call-template name="ol-start"> + <xsl:with-param name="node" select="$pparent"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$pparent/@start"> + <xsl:value-of select="$pparent/@start"/> + </xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="n"> + <xsl:for-each select="$node"> + <xsl:number/> + </xsl:for-each> + </xsl:variable> + <xsl:variable name="format"> + <xsl:choose> + <xsl:when test="$listtype='letters'">a</xsl:when> + <xsl:when test="$listtype='Letters'">A</xsl:when> + <xsl:when test="$listtype='rnumbers'">i</xsl:when> + <xsl:when test="$listtype='Rnumbers'">I</xsl:when> + <xsl:otherwise>1</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="listindex"> + <xsl:choose> + <xsl:when test="starts-with($listtype,'format ')"> + <xsl:call-template name="expand-format-percent"> + <xsl:with-param name="format" select="substring-after($listtype,'format ')"/> + <xsl:with-param name="pos" select="$n + $s - 1"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:number value="$n + $s - 1" format="{$format}"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="$xref/@format='counter'"> + <xsl:choose> + <xsl:when test="$listtype!='' and $listindex!=''"> + <xsl:value-of select="$listindex"/> + </xsl:when> + <xsl:when test="$listtype!='' and $listindex=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Use of format=counter for unsupported list type ',$listtype)"/> + </xsl:call-template> + <xsl:value-of select="$tcnt"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$tcnt"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$xref/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$xref/@format='title'"> + <xsl:choose> + <xsl:when test="$node/self::dt"> + <xsl:apply-templates select="$node/node()"/> + </xsl:when> + <xsl:when test="$node/@hangText"> + <xsl:value-of select="$node/@hangText"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$node/@title" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="pn" select="normalize-space(substring-after($tcnt,'p.'))"/> + <xsl:text>Paragraph&#160;</xsl:text> + <xsl:choose> + <xsl:when test="$pn=''"> + <xsl:text>?</xsl:text> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('No paragraph number for link target ',$xref/@target)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$pn"/></xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:when> + + <!-- Comment links --> + <xsl:when test="$node/self::cref"> + <xsl:choose> + <xsl:when test="$xml2rfc-comments!='no'"> + <a href="#{$xref/@target}"> + <xsl:variable name="name"> + <xsl:for-each select="$node"> + <xsl:call-template name="get-comment-name" /> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="$xref/@format='counter'"> + <xsl:value-of select="$name" /> + </xsl:when> + <xsl:when test="$xref/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:when test="$xref/@format='title'"> + <xsl:value-of select="$node/@title" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="normalize-space(concat('Comment&#160;',$name))"/> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">xref to cref, but comments aren't included in the output</xsl:with-param> + </xsl:call-template> + </xsl:for-each> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- Reference links --> + <xsl:when test="$node/self::reference"> + + <!-- check normative/informative --> + <xsl:variable name="t-is-normative" select="$xref/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="is-normative" select="$t-is-normative/@x:nrm='true'"/> + <xsl:if test="count($node)=1 and $is-normative"> + <xsl:variable name="t-r-is-normative" select="$node/ancestor-or-self::*[@x:nrm][1]"/> + <xsl:variable name="r-is-normative" select="$t-r-is-normative/@x:nrm='true'"/> + <xsl:if test="not($r-is-normative)"> + <xsl:for-each select="$xref"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('Potentially normative reference to ',$xref/@target,' not referenced normatively')"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + </xsl:if> + + <xsl:variable name="href"> + <xsl:call-template name="computed-target"> + <xsl:with-param name="bib" select="$node"/> + <xsl:with-param name="ref" select="$xref"/> + </xsl:call-template> + </xsl:variable> + + <xsl:variable name="sec"> + <xsl:choose> + <xsl:when test="starts-with($xref/@x:rel,'#') and $ssec=''"> + <xsl:call-template name="compute-section-number"> + <xsl:with-param name="bib" select="$node"/> + <xsl:with-param name="ref" select="$xref"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$xref/@x:rel and not(starts-with($xref/@x:rel,'#')) and $ssec=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">x:rel attribute '<xsl:value-of select="$xref/@x:rel"/>' in reference to <xsl:value-of select="$node/@anchor"/> is expected to start with '#'.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$ssec"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="secterm"> + <xsl:choose> + <!-- starts with letter? --> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''">Appendix</xsl:when> + <xsl:otherwise>Section</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="starts-with($xref/@x:rel,'#') and $ssec='' and $node/x:source/@href"> + <xsl:variable name="extdoc" select="document($node/x:source/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor=substring-after($xref//@x:rel,'#')]"/> + <xsl:if test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="substring-after($xref//@x:rel,'#')"/>' not found in <xsl:value-of select="$node/x:source/@href"/>.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:for-each select="$nodes"> + <xsl:value-of select="@title"/> + </xsl:for-each> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:variable> + + <!-- + Formats: + + parens [XXXX] (Section SS) + comma [XXXX], Section SS + of Section SS of [XXXX] + sec Section SS + number SS + --> + + <xsl:if test="$sfmt!='' and not($sfmt='of' or $sfmt='section' or $sfmt='number-only' or $sfmt='parens' or $sfmt='comma')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('unknown xref section format extension: ',$sfmt)"/> + </xsl:call-template> + </xsl:if> + + <xsl:if test="$sec!=''"> + + <xsl:choose> + <xsl:when test="$sfmt='of' or $sfmt='section'"> + <xsl:choose> + <xsl:when test="$href!=''"> + <a href="{$href}"> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:if test="$sfmt='section' and $xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + </a> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$secterm"/><xsl:text> </xsl:text><xsl:value-of select="$sec"/></xsl:otherwise> + </xsl:choose> + <xsl:if test="$sfmt='of'"> + <xsl:text> of </xsl:text> + </xsl:if> + </xsl:when> + <xsl:when test="$sfmt='number-only'"> + <xsl:choose> + <xsl:when test="$href!=''"> + <a href="{$href}"> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="$sec"/> + </a> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$sec"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise /> + </xsl:choose> + </xsl:if> + + <xsl:if test="$sec='' or ($sfmt!='section' and $sfmt!='number-only')"> + <xsl:choose> + <xsl:when test="$xref/@format='none'"> + <!-- Nothing to do --> + </xsl:when> + <xsl:otherwise> + <a href="#{$xref/@target}"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes'"> + <xsl:attribute name="id"><xsl:value-of select="$anchor"/></xsl:attribute> + </xsl:if> + <cite title="{normalize-space($node/front/title)}"> + <xsl:variable name="val"> + <xsl:call-template name="reference-name"> + <xsl:with-param name="node" select="$node" /> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$xref/@format='counter'"> + <!-- remove brackets --> + <xsl:value-of select="substring($val,2,string-length($val)-2)"/> + </xsl:when> + <xsl:when test="$xref/@format='title'"> + <xsl:value-of select="$node/front/title"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$val"/> + </xsl:otherwise> + </xsl:choose> + </cite> + </a> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:if test="$sec!=''"> + <xsl:choose> + <xsl:when test="$sfmt='parens'"> + <xsl:text> (</xsl:text> + <xsl:choose> + <xsl:when test="$href!=''"> + <a href="{$href}"><xsl:value-of select="$secterm"/><xsl:text> </xsl:text><xsl:value-of select="$sec"/></a> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$secterm"/><xsl:text> </xsl:text><xsl:value-of select="$sec"/></xsl:otherwise> + </xsl:choose> + <xsl:text>)</xsl:text> + </xsl:when> + <xsl:when test="$sfmt='comma'"> + <xsl:text>, </xsl:text> + <xsl:choose> + <xsl:when test="$href!=''"> + <a href="{$href}"> + <xsl:if test="$title!=''"> + <xsl:attribute name="title"><xsl:value-of select="$title"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="$secterm"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$sec"/> + </a> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$secterm"/><xsl:text> </xsl:text><xsl:value-of select="$sec"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> + </xsl:when> + + <xsl:otherwise> + <xsl:if test="$node"> + <!-- make it the correct context --> + <xsl:for-each select="$xref"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('xref to unknown element: ',name($node))"/> + </xsl:call-template> + </xsl:for-each> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + +<!-- mark unmatched elements red --> + +<xsl:template match="*"> + <xsl:call-template name="error"> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="msg">no XSLT template for element '<xsl:value-of select="name()"/>'</xsl:with-param> + </xsl:call-template> + <span class="tt {$css-error}">&lt;<xsl:value-of select="name()" />&gt;</span> + <xsl:copy><xsl:apply-templates select="node()|@*" /></xsl:copy> + <span class="tt {$css-error}">&lt;/<xsl:value-of select="name()" />&gt;</span> +</xsl:template> + +<xsl:template match="/"> + <xsl:apply-templates select="*" mode="validate"/> + <xsl:apply-templates select="*" /> +</xsl:template> + +<!-- utility templates --> + +<xsl:template name="collectLeftHeaderColumn"> + <!-- default case --> + <xsl:if test="$xml2rfc-private=''"> + <xsl:choose> + <xsl:when test="/rfc/@number and $header-format='2010' and $submissionType='independent'"> + <myns:item>Independent Submission</myns:item> + </xsl:when> + <xsl:when test="/rfc/@number and $header-format='2010' and $submissionType='IETF'"> + <myns:item>Internet Engineering Task Force (IETF)</myns:item> + </xsl:when> + <xsl:when test="/rfc/@number and $header-format='2010' and $submissionType='IRTF'"> + <myns:item>Internet Research Task Force (IRTF)</myns:item> + </xsl:when> + <xsl:when test="/rfc/@number and $header-format='2010' and $submissionType='IAB'"> + <myns:item>Internet Architecture Board (IAB)</myns:item> + </xsl:when> + <xsl:when test="/rfc/front/workgroup and (not(/rfc/@number) or /rfc/@number='')"> + <xsl:choose> + <xsl:when test="starts-with(/rfc/@docName,'draft-ietf-') and $submissionType='IETF'"/> + <xsl:when test="starts-with(/rfc/@docName,'draft-irft-') and $submissionType='IRTF'"/> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">The /rfc/front/workgroup should only be used for Working/Research Group drafts</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:for-each select="/rfc/front/workgroup"> + <xsl:variable name="v" select="normalize-space(.)"/> + <xsl:variable name="tmp" select="translate($v, $ucase, $lcase)"/> + <xsl:if test="contains($tmp,' research group') or contains($tmp,' working group')"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">No need to include 'Working Group' or 'Research Group' postfix in /rfc/front/workgroup value '<xsl:value-of select="$v"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="h"> + <!-- when a single name, append WG/RG postfix automatically --> + <xsl:choose> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-ietf-') and $submissionType='IETF'"> + <xsl:value-of select="concat($v, ' Working Group')"/> + </xsl:when> + <xsl:when test="not(contains($v, ' ')) and starts-with(/rfc/@docName,'draft-irtf-') and $submissionType='IRTF'"> + <xsl:value-of select="concat($v, ' Research Group')"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$v"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <myns:item> + <xsl:value-of select="$h"/> + </myns:item> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:if test="starts-with(/rfc/@docName,'draft-ietf-') and not(/rfc/front/workgroup)"> + <xsl:call-template name="info"> + <xsl:with-param name="msg">WG submissions should include a /rfc/front/workgroup element</xsl:with-param> + </xsl:call-template> + </xsl:if> + <myns:item>Network Working Group</myns:item> + </xsl:otherwise> + </xsl:choose> + <!-- check <area> value --> + <xsl:for-each select="/rfc/front/area"> + <xsl:variable name="area" select="normalize-space(.)"/> + <xsl:variable name="rallowed"> + <xsl:if test="$pub-yearmonth &lt; 201509"> + <ed:v>Applications</ed:v> + <ed:v>app</ed:v> + </xsl:if> + <xsl:if test="$pub-yearmonth &gt; 201505"> + <ed:v>Applications and Real-Time</ed:v> + <ed:v>art</ed:v> + </xsl:if> + <ed:v>General</ed:v> + <ed:v>gen</ed:v> + <ed:v>Internet</ed:v> + <ed:v>int</ed:v> + <ed:v>Operations and Management</ed:v> + <ed:v>ops</ed:v> + <xsl:if test="$pub-yearmonth &lt; 201509"> + <ed:v>Real-time Applications and Infrastructure</ed:v> + <ed:v>rai</ed:v> + </xsl:if> + <ed:v>Routing</ed:v> + <ed:v>rtg</ed:v> + <ed:v>Security</ed:v> + <ed:v>sec</ed:v> + <ed:v>Transport</ed:v> + <ed:v>tsv</ed:v> + </xsl:variable> + <xsl:variable name="allowed" select="exslt:node-set($rallowed)"/> + <xsl:choose> + <xsl:when test="$allowed/ed:v=$area"> + <!-- ok --> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown IETF area: "<xsl:value-of select="$area"/>" - should be one of: <xsl:for-each select="$allowed/ed:v"> + <xsl:text>"</xsl:text> + <xsl:value-of select="."/> + <xsl:text>"</xsl:text> + <xsl:if test="position()!=last()"> + <xsl:text>, </xsl:text> + </xsl:if> + </xsl:for-each> + <xsl:text> (as of the publication date of </xsl:text> + <xsl:value-of select="$pub-yearmonth"/> + <xsl:text>)</xsl:text> + </xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + <myns:item> + <xsl:choose> + <xsl:when test="/rfc/@ipr and not(/rfc/@number)">Internet-Draft</xsl:when> + <xsl:otherwise>Request for Comments: <xsl:value-of select="/rfc/@number"/></xsl:otherwise> + </xsl:choose> + </myns:item> + <xsl:if test="/rfc/@obsoletes!=''"> + <myns:item> + <xsl:text>Obsoletes: </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(/rfc/@obsoletes)" /> + </xsl:call-template> + <xsl:if test="not(/rfc/@number)"> (if approved)</xsl:if> + </myns:item> + </xsl:if> + <xsl:if test="/rfc/@seriesNo"> + <myns:item> + <xsl:choose> + <xsl:when test="/rfc/@category='bcp'">BCP: <xsl:value-of select="/rfc/@seriesNo" /></xsl:when> + <xsl:when test="/rfc/@category='info'">FYI: <xsl:value-of select="/rfc/@seriesNo" /></xsl:when> + <xsl:when test="/rfc/@category='std'">STD: <xsl:value-of select="/rfc/@seriesNo" /></xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">There is no IETF document series called '<xsl:value-of select="/rfc/@category"/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="concat(translate(/rfc/@category,$lcase,$ucase),': ',/rfc/@seriesNo)" /> + </xsl:otherwise> + </xsl:choose> + </myns:item> + </xsl:if> + <xsl:if test="/rfc/@updates!=''"> + <myns:item> + <xsl:text>Updates: </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(/rfc/@updates)" /> + </xsl:call-template> + <xsl:if test="not(/rfc/@number)"> (if approved)</xsl:if> + </myns:item> + </xsl:if> + <myns:item> + <xsl:choose> + <xsl:when test="/rfc/@number"> + <xsl:text>Category: </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>Intended status: </xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:call-template name="get-category-long" /> + </myns:item> + <xsl:if test="/rfc/@ipr and not(/rfc/@number)"> + <myns:item>Expires: <xsl:call-template name="expirydate" /></myns:item> + </xsl:if> + </xsl:if> + + <!-- private case --> + <xsl:if test="$xml2rfc-private!=''"> + <myns:item><xsl:value-of select="$xml2rfc-private" /></myns:item> + </xsl:if> + + <xsl:if test="$header-format='2010' and /rfc/@number"> + <myns:item>ISSN: 2070-1721</myns:item> + </xsl:if> +</xsl:template> + +<xsl:template name="collectRightHeaderColumn"> + <xsl:for-each select="author"> + <xsl:variable name="initials"> + <xsl:call-template name="format-initials"/> + </xsl:variable> + <xsl:variable name="truncated-initials"> + <xsl:call-template name="truncate-initials"> + <xsl:with-param name="initials" select="$initials"/> + </xsl:call-template> + </xsl:variable> + <xsl:if test="@surname"> + <myns:item> + <xsl:value-of select="$truncated-initials"/> + <xsl:if test="$truncated-initials!=''"> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:value-of select="@surname" /> + <xsl:if test="@asciiInitials!='' or @asciiSurname!=''"> + <xsl:text> (</xsl:text> + <xsl:value-of select="@asciiInitials"/> + <xsl:if test="@asciiInitials!='' and @asciiSurname!=''"> </xsl:if> + <xsl:value-of select="@asciiSurname"/> + <xsl:text>)</xsl:text> + </xsl:if> + <xsl:if test="@role"> + <xsl:choose> + <xsl:when test="@role='editor'"> + <xsl:text>, Editor</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>, </xsl:text><xsl:value-of select="@role" /> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </myns:item> + </xsl:if> + <xsl:variable name="org"> + <xsl:choose> + <xsl:when test="organization/@abbrev"><xsl:value-of select="organization/@abbrev" /></xsl:when> + <xsl:otherwise><xsl:value-of select="organization" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="orgOfFollowing"> + <xsl:choose> + <xsl:when test="following-sibling::*[1]/organization/@abbrev"><xsl:value-of select="following-sibling::*[1]/organization/@abbrev" /></xsl:when> + <xsl:otherwise><xsl:value-of select="following-sibling::*/organization" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:if test="$org != $orgOfFollowing and $org != ''"> + <myns:item> + <xsl:value-of select="$org"/> + <xsl:if test="organization/@ascii"> + <xsl:value-of select="concat(' (',organization/@ascii,')')"/> + </xsl:if> + </myns:item> + </xsl:if> + </xsl:for-each> + <myns:item> + <xsl:if test="$xml2rfc-ext-pub-month!=''"> + <xsl:value-of select="$xml2rfc-ext-pub-month" /> + <xsl:if test="$xml2rfc-ext-pub-day!='' and /rfc/@ipr and not(/rfc/@number)"> + <xsl:text> </xsl:text> + <xsl:value-of select="number($xml2rfc-ext-pub-day)" /> + <xsl:text>,</xsl:text> + </xsl:if> + </xsl:if> + <xsl:if test="$xml2rfc-ext-pub-month='' and $rfcno!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'month missing but is required for RFCs'"/> + </xsl:call-template> + </xsl:if> + <xsl:if test="$xml2rfc-ext-pub-day='' and /rfc/@docName and not(substring(/rfc/@docName, string-length(/rfc/@docName) - string-length('-latest') + 1) = '-latest')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/front/date/@day appears to be missing for a historic draft dated ', $pub-yearmonth)"/> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="concat(' ',$xml2rfc-ext-pub-year)" /> + </myns:item> +</xsl:template> + + +<xsl:template name="emitheader"> + <xsl:param name="lc" /> + <xsl:param name="rc" /> + + <tbody> + <xsl:for-each select="$lc/myns:item | $rc/myns:item"> + <xsl:variable name="pos" select="position()" /> + <xsl:if test="$pos &lt; count($lc/myns:item) + 1 or $pos &lt; count($rc/myns:item) + 1"> + <tr> + <td class="{$css-left}"><xsl:call-template name="copynodes"><xsl:with-param name="nodes" select="$lc/myns:item[$pos]/node()" /></xsl:call-template></td> + <td class="{$css-right}"><xsl:call-template name="copynodes"><xsl:with-param name="nodes" select="$rc/myns:item[$pos]/node()" /></xsl:call-template></td> + </tr> + </xsl:if> + </xsl:for-each> + </tbody> +</xsl:template> + +<!-- convenience template that avoids copying namespace nodes we don't want --> +<xsl:template name="copynodes"> + <xsl:param name="nodes" /> + <xsl:for-each select="$nodes"> + <xsl:choose> + <xsl:when test="namespace-uri()='http://www.w3.org/1999/xhtml'"> + <xsl:element name="{name()}" namespace="{namespace-uri()}"> + <xsl:copy-of select="@*|node()" /> + </xsl:element> + </xsl:when> + <xsl:when test="self::*"> + <xsl:element name="{name()}"> + <xsl:copy-of select="@*|node()" /> + </xsl:element> + </xsl:when> + <!-- workaround for opera, remove when Opera > 9.0.x comes out --> + <xsl:when test="self::text()"> + <xsl:value-of select="."/> + </xsl:when> + <xsl:otherwise> + <xsl:copy-of select="." /> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + +<xsl:template name="expirydate"> + <xsl:param name="in-prose"/> + <xsl:choose> + <xsl:when test="number($xml2rfc-ext-pub-day) >= 1"> + <!-- have day of month? --> + <xsl:if test="$in-prose"> + <xsl:text>on </xsl:text> + </xsl:if> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$xml2rfc-ext-pub-year"/> + <xsl:with-param name="month" select="$pub-month-numeric"/> + <xsl:with-param name="day" select="$xml2rfc-ext-pub-day + 185"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$in-prose"> + <xsl:text>in </xsl:text> + </xsl:if> + <xsl:variable name="month"> + <xsl:call-template name="get-month-as-num"> + <xsl:with-param name="month" select="$xml2rfc-ext-pub-month"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="string(number($month))!='NaN' and number($month) &gt; 0 and number($month) &lt; 7"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="number($month) + 6"/> + </xsl:call-template> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-ext-pub-year" /> + </xsl:when> + <xsl:when test="string(number($month))!='NaN' and number($month) &gt; 6 and number($month) &lt; 13"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="number($month) - 6"/> + </xsl:call-template> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-ext-pub-year + 1" /> + </xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="normalize-date"> + <xsl:param name="year"/> + <xsl:param name="month"/> + <xsl:param name="day"/> + + <xsl:variable name="isleap" select="(($year mod 4) = 0 and ($year mod 100 != 0)) or ($year mod 400) = 0" /> + + <!--<xsl:message> + <xsl:value-of select="concat($year,' ',$month,' ',$day)"/> + </xsl:message>--> + + <xsl:variable name="dim"> + <xsl:choose> + <xsl:when test="$month=1 or $month=3 or $month=5 or $month=7 or $month=8 or $month=10 or $month=12">31</xsl:when> + <xsl:when test="$month=2 and $isleap">29</xsl:when> + <xsl:when test="$month=2 and not($isleap)">28</xsl:when> + <xsl:otherwise>30</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$day > $dim and $month=12"> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$year + 1"/> + <xsl:with-param name="month" select="1"/> + <xsl:with-param name="day" select="$day - $dim"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$day > $dim"> + <xsl:call-template name="normalize-date"> + <xsl:with-param name="year" select="$year"/> + <xsl:with-param name="month" select="$month + 1"/> + <xsl:with-param name="day" select="$day - $dim"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$month"/> + </xsl:call-template> + <xsl:value-of select="concat(' ',$day,', ',$year)"/> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="get-month-as-num"> + <xsl:param name="month" /> + <xsl:choose> + <xsl:when test="$month='January'">01</xsl:when> + <xsl:when test="$month='February'">02</xsl:when> + <xsl:when test="$month='March'">03</xsl:when> + <xsl:when test="$month='April'">04</xsl:when> + <xsl:when test="$month='May'">05</xsl:when> + <xsl:when test="$month='June'">06</xsl:when> + <xsl:when test="$month='July'">07</xsl:when> + <xsl:when test="$month='August'">08</xsl:when> + <xsl:when test="$month='September'">09</xsl:when> + <xsl:when test="$month='October'">10</xsl:when> + <xsl:when test="$month='November'">11</xsl:when> + <xsl:when test="$month='December'">12</xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-month-as-name"> + <xsl:param name="month"/> + <xsl:choose> + <xsl:when test="$month=1">January</xsl:when> + <xsl:when test="$month=2">February</xsl:when> + <xsl:when test="$month=3">March</xsl:when> + <xsl:when test="$month=4">April</xsl:when> + <xsl:when test="$month=5">May</xsl:when> + <xsl:when test="$month=6">June</xsl:when> + <xsl:when test="$month=7">July</xsl:when> + <xsl:when test="$month=8">August</xsl:when> + <xsl:when test="$month=9">September</xsl:when> + <xsl:when test="$month=10">October</xsl:when> + <xsl:when test="$month=11">November</xsl:when> + <xsl:when test="$month=12">December</xsl:when> + <xsl:otherwise>WRONG SYNTAX FOR MONTH</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- produce back section with author information --> +<xsl:template name="get-authors-section-title"> + <xsl:choose> + <xsl:when test="count(/rfc/front/author)=1">Author's Address</xsl:when> + <xsl:otherwise>Authors' Addresses</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-authors-section-number"> + <xsl:if test="/*/x:assign-section-number[@builtin-target='authors']"> + <xsl:value-of select="/*/x:assign-section-number[@builtin-target='authors']/@number"/> + </xsl:if> +</xsl:template> + +<xsl:template name="insertAuthors"> + + <xsl:variable name="number"> + <xsl:call-template name="get-authors-section-number"/> + </xsl:variable> + + <xsl:if test="$number!='suppress' and $xml2rfc-authorship!='no'"> + <xsl:call-template name="insert-conditional-hrule"/> + + <section id="{$anchor-pref}authors" class="avoidbreakinside"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <h2> + <xsl:if test="$number != ''"> + <a href="#{$anchor-pref}section.{$number}" id="{$anchor-pref}section.{$number}"><xsl:value-of select="$number"/>.</a> + <xsl:text> </xsl:text> + </xsl:if> + <a href="#{$anchor-pref}authors"><xsl:call-template name="get-authors-section-title"/></a> + </h2> + + <xsl:apply-templates select="/rfc/front/author" /> + </section> + </xsl:if> +</xsl:template> + + + +<!-- insert copyright statement --> + +<xsl:template name="insertCopyright" myns:namespaceless-elements="xml2rfc"> + +<boilerplate> + <xsl:if test="not($no-copylong)"> + <section title="Full Copyright Statement" anchor="{$anchor-pref}copyright" x:fixed-section-number="3"> + <xsl:choose> + <xsl:when test="$ipr-rfc3667"> + <t> + <xsl:choose> + <xsl:when test="$ipr-rfc4748"> + Copyright &#169; The IETF Trust (<xsl:value-of select="$xml2rfc-ext-pub-year" />). + </xsl:when> + <xsl:otherwise> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). + </xsl:otherwise> + </xsl:choose> + </t> + <t> + This document is subject to the rights, licenses and restrictions + contained in BCP 78<xsl:if test="$submissionType='independent'"> and at <eref target="http://www.rfc-editor.org/copyright.html">http://www.rfc-editor.org/copyright.html</eref></xsl:if>, and except as set forth therein, the authors + retain all their rights. + </t> + <t> + This document and the information contained herein are provided + on an &#8220;AS IS&#8221; basis and THE CONTRIBUTOR, + THE ORGANIZATION HE/SHE REPRESENTS OR IS SPONSORED BY (IF ANY), + THE INTERNET SOCIETY<xsl:if test="$ipr-rfc4748">, THE IETF TRUST</xsl:if> + AND THE INTERNET ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, + EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE + INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED + WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + </t> + </xsl:when> + <xsl:otherwise> + <!-- <http://tools.ietf.org/html/rfc2026#section-10.4> --> + <t> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + <t> + This document and translations of it may be copied and furnished to + others, and derivative works that comment on or otherwise explain it + or assist in its implementation may be prepared, copied, published and + distributed, in whole or in part, without restriction of any kind, + provided that the above copyright notice and this paragraph are + included on all such copies and derivative works. However, this + document itself may not be modified in any way, such as by removing + the copyright notice or references to the Internet Society or other + Internet organizations, except as needed for the purpose of + developing Internet standards in which case the procedures for + copyrights defined in the Internet Standards process must be + followed, or as required to translate it into languages other than + English. + </t> + <t> + The limited permissions granted above are perpetual and will not be + revoked by the Internet Society or its successors or assigns. + </t> + <t> + This document and the information contained herein is provided on an + &#8220;AS IS&#8221; basis and THE INTERNET SOCIETY AND THE INTERNET ENGINEERING + TASK FORCE DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING + BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF THE INFORMATION + HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED WARRANTIES OF + MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + </t> + </xsl:otherwise> + </xsl:choose> + </section> + + <section title="Intellectual Property" anchor="{$anchor-pref}ipr" x:fixed-section-number="4"> + <xsl:choose> + <xsl:when test="$ipr-rfc3667"> + <t> + The IETF takes no position regarding the validity or scope of any + Intellectual Property Rights or other rights that might be claimed to + pertain to the implementation or use of the technology described in + this document or the extent to which any license under such rights + might or might not be available; nor does it represent that it has + made any independent effort to identify any such rights. Information + on the procedures with respect to rights in RFC documents + can be found in BCP 78 and BCP 79. + </t> + <t> + Copies of IPR disclosures made to the IETF Secretariat and any + assurances of licenses to be made available, or the result of an + attempt made to obtain a general license or permission for the use + of such proprietary rights by implementers or users of this + specification can be obtained from the IETF on-line IPR repository + at <eref target="http://www.ietf.org/ipr">http://www.ietf.org/ipr</eref>. + </t> + <t> + The IETF invites any interested party to bring to its attention any + copyrights, patents or patent applications, or other proprietary + rights that may cover technology that may be required to implement + this standard. Please address the information to the IETF at + <eref target="mailto:ietf-ipr@ietf.org">ietf-ipr@ietf.org</eref>. + </t> + </xsl:when> + <xsl:otherwise> + <t> + The IETF takes no position regarding the validity or scope of + any intellectual property or other rights that might be claimed + to pertain to the implementation or use of the technology + described in this document or the extent to which any license + under such rights might or might not be available; neither does + it represent that it has made any effort to identify any such + rights. Information on the IETF's procedures with respect to + rights in standards-track and standards-related documentation + can be found in BCP-11. Copies of claims of rights made + available for publication and any assurances of licenses to + be made available, or the result of an attempt made + to obtain a general license or permission for the use of such + proprietary rights by implementors or users of this + specification can be obtained from the IETF Secretariat. + </t> + <t> + The IETF invites any interested party to bring to its + attention any copyrights, patents or patent applications, or + other proprietary rights which may cover technology that may be + required to practice this standard. Please address the + information to the IETF Executive Director. + </t> + <xsl:if test="$xml2rfc-iprnotified='yes'"> + <t> + The IETF has been notified of intellectual property rights + claimed in regard to some or all of the specification contained + in this document. For more information consult the online list + of claimed rights. + </t> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </section> + + <xsl:choose> + <xsl:when test="$no-funding"/> + <xsl:when test="$funding1 and /rfc/@number"> + <section x:fixed-section-number="5"> + <xsl:attribute name="title"> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Acknowledgement</xsl:when> + <xsl:otherwise>Acknowledgment</xsl:otherwise> + </xsl:choose> + </xsl:attribute> + <t> + Funding for the RFC Editor function is provided by the IETF + Administrative Support Activity (IASA). + </t> + </section> + </xsl:when> + <xsl:when test="$funding0 and /rfc/@number"> + <section x:fixed-section-number="5"> + <xsl:attribute name="title"> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Acknowledgement</xsl:when> + <xsl:otherwise>Acknowledgment</xsl:otherwise> + </xsl:choose> + </xsl:attribute> + <t> + Funding for the RFC Editor function is currently provided by + the Internet Society. + </t> + </section> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:if> +</boilerplate> +</xsl:template> + +<!-- optional scripts --> +<xsl:template name="insertScripts"> +<xsl:if test="$xml2rfc-ext-refresh-from!=''"> +<script type="application/javascript"> +var RfcRefresh = {}; +RfcRefresh.NS_XHTML = "http://www.w3.org/1999/xhtml"; +RfcRefresh.NS_MOZERR = "http://www.mozilla.org/newlayout/xml/parsererror.xml"; +RfcRefresh.lastTxt = ""; +RfcRefresh.lastEtag = ""; +RfcRefresh.xslt = null; +RfcRefresh.xmlsource = "<xsl:value-of select='$xml2rfc-ext-refresh-from'/>"; +RfcRefresh.xsltsource = "<xsl:value-of select='$xml2rfc-ext-refresh-xslt'/>"; +RfcRefresh.interval = "<xsl:value-of select='number($xml2rfc-ext-refresh-interval)'/>"; + +RfcRefresh.getXSLT = function() { + if (! window.XSLTProcessor) { + var err = document.createElement("pre"); + err.className = "refreshbrowsererror <xsl:value-of select="$css-noprint"/>"; + var msg = "This browser does not support the window.XSLTProcessor functionality."; + err.appendChild(document.createTextNode(msg)); + RfcRefresh.showMessage("refreshxmlerror", err); + } + else { + try { + var xhr = new XMLHttpRequest(); + xhr.open("GET", RfcRefresh.xsltsource, true); + xhr.onload = function (e) { + if (xhr.readyState === 4) { + RfcRefresh.xslt = new XSLTProcessor(); + RfcRefresh.xslt.importStylesheet(xhr.responseXML); + } + } + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); + } + catch (e) { + var err = document.createElement("pre"); + err.className = "refreshbrowsererror <xsl:value-of select="$css-noprint"/>"; + var msg = "Failed to load XSLT code from &lt;" + RfcRefresh.xsltsource + "&gt;.\n"; + msg += "Your browser might not support loading from a file: URI.\n"; + msg += "Error details: " + e; + err.appendChild(document.createTextNode(msg)); + RfcRefresh.showMessage("refreshxmlerror", err); + } + } +} + +RfcRefresh.findAndUpdate = function(olddoc, elem) { + var changed = ""; + var children = elem.childNodes; + for (var i = 0; i != children.length; i++) { + var n = children[i]; + if (n.nodeType == 1) { + var c = RfcRefresh.findAndUpdate(olddoc, n); + if (changed == '') { + changed = c; + } + var id = n.id; + if (id != "") { + var old = olddoc.getElementById(id); + var newtext = n.innerHTML; + if (!old) { + console.debug("new " + id); + } else { + var oldtext = old.innerHTML; + if (oldtext != newtext) { + console.debug("updating " + id); + old.innerHTML = n.innerHTML; + if (changed == '') { + changed = id; + } + } + } + } + } + } + return changed; +} + +RfcRefresh.findDifferences = function(olddoc, newdoc) { + var changed = RfcRefresh.findAndUpdate(olddoc, newdoc.documentElement); + if (changed != "") { + console.debug("changed: " + changed); + document.location = "#" + changed; + } + // final check for changes; if those were not processed earlier, + // we refresh the whole document + var oldtext = olddoc.documentElement.getElementsByTagName("body")[0].innerHTML; + var newtext = newdoc.documentElement.getElementsByTagName("body")[0].innerHTML; + if (oldtext != newtext) { + console.debug("full refresh: " + newtext); + olddoc.documentElement.innerHTML = newdoc.documentElement.innerHTML; + } +} + +RfcRefresh.getNodeText = function(elem) { + var result = ""; + var children = elem.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 3) { + result += children[i].nodeValue; + } + } + return result; +} + +RfcRefresh.getParserError = function(dom) { + // FIREFOX + if ("parsererror" == dom.documentElement.nodeName &amp;&amp; RfcRefresh.NS_MOZERR == dom.documentElement.namespaceURI) { + var errmsg = new Object(); + errmsg.msg = ""; + errmsg.src = ""; + var children = dom.documentElement.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 3) { + errmsg.msg += children[i].nodeValue; + } else if (children[i].nodeName == "sourcetext") { + errmsg.src = RfcRefresh.getNodeText(children[i]); + } + } + return errmsg; + } + + var list = dom.getElementsByTagNameNS(RfcRefresh.NS_XHTML, "parsererror"); + if (list.length != 0) { + // Webkit + var errmsg = new Object(); + errmsg.msg = "XML parse error"; + list = dom.getElementsByTagNameNS(RfcRefresh.NS_XHTML, "div"); + if (list.length != 0) { + errmsg.msg = RfcRefresh.getNodeText(list[0]); + } + return errmsg; + } + + + return null; +} + +RfcRefresh.showMessage = function(cls, node) { + // remove previous message + var list = document.getElementsByClassName(cls); + if (list.length != 0) { + list[0].parentNode.removeChild(list[0]); + } + document.body.appendChild(node); +} + +RfcRefresh.refresh = function(txt) { + if (txt != RfcRefresh.lastTxt) { + RfcRefresh.lastTxt = txt; + // try to parse + var parser = new DOMParser(); + var dom = parser.parseFromString(txt, "text/xml"); + var errmsg = RfcRefresh.getParserError(dom); + + if (errmsg != null) { + var err = document.createElement("pre"); + err.className = "refreshxmlerror <xsl:value-of select="$css-noprint"/>"; + err.appendChild(document.createTextNode(errmsg.msg)); + if (errmsg.src != null) { + err.appendChild(document.createElement("hr")); + err.appendChild(document.createTextNode(errmsg.src)); + } + RfcRefresh.showMessage("refreshxmlerror", err); + } else { + // find new refresh + var children = dom.childNodes; + for (var i = 0; i != children.length; i++) { + if (children[i].nodeType == 7 &amp;&amp; children[i].target == "rfc-ext") { + var s = "&lt;foo " + children[i].data + "/>"; + var sd = parser.parseFromString(s, "text/xml"); + var refresh = sd.documentElement.getAttribute("refresh-interval"); + if (refresh != null &amp;&amp; refresh != "") { + refresh = parseInt(refresh, 10); + if (RfcRefresh.interval != refresh) { + if (Number.isNaN(refresh) || refresh &lt; 5) { + console.debug("refresh requested to be: " + refresh + " - ignored, using 5 instead."); + RfcRefresh.interval = 5; + } else { + RfcRefresh.interval = refresh; + console.debug("refresh changed to: " + refresh); + } + } + } + } + } + + var html = RfcRefresh.xslt.transformToDocument(dom); + RfcRefresh.findDifferences(document, html); + } + } +} + +RfcRefresh.initRefresh = function() { + RfcRefresh.getXSLT(); + + window.setTimeout(function(){ + if (RfcRefresh.xslt != null) { + var xhr = new XMLHttpRequest(); + xhr.open("GET", RfcRefresh.xmlsource, true); + if (RfcRefresh.lastEtag != "") { + xhr.setRequestHeader("If-None-Match", RfcRefresh.lastEtag); + } + xhr.onload = function (e) { + if (xhr.readyState === 4) { + console.debug(xhr.status + " " + xhr.statusText); + if (xhr.status != 304) { + RfcRefresh.refresh(xhr.responseText); + } + RfcRefresh.lastEtag = xhr.getResponseHeader("ETag"); + } + } + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); + setTimeout(arguments.callee, RfcRefresh.interval * 1000); + } + }, RfcRefresh.interval * 1000); +} +</script> +</xsl:if> +<xsl:if test="/rfc/x:feedback"> +<script type="application/javascript"> +var buttonsAdded = false; + +function initFeedback() { + var fb = document.createElement("div"); + fb.className = "<xsl:value-of select="concat($css-feedback,' ',$css-noprint)"/>"; + fb.setAttribute("onclick", "feedback();"); + fb.appendChild(document.createTextNode("feedback")); + + document.body.appendChild(fb); +} + +function feedback() { + toggleButtonsToElementsByName("h2"); + toggleButtonsToElementsByName("h3"); + toggleButtonsToElementsByName("h4"); + toggleButtonsToElementsByName("h5"); + + buttonsAdded = !buttonsAdded; +} + +function toggleButtonsToElementsByName(name) { + var list = document.getElementsByTagName(name); + for (var i = 0; i &lt; list.length; i++) { + toggleButton(list.item(i)); + } +} + +function toggleButton(node) { + if (! buttonsAdded) { + + // docname + var template = "<xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="/rfc/x:feedback/@template"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> +</xsl:call-template>"; + + var id = node.getAttribute("id"); + // try also parent + if (id == null || id == "") { + var id = node.parentNode.getAttribute("id"); + } + // better id available? + var titlelinks = node.getElementsByTagName("a"); + for (var i = 0; i &lt; titlelinks.length; i++) { + var tl = titlelinks.item(i); + if (tl.getAttribute("id")) { + id = tl.getAttribute("id"); + } + } + + // ref + var ref = window.location.toString(); + var hash = ref.indexOf("#"); + if (hash != -1) { + ref = ref.substring(0, hash); + } + if (id != null &amp;&amp; id != "") { + ref += "#" + id; + } + + // docname + var docname = "<xsl:value-of select="/rfc/@docName"/>"; + + // section + var section = node.textContent; + section = section.replace("\u00a0", " ").trim(); + + // build URI from template + var uri = template.replace("{docname}", encodeURIComponent(docname)); + uri = uri.replace("{section}", encodeURIComponent(section)); + uri = uri.replace("{ref}", encodeURIComponent(ref)); + + var button = document.createElement("a"); + button.className = "<xsl:value-of select="concat($css-fbbutton,' ',$css-noprint)"/>"; + button.setAttribute("href", uri); + button.appendChild(document.createTextNode("send feedback")); + node.appendChild(button); + } + else { + var buttons = node.getElementsByTagName("a"); + for (var i = 0; i &lt; buttons.length; i++) { + var b = buttons.item(i); + if (b.className == "<xsl:value-of select="concat($css-fbbutton,' ',$css-noprint)"/>") { + node.removeChild(b); + } + } + } +}</script> +</xsl:if> +<xsl:if test="$xml2rfc-ext-insert-metadata='yes' and $rfcno!=''"> +<script type="application/javascript"> +function getMeta(rfcno, container) { + + var xhr = new XMLHttpRequest(); + xhr.open("GET", "https://tools.ietf.org/draft/rfc" + rfcno + "/state.xml", true); + xhr.onload = function (e) { + if (xhr.readyState === 4) { + if (xhr.status === 200) { + var doc = xhr.responseXML; + var info = getChildByName(doc.documentElement, "info"); + + var cont = document.getElementById(container); + // empty the container + while (cont.firstChild) { + cont.removeChild(myNode.firstChild); + } + + var c = getChildByName(info, "stdstatus"); + if (c !== null) { + var bld = newElementWithText("b", c.textContent); + cont.appendChild(bld); + } + + c = getChildByName(info, "updatedby"); + if (c !== null) { + cont.appendChild(newElement("br")); + cont.appendChild(newText("Updated by: ")); + appendRfcLinks(cont, c.textContent); + } + + c = getChildByName(info, "obsoletedby"); + if (c !== null) { + cont.appendChild(newElement("br")); + cont.appendChild(newText("Obsoleted by: ")); + appendRfcLinks(cont, c.textContent); + } + + c = getChildByName(info, "errata"); + if (c !== null) { + var template = "<xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-errata-uri"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> + </xsl:call-template>"; + + cont.appendChild(newElement("br")); + var link = newElementWithText("a", "errata"); + link.setAttribute("href", template.replace("{rfc}", rfcno)); + var errata = newElementWithText("i", "This document has "); + errata.appendChild(link); + errata.appendChild(newText(".")); + cont.appendChild(errata); + } + + cont.style.display = "block"; + } else { + console.error(xhr.statusText); + } + } + }; + xhr.onerror = function (e) { + console.error(xhr.status + " " + xhr.statusText); + }; + xhr.send(null); +} + +// DOM helpers +function newElement(name) { + return document.createElement(name); +} +function newElementWithText(name, txt) { + var e = document.createElement(name); + e.appendChild(newText(txt)); + return e; +} +function newText(text) { + return document.createTextNode(text); +} + +function getChildByName(parent, name) { + if (parent === null) { + return null; + } + else { + for (var c = parent.firstChild; c !== null; c = c.nextSibling) { + if (name == c.nodeName) { + return c; + } + } + return null; + } +} + +function appendRfcLinks(parent, text) { + var template = "<xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="$xml2rfc-ext-rfc-uri"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> +</xsl:call-template>"; + var updates = text.split(","); + for (var i = 0; i &lt; updates.length; i++) { + var rfc = updates[i].trim(); + if (rfc.substring(0, 3) == "rfc") { + var no = rfc.substring(3); + var link = newElement("a"); + link.setAttribute("href", template.replace("{rfc}", no)); + link.appendChild(newText(no)); + parent.appendChild(link); + } else { + parent.appendChild(newText(rfc)); + } + if (i != updates.length - 1) { + parent.appendChild(newText(", ")); + } + } +} +</script> +</xsl:if> +<script type="application/javascript"> +function anchorRewrite() { +<xsl:text> map = { </xsl:text> + <xsl:for-each select="//x:anchor-alias"> + <xsl:text>"</xsl:text> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="@value"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> + </xsl:call-template> + <xsl:text>": "</xsl:text> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="ancestor::*[@anchor][1]/@anchor"/> + <xsl:with-param name="replace">"</xsl:with-param> + <xsl:with-param name="by">\"</xsl:with-param> + </xsl:call-template> + <xsl:text>"</xsl:text> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> +<xsl:text>};</xsl:text> + if (window.location.hash.length >= 1) { + var fragid = window.location.hash.substr(1); + if (fragid) { + if (! document.getElementById(fragid)) { + var prefix = "<xsl:value-of select="$anchor-pref"/>"; + var mapped = map[fragid]; + if (mapped) { + window.location.hash = mapped; + } else if (fragid.indexOf("section-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(8); + } else if (fragid.indexOf("appendix-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(9); + } else if (fragid.indexOf("s-") == 0) { + var postfix = fragid.substring(2); + if (postfix.startsWith("abstract")) { + window.location.hash = prefix + postfix; + } else if (postfix.startsWith("note-")) { + window.location.hash = prefix + "note." + postfix.substring(5); + } else { + window.location.hash = prefix + "section." + postfix; + } + } else if (fragid.indexOf("p-") == 0) { + var r = fragid.substring(2); + var p = r.indexOf("-"); + if (p >= 0) { + window.location.hash = prefix + "section." + r.substring(0, p) + ".p." + r.substring(p + 1); + } + } + } + } + } +} +window.addEventListener('hashchange', anchorRewrite); +window.addEventListener('DOMContentLoaded', anchorRewrite); +</script><xsl:if test="$prettyprint-script!=''"> +<script src="{$prettyprint-script}"/></xsl:if> +</xsl:template> + +<!-- insert CSS style info --> + +<xsl:template name="insertCss"> +<style type="text/css" title="rfc2629.xslt"> +<xsl:value-of select="$xml2rfc-ext-webfonts"/> +a { + text-decoration: none; +} +a.smpl { + color: black; +} +a:hover { + text-decoration: underline; +} +a:active { + text-decoration: underline; +} +address { + margin-top: 1em; + margin-left: 2em; + font-style: normal; +}<xsl:if test="//x:blockquote|//blockquote"> +blockquote { + border-style: solid; + border-color: gray; + border-width: 0 0 0 .25em; + font-style: italic; + padding-left: 0.5em; +}</xsl:if> +body {<xsl:if test="$xml2rfc-background!=''"> + background: url(<xsl:value-of select="$xml2rfc-background" />) #ffffff left top;</xsl:if> + color: black; + font-family: <xsl:value-of select="$xml2rfc-ext-ff-body"/>; + font-size: 12pt; + margin: 10px 0px 10px 10px; +}<xsl:if test="$parsedMaxwidth!=''"> +@media screen and (min-width: <xsl:value-of select="number($parsedMaxwidth + 40)"/>px) { + body { + margin: 10px auto; + max-width: <xsl:value-of select="$parsedMaxwidth"/>px; + } +}</xsl:if> +samp, span.tt, code, pre { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-pre"/>; +}<xsl:if test="//xhtml:p"> +br.p { + line-height: 150%; +}</xsl:if> +cite { + font-style: normal; +}<xsl:if test="//x:note|//aside"> +aside { + margin-left: 2em; +}</xsl:if> +dl { + margin-left: 2em; +} +dl > dt { + float: left; + margin-right: 1em; +} +dl.nohang > dt { + float: none; +} +dl > dd { + margin-bottom: .5em; +} +dl.compact > dd { + margin-bottom: .0em; +} +dl > dd > dl { + margin-top: 0.5em; +} +ul.empty {<!-- spacing between two entries in definition lists --> + list-style-type: none; +} +ul.empty li { + margin-top: .5em; +} +dl p { + margin-left: 0em; +} +dl.<xsl:value-of select="$css-reference"/> > dt { + font-weight: bold; +} +dl.<xsl:value-of select="$css-reference"/> > dd { + margin-left: <xsl:choose><xsl:when test="$xml2rfc-symrefs='no'">3.5</xsl:when><xsl:otherwise>6</xsl:otherwise></xsl:choose>em; +} +h1 { + color: green; + font-size: 150%; + line-height: 18pt; + font-weight: bold; + text-align: center; + margin-top: 36pt; + margin-bottom: 0pt; +} +h2 { + font-size: 130%; + line-height: 21pt; + page-break-after: avoid; +} +h2.np { + page-break-before: always; +} +h3 { + font-size: 120%; + line-height: 15pt; + page-break-after: avoid; +} +h4 { + font-size: 110%; + page-break-after: avoid; +} +h5, h6 { + page-break-after: avoid; +} +h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + color: black; +} +img { + margin-left: 3em; +} +li { + margin-left: 2em; +} +ol { + margin-left: 2em; +} +ol.la { + list-style-type: lower-alpha; +} +ol.ua { + list-style-type: upper-alpha; +} +ol p { + margin-left: 0em; +}<xsl:if test="//xhtml:q"> +q { + font-style: italic; +}</xsl:if> +p { + margin-left: 2em; +} +pre { + font-size: 11pt; + margin-left: 3em; + background-color: lightyellow; + padding: .25em; + page-break-inside: avoid; +}<xsl:if test="//artwork[@x:is-code-component='yes']"><!-- support "<CODE BEGINS>" and "<CODE ENDS>" markers--> +pre.ccmarker { + background-color: white; + color: gray; +} +pre.ccmarker > span { + font-size: small; +} +pre.cct { + margin-bottom: -1em; +} +pre.ccb { + margin-top: -1em; +}</xsl:if> +pre.text2 { + border-style: dotted; + border-width: 1px; + background-color: #f0f0f0; +} +pre.inline { + background-color: white; + padding: 0em; + page-break-inside: auto;<xsl:if test="$prettyprint-script!=''"> + border: none !important;</xsl:if> +} +pre.text { + border-style: dotted; + border-width: 1px; + background-color: #f8f8f8; +} +pre.drawing { + border-style: solid; + border-width: 1px; + background-color: #f8f8f8; + padding: 2em; +}<xsl:if test="//x:q"> +q { + font-style: italic; +}</xsl:if> +<xsl:if test="//x:sup|sup"> +sup { + font-size: 60%; +}</xsl:if><xsl:if test="sub"> +sub { + font-size: 60%; +}</xsl:if> +table { + margin-left: 2em; +}<xsl:if test="//texttable|//table"> +div.<xsl:value-of select="$css-tt"/> { + margin-left: 2em; +} +table.<xsl:value-of select="$css-tt"/> { + border-collapse: collapse; + border-color: gray; + border-spacing: 0; + vertical-align: top; + } +table.<xsl:value-of select="$css-tt"/> th { + border-color: gray; + padding: 3px; +} +table.<xsl:value-of select="$css-tt"/> td { + border-color: gray; + padding: 3px; +} +table.all { + border-style: solid; + border-width: 2px; +} +table.full { + border-style: solid; + border-width: 2px; +} +table.<xsl:value-of select="$css-tt"/> td { + vertical-align: top; +} +table.all td { + border-style: solid; + border-width: 1px; +} +table.full td { + border-style: none solid; + border-width: 1px; +} +table.<xsl:value-of select="$css-tt"/> th { + vertical-align: top; +} +table.all th { + border-style: solid; + border-width: 1px; +} +table.full th { + border-style: solid; + border-width: 1px 1px 2px 1px; +} +table.headers th { + border-style: none none solid none; + border-width: 2px; +} +table.<xsl:value-of select="$css-tleft"/> { + margin-right: auto; +} +table.<xsl:value-of select="$css-tright"/> { + margin-left: auto; +} +table.<xsl:value-of select="$css-tcenter"/> { + margin-left: auto; + margin-right: auto; +} +caption { + caption-side: bottom; + font-weight: bold; + font-size: 10pt; + margin-top: .5em; +} +<xsl:if test="//table"> +table:not([class]) th { + background-color: #e9e9e9; +} +table:not([class]) tr:nth-child(2n) > td { + background-color: #f5f5f5; +} +tr p { + margin-left: 0em; +} +tr pre { + margin-left: 1em; +} +tr ol { + margin-left: 1em; +} +tr ul { + margin-left: 1em; +} +tr dl { + margin-left: 1em; +} +</xsl:if> +</xsl:if> +table.<xsl:value-of select="$css-header"/> { + border-spacing: 1px; + width: 95%; + font-size: 11pt; + color: white; +} +td.top { + vertical-align: top; +} +td.topnowrap { + vertical-align: top; + white-space: nowrap; +} +table.<xsl:value-of select="$css-header"/> td { + background-color: gray; + width: 50%; +}<xsl:if test="/rfc/@obsoletes | /rfc/@updates"> +table.<xsl:value-of select="$css-header"/> a { + color: white; +}</xsl:if> +ul.toc, ul.toc ul { + list-style: none; + margin-left: 1.5em; + padding-left: 0em; +} +ul.toc li { + line-height: 150%; + font-weight: bold; + margin-left: 0em; +} +ul.toc li li { + line-height: normal; + font-weight: normal; + font-size: 11pt; + margin-left: 0em; +} +li.excluded { + font-size: 0pt; +} +ul p { + margin-left: 0em; +} +.filename, h1, h2, h3, h4 { + font-family: <xsl:value-of select="$xml2rfc-ext-ff-title"/>; +} +<xsl:if test="$has-index">ul.ind, ul.ind ul { + list-style: none; + margin-left: 1.5em; + padding-left: 0em; + page-break-before: avoid; +} +ul.ind li { + font-weight: bold; + line-height: 200%; + margin-left: 0em; +} +ul.ind li li { + font-weight: normal; + line-height: 150%; + margin-left: 0em; +}<xsl:if test="//svg:svg"> +@namespace svg url(http://www.w3.org/2000/svg); +svg|svg { + margin-left: 3em; +} +svg { + margin-left: 3em; +}</xsl:if> +.avoidbreakinside { + page-break-inside: avoid; +} +.avoidbreakafter { + page-break-after: avoid; +} +</xsl:if><xsl:if test="//*[@removeInRFC='true']">.rfcEditorRemove div:first-of-type { + font-style: italic; +}</xsl:if><xsl:if test="//x:bcp14|//bcp14">.bcp14 { + font-style: normal; + text-transform: lowercase; + font-variant: small-caps; +}</xsl:if><xsl:if test="//x:blockquote|//blockquote"> +blockquote > * .bcp14 { + font-style: italic; +}</xsl:if> +.comment { + background-color: yellow; +}<xsl:if test="$xml2rfc-editing='yes'"> +.editingmark { + background-color: khaki; +}</xsl:if> +.<xsl:value-of select="$css-center"/> { + text-align: center; +} +.<xsl:value-of select="$css-error"/> { + color: red; + font-style: italic; + font-weight: bold; +} +.figure { + font-weight: bold; + text-align: center; + font-size: 10pt; +} +.filename { + color: #333333; + font-size: 112%; + font-weight: bold; + line-height: 21pt; + text-align: center; + margin-top: 0pt; +} +.fn { + font-weight: bold; +} +.<xsl:value-of select="$css-left"/> { + text-align: left; +} +.<xsl:value-of select="$css-right"/> { + text-align: right; +} +.warning { + font-size: 130%; + background-color: yellow; +}<xsl:if test="$xml2rfc-ext-paragraph-links='yes'"> +.self { + color: #999999; + margin-left: .3em; + text-decoration: none; + visibility: hidden; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +} +.self:hover { + text-decoration: none; +} +li:hover > a.self, p:hover > a.self { + visibility: visible; +}</xsl:if><xsl:if test="$has-edits">del { + color: red; + text-decoration: line-through; +} +.del { + color: red; + text-decoration: line-through; +} +ins { + color: green; + text-decoration: underline; +} +.ins { + color: green; + text-decoration: underline; +} +div.issuepointer { + float: left; +}</xsl:if><xsl:if test="//ed:issue"> +table.openissue { + background-color: khaki; + border-width: thin; + border-style: solid; + border-color: black; +} +table.closedissue { + background-color: white; + border-width: thin; + border-style: solid; + border-color: gray; + color: gray; +} +thead th { + text-align: left; +} +.bg-issue { + border: solid; + border-width: 1px; + font-size: 8pt; +} +.closed-issue { + border: solid; + border-width: thin; + background-color: lime; + font-size: smaller; + font-weight: bold; +} +.open-issue { + border: solid; + border-width: thin; + background-color: red; + font-size: smaller; + font-weight: bold; +} +.editor-issue { + border: solid; + border-width: thin; + background-color: yellow; + font-size: smaller; + font-weight: bold; +}</xsl:if><xsl:if test="$xml2rfc-ext-refresh-from!=''">.refreshxmlerror { + position: fixed; + top: 1%; + right: 1%; + padding: 5px 5px; + color: yellow; + background: black; +} +.refreshbrowsererror { + position: fixed; + top: 1%; + left: 1%; + padding: 5px 5px; + color: red; + background: black; +}</xsl:if><xsl:if test="/rfc/x:feedback">.<xsl:value-of select="$css-feedback"/> { + position: fixed; + bottom: 1%; + right: 1%; + padding: 3px 5px; + color: white; + border-radius: 5px; + background: #006400; + border: 1px solid silver; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +} +.<xsl:value-of select="$css-fbbutton"/> { + margin-left: 1em; + color: #303030; + font-size: small; + font-weight: normal; + background: #d0d000; + padding: 1px 4px; + border: 1px solid silver; + border-radius: 5px; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}</xsl:if><xsl:if test="$xml2rfc-ext-justification='always'"> +dd, li, p { + text-align: justify; +}</xsl:if><xsl:if test="$xml2rfc-ext-insert-metadata='yes' and $rfcno!=''"> +.<xsl:value-of select="$css-docstatus"/> { + border: 1px solid black; + display: none; + float: right; + margin: 2em; + padding: 1em; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}</xsl:if><xsl:if test="$errata-parsed"> +.<xsl:value-of select="$css-erratum"/> { + border: 1px solid orangered; + border-left: 0.75em solid orangered; + float: right; + padding: 0.5em; + -webkit-user-select: none;<!-- not std CSS yet--> + -moz-user-select: none; + -ms-user-select: none; +}<xsl:if test="$parsedMaxwidth!=''"> +@media screen and (min-width: <xsl:value-of select="number($parsedMaxwidth + 350)"/>px) { + .<xsl:value-of select="$css-erratum"/> { + margin-right: -150px; + } +}</xsl:if></xsl:if><xsl:if test="$published-as-rfc"> +.<xsl:value-of select="$css-publishedasrfc"/> { + background-color: yellow; + color: green; + font-size: 14pt; + text-align: center; +}</xsl:if> + +@media screen { + pre.text, pre.text2 { + width: 69em; + } +} + +@media print { + .<xsl:value-of select="$css-noprint"/> { + display: none; + } + + a { + color: black; + text-decoration: none; + } + + table.<xsl:value-of select="$css-header"/> { + width: 90%; + } + + td.<xsl:value-of select="$css-header"/> { + width: 50%; + color: black; + background-color: white; + vertical-align: top; + font-size: 110%; + } + + ul.toc a:last-child::after { + content: leader('.') target-counter(attr(href), page); + } + + ul.ind li li a {<!-- links in the leaf nodes of the index should go to page numbers --> + content: target-counter(attr(href), page); + } + + pre { + font-size: 10pt; + } + + .print2col { + column-count: 2; + -moz-column-count: 2;<!-- for Firefox --> + column-fill: auto;<!-- for PrinceXML --> + } +<xsl:if test="$xml2rfc-ext-justification='print'"> + dd, li, p { + text-align: justify; + } +</xsl:if>} +@page<xsl:if test="$xml2rfc-ext-duplex='yes'">:right</xsl:if> { + @top-left { + content: "<xsl:call-template name="get-header-left"/>"; + } + @top-right { + content: "<xsl:call-template name="get-header-right"/>"; + } + @top-center { + content: "<xsl:call-template name="get-header-center"/>"; + } + @bottom-left { + content: "<xsl:call-template name="get-author-summary"/>"; + } + @bottom-center { + content: "<xsl:call-template name="get-bottom-center"/>"; + } + @bottom-right { + content: "[Page " counter(page) "]"; + } +}<xsl:if test="$xml2rfc-ext-duplex='yes'"> +@page:left { + @top-left { + content: "<xsl:call-template name="get-header-right"/>"; + } + @top-right { + content: "<xsl:call-template name="get-header-left"/>"; + } + @top-center { + content: "<xsl:call-template name="get-header-center"/>"; + } + @bottom-left { + content: "[Page " counter(page) "]"; + } + @bottom-center { + content: "<xsl:call-template name="get-bottom-center"/>"; + } + @bottom-right { + content: "<xsl:call-template name="get-author-summary"/>"; + } +} +</xsl:if> +@page:first { + @top-left { + content: normal; + } + @top-right { + content: normal; + } + @top-center { + content: normal; + } +} +</style> +</xsl:template> + + +<!-- generate the index section --> + +<xsl:template name="insertSingleIref"> + <xsl:choose> + <xsl:when test="@ed:xref"> + <!-- special index generator mode --> + <xsl:text>[</xsl:text> + <a href="#{@ed:xref}"><xsl:value-of select="@ed:xref"/></a> + <xsl:text>, </xsl:text> + <a> + <xsl:variable name="htmluri" select="//reference[@anchor=current()/@ed:xref]/format[@type='HTML']/@target"/> + <xsl:if test="$htmluri"> + <xsl:attribute name="href"><xsl:value-of select="concat($htmluri,'#',@ed:frag)"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="@primary='true'"><b><xsl:value-of select="@ed:label" /></b></xsl:when> + <xsl:otherwise><xsl:value-of select="@ed:label" /></xsl:otherwise> + </xsl:choose> + </a> + <xsl:text>]</xsl:text> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="_n"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$_n!=''"> + <xsl:value-of select="$_n"/> + </xsl:when> + <xsl:otherwise>&#167;</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:variable name="backlink"> + <xsl:choose> + <xsl:when test="self::xref"> + <xsl:variable name="target" select="@target"/> + <xsl:comment>workaround for Saxon 9.1 bug; force evaluation of: <xsl:value-of select="$target"/></xsl:comment> + <xsl:variable name="no"><xsl:number level="any" count="xref[@target=$target]"/></xsl:variable> + <xsl:text>#</xsl:text> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>xref.</xsl:text> + <xsl:value-of select="@target"/> + <xsl:text>.</xsl:text> + <xsl:value-of select="$no"/> + </xsl:when> + <xsl:when test="self::iref"> + <xsl:text>#</xsl:text> + <xsl:call-template name="compute-iref-anchor"/> + </xsl:when> + <xsl:when test="self::x:ref"> + <xsl:text>#</xsl:text> + <xsl:call-template name="compute-extref-anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:message>Unsupported element type for insertSingleIref</xsl:message> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <a href="{$backlink}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:choose> + <xsl:when test="@primary='true'"><b><xsl:value-of select="$n"/></b></xsl:when> + <xsl:otherwise><xsl:value-of select="$n"/></xsl:otherwise> + </xsl:choose> + </a> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="insertSingleXref"> + <xsl:variable name="_n"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$_n!=''"> + <xsl:value-of select="$_n"/> + </xsl:when> + <xsl:otherwise>&#167;</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="self::reference"> + <a href="#{@anchor}"> + <xsl:call-template name="insertInsDelClass"/> + <b><xsl:value-of select="$n"/></b> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="target" select="@target"/> + <xsl:variable name="backlink">#<xsl:value-of select="$anchor-pref"/>xref.<xsl:value-of select="$target"/>.<xsl:number level="any" count="xref[@target=$target]"/></xsl:variable> + <a href="{$backlink}"> + <xsl:call-template name="insertInsDelClass"/> + <xsl:value-of select="$n"/> + </a> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="position()!=last()">, </xsl:if> +</xsl:template> + +<xsl:template name="insertIndex"> + + <xsl:call-template name="insert-conditional-hrule"/> + + <section id="{$anchor-pref}index"> + <xsl:call-template name="insert-conditional-pagebreak"/> + <h2> + <a href="#{$anchor-pref}index">Index</a> + </h2> + + <!-- generate navigation links to index subsections --> + <p class="{$css-noprint}"> + <xsl:variable name="irefs" select="//iref[generate-id(.) = generate-id(key('index-first-letter',translate(substring(@item,1,1),$lcase,$ucase))[1])]"/> + <xsl:variable name="xrefs" select="//reference[not(starts-with(@anchor,'deleted-'))][generate-id(.) = generate-id(key('index-first-letter',translate(substring(concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase))[1])]"/> + + <xsl:for-each select="$irefs | $xrefs"> + + <xsl:sort select="translate(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),$lcase,$ucase)" /> + + <xsl:variable name="letter" select="translate(substring(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase)"/> + + <!-- character? --> + <xsl:if test="translate($letter,concat($lcase,$ucase,'0123456789'),'')=''"> + + <xsl:variable name="showit" select="$xml2rfc-ext-include-references-in-index='yes' or $irefs[starts-with(translate(@item,$lcase,$ucase),$letter)]"/> + + <xsl:if test="$showit"> + <a href="#{$anchor-pref}index.{$letter}"> + <xsl:value-of select="$letter" /> + </a> + <xsl:text> </xsl:text> + </xsl:if> + </xsl:if> + </xsl:for-each> + </p> + + <!-- for each index subsection --> + <div class="print2col"> + <ul class="ind"> + <xsl:variable name="irefs2" select="//iref[generate-id(.) = generate-id(key('index-first-letter',translate(substring(@item,1,1),$lcase,$ucase))[1])]"/> + <xsl:variable name="xrefs2" select="//reference[not(starts-with(@anchor,'deleted-'))][generate-id(.) = generate-id(key('index-first-letter',translate(substring(concat(/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase))[1])]"/> + + <xsl:for-each select="$irefs2 | $xrefs2"> + <xsl:sort select="translate(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),$lcase,$ucase)" /> + <xsl:variable name="letter" select="translate(substring(concat(@item,/rfc/back/displayreference[@target=current()/@anchor]/@to,@anchor),1,1),$lcase,$ucase)"/> + + <xsl:variable name="showit" select="$xml2rfc-ext-include-references-in-index='yes' or $irefs2[starts-with(translate(@item,$lcase,$ucase),$letter)]"/> + + <xsl:if test="$showit"> + <li> + + <!-- make letters and digits stand out --> + <xsl:choose> + <xsl:when test="translate($letter,concat($lcase,$ucase,'0123456789'),'')=''"> + <a id="{$anchor-pref}index.{$letter}" href="#{$anchor-pref}index.{$letter}"> + <b><xsl:value-of select="$letter" /></b> + </a> + </xsl:when> + <xsl:otherwise> + <b><xsl:value-of select="$letter" /></b> + </xsl:otherwise> + </xsl:choose> + + <ul> + <xsl:for-each select="key('index-first-letter',translate(substring(concat(@item,@anchor),1,1),$lcase,$ucase))"> + + <xsl:sort select="translate(concat(@item,@anchor),$lcase,$ucase)" /> + + <xsl:choose> + <xsl:when test="self::reference"> + <xsl:if test="$xml2rfc-ext-include-references-in-index='yes' and not(starts-with(@anchor,'deleted-'))"> + <li> + <xsl:variable name="val"> + <xsl:call-template name="reference-name"/> + </xsl:variable> + <em> + <xsl:value-of select="substring($val,2,string-length($val)-2)"/> + </em> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:variable name="rs" select="key('xref-item',current()/@anchor) | . | key('anchor-item',concat('deleted-',current()/@anchor))"/> + + <xsl:for-each select="$rs"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + + <xsl:variable name="rs2" select="$rs[@x:sec|@section]"/> + + <xsl:if test="$rs2"> + <ul> + <xsl:for-each select="$rs2"> + <xsl:sort select="substring-before(concat(@x:sec,@section,'.'),'.')" data-type="number"/> + <xsl:sort select="substring(concat(@x:sec,@section),2+string-length(substring-before(concat(@x:sec,@section),'.')))" data-type="number"/> + + <xsl:if test="generate-id(.) = generate-id(key('index-xref-by-sec',concat(@target,'..',@x:sec,@section))[1])"> + <li> + <em> + <xsl:choose> + <xsl:when test="translate(substring(concat(@x:sec,@section),1,1),$ucase,'')=''"> + <xsl:text>Appendix </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>Section </xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="@x:sec|@section"/> + </em> + <xsl:text>&#160;&#160;</xsl:text> + <xsl:for-each select="key('index-xref-by-sec',concat(@target,'..',@x:sec,@section))"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + </li> + </xsl:if> + </xsl:for-each> + </ul> + </xsl:if> + + <xsl:if test="current()/x:source/@href"> + <xsl:variable name="rs3" select="$rs[not(@x:sec) and @x:rel]"/> + <xsl:variable name="doc" select="document(current()/x:source/@href)"/> + <xsl:if test="$rs3"> + <ul> + <xsl:for-each select="$rs3"> + <xsl:sort select="count($doc//*[@anchor and following::*/@anchor=substring-after(current()/@x:rel,'#')])" order="ascending" data-type="number"/> + <xsl:if test="generate-id(.) = generate-id(key('index-xref-by-anchor',concat(@target,'..',@x:rel))[1])"> + <li> + <em> + <xsl:variable name="sec"> + <xsl:for-each select="$doc//*[@anchor=substring-after(current()/@x:rel,'#')]"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:variable> + <xsl:choose> + <xsl:when test="translate(substring($sec,1,1),$ucase,'')=''"> + <xsl:text>Appendix </xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:text>Section </xsl:text> + </xsl:otherwise> + </xsl:choose> + <xsl:value-of select="$sec"/> + </em> + <xsl:text>&#160;&#160;</xsl:text> + <xsl:for-each select="key('index-xref-by-anchor',concat(@target,'..',@x:rel))"> + <xsl:call-template name="insertSingleXref" /> + </xsl:for-each> + </li> + </xsl:if> + </xsl:for-each> + </ul> + </xsl:if> + </xsl:if> + </li> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <!-- regular iref --> + <xsl:if test="generate-id(.) = generate-id(key('index-item',concat(@item,@anchor))[1])"> + <xsl:variable name="item" select="@item"/> + <xsl:variable name="in-artwork" select="key('index-item',$item)[@primary='true' and ancestor::artwork]"/> + + <li> + <xsl:choose> + <xsl:when test="$in-artwork"> + <span class="tt"><xsl:value-of select="@item" /></span> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@item" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:variable name="irefs3" select="key('index-item',@item)[not(@subitem) or @subitem='']"/> + <xsl:variable name="xrefs3" select="key('xref-item',$irefs3[@x:for-anchor='']/../@anchor) | key('xref-item',$irefs3/@x:for-anchor)"/> + <xsl:variable name="extrefs3" select="key('extref-item',$irefs3[@x:for-anchor='']/../@anchor) | key('extref-item',$irefs3/@x:for-anchor)"/> + + <xsl:for-each select="$irefs3|$xrefs3|$extrefs3"> + <!-- <xsl:sort select="translate(@item,$lcase,$ucase)" /> --> + <xsl:call-template name="insertSingleIref" /> + </xsl:for-each> + + <xsl:variable name="s2" select="key('index-item',@item)[@subitem and @subitem!='']"/> + <xsl:if test="$s2"> + <ul> + <xsl:for-each select="$s2"> + <xsl:sort select="translate(@subitem,$lcase,$ucase)" /> + + <xsl:if test="generate-id(.) = generate-id(key('index-item-subitem',concat(@item,'..',@subitem))[1])"> + + <xsl:variable name="in-artwork2" select="key('index-item-subitem',concat(@item,'..',@subitem))[@primary='true' and ancestor::artwork]" /> + + <li> + + <xsl:choose> + <xsl:when test="$in-artwork2"> + <span class="tt"><xsl:value-of select="@subitem" /></span> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@subitem" /> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;&#160;</xsl:text> + + <xsl:variable name="irefs4" select="key('index-item-subitem',concat(@item,'..',@subitem))"/> + <xsl:variable name="xrefs4" select="key('xref-item',$irefs4[@x:for-anchor='']/../@anchor) | key('xref-item',$irefs4/@x:for-anchor)"/> + <xsl:variable name="extrefs4" select="key('extref-item',$irefs4[@x:for-anchor='']/../@anchor) | key('extref-item',$irefs4/@x:for-anchor)"/> + + <xsl:for-each select="$irefs4|$xrefs4|$extrefs4"> + <!--<xsl:sort select="translate(@item,$lcase,$ucase)" />--> + <xsl:call-template name="insertSingleIref" /> + </xsl:for-each> + + </li> + </xsl:if> + </xsl:for-each> + </ul> + </xsl:if> + </li> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + + + </xsl:for-each> + </ul> + </li> + </xsl:if> + + </xsl:for-each> + </ul> + </div> + </section> +</xsl:template> + +<xsl:template name="insertPreamble" myns:namespaceless-elements="xml2rfc"> + + <xsl:param name="notes"/> + +<boilerplate> + <!-- TLP4, Section 6.c.iii --> + <xsl:variable name="pre5378EscapeClause"> + This document may contain material from IETF Documents or IETF Contributions published or + made publicly available before November 10, 2008. The person(s) controlling the copyright in + some of this material may not have granted the IETF Trust the right to allow modifications of such + material outside the IETF Standards Process. Without obtaining an adequate license from the + person(s) controlling the copyright in such materials, this document may not be modified outside + the IETF Standards Process, and derivative works of it may not be created outside the IETF + Standards Process, except to format it for publication as an RFC or to translate it into languages + other than English. + </xsl:variable> + + <!-- TLP1, Section 6.c.i --> + <xsl:variable name="noModificationTrust200811Clause"> + This document may not be modified, and derivative works of it may not be + created, except to format it for publication as an RFC and to translate it + into languages other than English. + </xsl:variable> + + <!-- TLP2..4, Section 6.c.i --> + <xsl:variable name="noModificationTrust200902Clause"> + This document may not be modified, and derivative works of it may not be + created, except to format it for publication as an RFC or to translate it + into languages other than English.<!-- "and" changes to "or" --> + </xsl:variable> + + <!-- TLP1..4, Section 6.c.ii --> + <xsl:variable name="noDerivativesTrust200___Clause"> + This document may not be modified, and derivative works of it may not be + created, and it may not be published except as an Internet-Draft. + </xsl:variable> + + <section anchor="{$anchor-pref}status"> + <name> + <xsl:choose> + <xsl:when test="$xml2rfc-rfcedstyle='yes'">Status of This Memo</xsl:when> + <xsl:otherwise>Status of this Memo</xsl:otherwise> + </xsl:choose> + </name> + + <xsl:choose> + <xsl:when test="@ipr and not(@number)"> + <t> + <xsl:choose> + + <!-- RFC2026 --> + <xsl:when test="@ipr = 'full2026'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026. + </xsl:when> + <xsl:when test="@ipr = 'noDerivativeWorks2026'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026 + except that the right to produce derivative works is not granted. + </xsl:when> + <xsl:when test="@ipr = 'noDerivativeWorksNow'"> + This document is an Internet-Draft and is + in full conformance with all provisions of Section 10 of RFC2026 + except that the right to produce derivative works is not granted. + (If this document becomes part of an IETF working group activity, + then it will be brought into full compliance with Section 10 of RFC2026.) + </xsl:when> + <xsl:when test="@ipr = 'none'"> + This document is an Internet-Draft and is + NOT offered in accordance with Section 10 of RFC2026, + and the author does not provide the IETF with any rights other + than to publish as an Internet-Draft. + </xsl:when> + + <!-- RFC3667 --> + <xsl:when test="@ipr = 'full3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667. By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she become aware will be disclosed, in accordance with + RFC 3668. + </xsl:when> + <xsl:when test="@ipr = 'noModification3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667. By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she become aware will be disclosed, in accordance with + RFC 3668. This document may not be modified, and derivative works of + it may not be created, except to publish it as an RFC and to + translate it into languages other than English<xsl:if test="@iprExtract">, + other than to extract <xref target="{@iprExtract}"/> as-is + for separate use</xsl:if>. + </xsl:when> + <xsl:when test="@ipr = 'noDerivatives3667'"> + This document is an Internet-Draft and is subject to all provisions + of section 3 of RFC 3667 except for the right to produce derivative + works. By submitting this Internet-Draft, each author represents + that any applicable patent or other IPR claims of which he or she + is aware have been or will be disclosed, and any of which he or she + become aware will be disclosed, in accordance with RFC 3668. This + document may not be modified, and derivative works of it may + not be created<xsl:if test="@iprExtract">, other than to extract + <xref target="{@iprExtract}"/> as-is for separate use</xsl:if>. + </xsl:when> + + <!-- RFC3978 --> + <xsl:when test="@ipr = 'full3978'"> + By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she becomes aware will be disclosed, in accordance with + Section 6 of BCP 79. + </xsl:when> + <xsl:when test="@ipr = 'noModification3978'"> + By submitting this Internet-Draft, each + author represents that any applicable patent or other IPR claims of + which he or she is aware have been or will be disclosed, and any of + which he or she becomes aware will be disclosed, in accordance with + Section 6 of BCP 79. This document may not be modified, and derivative works of + it may not be created, except to publish it as an RFC and to + translate it into languages other than English<xsl:if test="@iprExtract">, + other than to extract <xref target="{@iprExtract}"/> as-is + for separate use</xsl:if>. + </xsl:when> + <xsl:when test="@ipr = 'noDerivatives3978'"> + By submitting this Internet-Draft, each author represents + that any applicable patent or other IPR claims of which he or she + is aware have been or will be disclosed, and any of which he or she + becomes aware will be disclosed, in accordance with Section 6 of BCP 79. This + document may not be modified, and derivative works of it may + not be created<xsl:if test="@iprExtract">, other than to extract + <xref target="{@iprExtract}"/> as-is for separate use</xsl:if>. + </xsl:when> + + <!-- as of Jan 2010, TLP 4.0 --> + <xsl:when test="$ipr-2010-01 and (@ipr = 'trust200902' + or @ipr = 'noModificationTrust200902' + or @ipr = 'noDerivativesTrust200902' + or @ipr = 'pre5378Trust200902')"> + This Internet-Draft is submitted in full conformance with + the provisions of BCP 78 and BCP 79. + </xsl:when> + + <!-- as of Nov 2008, Feb 2009 and Sep 2009 --> + <xsl:when test="@ipr = 'trust200811' + or @ipr = 'noModificationTrust200811' + or @ipr = 'noDerivativesTrust200811' + or @ipr = 'trust200902' + or @ipr = 'noModificationTrust200902' + or @ipr = 'noDerivativesTrust200902' + or @ipr = 'pre5378Trust200902'"> + This Internet-Draft is submitted to IETF in full conformance with + the provisions of BCP 78 and BCP 79. + </xsl:when> + <xsl:otherwise> + CONFORMANCE UNDEFINED. + </xsl:otherwise> + </xsl:choose> + + <!-- warn about iprExtract without effect --> + <xsl:if test="@iprExtract and (@ipr != 'noModification3667' and @ipr != 'noDerivatives3667' and @ipr != 'noModification3978' and @ipr != 'noDerivatives3978')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('/rfc/@iprExtract does not have any effect for /rfc/@ipr=',@ipr)"/> + </xsl:call-template> + </xsl:if> + + <!-- restrictions --> + <xsl:choose> + <xsl:when test="@ipr = 'noModificationTrust200811'"> + <xsl:value-of select="$noModificationTrust200811Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noDerivativesTrust200811'"> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noModificationTrust200902'"> + <xsl:value-of select="$noModificationTrust200902Clause"/> + </xsl:when> + <xsl:when test="@ipr = 'noDerivativesTrust200902'"> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </xsl:when> + <!-- escape clause moved to Copyright Notice as of 2009-11 --> + <xsl:when test="@ipr = 'pre5378Trust200902' and $pub-yearmonth &lt; 200911"> + <xsl:value-of select="$pre5378EscapeClause"/> + </xsl:when> + + <xsl:otherwise /> + </xsl:choose> + </t> + <xsl:choose> + <xsl:when test="$id-boilerplate='2010'"> + <t> + Internet-Drafts are working documents of the Internet Engineering + Task Force (IETF). Note that other groups may also distribute + working documents as Internet-Drafts. The list of current + Internet-Drafts is at <eref target='http://datatracker.ietf.org/drafts/current/'>http://datatracker.ietf.org/drafts/current/</eref>. + </t> + </xsl:when> + <xsl:otherwise> + <t> + Internet-Drafts are working documents of the Internet Engineering + Task Force (IETF), its areas, and its working groups. + Note that other groups may also distribute working documents as + Internet-Drafts. + </t> + </xsl:otherwise> + </xsl:choose> + <t> + Internet-Drafts are draft documents valid for a maximum of six months + and may be updated, replaced, or obsoleted by other documents at any time. + It is inappropriate to use Internet-Drafts as reference material or to cite + them other than as &#8220;work in progress&#8221;. + </t> + <xsl:if test="$id-boilerplate=''"> + <t> + The list of current Internet-Drafts can be accessed at + <eref target='http://www.ietf.org/ietf/1id-abstracts.txt'>http://www.ietf.org/ietf/1id-abstracts.txt</eref>. + </t> + <t> + The list of Internet-Draft Shadow Directories can be accessed at + <eref target='http://www.ietf.org/shadow.html'>http://www.ietf.org/shadow.html</eref>. + </t> + </xsl:if> + <t> + This Internet-Draft will expire <xsl:call-template name="expirydate"><xsl:with-param name="in-prose" select="true()"/></xsl:call-template>. + </t> + </xsl:when> + + <xsl:when test="@category='bcp' and $rfc-boilerplate='2010'"> + <t> + This memo documents an Internet Best Current Practice. + </t> + </xsl:when> + <xsl:when test="@category='bcp'"> + <t> + This document specifies an Internet Best Current Practices for the Internet + Community, and requests discussion and suggestions for improvements. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='exp' and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for examination, experimental implementation, and evaluation. + </t> + </xsl:when> + <xsl:when test="@category='exp'"> + <t> + This memo defines an Experimental Protocol for the Internet community. + It does not specify an Internet standard of any kind. + Discussion and suggestions for improvement are requested. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='historic' and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for the historical record. + </t> + </xsl:when> + <xsl:when test="@category='historic'"> + <t> + This memo describes a historic protocol for the Internet community. + It does not specify an Internet standard of any kind. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="@category='std' and $rfc-boilerplate='2010'"> + <t> + This is an Internet Standards Track document. + </t> + </xsl:when> + <xsl:when test="@category='std'"> + <t> + This document specifies an Internet standards track protocol for the Internet + community, and requests discussion and suggestions for improvements. + Please refer to the current edition of the &#8220;Internet Official Protocol + Standards&#8221; (STD 1) for the standardization state and status of this + protocol. Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:when test="(@category='info' or not(@category)) and $rfc-boilerplate='2010'"> + <t> + This document is not an Internet Standards Track specification; it is + published for informational purposes. + </t> + </xsl:when> + <xsl:when test="@category='info' or not(@category)"> + <t> + This memo provides information for the Internet community. + It does not specify an Internet standard of any kind. + Distribution of this memo is unlimited. + </t> + </xsl:when> + <xsl:otherwise> + <t> + UNSUPPORTED CATEGORY. + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unsupported value for /rfc/@category: ', @category)"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <!-- 2nd and 3rd paragraph --> + <xsl:if test="$rfc-boilerplate='2010' and @number"> + <t> + <xsl:if test="@category='exp'"> + This document defines an Experimental Protocol for the Internet + community. + </xsl:if> + <xsl:if test="@category='historic'"> + This document defines a Historic Document for the Internet community. + </xsl:if> + <xsl:choose> + <xsl:when test="$submissionType='IETF'"> + This document is a product of the Internet Engineering Task Force + (IETF). + <xsl:choose> + <xsl:when test="$consensus='yes'"> + It represents the consensus of the IETF community. It has + received public review and has been approved for publication by + the Internet Engineering Steering Group (IESG). + </xsl:when> + <xsl:otherwise> + It has been approved for publication by the Internet Engineering + Steering Group (IESG). + <!-- sanity check of $consensus --> + <xsl:if test="@category='std' or @category='bcp'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'IETF BCPs and Standards Track documents require IETF consensus, check values of @category and @consensus!'"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$submissionType='IAB'"> + This document is a product of the Internet Architecture Board (IAB) + and represents information that the IAB has deemed valuable to + provide for permanent record. + <xsl:if test="$consensus='yes'"> + It represents the consensus of the Internet Architecture Board (IAB). + </xsl:if> + </xsl:when> + <xsl:when test="$submissionType='IRTF'"> + This document is a product of the Internet Research Task Force (IRTF). + The IRTF publishes the results of Internet-related research and + development activities. These results might not be suitable for + deployment. + <xsl:choose> + <xsl:when test="$consensus='yes' and front/workgroup!=''"> + This RFC represents the consensus of the + <xsl:value-of select="front/workgroup"/> Research Group of the Internet + Research Task Force (IRTF). + </xsl:when> + <xsl:when test="$consensus='no' and front/workgroup!=''"> + This RFC represents the individual opinion(s) of one or more + members of the <xsl:value-of select="front/workgroup"/> Research Group of the + Internet Research Task Force (IRTF). + </xsl:when> + <xsl:otherwise> + <!-- no research group --> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$submissionType='independent'"> + This is a contribution to the RFC Series, independently of any other + RFC stream. The RFC Editor has chosen to publish this document at + its discretion and makes no statement about its value for + implementation or deployment. + </xsl:when> + <xsl:otherwise> + <!-- will contain error message already --> + <xsl:value-of select="$submissionType"/> + </xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="$submissionType='IETF'"> + <xsl:choose> + <xsl:when test="@category='bcp'"> + Further information on BCPs is available in <xsl:value-of select="$hab-reference"/>. + </xsl:when> + <xsl:when test="@category='std'"> + Further information on Internet Standards is available in <xsl:value-of select="$hab-reference"/>. + </xsl:when> + <xsl:otherwise> + Not all documents approved by the IESG are a candidate for any + level of Internet Standard; see <xsl:value-of select="$hab-reference"/>. + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="approver"> + <xsl:choose> + <xsl:when test="$submissionType='IAB'">IAB</xsl:when> + <xsl:when test="$submissionType='IRTF'">IRSG</xsl:when> + <xsl:otherwise>RFC Editor</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + Documents approved for publication by the + <xsl:value-of select="$approver"/> are not a candidate for any level + of Internet Standard; see <xsl:value-of select="$hab-reference"/>. + </xsl:otherwise> + </xsl:choose> + </t> + <t> + Information about the current status of this document, any errata, and + how to provide feedback on it may be obtained at + <eref target="http://www.rfc-editor.org/info/rfc{@number}">http://www.rfc-editor.org/info/rfc<xsl:value-of select="@number"/></eref>. + </t> + </xsl:if> + + </section> + + <!-- some notes might go here; see http://www.rfc-editor.org/rfc-style-guide/rfc-style --> + <xsl:copy-of select="$notes"/> + + <xsl:choose> + <xsl:when test="$ipr-2008-11"> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright &#169; <xsl:value-of select="$xml2rfc-ext-pub-year" /> IETF Trust and the persons identified + as the document authors. All rights reserved. + </t> + <xsl:choose> + <xsl:when test="$ipr-2010-01"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>) + in effect on the date of publication of this document. Please + review these documents carefully, as they describe your rights + and restrictions with respect to this document. + <xsl:if test="$submissionType='IETF'"> + Code Components extracted from this document must include + Simplified BSD License text as described in Section 4.e of the + Trust Legal Provisions and are provided without warranty as + described in the Simplified BSD License. + </xsl:if> + </t> + </xsl:when> + <xsl:when test="$ipr-2009-09"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>) + in effect on the date of publication of this document. Please + review these documents carefully, as they describe your rights + and restrictions with respect to this document. Code Components + extracted from this document must include Simplified BSD License + text as described in Section 4.e of the Trust Legal Provisions + and are provided without warranty as described in the BSD License. + </t> + </xsl:when> + <xsl:when test="$ipr-2009-02"> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents in effect on the date of + publication of this document + (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>). + Please review these documents carefully, as they describe your rights and restrictions with + respect to this document. + </t> + </xsl:when> + <xsl:otherwise> + <t> + This document is subject to BCP 78 and the IETF Trust's Legal + Provisions Relating to IETF Documents + (<eref target="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</eref>) in effect on the date of + publication of this document. Please review these documents + carefully, as they describe your rights and restrictions with respect + to this document. + </t> + </xsl:otherwise> + </xsl:choose> + + <!-- add warning for incompatible IPR attribute on RFCs --> + <xsl:variable name="stds-rfc-compatible-ipr" + select="@ipr='pre5378Trust200902' or @ipr='trust200902' or @ipr='trust200811' or @ipr='full3978' or @ipr='full3667' or @ipr='full2026'"/> + + <xsl:variable name="rfc-compatible-ipr" + select="$stds-rfc-compatible-ipr or @ipr='noModificationTrust200902' or @ipr='noDerivativesTrust200902' or @ipr='noModificationTrust200811' or @ipr='noDerivativesTrust200811'"/> + <!-- TODO: may want to add more historic variants --> + + <xsl:variable name="is-stds-track" + select="$submissionType='IETF' and @category='std'"/> + + <xsl:variable name="status-diags"> + <xsl:choose> + <xsl:when test="$is-stds-track and @number and @ipr and not($stds-rfc-compatible-ipr)"> + <xsl:value-of select="concat('The /rfc/@ipr attribute value of ',@ipr,' is not allowed on standards-track RFCs.')"/> + </xsl:when> + <xsl:when test="@number and @ipr and not($rfc-compatible-ipr)"> + <xsl:value-of select="concat('The /rfc/@ipr attribute value of ',@ipr,' is not allowed on RFCs.')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$status-diags!=''"> + <t> + <spanx><xsl:value-of select="$status-diags"/></spanx> + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$status-diags"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="(@number or $pub-yearmonth >= 200911) and @ipr = 'pre5378Trust200902'"> + <!-- special case: RFC5378 escape applies to RFCs as well --> + <!-- for IDs historically in Status Of This Memo, over here starting 2009-11 --> + <t> + <xsl:value-of select="$pre5378EscapeClause"/> + </t> + </xsl:when> + <xsl:when test="not(@number)"> + <!-- not an RFC, handled elsewhere --> + </xsl:when> + <xsl:when test="not(@ipr)"> + <!-- no IPR value; done --> + </xsl:when> + <xsl:when test="@ipr='trust200902' or @ipr='trust200811' or @ipr='full3978' or @ipr='full3667' or @ipr='full2026'"> + <!-- default IPR, allowed here --> + </xsl:when> + <xsl:when test="@ipr='noModificationTrust200811'"> + <t> + <xsl:value-of select="$noModificationTrust200811Clause"/> + </t> + </xsl:when> + <xsl:when test="@ipr='noModificationTrust200902'"> + <t> + <xsl:value-of select="$noModificationTrust200902Clause"/> + </t> + </xsl:when> + <xsl:when test="@ipr='noDerivativesTrust200902' or @ipr='noDerivativesTrust200811'"> + <t> + <xsl:value-of select="$noDerivativesTrust200___Clause"/> + </t> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="msg" select="concat('unexpected value of /rfc/@ipr for this type of document: ',@ipr)"/> + <t> + <spanx><xsl:value-of select="$msg"/></spanx> + </t> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="inline" select="'no'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + </section> + </xsl:when> + <xsl:when test="$ipr-2007-08"> + <!-- no copyright notice --> + </xsl:when> + <xsl:when test="$ipr-rfc4748"> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright &#169; The IETF Trust (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + </section> + </xsl:when> + <xsl:otherwise> + <section anchor="{$anchor-pref}copyrightnotice"> + <name>Copyright Notice</name> + <t> + Copyright &#169; The Internet Society (<xsl:value-of select="$xml2rfc-ext-pub-year" />). All Rights Reserved. + </t> + </section> + </xsl:otherwise> + </xsl:choose> +</boilerplate> + +</xsl:template> + +<!-- TOC generation --> + +<xsl:template match="/" mode="toc"> + <hr class="{$css-noprint}"/> + + <nav id="{$anchor-pref}toc"> + <xsl:call-template name="insert-errata"> + <xsl:with-param name="section" select="'toc'"/> + </xsl:call-template> + + <h2 class="np"> <!-- this pagebreak occurs always --> + <a href="#{$anchor-pref}toc">Table of Contents</a> + </h2> + + <ul class="toc"> + <xsl:apply-templates mode="toc" /> + </ul> + </nav> +</xsl:template> + +<xsl:template name="insert-toc-line"> + <xsl:param name="number" /> + <xsl:param name="target" /> + <xsl:param name="title" /> + <xsl:param name="name" /> + <xsl:param name="tocparam" /> + <xsl:param name="oldtitle" /> + <xsl:param name="waschanged" /> + + <xsl:variable name="depth"> + <!-- count the dots --> + <xsl:choose> + <xsl:when test="starts-with($number,'unnumbered-')"> + <xsl:value-of select="string-length(translate(substring-after($number,'unnumbered-'),'.ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890&#167;','.'))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="string-length(translate($number,'.ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890&#167;','.'))"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- handle tocdepth parameter --> + <xsl:choose> + <xsl:when test="(not($tocparam) or $tocparam='' or $tocparam='default') and $depth >= $parsedTocDepth"> + <!-- dropped entry because excluded --> + <xsl:attribute name="class">excluded</xsl:attribute> + </xsl:when> + <xsl:when test="$tocparam='exclude'"> + <!-- dropped entry because excluded --> + <xsl:attribute name="class">excluded</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="starts-with($number,'del-')"> + <del> + <xsl:value-of select="$number" /> + <a href="#{$target}"><xsl:value-of select="$title"/></a> + </del> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$number != '' and not(contains($number,'unnumbered-'))"> + <a href="#{$anchor-pref}section.{$number}"> + <xsl:call-template name="emit-section-number"> + <xsl:with-param name="no" select="$number"/> + </xsl:call-template> + </a> + <xsl:text>&#160;&#160;&#160;</xsl:text> + </xsl:if> + <a href="#{$target}"> + <xsl:choose> + <xsl:when test="$waschanged!=''"> + <ins><xsl:value-of select="$title"/></ins> + <del><xsl:value-of select="$oldtitle"/></del> + </xsl:when> + <xsl:when test="$name"> + <xsl:call-template name="render-name-ref"> + <xsl:with-param name="n" select="$name/node()"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$title"/> + </xsl:otherwise> + </xsl:choose> + </a> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="back-toc"> + + <xsl:if test="//cref and $xml2rfc-comments='yes' and $xml2rfc-inline!='yes'"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'comments')"/> + <xsl:with-param name="title" select="'Editorial Comments'"/> + </xsl:call-template> + </li> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-authors-section='before-appendices'"> + <xsl:apply-templates select="/rfc/front" mode="toc" /> + </xsl:if> + <xsl:apply-templates select="back/*[not(self::references)]" mode="toc" /> + + <!-- insert the index if index entries exist --> + <xsl:if test="$has-index"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'index')"/> + <xsl:with-param name="title" select="'Index'"/> + </xsl:call-template> + </li> + </xsl:if> + + <xsl:if test="$xml2rfc-ext-authors-section='end'"> + <xsl:apply-templates select="/rfc/front" mode="toc" /> + </xsl:if> + + <!-- copyright statements --> + <xsl:if test="$xml2rfc-private='' and not($no-copylong)"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'ipr')"/> + <xsl:with-param name="title" select="'Intellectual Property and Copyright Statements'"/> + </xsl:call-template> + </li> + </xsl:if> + +</xsl:template> + +<xsl:template match="front" mode="toc"> + + <xsl:variable name="authors-title"> + <xsl:call-template name="get-authors-section-title"/> + </xsl:variable> + <xsl:variable name="authors-number"> + <xsl:call-template name="get-authors-section-number"/> + </xsl:variable> + + <xsl:if test="$authors-number!='suppress' and $xml2rfc-authorship!='no'"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'authors')"/> + <xsl:with-param name="title" select="$authors-title"/> + <xsl:with-param name="number" select="$authors-number"/> + </xsl:call-template> + </li> + </xsl:if> + +</xsl:template> + +<xsl:template name="references-toc"> + + <!-- distinguish two cases: (a) single references element (process + as toplevel section; (b) multiple references sections (add one toplevel + container with subsection) --> + + <xsl:variable name="refsecs" select="/rfc/back/references|/rfc/back/ed:replace/ed:ins/references"/> + + <xsl:choose> + <xsl:when test="count($refsecs) = 0"> + <!-- nop --> + </xsl:when> + <xsl:when test="count($refsecs) = 1"> + <xsl:for-each select="$refsecs"> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="@title!=''"><xsl:value-of select="@title" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$xml2rfc-refparent"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number"> + <xsl:call-template name="get-references-section-number"/> + </xsl:with-param> + <xsl:with-param name="target" select="concat($anchor-pref,'references')"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="name" select="name"/> + </xsl:call-template> + </li> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <li> + <!-- insert pseudo container --> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number"> + <xsl:call-template name="get-references-section-number"/> + </xsl:with-param> + <xsl:with-param name="target" select="concat($anchor-pref,'references')"/> + <xsl:with-param name="title" select="$xml2rfc-refparent"/> + </xsl:call-template> + + <ul> + <!-- ...with subsections... --> + <xsl:for-each select="$refsecs"> + <xsl:variable name="title"> + <xsl:choose> + <xsl:when test="@title!=''"><xsl:value-of select="@title" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$xml2rfc-refparent"/></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + + <xsl:variable name="num"> + <xsl:number level="any"/> + </xsl:variable> + + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="concat($anchor-pref,'references','.',$num)"/> + <xsl:with-param name="title" select="$title"/> + <xsl:with-param name="name" select="name"/> + </xsl:call-template> + </li> + </xsl:for-each> + </ul> + </li> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="section|appendix" mode="toc"> + <xsl:variable name="sectionNumber"> + <xsl:call-template name="get-section-number" /> + </xsl:variable> + + <xsl:variable name="target"> + <xsl:choose> + <xsl:when test="@anchor"><xsl:value-of select="@anchor" /></xsl:when> + <xsl:otherwise><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$sectionNumber" /></xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <!-- obtain content, just to check whether we need to recurse at all --> + <xsl:variable name="content"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="$target"/> + <xsl:with-param name="title" select="@title"/> + <xsl:with-param name="name" select="name"/> + <xsl:with-param name="tocparam" select="@toc"/> + <xsl:with-param name="oldtitle" select="@ed:old-title"/> + <xsl:with-param name="waschanged" select="@ed:resolves"/> + </xsl:call-template> + + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </li> + </xsl:variable> + + <xsl:if test="$content!=''"> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="number" select="$sectionNumber"/> + <xsl:with-param name="target" select="$target"/> + <xsl:with-param name="title" select="@title"/> + <xsl:with-param name="name" select="name"/> + <xsl:with-param name="tocparam" select="@toc"/> + <xsl:with-param name="oldtitle" select="@ed:old-title"/> + <xsl:with-param name="waschanged" select="@ed:resolves"/> + </xsl:call-template> + + <!-- obtain nested content, just to check whether we need to recurse at all --> + <xsl:variable name="nested-content"> + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </xsl:variable> + + <!-- only recurse if we need to (do not produce useless list container) --> + <xsl:if test="$nested-content!=''"> + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </xsl:if> + </li> + </xsl:if> +</xsl:template> + +<xsl:template match="middle" mode="toc"> + <xsl:apply-templates mode="toc" /> + <xsl:call-template name="references-toc" /> +</xsl:template> + +<xsl:template match="rfc" mode="toc"> + <xsl:apply-templates select="middle" mode="toc" /> + <xsl:call-template name="back-toc" /> +</xsl:template> + +<xsl:template match="ed:del|ed:ins|ed:replace" mode="toc"> + <xsl:apply-templates mode="toc" /> +</xsl:template> + +<xsl:template match="*|text()" mode="toc" /> + + +<xsl:template name="insertTocAppendix"> + + <xsl:if test="//figure[@title!='' or @anchor!='']"> + <ul class="toc"> + <li>Figures + <ul> + <xsl:for-each select="//figure[@title!='' or @anchor!='']"> + <xsl:variable name="n"><xsl:call-template name="get-figure-number"/></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Figure </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!=''">: </xsl:if> + </xsl:if> + <xsl:if test="@title"><xsl:value-of select="@title"/></xsl:if> + </xsl:variable> + <li> + <xsl:call-template name="insert-toc-line"> + <xsl:with-param name="target" select="concat($anchor-pref,'figure.',$n)" /> + <xsl:with-param name="title" select="$title" /> + </xsl:call-template> + </li> + </xsl:for-each> + </ul> + </li> + </ul> + </xsl:if> + + <!-- experimental --> + <xsl:if test="//ed:issue"> + <xsl:call-template name="insertIssuesList" /> + </xsl:if> + +</xsl:template> + +<xsl:template name="reference-name"> + <xsl:param name="node" select="."/> + + <xsl:for-each select="$node"> + <xsl:choose> + <xsl:when test="$xml2rfc-symrefs!='no' and ancestor::ed:del"> + <xsl:variable name="unprefixed" select="substring-after(@anchor,'deleted-')"/> + <xsl:choose> + <xsl:when test="$unprefixed!=''"> + <xsl:value-of select="concat('[',$unprefixed,']')"/> + </xsl:when> + <xsl:otherwise> + <xsl:if test="count(//reference[@anchor=current()/@anchor])!=1"> + <xsl:message>Deleted duplicate anchors should have the prefix "deleted-": <xsl:value-of select="@anchor"/></xsl:message> + </xsl:if> + <xsl:value-of select="concat('[',@anchor,']')"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$xml2rfc-symrefs!='no'"> + <xsl:text>[</xsl:text> + <xsl:choose> + <xsl:when test="/rfc/back/displayreference[@target=current()/@anchor]"> + <xsl:value-of select="/rfc/back/displayreference[@target=current()/@anchor]/@to"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@anchor"/> + </xsl:otherwise> + </xsl:choose> + <xsl:text>]</xsl:text> + </xsl:when> + <xsl:when test="ancestor::ed:del"> + <xsl:text>[del]</xsl:text> + </xsl:when> + <xsl:otherwise>[<xsl:number level="any" count="reference[not(ancestor::ed:del)]"/>]</xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + + + +<xsl:template name="replace-substring"> + <xsl:param name="string" /> + <xsl:param name="replace" /> + <xsl:param name="by" /> + + <xsl:choose> + <xsl:when test="contains($string,$replace)"> + <xsl:value-of select="concat(substring-before($string, $replace),$by)" /> + <xsl:call-template name="replace-substring"> + <xsl:with-param name="string" select="substring-after($string,$replace)" /> + <xsl:with-param name="replace" select="$replace" /> + <xsl:with-param name="by" select="$by" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$string" /></xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="rfc-or-id-link"> + <xsl:param name="name" /> + + <xsl:choose> + <xsl:when test="starts-with($name,'draft-')"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-internet-draft-uri"> + <xsl:with-param name="internet-draft" select="$name"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$name"/></a> + <xsl:call-template name="check-front-matter-ref"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="string(number($name))=$name"> + <xsl:variable name="uri"> + <xsl:call-template name="compute-rfc-uri"> + <xsl:with-param name="rfc" select="$name"/> + </xsl:call-template> + </xsl:variable> + <a href="{$uri}"><xsl:value-of select="$name"/></a> + <xsl:call-template name="check-front-matter-ref"> + <xsl:with-param name="name" select="$name"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$name"/> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('In metadata obsoletes/updates, RFC number of draft name is expected - found: ',$name)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="rfclist"> + <xsl:param name="list" /> + <xsl:choose> + <xsl:when test="contains($list,',')"> + <xsl:variable name="rfcNo" select="substring-before($list,',')" /> + <xsl:call-template name="rfc-or-id-link"> + <xsl:with-param name="name" select="$rfcNo"/> + </xsl:call-template> + <xsl:text>, </xsl:text> + <xsl:call-template name="rfclist"> + <xsl:with-param name="list" select="normalize-space(substring-after($list,','))" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rfcNo" select="$list" /> + <xsl:call-template name="rfc-or-id-link"> + <xsl:with-param name="name" select="$rfcNo"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="check-front-matter-ref"> + <xsl:param name="name"/> + <xsl:choose> + <xsl:when test="starts-with($name,'draft-')"> + <xsl:if test="not(//references//reference//seriesInfo[@name='Internet-Draft' and @value=$name])"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('front matter mentions I-D ',$name,' for which there is no reference element')"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:if test="not(//references//reference//seriesInfo[@name='RFC' and @value=$name])"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat('front matter mentions RFC ',$name,' for which there is no reference element')"/> + </xsl:call-template> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="check-anchor"> + <xsl:if test="@anchor and @anchor!=''"> + <!-- check validity of anchor name --> + <xsl:variable name="t" select="@anchor"/> + <xsl:variable name="tstart" select="substring($t,1,1)"/> + + <!-- we only check for disallowed ASCII characters for now --> + <xsl:variable name="not-namestartchars">&#9;&#10;&#13;&#32;!"#$%&amp;'()*+,-./0123456789;&lt;=&gt;?@[\]^`[|}~</xsl:variable> + + <xsl:if test="$tstart!=translate($tstart,$not-namestartchars,'')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('anchor &quot;',$t,'&quot; can not start with character &quot;',$tstart,'&quot;')"/> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="check-anchor-non-start"> + <xsl:with-param name="f" select="$t"/> + <xsl:with-param name="t" select="$t"/> + </xsl:call-template> + </xsl:if> +</xsl:template> + +<xsl:template name="check-anchor-non-start"> + <xsl:param name="f"/> + <xsl:param name="t"/> + + <xsl:variable name="not-namechars">&#9;&#10;&#13;&#32;!"#$%&amp;'()*+,/;&lt;=&gt;?@[\]^`[|}~</xsl:variable> + + <xsl:choose> + <xsl:when test="$t=''"> + <!-- Done --> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="s" select="substring($t,1,1)"/> + <xsl:choose> + <xsl:when test="$s!=translate($s,$not-namechars,'')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('anchor &quot;',$f,'&quot; contains invalid character &quot;',$s,'&quot; at position ',string-length($f) - string-length($t))"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="check-anchor-non-start"> + <xsl:with-param name="f" select="$f"/> + <xsl:with-param name="t" select="substring($t,2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="sluggy-anchor"> + <xsl:if test="self::section and (not(@anchor) or @anchor='')"> + <xsl:variable name="fr">ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789.()-_ :%,/=@&lt;&gt;</xsl:variable> + <xsl:variable name="to">abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz0123456789.---_---------</xsl:variable> + <xsl:variable name="canslug" select="translate(normalize-space(concat(@title,name)),$fr,'')=''"/> + <xsl:if test="$canslug"> + <xsl:variable name="slug" select="translate(normalize-space(concat(@title,name)),$fr,$to)"/> + <xsl:variable name="conflicts" select="//section[not(@anchor) and $slug=translate(normalize-space(concat(@title,name)),$fr,$to)]"/> + <xsl:choose> + <xsl:when test="count($conflicts)>1"> + <xsl:variable name="c" select="preceding::*[not(@anchor) and $slug=translate(normalize-space(concat(@title,name)),$fr,$to)]"/> + <xsl:value-of select="concat('n-',$slug,'_',(1+count($c)))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat('n-',$slug)"/> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + </xsl:if> +</xsl:template> + +<xsl:template name="copy-anchor"> + <xsl:call-template name="check-anchor"/> + <xsl:choose> + <xsl:when test="@anchor and @anchor!=''"> + <xsl:attribute name="id"><xsl:value-of select="@anchor"/></xsl:attribute> + </xsl:when> + <xsl:when test="self::section"> + <xsl:variable name="slug"> + <xsl:call-template name="sluggy-anchor"/> + </xsl:variable> + <xsl:if test="$slug!=''"> + <xsl:attribute name="id"><xsl:value-of select="$slug"/></xsl:attribute> + </xsl:if> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:template> + +<xsl:template name="rfclist-for-dcmeta"> + <xsl:param name="list" /> + <xsl:choose> + <xsl:when test="contains($list,',')"> + <xsl:variable name="rfcNo" select="substring-before($list,',')" /> + <meta name="dct.replaces" content="urn:ietf:rfc:{$rfcNo}" /> + <xsl:call-template name="rfclist-for-dcmeta"> + <xsl:with-param name="list" select="normalize-space(substring-after($list,','))" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rfcNo" select="$list" /> + <meta name="dct.replaces" content="urn:ietf:rfc:{$rfcNo}" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-paragraph-number"> + <!-- no paragraph numbers in certain containers --> + <xsl:if test="not(ancestor::ul) and not(ancestor::dl) and not(ancestor::ol)"> + + <xsl:choose> + <xsl:when test="parent::blockquote or parent::x:blockquote"> + <!-- boilerplate --> + <xsl:for-each select="parent::blockquote|parent::x:blockquote"><xsl:call-template name="get-paragraph-number" />.</xsl:for-each> + <xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol"/> + </xsl:when> + + <xsl:when test="parent::aside or parent::x:note"> + <!-- boilerplate --> + <xsl:for-each select="parent::aside|parent::x:note"><xsl:call-template name="get-paragraph-number" />.</xsl:for-each> + <xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol"/> + </xsl:when> + + <xsl:when test="ancestor::section"> + <!-- get section number of ancestor section element, then add t number --> + <xsl:for-each select="ancestor::section[1]"><xsl:call-template name="get-section-number" />.p.</xsl:for-each> + <xsl:variable name="b"><xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::section and ../@removeInRFC='true' and ../t[1]!=$section-removeInRFC"> + <xsl:value-of select="1 + $b"/> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$b"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + + <xsl:when test="ancestor::note"> + <!-- or get section number of ancestor note element, then add t number --> + <xsl:for-each select="ancestor::note[1]"><xsl:call-template name="get-section-number" />.p.</xsl:for-each> + <xsl:variable name="b"><xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol"/></xsl:variable> + <xsl:choose> + <xsl:when test="parent::note and ../@removeInRFC='true' and ../t[1]!=$note-removeInRFC"> + <xsl:value-of select="1 + $b"/> + </xsl:when> + <xsl:otherwise><xsl:value-of select="$b"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + + <!-- abstract --> + <xsl:when test="ancestor::abstract"> + <xsl:text>p.</xsl:text> + <xsl:number count="t|x:blockquote|blockquote|x:note|aside|ul|dl|ol"/> + </xsl:when> + + <xsl:otherwise/> + </xsl:choose> + </xsl:if> +</xsl:template> + +<xsl:template name="editingMark"> + <xsl:if test="$xml2rfc-editing='yes' and ancestor::rfc"> + <sup class="editingmark"><span><xsl:number level="any" count="postamble|preamble|t"/></span>&#0160;</sup> + </xsl:if> +</xsl:template> + +<!-- internal ref support --> +<xsl:key name="anchor-item-alias" match="//*[@anchor and (x:anchor-alias/@value or ed:replace/ed:ins/x:anchor-alias)]" use="x:anchor-alias/@value | ed:replace/ed:ins/x:anchor-alias/@value"/> + +<xsl:template match="x:ref"> + <xsl:variable name="val" select="normalize-space(.)"/> + <xsl:variable name="target" select="key('anchor-item',$val) | key('anchor-item-alias',$val) | //reference/x:source[x:defines=$val]"/> + <xsl:if test="count($target)>1"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">internal link target for '<xsl:value-of select="."/>' is ambiguous; picking first.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="$target[1]/@anchor"> + <a href="#{$target[1]/@anchor}" class="smpl"> + <xsl:call-template name="copy-anchor"/> + <!-- insert id when a backlink to this xref is needed in the index --> + <xsl:if test="//iref[@x:for-anchor=$val] | //iref[@x:for-anchor='' and ../@anchor=$val]"> + <xsl:attribute name="id"><xsl:call-template name="compute-extref-anchor"/></xsl:attribute> + </xsl:if> + <xsl:value-of select="."/> + </a> + </xsl:when> + <xsl:when test="$target[1]/self::x:source"> + <xsl:variable name="extdoc" select="document($target[1]/@href)"/> + <xsl:variable name="nodes" select="$extdoc//*[@anchor and (x:anchor-alias/@value=$val)]"/> + <xsl:if test="not($nodes)"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Anchor '<xsl:value-of select="$val"/>' not found in source file '<xsl:value-of select="$target[1]/@href"/>'.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:variable name="t"> + <xsl:call-template name="computed-auto-target"> + <xsl:with-param name="bib" select="$target[1]/.."/> + <xsl:with-param name="ref" select="$nodes[1]"/> + </xsl:call-template> + </xsl:variable> + <a href="{$t}" class="smpl"> + <xsl:value-of select="."/> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">internal link target for '<xsl:value-of select="."/>' does not exist.</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="."/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- Nothing to do here --> +<xsl:template match="x:anchor-alias" /> + +<!-- Quotes --> +<xsl:template match="x:q"> + <q> + <xsl:copy-of select="@cite"/> + <xsl:apply-templates/> + </q> +</xsl:template> + +<!-- Notes --> +<xsl:template match="x:note|aside"> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + + <div> + <xsl:if test="$p!='' and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$p"/></xsl:attribute> + </xsl:if> + <aside> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </aside> + </div> +</xsl:template> + +<xsl:template match="x:bcp14|bcp14"> + <!-- check valid BCP14 keywords, then emphasize them --> + <xsl:variable name="c" select="normalize-space(.)"/> + <xsl:choose> + <xsl:when test="$c='MUST' or $c='REQUIRED' or $c='SHALL'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:when test="$c='MUST NOT' or $c='SHALL NOT'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:when test="$c='SHOULD' or $c='RECOMMENDED'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:when test="$c='SHOULD NOT' or $c='NOT RECOMMENDED'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:when test="$c='MAY' or $c='OPTIONAL'"> + <em class="bcp14"><xsl:value-of select="."/></em> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="."/> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Unknown BCP14 keyword: ',.)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:blockquote|blockquote"> + <xsl:variable name="p"> + <xsl:call-template name="get-paragraph-number" /> + </xsl:variable> + <div> + <xsl:call-template name="insertInsDelClass"/> + <xsl:call-template name="editingMark" /> + <xsl:if test="string-length($p) &gt; 0 and not(ancestor::ed:del) and not(ancestor::ed:ins)"> + <xsl:attribute name="id"><xsl:value-of select="$anchor-pref"/>section.<xsl:value-of select="$p"/></xsl:attribute> + </xsl:if> + <blockquote> + <xsl:call-template name="copy-anchor"/> + <xsl:copy-of select="@cite"/> + <xsl:choose> + <xsl:when test="t|ul|ol|dl|artwork|figure|sourcecode"> + <xsl:apply-templates/> + </xsl:when> + <xsl:otherwise> + <p> + <xsl:apply-templates/> + </p> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="@quotedFrom"> + <cite> + <xsl:text>&#8212; </xsl:text> + <xsl:choose> + <xsl:when test="@cite"><a href="{@cite}"><xsl:value-of select="@quotedFrom"/></a></xsl:when> + <xsl:otherwise><xsl:value-of select="@quotedFrom"/></xsl:otherwise> + </xsl:choose> + </cite> + </xsl:if> + </blockquote> + </div> +</xsl:template> + +<!-- Definitions --> +<xsl:template match="x:dfn"> + <dfn> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </dfn> +</xsl:template> + +<!-- headings --> +<xsl:template match="x:h"> + <b> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </b> +</xsl:template> + +<!-- superscripts --> +<xsl:template match="x:sup|sup"> + <sup> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </sup> +</xsl:template> + +<!-- subscripts --> +<xsl:template match="sub"> + <sub> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </sub> +</xsl:template> + +<!-- bold --> +<xsl:template match="x:highlight"> + <b> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </b> +</xsl:template> + +<!-- measuring lengths --> +<xsl:template match="x:length-of"> + <xsl:variable name="target" select="//*[@anchor=current()/@target]"/> + <xsl:if test="count($target)!=1"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('@target ',@target,' defined ',count($target),' times.')"/> + </xsl:call-template> + </xsl:if> + <xsl:variable name="content"> + <xsl:apply-templates select="$target"/> + </xsl:variable> + <xsl:variable name="lineends" select="string-length($content) - string-length(translate($content,'&#10;',''))"/> + <xsl:variable name="indents"> + <xsl:choose> + <xsl:when test="@indented"> + <xsl:value-of select="number(@indented) * $lineends"/> + </xsl:when> + <xsl:otherwise>0</xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="string-length($content) + $lineends - $indents"/> +</xsl:template> + +<!-- Almost Nop --> +<xsl:template match="x:span"> + <xsl:choose> + <xsl:when test="@x:lang and $prettyprint-class!=''"> + <code class="{$prettyprint-class}"> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </code> + </xsl:when> + <xsl:otherwise> + <span> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates/> + </span> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="x:parse-xml"> + <xsl:apply-templates/> + + <xsl:if test="function-available('exslt:node-set')"> + <xsl:variable name="cleaned"> + <xsl:apply-templates mode="cleanup-edits"/> + </xsl:variable> + <xsl:if test="$xml2rfc-ext-trace-parse-xml='yes'"> + <xsl:call-template name="trace"> + <xsl:with-param name="msg" select="concat('Parsing XML: ', $cleaned)"/> + </xsl:call-template> + </xsl:if> + <xsl:choose> + <xsl:when test="function-available('myns:parseXml')" use-when="function-available('myns:parseXml')"> + <xsl:if test="myns:parseXml(concat($cleaned,''))!=''"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('Parse error in XML: ', myns:parseXml(concat($cleaned,'')))"/> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="function-available('saxon:parse')" use-when="function-available('saxon:parse')"> + <xsl:variable name="parsed" select="saxon:parse(concat($cleaned,''))"/> + <xsl:if test="$parsed='foo'"> + <xsl:comment>should not get here</xsl:comment> + </xsl:if> + </xsl:when> + <xsl:when test="false()"></xsl:when> + <xsl:otherwise></xsl:otherwise> + </xsl:choose> + </xsl:if> +</xsl:template> + +<!-- inlined RDF support --> +<xsl:template match="rdf:Description"> + <!-- ignore --> +</xsl:template> + +<!-- cleanup for ins/del --> + +<xsl:template match="comment()|@*" mode="cleanup-edits"><xsl:copy/></xsl:template> + +<xsl:template match="text()" mode="cleanup-edits"><xsl:copy/></xsl:template> + +<xsl:template match="/" mode="cleanup-edits"> + <xsl:copy><xsl:apply-templates select="node()" mode="cleanup-edits" /></xsl:copy> +</xsl:template> + +<xsl:template match="ed:del" mode="cleanup-edits"/> + +<xsl:template match="ed:replace" mode="cleanup-edits"> + <xsl:apply-templates mode="cleanup-edits"/> +</xsl:template> + +<xsl:template match="ed:ins" mode="cleanup-edits"> + <xsl:apply-templates mode="cleanup-edits"/> +</xsl:template> + + +<!-- ABNF support --> +<xsl:template name="to-abnf-char-sequence"> + <xsl:param name="chars"/> + + <xsl:variable name="asciistring">&#160; !"#$%&amp;'()*+,-./<xsl:value-of select="$digits"/>:;&lt;=>?@<xsl:value-of select="$ucase"/>[\]^_`<xsl:value-of select="$lcase"/>{|}~&#127;</xsl:variable> + <xsl:variable name="hex">0123456789ABCDEF</xsl:variable> + + <xsl:variable name="c" select="substring($chars,1,1)"/> + <xsl:variable name="r" select="substring($chars,2)"/> + <xsl:variable name="pos" select="string-length(substring-before($asciistring,$c))"/> + + <xsl:choose> + <xsl:when test="$pos >= 1"> + <xsl:variable name="ascii" select="$pos + 31"/> + <xsl:variable name="h" select="floor($ascii div 16)"/> + <xsl:variable name="l" select="floor($ascii mod 16)"/> + <xsl:value-of select="concat(substring($hex,1 + $h,1),substring($hex,1 + $l,1))"/> + </xsl:when> + <xsl:otherwise> + <xsl:text>??</xsl:text> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="concat('unexpected character in ABNF char sequence: ',substring($chars,1,1))" /> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="$r!=''"> + <xsl:text>.</xsl:text> + <xsl:call-template name="to-abnf-char-sequence"> + <xsl:with-param name="chars" select="$r"/> + </xsl:call-template> + </xsl:if> + +</xsl:template> + +<xsl:template match="x:abnf-char-sequence"> + <xsl:choose> + <xsl:when test="substring(.,1,1) != '&quot;' or substring(.,string-length(.),1) != '&quot;'"> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'contents of x:abnf-char-sequence needs to be quoted.'" /> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:text>%x</xsl:text> + <xsl:call-template name="to-abnf-char-sequence"> + <xsl:with-param name="chars" select="substring(.,2,string-length(.)-2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- box drawing --> + +<!-- nop for alignment --> +<xsl:template match="x:x"/> + +<!-- box (top) --> +<xsl:template match="x:bt"> + <xsl:text>&#x250c;</xsl:text> + <xsl:value-of select="translate(substring(.,2,string-length(.)-2),'-','&#x2500;')"/> + <xsl:text>&#x2510;</xsl:text> +</xsl:template> + +<!-- box (center) --> +<xsl:template match="x:bc"> + <xsl:variable name="first" select="substring(.,1)"/> + <xsl:variable name="content" select="substring(.,2,string-length(.)-2)"/> + <xsl:variable name="is-delimiter" select="translate($content,'-','')=''"/> + + <xsl:choose> + <xsl:when test="$is-delimiter"> + <xsl:text>&#x251c;</xsl:text> + <xsl:value-of select="translate($content,'-','&#x2500;')"/> + <xsl:text>&#x2524;</xsl:text> + </xsl:when> + <xsl:when test="*"> + <xsl:for-each select="node()"> + <xsl:choose> + <xsl:when test="position()=1"> + <xsl:text>&#x2502;</xsl:text> + <xsl:value-of select="substring(.,2)"/> + </xsl:when> + <xsl:when test="position()=last()"> + <xsl:value-of select="substring(.,1,string-length(.)-1)"/> + <xsl:text>&#x2502;</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="."/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:otherwise> + <xsl:text>&#x2502;</xsl:text> + <xsl:value-of select="$content"/> + <xsl:text>&#x2502;</xsl:text> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<!-- box (bottom) --> +<xsl:template match="x:bb"> + <xsl:text>&#x2514;</xsl:text> + <xsl:value-of select="translate(substring(.,2,string-length(.)-2),'-','&#x2500;')"/> + <xsl:text>&#x2518;</xsl:text> +</xsl:template> + +<!-- author handling extensions --> +<xsl:template match="x:include-author"> + <xsl:for-each select="/*/front/author[@anchor=current()/@target]"> + <xsl:apply-templates select="."/> + </xsl:for-each> +</xsl:template> + +<!-- boilerplate --> +<xsl:template match="boilerplate"> + <xsl:apply-templates/> +</xsl:template> + +<!-- experimental annotation support --> + +<xsl:template match="ed:issueref"> + <xsl:choose> + <xsl:when test=".=//ed:issue/@name"> + <a href="#{$anchor-pref}issue.{.}"> + <xsl:apply-templates/> + </a> + </xsl:when> + <xsl:when test="@href"> + <a href="{@href}" id="{$anchor-pref}issue.{.}"> + <xsl:apply-templates/> + </a> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Dangling ed:issueref: <xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + <xsl:apply-templates/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="ed:issue"> + <xsl:variable name="class"> + <xsl:choose> + <xsl:when test="@status='closed'">closedissue</xsl:when> + <xsl:otherwise>openissue</xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <table class="{$class}"> + <tr> + <td colspan="3"> + <a id="{$anchor-pref}issue.{@name}"> + <xsl:choose> + <xsl:when test="@status='closed'"> + <xsl:attribute name="class">closed-issue</xsl:attribute> + </xsl:when> + <xsl:when test="@status='editor'"> + <xsl:attribute name="class">editor-issue</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue</xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:text>&#160;</xsl:text> + <xsl:choose> + <xsl:when test="@href"> + <em><a href="{@href}"><xsl:value-of select="@name" /></a></em> + </xsl:when> + <xsl:when test="@alternate-href"> + <em>[<a href="{@alternate-href}">alternate link</a>]</em> + </xsl:when> + <xsl:otherwise> + <em><xsl:value-of select="@name" /></em> + </xsl:otherwise> + </xsl:choose> + &#0160; + (type: <xsl:value-of select="@type"/>, status: <xsl:value-of select="@status"/>) + </td> + </tr> + + <xsl:apply-templates select="ed:item"/> + <xsl:apply-templates select="ed:resolution"/> + + <xsl:variable name="changes" select="//*[@ed:resolves=current()/@name or ed:resolves=current()/@name]" /> + <xsl:if test="$changes"> + <tr> + <td class="top" colspan="3"> + Associated changes in this document: + <xsl:variable name="issue" select="@name"/> + <xsl:for-each select="$changes"> + <a href="#{$anchor-pref}change.{$issue}.{position()}"> + <xsl:variable name="label"> + <xsl:call-template name="get-section-number"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="$label!=''"><xsl:value-of select="$label"/></xsl:when> + <xsl:otherwise>&lt;<xsl:value-of select="concat('#',$anchor-pref,'change.',$issue,'.',position())"/>&gt;</xsl:otherwise> + </xsl:choose> + </a> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + <xsl:text>.</xsl:text> + </td> + </tr> + </xsl:if> + </table> + +</xsl:template> + +<xsl:template match="ed:item"> + <tr> + <td class="top"> + <xsl:if test="@entered-by"> + <a href="mailto:{@entered-by}?subject={/rfc/@docName},%20{../@name}"> + <i><xsl:value-of select="@entered-by"/></i> + </a> + </xsl:if> + </td> + <td class="topnowrap"> + <xsl:value-of select="@date"/> + </td> + <td class="top"> + <xsl:apply-templates select="node()" mode="issuehtml"/> + </td> + </tr> +</xsl:template> + +<xsl:template match="ed:resolution"> + <tr> + <td class="top"> + <xsl:if test="@entered-by"> + <a href="mailto:{@entered-by}?subject={/rfc/@docName},%20{../@name}"><i><xsl:value-of select="@entered-by"/></i></a> + </xsl:if> + </td> + <td class="topnowrap"> + <xsl:value-of select="@datetime"/> + </td> + <td class="top"> + <em>Resolution:</em> + <xsl:apply-templates select="node()" mode="issuehtml"/> + </td> + </tr> +</xsl:template> + +<xsl:template match="ed:annotation"> + <em> + <xsl:apply-templates/> + </em> +</xsl:template> + +<!-- special templates for handling XHTML in issues --> +<xsl:template match="text()" mode="issuehtml"> + <xsl:value-of select="."/> +</xsl:template> + +<xsl:template match="*|@*" mode="issuehtml"> + <xsl:message terminate="yes">Unexpected node in issue HTML: <xsl:value-of select="name(.)"/></xsl:message> +</xsl:template> + +<xsl:template match="xhtml:a|xhtml:b|xhtml:br|xhtml:cite|xhtml:del|xhtml:em|xhtml:i|xhtml:ins|xhtml:q|xhtml:pre|xhtml:tt" mode="issuehtml"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="@*|node()" mode="issuehtml"/> + </xsl:element> +</xsl:template> + +<xsl:template match="xhtml:p" mode="issuehtml"> + <xsl:apply-templates select="node()" mode="issuehtml"/> + <br class="p"/> +</xsl:template> + +<xsl:template match="xhtml:a/@href|xhtml:q/@cite" mode="issuehtml"> + <xsl:attribute name="{local-name(.)}"> + <xsl:value-of select="."/> + </xsl:attribute> +</xsl:template> + +<xsl:template match="ed:issueref" mode="issuehtml"> + <xsl:apply-templates select="."/> +</xsl:template> + +<xsl:template match="ed:eref" mode="issuehtml"> + <xsl:text>&lt;</xsl:text> + <a href="{.}"><xsl:value-of select="."/></a> + <xsl:text>&gt;</xsl:text> +</xsl:template> + +<xsl:template name="insertIssuesList"> + + <h2 id="{$anchor-pref}issues-list" ><a href="#{$anchor-pref}issues-list">Issues list</a></h2> + <table> + <thead> + <tr> + <th>Id</th> + <th>Type</th> + <th>Status</th> + <th>Date</th> + <th>Raised By</th> + </tr> + </thead> + <tbody> + <xsl:for-each select="//ed:issue"> + <xsl:sort select="@status" /> + <xsl:sort select="@name" /> + <tr> + <td><a href="#{$anchor-pref}issue.{@name}"><xsl:value-of select="@name" /></a></td> + <td><xsl:value-of select="@type" /></td> + <td><xsl:value-of select="@status" /></td> + <td><xsl:value-of select="ed:item[1]/@date" /></td> + <td><a href="mailto:{ed:item[1]/@entered-by}?subject={/rfc/@docName},%20{@name}"><xsl:value-of select="ed:item[1]/@entered-by" /></a></td> + </tr> + </xsl:for-each> + </tbody> + </table> + +</xsl:template> + +<xsl:template name="insert-diagnostics"> + + <!-- check anchor names --> + + <xsl:variable name="badAnchors" select="//*[starts-with(@anchor,$anchor-pref)]" /> + <xsl:if test="$badAnchors"> + <xsl:variable name="text"> + <xsl:text>The following anchor names may collide with internally generated anchors because of their prefix "</xsl:text> + <xsl:value-of select="$anchor-pref" /> + <xsl:text>": </xsl:text> + <xsl:for-each select="$badAnchors"> + <xsl:value-of select="@anchor"/> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="normalize-space($text)"/></xsl:with-param> + <xsl:with-param name="lineno" select="false()"/> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="badV3Anchors" select="//*[substring(@anchor,2,1)='-' and translate(substring(@anchor,1,1),$lcase,'')='']" /> + <xsl:if test="$badV3Anchors"> + <xsl:variable name="text"> + <xsl:text>The following anchor names may collide with internally generated anchors in XML2RFCV3 mode because: </xsl:text> + <xsl:for-each select="$badV3Anchors"> + <xsl:value-of select="@anchor"/> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="normalize-space($text)"/></xsl:with-param> + <xsl:with-param name="lineno" select="false()"/> + </xsl:call-template> + </xsl:if> + + <!-- check ABNF syntax references --> + <xsl:if test="//artwork[@type='abnf2616' or @type='abnf7230']"> + <xsl:if test="not(//reference//seriesInfo[@name='RFC' and (@value='2068' or @value='2616' or @value='7230')]) and not(//reference//seriesInfo[@name='Internet-Draft' and (starts-with(@value, 'draft-ietf-httpbis-p1-messaging-'))])"> + <!-- check for draft-ietf-httpbis-p1-messaging- is for backwards compat --> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">document uses HTTP-style ABNF syntax, but doesn't reference RFC 2068, RFC 2616, or RFC 7230.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + <xsl:if test="//artwork[@type='abnf']"> + <xsl:if test="not(//reference//seriesInfo[@name='RFC' and (@value='2234' or @value='4234' or @value='5234')])"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">document uses ABNF syntax, but doesn't reference RFC 2234, 4234 or 5234.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + + <!-- check IDs --> + <xsl:variable name="badTargets" select="//xref[not(@target=//@anchor) and not(@target=exslt:node-set($includeDirectives)//@anchor) and not(ancestor::ed:del)]" /> + <xsl:if test="$badTargets"> + <xsl:variable name="text"> + <xsl:text>The following target names do not exist: </xsl:text> + <xsl:for-each select="$badTargets"> + <xsl:value-of select="@target"/> + <xsl:if test="not(@target)">(@target attribute missing)</xsl:if> + <xsl:call-template name="lineno"/> + <xsl:if test="position()!=last()"> + <xsl:text>, </xsl:text> + </xsl:if> + </xsl:for-each> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg"><xsl:value-of select="$text"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + + +</xsl:template> + +<!-- special change mark support, not supported by RFC2629 yet --> + +<xsl:template match="@ed:*" /> + +<xsl:template match="ed:del"> + <xsl:call-template name="insert-issue-pointer"/> + <del> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </del> +</xsl:template> + +<xsl:template match="ed:ins"> + <xsl:call-template name="insert-issue-pointer"/> + <ins> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates /> + </ins> +</xsl:template> + +<xsl:template name="insert-issue-pointer"> + <xsl:param name="deleted-anchor"/> + <xsl:variable name="change" select="."/> + <xsl:for-each select="@ed:resolves|ed:resolves"> + <xsl:variable name="resolves" select="."/> + <!-- need the right context node for proper numbering --> + <xsl:variable name="count"><xsl:for-each select=".."><xsl:number level="any" count="*[@ed:resolves=$resolves or ed:resolves=$resolves]" /></xsl:for-each></xsl:variable> + <xsl:variable name="total" select="count(//*[@ed:resolves=$resolves or ed:resolves=$resolves])" /> + <xsl:variable name="id"> + <xsl:value-of select="$anchor-pref"/>change.<xsl:value-of select="$resolves"/>.<xsl:value-of select="$count" /> + </xsl:variable> + <xsl:choose> + <!-- block level? --> + <xsl:when test="not(ancestor::t) and not(ancestor::title) and not(ancestor::figure) and not($change/@ed:old-title)"> + <div class="issuepointer {$css-noprint}"> + <xsl:if test="not($deleted-anchor)"> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:if test="$count > 1"> + <a class="bg-issue" title="previous change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count - 1}">&#x2191;</a> + </xsl:if> + <a class="open-issue" href="#{$anchor-pref}issue.{$resolves}" title="resolves: {$resolves}"> + <xsl:choose> + <xsl:when test="//ed:issue[@name=$resolves and @status='closed']"> + <xsl:attribute name="class">closed-issue</xsl:attribute> + </xsl:when> + <xsl:when test="//ed:issue[@name=$resolves and @status='editor']"> + <xsl:attribute name="class">editor-issue</xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue</xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:if test="$count &lt; $total"> + <a class="bg-issue" title="next change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count + 1}">&#x2193;</a> + </xsl:if> + <xsl:text>&#160;</xsl:text> + </div> + </xsl:when> + <xsl:otherwise> + <xsl:if test="$count > 1"> + <a class="bg-issue" title="previous change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count - 1}">&#x2191;</a> + </xsl:if> + <a title="resolves: {$resolves}" href="#{$anchor-pref}issue.{$resolves}"> + <xsl:if test="not($deleted-anchor)"> + <xsl:attribute name="id"><xsl:value-of select="$id"/></xsl:attribute> + </xsl:if> + <xsl:choose> + <xsl:when test="//ed:issue[@name=$resolves and @status='closed']"> + <xsl:attribute name="class">closed-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:when> + <xsl:when test="//ed:issue[@name=$resolves and @status='editor']"> + <xsl:attribute name="class">editor-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="class">open-issue <xsl:value-of select="$css-noprint"/></xsl:attribute> + </xsl:otherwise> + </xsl:choose> + <xsl:text>&#160;I&#160;</xsl:text> + </a> + <xsl:if test="$count &lt; $total"> + <a class="bg-issue" title="next change for {$resolves}" href="#{$anchor-pref}change.{$resolves}.{$count + 1}">&#x2193;</a> + </xsl:if> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> +</xsl:template> + +<xsl:template match="ed:replace"> + <!-- we need to special-case things like lists and tables --> + <xsl:choose> + <xsl:when test="parent::list"> + <xsl:apply-templates select="ed:del/node()" /> + <xsl:apply-templates select="ed:ins/node()" /> + </xsl:when> + <xsl:when test="parent::references"> + <xsl:apply-templates select="ed:del/node()" /> + <xsl:apply-templates select="ed:ins/node()" /> + </xsl:when> + <xsl:otherwise> + <xsl:if test="@cite"> + <a class="editor-issue" href="{@cite}" target="_blank" title="see {@cite}"> + <xsl:text>&#160;i&#160;</xsl:text> + </a> + </xsl:if> + <xsl:call-template name="insert-issue-pointer"/> + <xsl:if test="ed:del"> + <del> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::xsl:template[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates select="ed:del/node()" /> + </del> + </xsl:if> + <xsl:if test="ed:ins"> + <ins> + <xsl:copy-of select="@*[namespace-uri()='']"/> + <xsl:if test="not(@title) and ancestor-or-self::*[@ed:entered-by] and @datetime"> + <xsl:attribute name="title"><xsl:value-of select="concat(@datetime,', ',ancestor-or-self::*[@ed:entered-by][1]/@ed:entered-by)"/></xsl:attribute> + </xsl:if> + <xsl:apply-templates select="ed:ins/node()" /> + </ins> + </xsl:if> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- convenience template for helping Mozilla (pre/ins inheritance problem) --> +<xsl:template name="insertInsDelClass"> + <xsl:if test="ancestor::ed:del"> + <xsl:attribute name="class">del</xsl:attribute> + </xsl:if> + <xsl:if test="ancestor::ed:ins"> + <xsl:attribute name="class">ins</xsl:attribute> + </xsl:if> +</xsl:template> + + +<xsl:template name="sectionnumberAndEdits"> + <xsl:choose> + <xsl:when test="ancestor::ed:del"> + <xsl:text>del-</xsl:text> + <xsl:number count="ed:del//section" level="any"/> + </xsl:when> + <xsl:when test="@x:fixed-section-number and @x:fixed-section-number!=''"> + <xsl:value-of select="@x:fixed-section-number"/> + </xsl:when> + <xsl:when test="(@x:fixed-section-number and @x:fixed-section-number='') or @numbered='false'"> + <xsl:text>unnumbered-</xsl:text> + <xsl:number count="section[@x:fixed-section-number='' or @numbered='false']" level="any"/> + </xsl:when> + <xsl:when test="self::section and parent::ed:ins and local-name(../..)='replace'"> + <xsl:for-each select="../.."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:for-each select=".."> + <xsl:if test="parent::ed:replace"> + <xsl:for-each select=".."> + <xsl:if test="parent::section">.</xsl:if> + <xsl:variable name="cnt" select="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + <xsl:choose> + <xsl:when test="ancestor::back and not(ancestor::section)"><xsl:number format="A" value="$cnt"/></xsl:when> + <xsl:otherwise><xsl:value-of select="$cnt"/></xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:if> + </xsl:for-each> + </xsl:when> + <xsl:when test="self::section[parent::ed:ins]"> + <xsl:for-each select="../.."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:for-each select=".."> + <xsl:if test="parent::section">.</xsl:if><xsl:value-of select="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:for-each> + </xsl:when> + <xsl:when test="self::section"> + <xsl:for-each select=".."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + <xsl:if test="parent::section">.</xsl:if> + <xsl:choose> + <xsl:when test="parent::back"> + <xsl:number format="A" value="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:when> + <xsl:otherwise> + <xsl:number value="1+count(preceding-sibling::section|preceding-sibling::ed:ins/section|preceding-sibling::ed:replace/ed:ins/section)" /> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::references"> + <xsl:choose> + <xsl:when test="count(/*/back/references)+count(/*/back/ed:replace/ed:ins/references)=1"><xsl:call-template name="get-references-section-number"/></xsl:when> + <xsl:otherwise><xsl:call-template name="get-references-section-number"/>.<xsl:number level="any"/></xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::middle or self::back"><!-- done --></xsl:when> + <xsl:otherwise> + <!-- go up one level --> + <xsl:for-each select=".."><xsl:call-template name="sectionnumberAndEdits" /></xsl:for-each> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- utilities for warnings --> + +<xsl:template name="trace"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">TRACE</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="$inline"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="inline-warning"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">WARNING</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'yes'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="warning"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">WARNING</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="info"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">INFO</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="'no'"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="error"> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline"/> + <xsl:param name="lineno" select="true()"/> + <xsl:call-template name="emit-message"> + <xsl:with-param name="level">ERROR</xsl:with-param> + <xsl:with-param name="msg" select="$msg"/> + <xsl:with-param name="msg2" select="$msg2"/> + <xsl:with-param name="inline" select="$inline"/> + <xsl:with-param name="lineno" select="$lineno"/> + </xsl:call-template> +</xsl:template> + +<xsl:template name="emit-message"> + <xsl:param name="level"/> + <xsl:param name="msg"/> + <xsl:param name="msg2"/> + <xsl:param name="inline"/> + <xsl:param name="lineno" select="true()"/> + <xsl:variable name="message"><xsl:value-of select="$level"/>: <xsl:value-of select="$msg"/><xsl:if test="$msg2!=''"> - <xsl:value-of select="$msg2"/></xsl:if><xsl:if test="$lineno"><xsl:call-template name="lineno"/></xsl:if></xsl:variable> + <xsl:choose> + <xsl:when test="$inline!='no'"> + <xsl:choose> + <xsl:when test="ancestor::t"> + <span class="{$css-error}"><xsl:value-of select="$message"/></span> + </xsl:when> + <xsl:otherwise> + <div class="{$css-error}"><xsl:value-of select="$message"/></div> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <!-- this fails when the message contains characters not encodable in the output encoding --> + <!-- <xsl:comment><xsl:value-of select="$message"/></xsl:comment> --> + </xsl:otherwise> + </xsl:choose> + <xsl:message><xsl:value-of select="$message"/></xsl:message> +</xsl:template> + +<!-- table formatting --> + +<xsl:template match="table"> + <div> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="iref"/> + <table> + <xsl:if test="name or @anchor!=''"> + <xsl:variable name="n"><xsl:call-template name="get-table-number"/></xsl:variable> + <caption> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Table </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="name">: </xsl:if> + </xsl:if> + <xsl:if test="name"> + <xsl:apply-templates select="name/node()"/> + </xsl:if> + </caption> + </xsl:if> + <xsl:apply-templates select="*[not(self::iref)]"/> + </table> + </div> +</xsl:template> + +<xsl:template match="table/name"/> + +<xsl:template match="tbody"> + <tbody> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tbody> +</xsl:template> + +<xsl:template match="tfoot"> + <tfoot> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tfoot> +</xsl:template> + +<xsl:template match="thead"> + <thead> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </thead> +</xsl:template> + +<xsl:template match="tr"> + <tr> + <xsl:call-template name="copy-anchor"/> + <xsl:apply-templates select="*"/> + </tr> +</xsl:template> + +<xsl:template name="t-alignment"> + <xsl:if test="@align and @align!=''"> + <xsl:attribute name="class"> + <xsl:choose> + <xsl:when test="@align='left'"><xsl:value-of select="$css-left"/></xsl:when> + <xsl:when test="@align='right'"><xsl:value-of select="$css-right"/></xsl:when> + <xsl:when test="@align='center'">center</xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown align attribute: <xsl:value-of select="@align"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template match="td"> + <td> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="t-alignment"/> + <xsl:copy-of select="@colspan|@rowspan"/> + <xsl:apply-templates select="node()"/> + </td> +</xsl:template> + +<xsl:template match="th"> + <th> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="t-alignment"/> + <xsl:copy-of select="@colspan|@rowspan"/> + <xsl:apply-templates select="node()"/> + </th> +</xsl:template> + +<xsl:template match="td/br|th/br"> + <br/> +</xsl:template> + +<xsl:template match="texttable"> + <xsl:call-template name="check-no-text-content"/> + + <xsl:variable name="anch"> + <xsl:call-template name="get-table-anchor"/> + </xsl:variable> + + <div id="{$anch}" class="{$css-tt}"> + + <xsl:if test="@anchor!=''"> + <div id="{@anchor}"/> + </xsl:if> + <xsl:apply-templates select="preamble" /> + + <xsl:variable name="style"> + <xsl:value-of select="$css-tt"/> + <xsl:text> </xsl:text> + <xsl:choose> + <xsl:when test="@style!=''"> + <xsl:value-of select="@style"/> + </xsl:when> + <xsl:otherwise>full</xsl:otherwise> + </xsl:choose> + <xsl:choose> + <xsl:when test="@align='left'"><xsl:text> </xsl:text><xsl:value-of select="$css-tleft"/></xsl:when> + <xsl:when test="@align='right'"><xsl:text> </xsl:text><xsl:value-of select="$css-tright"/></xsl:when> + <xsl:when test="@align='center' or not(@align) or @align=''"><xsl:text> </xsl:text><xsl:value-of select="$css-tcenter"/></xsl:when> + <xsl:otherwise/> + </xsl:choose> + </xsl:variable> + + <table class="{$style}"> + <xsl:if test="(@title!='') or (@anchor!='' and not(@suppress-title='true'))"> + <xsl:variable name="n"><xsl:call-template name="get-table-number"/></xsl:variable> + <caption> + <xsl:if test="not(starts-with($n,'u'))"> + <xsl:text>Table </xsl:text> + <xsl:value-of select="$n"/> + <xsl:if test="@title!=''">: </xsl:if> + </xsl:if> + <xsl:if test="@title!=''"> + <xsl:value-of select="@title" /> + </xsl:if> + </caption> + </xsl:if> + + <xsl:if test="ttcol!=''"> + <!-- skip header when all column titles are empty --> + <thead> + <tr> + <xsl:apply-templates select="ttcol" /> + </tr> + </thead> + </xsl:if> + <tbody> + <xsl:variable name="columns" select="count(ttcol)" /> + <xsl:variable name="fields" select="c | ed:replace/ed:ins/c | ed:replace/ed:del/c" /> + <xsl:for-each select="$fields[$columns=1 or (position() mod $columns) = 1]"> + <tr> + <xsl:for-each select=". | following-sibling::c[position() &lt; $columns]"> + <td> + <xsl:call-template name="copy-anchor"/> + <xsl:call-template name="insertInsDelClass"/> + <xsl:variable name="pos" select="position()" /> + <xsl:variable name="col" select="../ttcol[position() = $pos]" /> + <xsl:choose> + <xsl:when test="$col/@align='right' or $col/@align='center'"> + <xsl:attribute name="class"><xsl:value-of select="$col/@align"/></xsl:attribute> + </xsl:when> + <xsl:when test="$col/@align='left' or not($col/@align)"> + <xsl:attribute name="class"><xsl:value-of select="$css-left"/></xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">Unknown align attribute on ttcol: <xsl:value-of select="$col/@align"/></xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + <xsl:apply-templates select="node()" /> + </td> + </xsl:for-each> + </tr> + </xsl:for-each> + </tbody> + </table> + <xsl:apply-templates select="postamble" /> + </div> + +</xsl:template> + +<xsl:template match="ttcol"> + <th> + + <xsl:choose> + <xsl:when test="@align='right' or @align='center' or @align='left'"> + <xsl:attribute name="class"><xsl:value-of select="@align"/></xsl:attribute> + </xsl:when> + <xsl:when test="not(@align)"> + <!-- that's the default, nothing to do here --> + </xsl:when> + <xsl:otherwise> + <xsl:message>Unknown align attribute on ttcol: <xsl:value-of select="@align"/></xsl:message> + </xsl:otherwise> + </xsl:choose> + + <xsl:if test="@width"> + <xsl:attribute name="style">width: <xsl:value-of select="@width" />;</xsl:attribute> + </xsl:if> + + <xsl:apply-templates /> + </th> +</xsl:template> + +<!-- cref support --> + +<xsl:template name="get-comment-name"> + <xsl:choose> + <xsl:when test="@anchor"> + <xsl:value-of select="@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>comment.</xsl:text> + <xsl:number count="cref[not(@anchor)]" level="any"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template match="cref"> + <xsl:if test="$xml2rfc-comments!='no'"> + <xsl:variable name="cid"> + <xsl:call-template name="get-comment-name"/> + </xsl:variable> + + <span class="comment"> + <xsl:choose> + <xsl:when test="$xml2rfc-inline='yes'"> + <xsl:attribute name="id"> + <xsl:value-of select="$cid"/> + </xsl:attribute> + <xsl:text>[</xsl:text> + <a href="#{$cid}" class="smpl"> + <xsl:value-of select="$cid"/> + </a> + <xsl:text>: </xsl:text> + <xsl:apply-templates select="text()|eref|xref"/> + <xsl:if test="@source"> --<xsl:value-of select="@source"/></xsl:if> + <xsl:text>]</xsl:text> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="title"> + <xsl:if test="@source"><xsl:value-of select="@source"/>: </xsl:if> + <xsl:variable name="content"> + <xsl:apply-templates select="text()|eref|xref"/> + </xsl:variable> + <xsl:value-of select="$content"/> + </xsl:attribute> + <xsl:text>[</xsl:text> + <a href="#{$cid}"> + <xsl:value-of select="$cid"/> + </a> + <xsl:text>]</xsl:text> + </xsl:otherwise> + </xsl:choose> + </span> + </xsl:if> +</xsl:template> + +<xsl:template name="insertComments"> + + <xsl:call-template name="insert-conditional-hrule"/> + + <h2> + <xsl:call-template name="insert-conditional-pagebreak"/> + <a id="{$anchor-pref}comments" href="#{$anchor-pref}comments">Editorial Comments</a> + </h2> + + <dl> + <xsl:for-each select="//cref"> + <xsl:variable name="cid"> + <xsl:choose> + <xsl:when test="@anchor"> + <xsl:value-of select="@anchor"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>comment.</xsl:text> + <xsl:number count="cref[not(@anchor)]" level="any"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <dt id="{$cid}"> + [<xsl:value-of select="$cid"/>] + </dt> + <dd> + <xsl:apply-templates select="node()"/> + <xsl:if test="@source"> --<xsl:value-of select="@source"/></xsl:if> + </dd> + </xsl:for-each> + </dl> +</xsl:template> + + +<!-- Chapter Link Generation --> + +<xsl:template match="*" mode="links"><xsl:apply-templates mode="links"/></xsl:template> +<xsl:template match="text()" mode="links" /> + +<xsl:template match="/*/middle//section[not(ancestor::section)]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-section-number" /></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,'unnumbered-'))"> + <xsl:value-of select="$sectionNumber"/> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <link rel="Chapter" title="{$title}" href="#{$anchor-pref}section.{$sectionNumber}"/> + <xsl:apply-templates mode="links" /> +</xsl:template> + +<xsl:template match="/*/back//section[not(ancestor::section)]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-section-number" /></xsl:variable> + <xsl:variable name="title"> + <xsl:if test="$sectionNumber!='' and not(contains($sectionNumber,'unnumbered-'))"> + <xsl:value-of select="$sectionNumber"/> + <xsl:text> </xsl:text> + </xsl:if> + <xsl:choose> + <xsl:when test="name"> + <xsl:variable name="hold"> + <xsl:apply-templates select="name/node()"/> + </xsl:variable> + <xsl:value-of select="normalize-space($hold)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="@title"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <link rel="Appendix" title="{$title}" href="#{$anchor-pref}section.{$sectionNumber}"/> + <xsl:apply-templates mode="links" /> +</xsl:template> + +<xsl:template match="/*/back/references[position()=1]" mode="links"> + <xsl:variable name="sectionNumber"><xsl:call-template name="get-references-section-number" /></xsl:variable> + <link rel="Chapter" href="#{$anchor-pref}section.{$sectionNumber}"> + <xsl:choose> + <xsl:when test="@title and count(/*/back/references)=1"> + <xsl:attribute name="title"> + <xsl:call-template name="get-references-section-number"/> + <xsl:text> </xsl:text> + <xsl:value-of select="@title"/> + </xsl:attribute> + </xsl:when> + <xsl:otherwise> + <xsl:attribute name="title"> + <xsl:call-template name="get-references-section-number"/> + <xsl:text> </xsl:text> + <xsl:value-of select="$xml2rfc-refparent"/> + </xsl:attribute> + </xsl:otherwise> + </xsl:choose> + </link> +</xsl:template> + +<!-- convenience templates --> + +<xsl:template name="get-author-summary"> + <xsl:choose> + <xsl:when test="count(/rfc/front/author)=1"> + <xsl:value-of select="/rfc/front/author[1]/@surname" /> + </xsl:when> + <xsl:when test="count(/rfc/front/author)=2"> + <xsl:value-of select="concat(/rfc/front/author[1]/@surname,' &amp; ',/rfc/front/author[2]/@surname)" /> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(/rfc/front/author[1]/@surname,', et al.')" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-bottom-center"> + <xsl:choose> + <xsl:when test="/rfc/@docName"> + <!-- for IDs, use the expiry date --> + <xsl:text>Expires </xsl:text><xsl:call-template name="expirydate" /> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-category-long"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-category-long"> + <xsl:choose> + <xsl:when test="$xml2rfc-footer!=''"><xsl:value-of select="$xml2rfc-footer" /></xsl:when> + <xsl:when test="$xml2rfc-private!=''"/> <!-- private draft, footer not set --> + <xsl:when test="/rfc/@category='bcp'">Best Current Practice</xsl:when> + <xsl:when test="/rfc/@category='historic'">Historic</xsl:when> + <xsl:when test="/rfc/@category='info' or not(/rfc/@category)">Informational</xsl:when> + <xsl:when test="/rfc/@category='std'">Standards Track</xsl:when> + <xsl:when test="/rfc/@category='exp'">Experimental</xsl:when> + <xsl:otherwise>(category unknown)</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-header-center"> + <xsl:choose> + <xsl:when test="string-length(/rfc/front/title/@abbrev) &gt; 0"> + <xsl:value-of select="/rfc/front/title/@abbrev" /> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="/rfc/front/title" mode="get-text-content" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-header-left"> + <xsl:choose> + <xsl:when test="$xml2rfc-header!=''"><xsl:value-of select="$xml2rfc-header" /></xsl:when> + <xsl:when test="$xml2rfc-private!=''"/> <!-- private draft, header not set --> + <xsl:when test="/rfc/@ipr and not(/rfc/@number)">Internet-Draft</xsl:when> + <xsl:otherwise>RFC <xsl:value-of select="/rfc/@number"/></xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-generator"> + <xsl:variable name="gen"> + <xsl:text>http://greenbytes.de/tech/webdav/rfc2629.xslt, </xsl:text> + <!-- when RCS keyword substitution in place, add version info --> + <xsl:if test="contains('$Revision: 1.912 $',':')"> + <xsl:value-of select="concat('Revision ',normalize-space(translate(substring-after('$Revision: 1.912 $', 'Revision: '),'$','')),', ')" /> + </xsl:if> + <xsl:if test="contains('$Date: 2017/05/31 14:04:25 $',':')"> + <xsl:value-of select="concat(normalize-space(translate(substring-after('$Date: 2017/05/31 14:04:25 $', 'Date: '),'$','')),', ')" /> + </xsl:if> + <xsl:value-of select="concat('XSLT vendor: ',system-property('xsl:vendor'),' ',system-property('xsl:vendor-url'))" /> + </xsl:variable> + <xsl:value-of select="$gen" /> +</xsl:template> + +<xsl:template name="get-header-right"> + <xsl:value-of select="concat($xml2rfc-ext-pub-month, ' ', $xml2rfc-ext-pub-year)" /> +</xsl:template> + +<xsl:template name="get-keywords"> + <xsl:for-each select="/rfc/front/keyword"> + <xsl:if test="contains(.,',')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">keyword element appears to contain a comma-separated list, split into multiple elements instead.</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="normalize-space(.)" /> + <xsl:if test="position()!=last()">, </xsl:if> + </xsl:for-each> +</xsl:template> + +<!-- get language from context node. nearest ancestor or return the default of "en" --> +<xsl:template name="get-lang"> + <xsl:choose> + <xsl:when test="ancestor-or-self::*[@xml:lang]"><xsl:value-of select="ancestor-or-self::*/@xml:lang" /></xsl:when> + <xsl:otherwise>en</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-section-number"> + <xsl:variable name="anchor" select="@anchor"/> + <xsl:choose> + <xsl:when test="self::note"> + <xsl:number count="note"/> + </xsl:when> + <xsl:when test="@x:fixed-section-number and @x:fixed-section-number!=''"> + <xsl:value-of select="@x:fixed-section-number"/> + </xsl:when> + <xsl:when test="(@x:fixed-section-number and @x:fixed-section-number='') or @numbered='false'"> + <xsl:text>unnumbered-</xsl:text> + <xsl:number count="section[@x:fixed-section-number='' or @numbered='false']" level="any"/> + <!-- checks --> + <xsl:if test="@numbered='false'"> + <xsl:if test="ancestor::section or ancestor::section"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Only top-level sections can be unnumbered</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="following-sibling::section[not(@numbered) or @numberer!='false'] or following-sibling::references"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">Unnumbered section is followed by numbered sections</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:if> + </xsl:when> + <xsl:when test="$has-edits or ancestor::*/@x:fixed-section-number"> + <xsl:call-template name="sectionnumberAndEdits" /> + </xsl:when> + <xsl:otherwise> + <xsl:choose> + <xsl:when test="self::references"> + <xsl:choose> + <xsl:when test="count(/*/back/references)=1"> + <xsl:call-template name="get-references-section-number"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-references-section-number"/>.<xsl:number count="references"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="self::reference"> + <xsl:for-each select="parent::references"> + <xsl:choose> + <xsl:when test="count(/*/back/references)=1"> + <xsl:call-template name="get-references-section-number"/> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="get-references-section-number"/>.<xsl:number count="references"/> + </xsl:otherwise> + </xsl:choose> + </xsl:for-each> + </xsl:when> + <xsl:when test="ancestor::reference"> + <xsl:for-each select="ancestor::reference"> + <xsl:call-template name="get-section-number"/> + </xsl:for-each> + </xsl:when> + <xsl:when test="ancestor::back"><xsl:number count="section|appendix" level="multiple" format="A.1.1.1.1.1.1.1" /></xsl:when> + <xsl:when test="self::appendix"><xsl:number count="appendix" level="multiple" format="A.1.1.1.1.1.1.1" /></xsl:when> + <xsl:otherwise><xsl:number count="section" level="multiple"/></xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<!-- get the section number for the references section --> +<xsl:template name="get-references-section-number"> + <xsl:value-of select="count(/rfc/middle/section) + count(/rfc/middle/ed:replace/ed:ins/section) + 1"/> +</xsl:template> + +<xsl:template name="emit-section-number"> + <xsl:param name="no"/> + <xsl:value-of select="$no"/><xsl:if test="not(contains($no,'.')) or $xml2rfc-ext-sec-no-trailing-dots!='no'">.</xsl:if> +</xsl:template> + +<xsl:template name="get-section-type"> + <xsl:param name="prec" /> <!-- TODO: check this, it's unused --> + <xsl:choose> + <xsl:when test="ancestor::back">Appendix</xsl:when> + <xsl:otherwise>Section</xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-table-number"> + <xsl:choose> + <xsl:when test="@anchor!=''"> + <xsl:number level="any" count="texttable[@anchor!='']|table[@anchor!='']" /> + </xsl:when> + <xsl:otherwise> + <xsl:text>u.</xsl:text> + <xsl:number level="any" count="texttable[not(@anchor) or @anchor='']|table[not(@anchor) or @anchor='']" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-table-anchor"> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>table.</xsl:text> + <xsl:call-template name="get-table-number"/> +</xsl:template> + +<xsl:template name="get-figure-number"> + <xsl:choose> + <xsl:when test="@anchor!=''"> + <xsl:number level="any" count="figure[@anchor!='']" /> + </xsl:when> + <xsl:otherwise> + <xsl:text>u.</xsl:text> + <xsl:number level="any" count="figure[not(@anchor) or @anchor='']" /> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="get-figure-anchor"> + <xsl:value-of select="$anchor-pref"/> + <xsl:text>figure.</xsl:text> + <xsl:call-template name="get-figure-number"/> +</xsl:template> + +<!-- reformat contents of author/@initials --> +<xsl:template name="format-initials"> + <xsl:variable name="normalized" select="normalize-space(@initials)"/> + + <xsl:choose> + <xsl:when test="$normalized=''"> + <!-- nothing to do --> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="r"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="remainder" select="$normalized"/> + </xsl:call-template> + </xsl:variable> + + <xsl:if test="$r!=@initials"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">@initials '<xsl:value-of select="@initials"/>': did you mean '<xsl:value-of select="$r"/>'?</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:value-of select="$r"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="t-format-initials"> + <xsl:param name="have"/> + <xsl:param name="remainder"/> + + <xsl:variable name="first" select="substring($remainder,1,1)"/> + <xsl:variable name="prev" select="substring($have,string-length($have))"/> + +<!--<xsl:message> +have: <xsl:value-of select="$have"/> +remainder: <xsl:value-of select="$remainder"/> +first: <xsl:value-of select="$first"/> +prev: <xsl:value-of select="$prev"/> +</xsl:message>--> + + <xsl:choose> + <xsl:when test="$remainder='' and $prev!='.'"> + <xsl:value-of select="concat($have,'.')"/> + </xsl:when> + <xsl:when test="$remainder=''"> + <xsl:value-of select="$have"/> + </xsl:when> + <xsl:when test="$prev='.' and $first='.'"> + <!-- repeating dots --> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="$have"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when> + <!-- missing dot before '-' --> +<!-- <xsl:when test="$prev!='.' and $first='-'"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,'.-')"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when>--> + <!-- missing space after '.' --> +<!-- <xsl:when test="$prev='.' and $first!=' '"> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,' ',$first)"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:when>--> + <xsl:otherwise> + <xsl:call-template name="t-format-initials"> + <xsl:with-param name="have" select="concat($have,$first)"/> + <xsl:with-param name="remainder" select="substring($remainder,2)"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="truncate-initials"> + <xsl:param name="initials"/> + <xsl:variable name="local-multiple-initials"> + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="../../processing-instruction('rfc')|../processing-instruction('rfc')|./processing-instruction('rfc')"/> + <xsl:with-param name="attr" select="'multiple-initials'"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="use-multiple-initials"> + <xsl:choose> + <xsl:when test="$local-multiple-initials!=''"> + <xsl:value-of select="$local-multiple-initials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$xml2rfc-multiple-initials"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:choose> + <xsl:when test="normalize-space($initials)=''"/> + <xsl:when test="$use-multiple-initials='yes'"> + <xsl:value-of select="$initials"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="concat(substring-before($initials,'.'),'.')"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="extract-normalized"> + <xsl:param name="node" select="."/> + <xsl:param name="name"/> + <xsl:param name="ascii" select="false()"/> + + <xsl:variable name="n"> + <xsl:choose> + <xsl:when test="$ascii and $node/@ascii!=''"> + <xsl:value-of select="$node/@ascii"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$node"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + + <xsl:variable name="text" select="normalize-space($n)"/> + <xsl:if test="string-length($n) != string-length($text)"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">excessive whitespace in <xsl:value-of select="$name"/>: '<xsl:value-of select="$n"/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="$text=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">missing text in <xsl:value-of select="$name"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:value-of select="$text"/> +</xsl:template> + +<!-- checking for email element --> +<xsl:template name="extract-email"> + <xsl:variable name="email" select="normalize-space(.)"/> + <xsl:if test="contains($email,' ')"> + <xsl:call-template name="error"> + <xsl:with-param name="msg">whitespace in email address: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:variable name="email2"> + <xsl:choose> + <xsl:when test="starts-with($email,'mailto:')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">email should not include URI scheme: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + <xsl:value-of select="substring($email, 1 + string-length('mailto:'))"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$email"/> + </xsl:otherwise> + </xsl:choose> + </xsl:variable> + <xsl:value-of select="$email2"/> +</xsl:template> + +<!-- checking for uri element --> +<xsl:template name="extract-uri"> + <xsl:variable name="uri" select="normalize-space(.)"/> + <xsl:if test="string-length(.) != string-length($uri) or contains($uri,' ')"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">excessive whitespace in URI: '<xsl:value-of select="."/>'</xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:if test="$uri=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">URI is empty</xsl:with-param> + </xsl:call-template> + </xsl:if> + + <xsl:value-of select="$uri"/> +</xsl:template> + +<xsl:template name="insert-conditional-pagebreak"> + <xsl:if test="$xml2rfc-compact!='yes'"> + <xsl:attribute name="class">np</xsl:attribute> + </xsl:if> +</xsl:template> + +<xsl:template name="insert-conditional-hrule"> + <xsl:if test="$xml2rfc-compact!='yes'"> + <hr class="{$css-noprint}" /> + </xsl:if> +</xsl:template> + +<!-- get text content from marked-up text --> + +<xsl:template match="text()" mode="get-text-content"> + <xsl:value-of select="."/> +</xsl:template> + +<xsl:template match="*" mode="get-text-content"> + <xsl:apply-templates mode="get-text-content"/> +</xsl:template> + +<xsl:template match="ed:del" mode="get-text-content"> +</xsl:template> + +<!-- parsing of processing instructions --> +<xsl:template name="parse-pis"> + <xsl:param name="nodes"/> + <xsl:param name="attr"/> + <xsl:param name="sep"/> + <xsl:param name="ret"/> + <xsl:param name="default"/> + <xsl:param name="duplicate-warning" select="'yes'"/> + + <xsl:choose> + <xsl:when test="count($nodes)=0"> + <xsl:choose> + <xsl:when test="$ret!=''"> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$default"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="ret2"> + <xsl:for-each select="$nodes[1]"> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="."/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:for-each> + </xsl:variable> + + <xsl:call-template name="parse-pis"> + <xsl:with-param name="nodes" select="$nodes[position()!=1]"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret2"/> + <xsl:with-param name="default" select="$default"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="parse-one-pi"> + <xsl:param name="str"/> + <xsl:param name="attr"/> + <xsl:param name="sep"/> + <xsl:param name="ret"/> + <xsl:param name="duplicate-warning"/> + + <xsl:variable name="str2"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="$str"/> + </xsl:call-template> + </xsl:variable> + + <xsl:choose> + <xsl:when test="$str2=''"> + <!-- done --> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="attrname" select="substring-before($str2,'=')"/> + + <xsl:choose> + <xsl:when test="$attrname=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">bad PI syntax: <xsl:value-of select="$str2"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="remainder" select="substring($str2,2+string-length($attrname))"/> + <xsl:choose> + <xsl:when test="string-length($remainder) &lt; 2"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">bad PI value syntax: <xsl:value-of select="$remainder"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="rem"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="$remainder"/> + </xsl:call-template> + </xsl:variable> + <xsl:variable name="qchars">&apos;&quot;</xsl:variable> + <xsl:variable name="qchar" select="substring($rem,1,1)"/> + <xsl:variable name="rem2" select="substring($rem,2)"/> + <xsl:choose> + <xsl:when test="not(contains($qchars,$qchar))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">pseudo-attribute value needs to be quoted: <xsl:value-of select="$rem"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:when test="not(contains($rem2,$qchar))"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unmatched quote in: <xsl:value-of select="$rem2"/></xsl:with-param> + </xsl:call-template> + <xsl:value-of select="$ret"/> + </xsl:when> + <xsl:otherwise> + <xsl:variable name="value" select="substring-before($rem2,$qchar)"/> + + <!-- check pseudo-attribute names --> + <xsl:if test="name()='rfc-ext' and $attr='SANITYCHECK'"> + <xsl:choose> + <xsl:when test="$attrname='allow-markup-in-artwork'"/> + <xsl:when test="$attrname='authors-section'"/> + <xsl:when test="$attrname='check-artwork-width'"/> + <xsl:when test="$attrname='duplex'"/> + <xsl:when test="$attrname='html-pretty-print'"/> + <xsl:when test="$attrname='include-index'"/> + <xsl:when test="$attrname='include-references-in-index'"/> + <xsl:when test="$attrname='justification'"/> + <xsl:when test="$attrname='paragraph-links'"/> + <xsl:when test="$attrname='parse-xml-in-artwork'"/> + <xsl:when test="$attrname='refresh-from'"/> + <xsl:when test="$attrname='refresh-interval'"/> + <xsl:when test="$attrname='refresh-xslt'"/> + <xsl:when test="$attrname='sec-no-trailing-dots'"/> + <xsl:when test="$attrname='trace-parse-xml'"/> + <xsl:when test="$attrname='vspace-pagebreak'"/> + <xsl:when test="$attrname='xml2rfc-backend'"/> + <xsl:otherwise> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">unsupported rfc-ext pseudo-attribute '<xsl:value-of select="$attrname"/>'</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:if test="name()='rfc' and $attr='SANITYCHECK'"> + <xsl:choose> + <xsl:when test="$attrname='authorship'"/> + <xsl:when test="$attrname='comments'"/> + <xsl:when test="$attrname='compact'"/> + <xsl:when test="$attrname='docmapping'"> + <xsl:if test="$value!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc docmapping pseudo-attribute with values other than 'yes' in not supported by this processor.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:when test="$attrname='editing'"/> + <xsl:when test="$attrname='footer'"/> + <xsl:when test="$attrname='header'"/> + <xsl:when test="$attrname='include'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc include pseudo-attribute is only partially supported by this processor, see http://greenbytes.de/tech/webdav/rfc2629xslt/rfc2629xslt.html#examples.internalsubset for alternative syntax.</xsl:with-param> + </xsl:call-template> + </xsl:when> + <xsl:when test="$attrname='inline'"/> + <xsl:when test="$attrname='iprnotified'"/> + <xsl:when test="$attrname='linefile'"/> + <xsl:when test="$attrname='linkmailto'"/> + <xsl:when test="$attrname='multiple-initials'"/> + <xsl:when test="$attrname='private'"/> + <xsl:when test="$attrname='rfcedstyle'"/> + <xsl:when test="$attrname='sortrefs'"/> + <xsl:when test="$attrname='subcompact'"/> + <xsl:when test="$attrname='strict'"/> + <xsl:when test="$attrname='symrefs'"/> + <xsl:when test="$attrname='toc'"/> + <xsl:when test="$attrname='tocdepth'"/> + <xsl:when test="$attrname='tocindent'"> + <xsl:if test="$value!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">the rfc tocindent pseudo-attribute with values other than 'yes' in not supported by this processor.</xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="info"> + <xsl:with-param name="msg">unsupported rfc pseudo-attribute '<xsl:value-of select="$attrname"/>'</xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> + </xsl:if> + + <xsl:choose> + <xsl:when test="$attrname != $attr"> + <!-- pseudo-attr does not match, continue --> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$ret"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$sep='' and $ret!=''"> + <!-- pseudo-attr does match, but we only want one value --> + <xsl:if test="$ret != $value and $duplicate-warning='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">duplicate pseudo-attribute <xsl:value-of select="$attr"/>, overwriting value <xsl:value-of select="$ret"/></xsl:with-param> + </xsl:call-template> + </xsl:if> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="ret" select="$value"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <!-- pseudo-attr does match --> + <xsl:call-template name="parse-one-pi"> + <xsl:with-param name="str" select="substring($rem2, 2 + string-length($value))"/> + <xsl:with-param name="attr" select="$attr"/> + <xsl:with-param name="sep" select="$sep"/> + <xsl:with-param name="duplicate-warning" select="$duplicate-warning"/> + <xsl:with-param name="ret"> + <xsl:choose> + <xsl:when test="$ret!=''"> + <xsl:value-of select="concat($ret,$sep,$value)"/> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$value"/> + </xsl:otherwise> + </xsl:choose> + </xsl:with-param> + </xsl:call-template> + </xsl:otherwise> + + </xsl:choose> + + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<xsl:template name="eat-leading-whitespace"> + <xsl:param name="str"/> + + <xsl:choose> + <xsl:when test="$str=''"> + </xsl:when> + <xsl:when test="translate(substring($str,1,1),' &#10;&#13;&#9;',' ')=' '"> + <xsl:call-template name="eat-leading-whitespace"> + <xsl:with-param name="str" select="substring($str,2)"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$str"/> + </xsl:otherwise> + </xsl:choose> + +</xsl:template> + +<!-- diag support --> +<xsl:template name="lineno"> + <xsl:if test="function-available('saxon-old:line-number')" use-when="function-available('saxon-old:line-number')"> + <xsl:if test="saxon-old:line-number() > 0"> + <xsl:text> (at line </xsl:text> + <xsl:value-of select="saxon-old:line-number()"/> + <xsl:if test="function-available('saxon-old:systemId')"> + <xsl:variable name="rootsys"> + <xsl:for-each select="/*"> + <xsl:value-of select="saxon-old:systemId()"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$rootsys != saxon-old:systemId()"> + <xsl:text> of </xsl:text> + <xsl:value-of select="saxon-old:systemId()"/> + </xsl:if> + </xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + </xsl:if> + <xsl:if test="function-available('saxon:line-number')" use-when="function-available('saxon:line-number')"> + <xsl:if test="saxon:line-number() > 0"> + <xsl:text> (at line </xsl:text> + <xsl:value-of select="saxon:line-number()"/> + <xsl:if test="function-available('saxon:systemId')"> + <xsl:variable name="rootsys"> + <xsl:for-each select="/*"> + <xsl:value-of select="saxon:systemId()"/> + </xsl:for-each> + </xsl:variable> + <xsl:if test="$rootsys != saxon:systemId()"> + <xsl:text> of </xsl:text> + <xsl:value-of select="saxon:systemId()"/> + </xsl:if> + </xsl:if> + <xsl:text>)</xsl:text> + </xsl:if> + </xsl:if> +</xsl:template> + +<!-- define exslt:node-set for msxml --> +<msxsl:script language="JScript" implements-prefix="exslt"> + this['node-set'] = function (x) { + return x; + } +</msxsl:script> + +<!-- date handling --> + +<msxsl:script language="JScript" implements-prefix="date"> + function twodigits(s) { + return s &lt; 10 ? "0" + s : s; + } + + this['date-time'] = function (x) { + var now = new Date(); + var offs = now.getTimezoneOffset(); + return now.getFullYear() + "-" + + twodigits(1 + now.getMonth()) + "-" + + twodigits(now.getDate()) + "T" + + twodigits(now.getHours()) + ":" + + twodigits(now.getMinutes()) + ":" + + twodigits(now.getSeconds()) + + (offs >= 0 ? "-" : "+") + + twodigits(Math.abs(offs) / 60) + ":" + + twodigits(Math.abs(offs) % 60); + } +</msxsl:script> + +<xsl:variable name="current-year"> + <xsl:choose> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-before(date:date-time(),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-date')"> + <xsl:value-of select="substring-before(string(current-date()),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="current-month"> + <xsl:choose> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-before(substring-after(date:date-time(),'-'),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-date')"> + <xsl:value-of select="substring-before(substring-after(string(current-date()),'-'),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="current-day"> + <xsl:choose> + <xsl:when test="function-available('date:date-time')" use-when="function-available('date:date-time')"> + <xsl:value-of select="substring-after(substring-after(substring-before(date:date-time(),'T'),'-'),'-')"/> + </xsl:when> + <xsl:when test="function-available('current-dateTime')"> + <xsl:value-of select="substring-after(substring-after(substring-before(string(current-dateTime()),'T'),'-'),'-')"/> + </xsl:when> + <xsl:otherwise/> + </xsl:choose> +</xsl:variable> + +<xsl:variable name="may-default-dates"> + <xsl:choose> + <xsl:when test="$current-year!='' and $current-month!='' and $current-day!=''"> + <xsl:variable name="year-specified" select="/rfc/front/date/@year and /rfc/front/date/@year!=''"/> + <xsl:variable name="month-specified" select="/rfc/front/date/@month and /rfc/front/date/@month!=''"/> + <xsl:variable name="day-specified" select="/rfc/front/date/@day and /rfc/front/date/@day!=''"/> + <xsl:variable name="system-month"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$current-month"/> + </xsl:call-template> + </xsl:variable> + <xsl:choose> + <xsl:when test="$year-specified and /rfc/front/date/@year!=$current-year">Specified year <xsl:value-of select="/rfc/front/date/@year"/> does not match system date (<xsl:value-of select="$current-year"/>)</xsl:when> + <xsl:when test="$month-specified and /rfc/front/date/@month!=$system-month">Specified month <xsl:value-of select="/rfc/front/date/@month"/> does not match system date (<xsl:value-of select="$system-month"/>)</xsl:when> + <xsl:when test="$day-specified and /rfc/front/date/@day!=$current-day">Specified day does not match system date</xsl:when> + <xsl:when test="not($year-specified) and ($month-specified or $day-specified)">Can't default year when month or day is specified</xsl:when> + <xsl:when test="not($month-specified) and $day-specified">Can't default month when day is specified</xsl:when> + <xsl:otherwise>yes</xsl:otherwise> + </xsl:choose> + </xsl:when> + <!-- may, but won't --> + <xsl:otherwise>yes</xsl:otherwise> + </xsl:choose> +</xsl:variable> + +<xsl:param name="xml2rfc-ext-pub-year"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@year and /rfc/front/date/@year!=''"> + <xsl:value-of select="/rfc/front/date/@year"/> + </xsl:when> + <xsl:when test="$current-year!='' and $may-default-dates='yes'"> + <xsl:value-of select="$current-year"/> + </xsl:when> + <xsl:when test="$current-year!='' and $may-default-dates!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="$may-default-dates"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'/rfc/front/date/@year missing (and XSLT processor cannot compute the system date)'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:param name="xml2rfc-ext-pub-month"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@month and /rfc/front/date/@month!=''"> + <xsl:variable name="m" select="/rfc/front/date/@month"/> + <xsl:choose> + <xsl:when test="string(number($m))!='NaN' and number($m) &gt; 0 and number($m) &lt; 13"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$m"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:value-of select="$m"/> + </xsl:otherwise> + </xsl:choose> + </xsl:when> + <xsl:when test="$current-month!='' and $may-default-dates='yes'"> + <xsl:call-template name="get-month-as-name"> + <xsl:with-param name="month" select="$current-month"/> + </xsl:call-template> + </xsl:when> + <xsl:when test="$current-month!='' and $may-default-dates!='yes'"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="$may-default-dates"/> + </xsl:call-template> + </xsl:when> + <xsl:otherwise> + <xsl:call-template name="error"> + <xsl:with-param name="msg" select="'/rfc/front/date/@month missing (and XSLT processor cannot compute the system date)'"/> + </xsl:call-template> + </xsl:otherwise> + </xsl:choose> +</xsl:param> + +<xsl:param name="pub-month-numeric"> + <xsl:call-template name="get-month-as-num"> + <xsl:with-param name="month" select="$xml2rfc-ext-pub-month" /> + </xsl:call-template> +</xsl:param> + +<xsl:param name="xml2rfc-ext-pub-day"> + <xsl:choose> + <xsl:when test="/rfc/front/date/@day and /rfc/front/date/@day!=''"> + <xsl:value-of select="/rfc/front/date/@day"/> + </xsl:when> + <xsl:when test="$current-day!='' and $may-default-dates='yes'"> + <xsl:value-of select="$current-day"/> + </xsl:when> + <xsl:otherwise /> <!-- harmless, we just don't have it --> + </xsl:choose> +</xsl:param> + +<xsl:param name="pub-yearmonth"> + <!-- year or 0000 --> + <xsl:choose> + <xsl:when test="$xml2rfc-ext-pub-year!=''"> + <xsl:value-of select="format-number($xml2rfc-ext-pub-year,'0000')"/> + </xsl:when> + <xsl:otherwise>0000</xsl:otherwise> + </xsl:choose> + <!-- month or 00 --> + <xsl:choose> + <xsl:when test="number($pub-month-numeric) &gt; 0"> + <xsl:value-of select="format-number($pub-month-numeric,'00')"/> + </xsl:when> + <xsl:otherwise>00</xsl:otherwise> + </xsl:choose> +</xsl:param> + +<!-- simple validation support --> + +<xsl:template match="*" mode="validate"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="@*" mode="validate"/> + +<xsl:template name="warninvalid"> + <xsl:param name="additionalDiagnostics"/> + <xsl:variable name="pname"> + <xsl:if test="namespace-uri(..)!=''"> + <xsl:value-of select="concat('{',namespace-uri(..),'}')"/> + </xsl:if> + <xsl:value-of select="local-name(..)"/> + </xsl:variable> + <xsl:variable name="cname"> + <xsl:if test="namespace-uri(.)!=''"> + <xsl:value-of select="concat('{',namespace-uri(.),'}')"/> + </xsl:if> + <xsl:value-of select="local-name(.)"/> + </xsl:variable> + <xsl:call-template name="warning"> + <xsl:with-param name="msg" select="concat($cname,' not allowed inside ',$pname,$additionalDiagnostics)"/> + </xsl:call-template> +</xsl:template> + +<!-- figure element --> +<xsl:template match="figure/artwork | figure/ed:replace/ed:*/artwork" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="artwork" mode="validate"> + <xsl:call-template name="warninvalid"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- li element --> +<xsl:template match="ol/li | ul/li" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="li" mode="validate"> + <xsl:call-template name="warninvalid"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- list element --> +<xsl:template match="t/list | t/ed:replace/ed:*/list" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="list" mode="validate"> + <xsl:call-template name="warninvalid"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- t element --> +<xsl:template match="abstract/t | abstract/ed:replace/ed:*/t | + list/t | list/ed:replace/ed:*/t | + note/t | note/ed:replace/ed:*/t | + section/t | section/ed:replace/ed:*/t | + blockquote/t | + x:blockquote/t | x:blockquote/ed:replace/ed:*/t | + x:note/t | x:note/ed:replace/ed:*/t | + aside/t | + td/t | th/t | + x:lt/t | li/t | x:lt/ed:replace/ed:*/t | dd/t" mode="validate" priority="9"> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> +<xsl:template match="t" mode="validate"> + <xsl:call-template name="warninvalid"/> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<!-- xref element --> +<xsl:template match="abstract//xref" mode="validate"> + <xsl:call-template name="warninvalid"> + <xsl:with-param name="additionalDiagnostics"> (inside &lt;abstract>)</xsl:with-param> + </xsl:call-template> + <xsl:apply-templates select="@*|*" mode="validate"/> +</xsl:template> + +<xsl:template name="check-no-text-content"> + <xsl:for-each select="text()"> + <xsl:if test="normalize-space(.)!=''"> + <xsl:call-template name="warning"> + <xsl:with-param name="msg">No text content allowed inside &lt;<xsl:value-of select="local-name(..)"/>&gt;, but found: <xsl:value-of select="."/></xsl:with-param> + </xsl:call-template> + </xsl:if> + </xsl:for-each> +</xsl:template> + +<xsl:template name="render-name"> + <xsl:param name="n"/> + <xsl:param name="strip-links" select="true()"/> + <xsl:variable name="t"> + <xsl:apply-templates select="$n"/> + </xsl:variable> + <xsl:choose> + <xsl:when test="not($strip-links)"> + <xsl:copy-of select="exslt:node-set($t)"/> + </xsl:when> + <xsl:otherwise> + <xsl:apply-templates select="exslt:node-set($t)" mode="strip-links"/> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +<xsl:template name="render-name-ref"> + <xsl:param name="n"/> + <xsl:variable name="t"> + <xsl:call-template name="render-name"> + <xsl:with-param name="n" select="$n"/> + </xsl:call-template> + </xsl:variable> + <xsl:apply-templates select="exslt:node-set($t)" mode="strip-ids"/> +</xsl:template> + +<!-- clean up links from HTML --> +<xsl:template match="comment()|@*" mode="strip-links"><xsl:copy/></xsl:template> +<xsl:template match="text()" mode="strip-links"><xsl:copy/></xsl:template> +<xsl:template match="*" mode="strip-links"> + <xsl:element name="{local-name()}"> + <xsl:apply-templates select="@*|node()" mode="strip-links" /> + </xsl:element> +</xsl:template> +<xsl:template match="a|xhtml:a" mode="strip-links" xmlns:xhtml="http://www.w3.org/1999/xhtml"> + <xsl:apply-templates select="node()" mode="strip-links" /> +</xsl:template> +<xsl:template match="node()|@*" mode="strip-ids"> + <xsl:copy> + <xsl:apply-templates select="node()|@*" mode="strip-ids" /> + </xsl:copy> +</xsl:template> +<xsl:template match="@id" mode="strip-ids"/> + + +<!-- customization: these templates can be overridden in an XSLT that imports from this one --> +<xsl:template name="add-start-material"/> +<xsl:template name="add-end-material"/> + +</xsl:transform> diff --git a/test/fixtures/cache-tests/spec/lib/rfcbootstrap.xslt b/test/fixtures/cache-tests/spec/lib/rfcbootstrap.xslt new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/rfcbootstrap.xslt @@ -0,0 +1,335 @@ +<!-- + XSLT transformation from RFC2629 XML format to Bootstrap-ised HTML + + Copyright (c) 2014-2015, Mark Nottingham (mnot@mnot.net) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Mark Nottingham nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + + Based upon Julian Reschke's rfc2629.xslt: + + Copyright (c) 2006-2014, Julian Reschke (julian.reschke@greenbytes.de) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Julian Reschke nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +--> + +<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" + version="2.0" + + xmlns:date="http://exslt.org/dates-and-times" + xmlns:ed="http://greenbytes.de/2002/rfcedit" + xmlns:exslt="http://exslt.org/common" + xmlns:msxsl="urn:schemas-microsoft-com:xslt" + xmlns:myns="mailto:julian.reschke@greenbytes.de?subject=rcf2629.xslt" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:saxon="http://saxon.sf.net/" + xmlns:saxon-old="http://icl.com/saxon" + xmlns:x="http://purl.org/net/xml2rfc/ext" + xmlns:xhtml="http://www.w3.org/1999/xhtml" + + exclude-result-prefixes="date ed exslt msxsl myns rdf saxon saxon-old x xhtml" + > + + <xsl:import href="rfc2629.xslt"/> + + <xsl:strip-space elements="abstract author back figure front list middle note postal reference references rfc section texttable"/> + + <xsl:output method="html" encoding="utf-8" indent="no" doctype-system="about:legacy-compat" /> + + <!-- CSS mapping --> + <xsl:param name="xml2rfc-ext-css-map">cssmap.xml</xsl:param> + + <!-- Library URLs --> + <xsl:param name="bootstrapCssUrl" + select="'https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css'" /> + <xsl:param name="bootstrapJsUrl" + select="'https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/js/bootstrap.min.js'" /> + <xsl:param name="jqueryJsUrl" + select="'https://code.jquery.com/jquery-3.3.1.slim.min.js'" /> + + <!-- navbar --> + <xsl:param name="navbar" select="''" /> + + <!-- site URLs --> + <xsl:param name="siteCssUrl" select="''" /> + <xsl:param name="siteJsUrl" select="''" /> + + <!-- page URL --> + <xsl:param name="pageUrl" select="''" /> + + <!-- page image URL --> + <xsl:param name="pageImageUrl" select="''" /> + + <!-- site name --> + <xsl:param name="siteName" select="''" /> + + <!-- optional banner --> + <xsl:param name="banner" select="''" /> + + <!-- disable built-in ToC --> + <xsl:variable name="xml2rfc-toc">yes</xsl:variable> + + <xsl:variable name="toc-ul-class" select="'nav-sublist list-unstyled'" /> + + <xsl:template name="body"> + <body> + <xsl:variable name="onload"> + <xsl:if test="$xml2rfc-ext-insert-metadata='yes' and /rfc/@number">getMeta(<xsl:value-of select="/rfc/@number"/>,"rfc.meta");</xsl:if> + <xsl:if test="/rfc/x:feedback">initFeedback();</xsl:if> + <xsl:if test="$xml2rfc-ext-refresh-from!=''">RfcRefresh.initRefresh()</xsl:if> + </xsl:variable> + <xsl:if test="$onload!=''"> + <xsl:attribute name="onload"> + <xsl:value-of select="$onload"/> + </xsl:attribute> + </xsl:if> + + <xsl:if test="$navbar!=''"> + <xsl:copy-of select="document($navbar)"/> + </xsl:if> + + <!-- insert diagnostics --> + <xsl:call-template name="insert-diagnostics"/> + + <div class="container" id="top"> + <div class="row"> + <div class="col-lg-4 order-last d-none d-lg-block" id="sidebar" role="navigation"> + <div class="navbar"> + <div class="navbar-brand"> + <a href="#top"> + <xsl:choose> + <xsl:when test="/rfc/@number"><strong>RFC </strong><xsl:value-of select="/rfc/@number"/></xsl:when> + </xsl:choose> + </a> + </div> + <br clear="all"/> + <div class=""> + <xsl:apply-templates select="/" mode="toc"> + <xsl:with-param name="extra"></xsl:with-param> + </xsl:apply-templates> + </div> + </div> + </div> + <div class="col-lg-8 order-first main" role="main"> + <xsl:apply-templates select="front" /> + <xsl:apply-templates select="middle" /> + <xsl:call-template name="back" /> + </div> + </div> + </div> + <xsl:if test="$siteJsUrl!=''"> + <script src="{$siteJsUrl}" type="module"></script> + </xsl:if> + </body> + </xsl:template> + + + <xsl:template name="insertCss"> + <meta name="viewport" content="width=device-width, initial-scale=1"/> + <meta property="og:type" content="article" /> + <xsl:choose> + <xsl:when test="$rfcno!=''"> + <meta property="og:title" content="{concat('RFC', $rfcno)}" /> + <xsl:element name="meta"> + <xsl:attribute name="property">og:description</xsl:attribute> + <xsl:attribute name="content"> + <xsl:apply-templates select="front/title" mode="get-text-content" /> + </xsl:attribute> + </xsl:element> + </xsl:when> + <xsl:otherwise> + <xsl:element name="meta"> + <xsl:attribute name="property">og:title</xsl:attribute> + <xsl:attribute name="content"> + <xsl:apply-templates select="front/title" mode="get-text-content" /> + </xsl:attribute> + </xsl:element> + <meta property="og:description" content="{normalize-space(front/abstract)}" /> + </xsl:otherwise> + </xsl:choose> + <xsl:if test="$pageUrl!=''"> + <meta property="og:url" content="{$pageUrl}" /> + </xsl:if> + <xsl:if test="$siteName!=''"> + <meta property="og:site_name" content="{$siteName}" /> + </xsl:if> + <xsl:if test="$pageImageUrl!=''"> + <meta property="og:image" content="{$pageImageUrl}" /> + </xsl:if> + <link rel="stylesheet" type="text/css" href="{$bootstrapCssUrl}" /> + <style type="text/css"> + body { + padding-top: 80px; + padding-bottom: 80px; + position: relative; + } + .table.header th, .table.header td { + border-top: none; + padding: 0; + } + #sidebar { + margin-top: -10px; + height: 90%; + overflow-y: auto; + font-size: 90%; + } + #rfc\.meta { + width: 40%; + float: right + } + .toc ul { + list-style: none; + } + .filename { + color: rgb(119, 119, 119); + font-size: 23px; + font-weight: normal; + height: auto; + line-height: 23px; + } + dl { + margin-left: 1em; + } + dl.dl-horizontal: { + margin-left: 0; + } + dl > dt { + float: left; + margin-right: 1em; + } + dl.nohang > dt { + float: none; + } + dl > dd { + margin-bottom: .5em; + } + dl.compact > dd { + margin-bottom: 0em; + } + dl > dd > dl { + margin-top: 0.5em; + margin-bottom: 0em; + } + ul.empty {<!-- spacing between two entries in definition lists --> + list-style-type: none; + } + ul.empty li { + margin-top: .5em; + } + td.reference { + padding-right: 1em; + vertical-align: top; + } + .feedback { + position: fixed; + bottom: 5px; + right: 5px; + } + .fbbutton { + margin-left: 5px; + } + h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + color: rgb(51, 51, 51); + } + span.tt { + font: 11pt consolas, monospace; + font-size-adjust: none; + } + div.banner { + background-color: #fee; + border: 2px solid #633; + padding: 8px 12px; + margin-bottom: 10px; + } + div.banner p { + font-size: 1.2em; + } + </style> + <xsl:if test="$siteCssUrl!=''"> + <link rel="stylesheet" type="text/css" href="{$siteCssUrl}" /> + </xsl:if> + </xsl:template> + + <xsl:template match="abstract"> + <xsl:call-template name="check-no-text-content"/> + <hr/> + <h2 id="{$anchor-pref}abstract"><a href="#{$anchor-pref}abstract">Abstract</a></h2> + <div class="lead"> + <xsl:apply-templates /> + </div> + <xsl:if test="$banner!=''"> + <div class="banner"> + <p><xsl:value-of select="$banner"/></p> + </div> + </xsl:if> + + </xsl:template> + + <xsl:template match="/" mode="toc"> + <xsl:param name="extra">d-lg-none</xsl:param> + <div class="toc {$extra}"> + <ul> + <xsl:apply-templates mode="toc" /> + </ul> + </div> + </xsl:template> + + <xsl:template name="insertTocAppendix"> + </xsl:template> + + <xsl:template name="get-generator"> + <xsl:variable name="gen"> + <xsl:text>https://github.com/mnot/RFCBootstrap </xsl:text> + <xsl:value-of select="concat('XSLT vendor: ',system-property('xsl:vendor'),' ',system-property('xsl:vendor-url'))" /> + </xsl:variable> + <xsl:value-of select="$gen" /> + </xsl:template> + + +</xsl:transform> diff --git a/test/fixtures/cache-tests/spec/lib/xreffer.xslt b/test/fixtures/cache-tests/spec/lib/xreffer.xslt new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/lib/xreffer.xslt @@ -0,0 +1,32 @@ +<xsl:transform xmlns:xsl="http://www.w3.org/1999/XSL/Transform" + xmlns:x="http://purl.org/net/xml2rfc/ext" + version="1.0" +> + +<xsl:output encoding="UTF-8" /> + +<!-- rules for identity transformations --> + +<xsl:template match="node()|@*"><xsl:copy><xsl:apply-templates select="node()|@*" /></xsl:copy></xsl:template> + +<xsl:template match="/"> + <xsl:copy><xsl:apply-templates select="node()" /></xsl:copy> +</xsl:template> + +<xsl:template match="xref"> + <xsl:variable name="t" select="@target"/> + <xsl:variable name="n" select="//*[@anchor=$t]"/> + <xsl:choose> + <xsl:when test="$n/self::x:has and $n/@target"> + <xref target="{$n/../../@anchor}" x:rel="#{$n/@target}"/> + </xsl:when> + <xsl:when test="$n/self::x:has"> + <xref target="{$n/../../@anchor}" x:rel="#{$t}"/> + </xsl:when> + <xsl:otherwise> + <xsl:copy><xsl:apply-templates select="@*|node()" /></xsl:copy> + </xsl:otherwise> + </xsl:choose> +</xsl:template> + +</xsl:transform> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/rfc9110.xml b/test/fixtures/cache-tests/spec/rfc9110.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/rfc9110.xml @@ -0,0 +1,13578 @@ +<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type='text/xsl' href='lib/myxml2rfc.xslt'?> +<?rfc compact="yes"?> +<?rfc subcompact="no" ?> +<?rfc linkmailto="no" ?> +<?rfc editing="no" ?> +<?rfc comments="yes"?> +<?rfc inline="yes"?> +<?rfc rfcedstyle="yes"?> +<?rfc-ext allow-markup-in-artwork="yes" ?> +<?rfc-ext map-dfn="none" ?> +<?rfc-ext html-pretty-print="prettyprint https://cdn.rawgit.com/google/code-prettify/master/loader/run_prettify.js"?> +<?rfc-ext include-references-in-index="yes" ?> +<?rfc-ext xml2rfc-backend="202007"?> +<rfc xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:x="http://purl.org/net/xml2rfc/ext" obsoletes="2818, 7230, 7231, 7232, 7233, 7235, 7538, 7615, 7694" updates="3864" category="std" x:maturity-level="internet" ipr="pre5378Trust200902" number="9110" seriesNo="97" version="3" tocDepth="4" sortRefs="true"> +<front> + + <title>HTTP Semantics</title> + + <author fullname="Roy T. Fielding" initials="R." surname="Fielding" role="editor"> + <organization>Adobe</organization> + <address> + <postal> + <postalLine>345 Park Ave</postalLine> + <postalLine>San Jose, CA 95110</postalLine> + <country>United States of America</country> + </postal> + <email>fielding@gbiv.com</email> + <uri>https://roy.gbiv.com/</uri> + </address> + </author> + + <author fullname="Mark Nottingham" initials="M." surname="Nottingham" role="editor"> + <organization>Fastly</organization> + <address> + <postal> + <postalLine>Prahran</postalLine> + <country>Australia</country> + </postal> + <email>mnot@mnot.net</email> + <uri>https://www.mnot.net/</uri> + </address> + </author> + + <author fullname="Julian Reschke" initials="J." surname="Reschke" role="editor"> + <organization abbrev="greenbytes">greenbytes GmbH</organization> + <address> + <postal> + <postalLine>Hafenweg 16</postalLine> + <postalLine>48155 Münster</postalLine> + <country>Germany</country> + </postal> + <email>julian.reschke@greenbytes.de</email> + <uri>https://greenbytes.de/tech/webdav/</uri> + </address> + </author> + + <date year="2022" month="June"/> + + <area>Applications and Real-Time</area> + <workgroup>HTTP Working Group</workgroup> + + <keyword>Hypertext Transfer Protocol</keyword> + <keyword>HTTP</keyword> + <keyword>HTTP semantics</keyword> + <keyword>HTTP content</keyword> + <keyword>HTTP method</keyword> + <keyword>HTTP status code</keyword> + +<abstract> +<t> + The Hypertext Transfer Protocol (HTTP) is a stateless application-level + protocol for distributed, collaborative, hypertext information systems. + This document describes the overall architecture of HTTP, establishes common + terminology, and defines aspects of the protocol that are shared by all + versions. In this definition are core protocol elements, extensibility + mechanisms, and the "http" and "https" Uniform Resource Identifier (URI) + schemes. +</t> +<t> + This document updates RFC 3864 and + obsoletes RFCs 2818, 7231, 7232, 7233, + 7235, 7538, 7615, 7694, and portions of 7230. +</t> +</abstract> +</front> + +<middle> +<section title="Introduction" anchor="introduction"> +<section title="Purpose" anchor="purpose"> +<t> + The Hypertext Transfer Protocol (HTTP) is a family of stateless, + application-level, request/response protocols that share a generic interface, + extensible semantics, and self-descriptive messages to enable flexible + interaction with network-based hypertext information systems. +</t> +<t> + HTTP hides the details of how a service is implemented by presenting a + uniform interface to clients that is independent of the types of resources + provided. Likewise, servers do not need to be aware of each client's + purpose: a request can be considered in isolation rather than being + associated with a specific type of client or a predetermined sequence of + application steps. This allows general-purpose implementations to be used + effectively in many different contexts, reduces interaction complexity, and + enables independent evolution over time. +</t> +<t> + HTTP is also designed for use as an intermediation protocol, wherein + proxies and gateways can translate non-HTTP information systems into a + more generic interface. +</t> +<t> + One consequence of this flexibility is that the protocol cannot be + defined in terms of what occurs behind the interface. Instead, we + are limited to defining the syntax of communication, the intent + of received communication, and the expected behavior of recipients. + If the communication is considered in isolation, then successful + actions ought to be reflected in corresponding changes to the + observable interface provided by servers. However, since multiple + clients might act in parallel and perhaps at cross-purposes, we + cannot require that such changes be observable beyond the scope + of a single response. +</t> +</section> + +<section title="History and Evolution" anchor="history.and.evolution"> +<t> + HTTP has been the primary information transfer protocol for the World Wide + Web since its introduction in 1990. It began as a trivial mechanism for + low-latency requests, with a single method (GET) to request transfer of a + presumed hypertext document identified by a given pathname. + As the Web grew, HTTP was extended to enclose requests and responses within + messages, transfer arbitrary data formats using MIME-like media types, and + route requests through intermediaries. These protocols were eventually + defined as HTTP/0.9 and HTTP/1.0 (see <xref target="HTTP10"/>). +</t> +<t> + HTTP/1.1 was designed to refine the protocol's features while retaining + compatibility with the existing text-based messaging syntax, improving + its interoperability, scalability, and robustness across the Internet. + This included length-based data delimiters for both fixed and dynamic + (chunked) content, a consistent framework for content negotiation, + opaque validators for conditional requests, cache controls for better + cache consistency, range requests for partial updates, and default + persistent connections. HTTP/1.1 was introduced in 1995 and published on + the Standards Track in 1997 <xref target="RFC2068"/>, revised in + 1999 <xref target="RFC2616"/>, and revised again in 2014 + (<xref target="RFC7230"/> through <xref target="RFC7235"/>). +</t> +<t> + HTTP/2 (<xref target="HTTP2"/>) introduced a multiplexed session layer + on top of the existing TLS and TCP protocols for exchanging concurrent + HTTP messages with efficient field compression and server push. + HTTP/3 (<xref target="HTTP3"/>) provides greater independence for concurrent + messages by using QUIC as a secure multiplexed transport over UDP instead of + TCP. +</t> +<t> + All three major versions of HTTP rely on the semantics defined by + this document. They have not obsoleted each other because each one has + specific benefits and limitations depending on the context of use. + Implementations are expected to choose the most appropriate transport and + messaging syntax for their particular context. +</t> +<t> + This revision of HTTP separates the definition of semantics (this document) + and caching (<xref target="CACHING"/>) from the current HTTP/1.1 messaging + syntax (<xref target="HTTP11"/>) to allow each major protocol version + to progress independently while referring to the same core semantics. +</t> +</section> + +<section title="Core Semantics" anchor="core.semantics"> +<t> + HTTP provides a uniform interface for interacting with a resource + (<xref target="resources"/>) — regardless of its type, nature, or + implementation — by sending messages that manipulate or transfer + representations (<xref target="representations"/>). +</t> +<t> + Each message is either a request or a response. A client constructs request + messages that communicate its intentions and routes those messages toward + an identified origin server. A server listens for requests, parses each + message received, interprets the message semantics in relation to the + identified target resource, and responds to that request with one or more + response messages. The client examines received responses to see if its + intentions were carried out, determining what to do next based on the + status codes and content received. +</t> +<t> + HTTP semantics include the intentions defined by each request method + (<xref target="methods"/>), extensions to those semantics that might be + described in request header fields, + status codes that describe the response (<xref target="status.codes"/>), and + other control data and resource metadata that might be given in response + fields. +</t> +<t><iref item="content negotiation"/> + Semantics also include representation metadata that describe how + content is intended to be interpreted by a recipient, request header + fields that might influence content selection, and the various selection + algorithms that are collectively referred to as + <x:dfn>content negotiation</x:dfn> (<xref target="content.negotiation"/>). +</t> +</section> + +<section title="Specifications Obsoleted by This Document" anchor="specifications.obsoleted.by.this.document"> +<table align="left"> + <thead> + <tr> + <th>Title</th> + <th>Reference</th> + <th>See</th> + </tr> + </thead> + <tbody> + <tr> + <td>HTTP Over TLS</td> + <td><xref target="RFC2818"/></td> + <td><xref target="changes.from.rfc.2818" format="counter"/></td> + </tr> + <tr> + <td>HTTP/1.1 Message Syntax and Routing [*]</td> + <td><xref target="RFC7230"/></td> + <td><xref target="changes.from.rfc.7230" format="counter"/></td> + </tr> + <tr> + <td>HTTP/1.1 Semantics and Content</td> + <td><xref target="RFC7231"/></td> + <td><xref target="changes.from.rfc.7231" format="counter"/></td> + </tr> + <tr> + <td>HTTP/1.1 Conditional Requests</td> + <td><xref target="RFC7232"/></td> + <td><xref target="changes.from.rfc.7232" format="counter"/></td> + </tr> + <tr> + <td>HTTP/1.1 Range Requests</td> + <td><xref target="RFC7233"/></td> + <td><xref target="changes.from.rfc.7233" format="counter"/></td> + </tr> + <tr> + <td>HTTP/1.1 Authentication</td> + <td><xref target="RFC7235"/></td> + <td><xref target="changes.from.rfc.7235" format="counter"/></td> + </tr> + <tr> + <td>HTTP Status Code 308 (Permanent Redirect)</td> + <td><xref target="RFC7538"/></td> + <td><xref target="changes.from.rfc.7538" format="counter"/></td> + </tr> + <tr> + <td>HTTP Authentication-Info and Proxy-Authentication-Info + Response Header Fields</td> + <td><xref target="RFC7615"/></td> + <td><xref target="changes.from.rfc.7615" format="counter"/></td> + </tr> + <tr> + <td>HTTP Client-Initiated Content-Encoding</td> + <td><xref target="RFC7694"/></td> + <td><xref target="changes.from.rfc.7694" format="counter"/></td> + </tr> + </tbody> +</table> +<t> + [*] This document only obsoletes the portions of + <xref target="RFC7230" format="none">RFC 7230</xref> that are independent of + the HTTP/1.1 messaging syntax and connection management; the remaining + bits of <xref target="RFC7230" format="none">RFC 7230</xref> are + obsoleted by "HTTP/1.1" <xref target="HTTP11"/>. +</t> +</section> +</section> + +<section title="Conformance" anchor="conformance"> + +<section title="Syntax Notation" anchor="notation"> +<iref primary="true" item="Grammar" subitem="ALPHA"/> +<iref primary="true" item="Grammar" subitem="CR"/> +<iref primary="true" item="Grammar" subitem="CRLF"/> +<iref primary="true" item="Grammar" subitem="CTL"/> +<iref primary="true" item="Grammar" subitem="DIGIT"/> +<iref primary="true" item="Grammar" subitem="DQUOTE"/> +<iref primary="true" item="Grammar" subitem="HEXDIG"/> +<iref primary="true" item="Grammar" subitem="HTAB"/> +<iref primary="true" item="Grammar" subitem="LF"/> +<iref primary="true" item="Grammar" subitem="OCTET"/> +<iref primary="true" item="Grammar" subitem="SP"/> +<iref primary="true" item="Grammar" subitem="VCHAR"/> +<t> + This specification uses the Augmented Backus-Naur Form (ABNF) notation of + <xref target="RFC5234"/>, extended with the notation for case-sensitivity + in strings defined in <xref target="RFC7405"/>. +</t> +<t> + It also uses a list extension, defined in <xref target="abnf.extension"/>, + that allows for compact definition of comma-separated lists using a "#" + operator (similar to how the "*" operator indicates repetition). <xref target="collected.abnf"/> shows the collected grammar with all list + operators expanded to standard ABNF notation. +</t> +<t> + As a convention, ABNF rule names prefixed with "obs-" denote + obsolete grammar rules that appear for historical reasons. +</t> +<t anchor="core.rules"> + <x:anchor-alias value="ALPHA"/> + <x:anchor-alias value="CR"/> + <x:anchor-alias value="CRLF"/> + <x:anchor-alias value="CTL"/> + <x:anchor-alias value="DIGIT"/> + <x:anchor-alias value="DQUOTE"/> + <x:anchor-alias value="HEXDIG"/> + <x:anchor-alias value="HTAB"/> + <x:anchor-alias value="LF"/> + <x:anchor-alias value="OCTET"/> + <x:anchor-alias value="SP"/> + <x:anchor-alias value="VCHAR"/> + The following core rules are included by + reference, as defined in <xref target="RFC5234" x:fmt="of" x:sec="B.1"/>: + ALPHA (letters), CR (carriage return), CRLF (CR LF), CTL (controls), + DIGIT (decimal 0-9), DQUOTE (double quote), + HEXDIG (hexadecimal 0-9/A-F/a-f), HTAB (horizontal tab), LF (line feed), + OCTET (any 8-bit sequence of data), SP (space), and + VCHAR (any visible US-ASCII character). +</t> +<t> + <xref target="fields.components"/> defines some generic syntactic + components for field values. +</t> +<t> + This specification uses the terms + "character", + "character encoding scheme", + "charset", and + "protocol element" + as they are defined in <xref target="RFC6365"/>. +</t> +</section> + +<section title="Requirements Notation" anchor="requirements.notation"> +<t> + The key words "<bcp14>MUST</bcp14>", "<bcp14>MUST NOT</bcp14>", "<bcp14>REQUIRED</bcp14>", "<bcp14>SHALL</bcp14>", "<bcp14>SHALL NOT</bcp14>", + "<bcp14>SHOULD</bcp14>", "<bcp14>SHOULD NOT</bcp14>", "<bcp14>RECOMMENDED</bcp14>", "<bcp14>NOT RECOMMENDED</bcp14>", + "<bcp14>MAY</bcp14>", and "<bcp14>OPTIONAL</bcp14>" in this document are to be interpreted as + described in BCP 14 <xref target="RFC2119"/> <xref target="RFC8174"/> when, and only when, they + appear in all capitals, as shown here. +</t> +<t> + This specification targets conformance criteria according to the role of + a participant in HTTP communication. Hence, requirements are placed + on senders, recipients, clients, servers, user agents, intermediaries, + origin servers, proxies, gateways, or caches, depending on what behavior + is being constrained by the requirement. Additional requirements + are placed on implementations, resource owners, and protocol element + registrations when they apply beyond the scope of a single communication. +</t> +<t> + The verb "generate" is used instead of "send" where a requirement applies + only to implementations that create the protocol element, rather than an + implementation that forwards a received element downstream. +</t> +<t> + An implementation is considered conformant if it complies with all of the + requirements associated with the roles it partakes in HTTP. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate protocol elements that do not match the grammar + defined by the corresponding ABNF rules. + Within a given message, a sender <bcp14>MUST NOT</bcp14> generate protocol elements or + syntax alternatives that are only allowed to be generated by participants in + other roles (i.e., a role that the sender does not have for that message). +</t> +<t> + Conformance to HTTP includes both conformance to the particular messaging + syntax of the protocol version in use and conformance to the semantics of + protocol elements sent. For example, a client that claims conformance to + HTTP/1.1 but fails to recognize the features required of HTTP/1.1 + recipients will fail to interoperate with servers that adjust their + responses in accordance with those claims. + Features that reflect user choices, such as content negotiation and + user-selected extensions, can impact application behavior beyond the + protocol stream; sending protocol elements that inaccurately reflect a + user's choices will confuse the user and inhibit choice. +</t> +<t> + When an implementation fails semantic conformance, recipients of that + implementation's messages will eventually develop workarounds to adjust + their behavior accordingly. A recipient <bcp14>MAY</bcp14> employ such workarounds while + remaining conformant to this protocol if the workarounds are limited to the + implementations at fault. For example, servers often scan portions of the + User-Agent field value, and user agents often scan the Server field value, + to adjust their own behavior with respect to known bugs or poorly chosen + defaults. +</t> +</section> + +<section title="Length Requirements" anchor="length.requirements"> +<t> + A recipient <bcp14>SHOULD</bcp14> parse a received protocol element defensively, with + only marginal expectations that the element will conform to its ABNF + grammar and fit within a reasonable buffer size. +</t> +<t> + HTTP does not have specific length limitations for many of its protocol + elements because the lengths that might be appropriate will vary widely, + depending on the deployment context and purpose of the implementation. + Hence, interoperability between senders and recipients depends on shared + expectations regarding what is a reasonable length for each protocol + element. Furthermore, what is commonly understood to be a reasonable length + for some protocol elements has changed over the course of the past three + decades of HTTP use and is expected to continue changing in the future. +</t> +<t> + At a minimum, a recipient <bcp14>MUST</bcp14> be able to parse and process protocol + element lengths that are at least as long as the values that it generates + for those same protocol elements in other messages. For example, an origin + server that publishes very long URI references to its own resources needs + to be able to parse and process those same references when received as a + target URI. +</t> +<t> + Many received protocol elements are only parsed to the extent necessary to + identify and forward that element downstream. For example, an intermediary + might parse a received field into its field name and field value components, + but then forward the field without further parsing inside the field value. +</t> +</section> + +<section title="Error Handling" anchor="error.handling"> +<t> + A recipient <bcp14>MUST</bcp14> interpret a received protocol element according to the + semantics defined for it by this specification, including extensions to + this specification, unless the recipient has determined (through experience + or configuration) that the sender incorrectly implements what is implied by + those semantics. + For example, an origin server might disregard the contents of a received + <x:ref>Accept-Encoding</x:ref> header field if inspection of the + <x:ref>User-Agent</x:ref> header field indicates a specific implementation + version that is known to fail on receipt of certain content codings. +</t> +<t> + Unless noted otherwise, a recipient <bcp14>MAY</bcp14> attempt to recover a usable + protocol element from an invalid construct. HTTP does not define + specific error handling mechanisms except when they have a direct impact + on security, since different applications of the protocol require + different error handling strategies. For example, a Web browser might + wish to transparently recover from a response where the + <x:ref>Location</x:ref> header field doesn't parse according to the ABNF, + whereas a systems control client might consider any form of error recovery + to be dangerous. +</t> +<t> + Some requests can be automatically retried by a client in the event of + an underlying connection failure, as described in + <xref target="idempotent.methods"/>. +</t> +</section> + +<section title="Protocol Version" anchor="protocol.version"> +<t> + HTTP's version number consists of two decimal digits separated by a "." + (period or decimal point). The first digit (major version) indicates the + messaging syntax, whereas the second digit (minor version) + indicates the highest minor version within that major version to which the + sender is conformant (able to understand for future communication). +</t> +<t> + While HTTP's core semantics don't change between protocol versions, their + expression "on the wire" can change, and so the + HTTP version number changes when incompatible changes are made to the wire + format. Additionally, HTTP allows incremental, backwards-compatible + changes to be made to the protocol without changing its version through + the use of defined extension points (<xref target="extending"/>). +</t> +<t> + The protocol version as a whole indicates the sender's conformance with + the set of requirements laid out in that version's corresponding + specification(s). + For example, the version "HTTP/1.1" is defined by the combined + specifications of this document, "HTTP Caching" <xref target="CACHING"/>, + and "HTTP/1.1" <xref target="HTTP11"/>. +</t> +<t> + HTTP's major version number is incremented when an incompatible message + syntax is introduced. The minor number is incremented when changes made to + the protocol have the effect of adding to the message semantics or + implying additional capabilities of the sender. +</t> +<t> + The minor version advertises the sender's communication capabilities even + when the sender is only using a backwards-compatible subset of the + protocol, thereby letting the recipient know that more advanced features + can be used in response (by servers) or in future requests (by clients). +</t> +<t> + When a major version of HTTP does not define any minor versions, the minor + version "0" is implied. The "0" is used when referring to that protocol + within elements that require a minor version identifier. +</t> +</section> +</section> + +<section title="Terminology and Core Concepts" anchor="terminology"> +<t> + HTTP was created for the World Wide Web (WWW) architecture + and has evolved over time to support the scalability needs of a worldwide + hypertext system. Much of that architecture is reflected in the terminology + used to define HTTP. +</t> + +<section title="Resources" anchor="resources"> + <iref primary="true" item="resource"/> + <x:anchor-alias value="resource"/> +<t> + The target of an HTTP request is called a <x:dfn>resource</x:dfn>. + HTTP does not limit the nature of a resource; it merely + defines an interface that might be used to interact with resources. + Most resources are identified by a Uniform Resource Identifier (URI), as + described in <xref target="uri"/>. +</t> +<t> + One design goal of HTTP is to separate resource identification from + request semantics, which is made possible by vesting the request + semantics in the request method (<xref target="methods"/>) and a few + request-modifying header fields. + A resource cannot treat a request in a manner inconsistent with the + semantics of the method of the request. For example, though the URI of a + resource might imply semantics that are not safe, a client can expect the + resource to avoid actions that are unsafe when processing a request with a + safe method (see <xref target="safe.methods"/>). +</t> +<t> + HTTP relies upon the Uniform Resource Identifier (URI) + standard <xref target="URI"/> to indicate the target resource + (<xref target="target.resource"/>) and relationships between resources. +</t> +</section> + +<section title="Representations" anchor="representations"> +<iref primary="true" item="representation"/> +<x:anchor-alias value="representation"/> +<t> + A <x:dfn>representation</x:dfn> is information + that is intended to reflect a past, current, or desired state of a given + resource, in a format that can be readily communicated via the protocol. + A representation consists of a set of representation metadata and a + potentially unbounded stream of representation data + (<xref target="representation.data.and.metadata"/>). +</t> +<t> + HTTP allows "information hiding" behind its uniform interface by defining + communication with respect to a transferable representation of the resource + state, rather than transferring the resource itself. This allows the + resource identified by a URI to be anything, including temporal functions + like "the current weather in Laguna Beach", while potentially providing + information that represents that resource at the time a message is + generated <xref target="REST"/>. +</t> +<t> + The uniform interface is similar to a window through which one can observe + and act upon a thing only through the communication of messages to an + independent actor on the other side. A shared abstraction is needed to + represent ("take the place of") the current or desired state of that thing + in our communications. When a representation is hypertext, it can provide + both a representation of the resource state and processing instructions + that help guide the recipient's future interactions. +</t> +<t anchor="selected.representation"> + <x:anchor-alias value="selected representation"/> + <iref primary="true" item="selected representation"/> + A <x:ref>target resource</x:ref> might be provided with, or be capable of + generating, multiple representations that are each intended to reflect the + resource's current state. An algorithm, usually based on + <x:ref>content negotiation</x:ref> (<xref target="content.negotiation"/>), + would be used to select one of those representations as being most + applicable to a given request. + This <x:dfn>selected representation</x:dfn> provides the data and metadata + for evaluating conditional requests (<xref target="conditional.requests"/>) + and constructing the content for <x:ref>200 (OK)</x:ref>, + <x:ref>206 (Partial Content)</x:ref>, and + <x:ref>304 (Not Modified)</x:ref> responses to GET (<xref target="GET"/>). +</t> +</section> + +<section title="Connections, Clients, and Servers" anchor="connections"> +<iref primary="true" item="client"/> +<iref primary="true" item="server"/> +<iref primary="true" item="connection"/> +<x:anchor-alias value="connection"/> +<t> + HTTP is a client/server protocol that operates over a reliable + transport- or session-layer <x:dfn>connection</x:dfn>. +</t> +<t> + An HTTP <x:dfn>client</x:dfn> is a program that establishes a connection + to a server for the purpose of sending one or more HTTP requests. + An HTTP <x:dfn>server</x:dfn> is a program that accepts connections + in order to service HTTP requests by sending HTTP responses. +</t> +<t> + The terms client and server refer only to the roles that + these programs perform for a particular connection. The same program + might act as a client on some connections and a server on others. +</t> +<t> + HTTP is defined as a stateless protocol, meaning that each request message's semantics + can be understood in isolation, and that the relationship between connections + and messages on them has no impact on the interpretation of those messages. + For example, a CONNECT request (<xref target="CONNECT"/>) or a request with + the Upgrade header field (<xref target="field.upgrade"/>) can occur at any time, + not just in the first message on a connection. Many implementations depend on + HTTP's stateless design in order to reuse proxied connections or dynamically + load balance requests across multiple servers. +</t> +<t> + As a result, a server <bcp14>MUST NOT</bcp14> + assume that two requests on the same connection are from the same user + agent unless the connection is secured and specific to that agent. + Some non-standard HTTP extensions (e.g., <xref target="RFC4559"/>) have + been known to violate this requirement, resulting in security and + interoperability problems. +</t> +</section> + +<section title="Messages" anchor="messages"> +<iref primary="true" item="messages"/> +<iref item="message"/> +<iref primary="true" item="sender"/> +<iref primary="true" item="recipient"/> +<iref primary="true" item="request"/> +<iref primary="true" item="response"/> +<t> + HTTP is a stateless request/response protocol for exchanging + <x:dfn>messages</x:dfn> across a <x:ref>connection</x:ref>. + The terms <x:dfn>sender</x:dfn> and <x:dfn>recipient</x:dfn> refer to + any implementation that sends or receives a given message, respectively. +</t> +<t> + A client sends requests to a server in the form of a <x:dfn>request</x:dfn> + message with a method (<xref target="methods"/>) and request target + (<xref target="target.resource"/>). The request might also contain + header fields (<xref target="header.fields"/>) for request modifiers, + client information, and representation metadata, + content (<xref target="content"/>) intended for processing + in accordance with the method, and + trailer fields (<xref target="trailer.fields"/>) to communicate information + collected while sending the content. +</t> +<t> + A server responds to a client's request by sending one or more + <x:dfn>response</x:dfn> messages, each including a status + code (<xref target="status.codes"/>). The response might also contain + header fields for server information, resource metadata, and representation + metadata, content to be interpreted in accordance with the status + code, and trailer fields to communicate information + collected while sending the content. +</t> +</section> + +<section title="User Agents" anchor="user.agent"> +<iref primary="true" item="user agent"/> +<iref primary="true" item="browser"/> +<iref primary="true" item="spider"/> +<t> + The term <x:dfn>user agent</x:dfn> refers to any of the various + client programs that initiate a request. +</t> +<t> + The most familiar form of user agent is the general-purpose Web browser, but + that's only a small percentage of implementations. Other common user agents + include spiders (web-traversing robots), command-line tools, billboard + screens, household appliances, scales, light bulbs, firmware update scripts, + mobile apps, and communication devices in a multitude of shapes and sizes. +</t> +<t> + Being a user agent does not imply that there is a human user directly + interacting with the software agent at the time of a request. In many + cases, a user agent is installed or configured to run in the background + and save its results for later inspection (or save only a subset of those + results that might be interesting or erroneous). Spiders, for example, are + typically given a start URI and configured to follow certain behavior while + crawling the Web as a hypertext graph. +</t> +<t> + Many user agents cannot, or choose not to, + make interactive suggestions to their user or provide adequate warning for + security or privacy concerns. In the few cases where this + specification requires reporting of errors to the user, it is acceptable + for such reporting to only be observable in an error console or log file. + Likewise, requirements that an automated action be confirmed by the user + before proceeding might be met via advance configuration choices, + run-time options, or simple avoidance of the unsafe action; confirmation + does not imply any specific user interface or interruption of normal + processing if the user has already made that choice. +</t> +</section> + +<section title="Origin Server" anchor="origin.server"> +<iref primary="true" item="origin server"/> +<t> + The term <x:dfn>origin server</x:dfn> refers to a program that can + originate authoritative responses for a given target resource. +</t> +<t> + The most familiar form of origin server are large public websites. + However, like user agents being equated with browsers, it is easy to be + misled into thinking that all origin servers are alike. + Common origin servers also include home automation units, configurable + networking components, office machines, autonomous robots, news feeds, + traffic cameras, real-time ad selectors, and video-on-demand platforms. +</t> +<t> + Most HTTP communication consists of a retrieval request (GET) for + a representation of some resource identified by a URI. In the + simplest case, this might be accomplished via a single bidirectional + connection (===) between the user agent (UA) and the origin server (O). +</t> +<figure><artwork type="drawing"> + request &gt; + <x:highlight>UA</x:highlight> ======================================= <x:highlight>O</x:highlight> + &lt; response +</artwork></figure> +</section> + +<section title="Intermediaries" anchor="intermediaries"> +<iref primary="true" item="intermediary"/> +<t> + HTTP enables the use of intermediaries to satisfy requests through + a chain of connections. There are three common forms of HTTP + <x:dfn>intermediary</x:dfn>: proxy, gateway, and tunnel. In some cases, + a single intermediary might act as an origin server, proxy, gateway, + or tunnel, switching behavior based on the nature of each request. +</t> +<figure><artwork type="drawing"> + &gt; &gt; &gt; &gt; + <x:highlight>UA</x:highlight> =========== <x:highlight>A</x:highlight> =========== <x:highlight>B</x:highlight> =========== <x:highlight>C</x:highlight> =========== <x:highlight>O</x:highlight> + &lt; &lt; &lt; &lt; +</artwork></figure> +<t> + The figure above shows three intermediaries (A, B, and C) between the + user agent and origin server. A request or response message that + travels the whole chain will pass through four separate connections. + Some HTTP communication options + might apply only to the connection with the nearest, non-tunnel + neighbor, only to the endpoints of the chain, or to all connections + along the chain. Although the diagram is linear, each participant might + be engaged in multiple, simultaneous communications. For example, B + might be receiving requests from many clients other than A, and/or + forwarding requests to servers other than C, at the same time that it + is handling A's request. Likewise, later requests might be sent through a + different path of connections, often based on dynamic configuration for + load balancing. +</t> +<t> +<iref primary="true" item="upstream"/><iref primary="true" item="downstream"/> +<iref primary="true" item="inbound"/><iref primary="true" item="outbound"/> + The terms <x:dfn>upstream</x:dfn> and <x:dfn>downstream</x:dfn> are + used to describe directional requirements in relation to the message flow: + all messages flow from upstream to downstream. + The terms <x:dfn>inbound</x:dfn> and <x:dfn>outbound</x:dfn> are used to + describe directional requirements in relation to the request route: + inbound means "toward the origin server", whereas + outbound means "toward the user agent". +</t> +<t><iref primary="true" item="proxy"/> + A <x:dfn>proxy</x:dfn> is a message-forwarding agent that is chosen by the + client, usually via local configuration rules, to receive requests + for some type(s) of absolute URI and attempt to satisfy those + requests via translation through the HTTP interface. Some translations + are minimal, such as for proxy requests for "http" URIs, whereas + other requests might require translation to and from entirely different + application-level protocols. Proxies are often used to group an + organization's HTTP requests through a common intermediary for the + sake of security services, annotation services, or shared caching. Some + proxies are designed to apply transformations to selected messages or + content while they are being forwarded, as described in + <xref target="message.transformations"/>. +</t> +<t><iref primary="true" item="gateway"/><iref primary="true" item="reverse proxy"/> +<iref primary="true" item="accelerator"/> + A <x:dfn>gateway</x:dfn> (a.k.a. <x:dfn>reverse proxy</x:dfn>) is an + intermediary that acts as an origin server for the outbound connection but + translates received requests and forwards them inbound to another server or + servers. Gateways are often used to encapsulate legacy or untrusted + information services, to improve server performance through + <x:dfn>accelerator</x:dfn> caching, and to enable partitioning or load + balancing of HTTP services across multiple machines. +</t> +<t> + All HTTP requirements applicable to an origin server + also apply to the outbound communication of a gateway. + A gateway communicates with inbound servers using any protocol that + it desires, including private extensions to HTTP that are outside + the scope of this specification. However, an HTTP-to-HTTP gateway + that wishes to interoperate with third-party HTTP servers needs to conform + to user agent requirements on the gateway's inbound connection. +</t> +<t><iref primary="true" item="tunnel"/> + A <x:dfn>tunnel</x:dfn> acts as a blind relay between two connections + without changing the messages. Once active, a tunnel is not + considered a party to the HTTP communication, though the tunnel might + have been initiated by an HTTP request. A tunnel ceases to exist when + both ends of the relayed connection are closed. Tunnels are used to + extend a virtual connection through an intermediary, such as when + Transport Layer Security (TLS, <xref target="TLS13"/>) is used to + establish confidential communication through a shared firewall proxy. +</t> +<t> + The above categories for intermediary only consider those acting as + participants in the HTTP communication. There are also intermediaries + that can act on lower layers of the network protocol stack, filtering or + redirecting HTTP traffic without the knowledge or permission of message + senders. Network intermediaries are indistinguishable (at a protocol level) + from an on-path attacker, often introducing security flaws or + interoperability problems due to mistakenly violating HTTP semantics. +</t> +<t><iref primary="true" item="interception proxy"/> +<iref primary="true" item="transparent proxy"/> + For example, an + <x:dfn>interception proxy</x:dfn> <xref target="RFC3040"/> (also commonly + known as a <x:dfn>transparent proxy</x:dfn> <xref target="RFC1919"/>) + differs from an HTTP proxy because it is not chosen by the client. + Instead, an interception proxy filters or redirects outgoing TCP port 80 + packets (and occasionally other common port traffic). + Interception proxies are commonly found on public network access points, + as a means of enforcing account subscription prior to allowing use of + non-local Internet services, and within corporate firewalls to enforce + network usage policies. +</t> +</section> + +<section title="Caches" anchor="caches"> +<iref primary="true" item="cache"/> +<t> + A <x:dfn>cache</x:dfn> is a local store of previous response messages and the + subsystem that controls its message storage, retrieval, and deletion. + A cache stores cacheable responses in order to reduce the response + time and network bandwidth consumption on future, equivalent + requests. Any client or server <bcp14>MAY</bcp14> employ a cache, though a cache + cannot be used while acting as a tunnel. +</t> +<t> + The effect of a cache is that the request/response chain is shortened + if one of the participants along the chain has a cached response + applicable to that request. The following illustrates the resulting + chain if B has a cached copy of an earlier response from O (via C) + for a request that has not been cached by UA or A. +</t> +<figure><artwork type="drawing"> + &gt; &gt; + <x:highlight>UA</x:highlight> =========== <x:highlight>A</x:highlight> =========== <x:highlight>B</x:highlight> - - - - - - <x:highlight>C</x:highlight> - - - - - - <x:highlight>O</x:highlight> + &lt; &lt; +</artwork></figure> +<t><iref primary="true" item="cacheable"/> + A response is <x:dfn>cacheable</x:dfn> if a cache is allowed to store a copy of + the response message for use in answering subsequent requests. + Even when a response is cacheable, there might be additional + constraints placed by the client or by the origin server on when + that cached response can be used for a particular request. HTTP + requirements for cache behavior and cacheable responses are + defined in <xref target="CACHING"/>. +</t> +<t> + There is a wide variety of architectures and configurations + of caches deployed across the World Wide Web and + inside large organizations. These include national hierarchies + of proxy caches to save bandwidth and reduce latency, content delivery + networks that use gateway caching to optimize regional and global distribution of popular sites, + collaborative systems that + broadcast or multicast cache entries, archives of pre-fetched cache + entries for use in off-line or high-latency environments, and so on. +</t> +</section> + +<section title="Example Message Exchange" anchor="example"> +<t> + The following example illustrates a typical HTTP/1.1 message exchange for a + GET request (<xref target="GET"/>) on the URI "http://www.example.com/hello.txt": +</t> +<t> +Client request: +</t> +<sourcecode type="http-message"> +GET /hello.txt HTTP/1.1 +User-Agent: curl/7.64.1 +Host: www.example.com +Accept-Language: en, mi + +</sourcecode> +<t> +Server response: +</t> +<sourcecode type="http-message"> +HTTP/1.1 200 OK +Date: Mon, 27 Jul 2009 12:28:53 GMT +Server: Apache +Last-Modified: Wed, 22 Jul 2009 19:15:56 GMT +ETag: "34aa387-d-1568eb00" +Accept-Ranges: bytes +Content-Length: <x:length-of target="exbody"/> +Vary: Accept-Encoding +Content-Type: text/plain + +<x:span anchor="exbody">Hello World! My content includes a trailing CRLF. +</x:span></sourcecode> +</section> +</section> + +<section title="Identifiers in HTTP" anchor="uri"> + <iref primary="true" item="URI"/> + <iref primary="false" item="resource"/> +<t> + Uniform Resource Identifiers (URIs) <xref target="URI"/> are used + throughout HTTP as the means for identifying resources (<xref target="resources"/>). +</t> + +<section title="URI References" anchor="uri.references"> + <iref primary="true" item="URI reference"/> + <x:anchor-alias value="URI-reference"/> + <x:anchor-alias value="absolute-URI"/> + <x:anchor-alias value="relative-part"/> + <x:anchor-alias value="authority"/> + <x:anchor-alias value="uri-host"/> + <x:anchor-alias value="port"/> + <x:anchor-alias value="path"/> + <x:anchor-alias value="path-abempty"/> + <x:anchor-alias value="segment"/> + <x:anchor-alias value="query"/> + <x:anchor-alias value="absolute-path"/> + <x:anchor-alias value="partial-URI"/> +<t> + URI references are used to target requests, indicate redirects, and define + relationships. +</t> +<t> + The definitions of "URI-reference", + "absolute-URI", "relative-part", "authority", "port", "host", + "path-abempty", "segment", and "query" are adopted from the + URI generic syntax. + An "absolute-path" rule is defined for protocol elements that can contain a + non-empty path component. (This rule differs slightly from the path-abempty + rule of RFC 3986, which allows for an empty path, + and path-absolute rule, which does not allow paths that begin with "//".) + A "partial-URI" rule is defined for protocol elements + that can contain a relative URI but not a fragment component. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="URI-reference"><!--exported production--></iref><iref primary="true" item="Grammar" subitem="absolute-URI"/><iref primary="true" item="Grammar" subitem="authority"/><iref primary="true" item="Grammar" subitem="absolute-path"/><iref primary="true" item="Grammar" subitem="port"/><iref primary="true" item="Grammar" subitem="query"/><iref primary="true" item="Grammar" subitem="segment"/><iref primary="true" item="Grammar" subitem="uri-host"/><iref primary="true" item="Grammar" subitem="partial-URI"><!--exported production--></iref> + <x:ref>URI-reference</x:ref> = &lt;URI-reference, see <xref target="URI" x:fmt="," x:sec="4.1"/>&gt; + <x:ref>absolute-URI</x:ref> = &lt;absolute-URI, see <xref target="URI" x:fmt="," x:sec="4.3"/>&gt; + <x:ref>relative-part</x:ref> = &lt;relative-part, see <xref target="URI" x:fmt="," x:sec="4.2"/>&gt; + <x:ref>authority</x:ref> = &lt;authority, see <xref target="URI" x:fmt="," x:sec="3.2"/>&gt; + <x:ref>uri-host</x:ref> = &lt;host, see <xref target="URI" x:fmt="," x:sec="3.2.2"/>&gt; + <x:ref>port</x:ref> = &lt;port, see <xref target="URI" x:fmt="," x:sec="3.2.3"/>&gt; + <x:ref>path-abempty</x:ref> = &lt;path-abempty, see <xref target="URI" x:fmt="," x:sec="3.3"/>&gt; + <x:ref>segment</x:ref> = &lt;segment, see <xref target="URI" x:fmt="," x:sec="3.3"/>&gt; + <x:ref>query</x:ref> = &lt;query, see <xref target="URI" x:fmt="," x:sec="3.4"/>&gt; + + <x:ref>absolute-path</x:ref> = 1*( "/" segment ) + <x:ref>partial-URI</x:ref> = relative-part [ "?" query ] +</sourcecode> +<t> + Each protocol element in HTTP that allows a URI reference will indicate + in its ABNF production whether the element allows any form of reference + (URI-reference), only a URI in absolute form (absolute-URI), only the + path and optional query components (partial-URI), + or some combination of the above. + Unless otherwise indicated, URI references are parsed + relative to the target URI (<xref target="target.resource"/>). +</t> +<t> + It is <bcp14>RECOMMENDED</bcp14> that all senders and recipients support, at a minimum, + URIs with lengths of 8000 octets in protocol elements. Note that this + implies some structures and on-wire representations (for example, the + request line in HTTP/1.1) will necessarily be larger in some cases. +</t> +</section> + +<section title="HTTP-Related URI Schemes" anchor="uri.schemes"> +<t> + IANA maintains the registry of URI Schemes <xref target="BCP35"/> at + <eref target="https://www.iana.org/assignments/uri-schemes/"/>. + Although requests might target any URI scheme, the following schemes are + inherent to HTTP servers: +</t> +<table align="left" anchor="uri.scheme.table"> + <thead> + <tr> + <th>URI Scheme</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>http</td> + <td>Hypertext Transfer Protocol</td> + <td><xref target="http.uri" format="counter"/></td> + </tr> + <tr> + <td>https</td> + <td>Hypertext Transfer Protocol Secure</td> + <td><xref target="https.uri" format="counter"/></td> + </tr> + </tbody> +</table> +<t> + Note that the presence of an "http" or "https" URI does not imply that + there is always an HTTP server at the identified origin listening for + connections. Anyone can mint a URI, whether or not a server exists and + whether or not that server currently maps that identifier to a resource. + The delegated nature of registered names and IP addresses creates a + federated namespace whether or not an HTTP server is present. +</t> + +<section title="http URI Scheme" anchor="http.uri"> + <x:anchor-alias value="http-URI"/> + <iref item="http URI scheme" primary="true"/> + <iref item="URI scheme" subitem="http" primary="true"/> +<t> + The "http" URI scheme is hereby defined for minting identifiers within the + hierarchical namespace governed by a potential HTTP origin server + listening for TCP (<xref target="TCP"/>) connections on a given port. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="http-URI"><!--terminal production--></iref> + <x:ref>http-URI</x:ref> = "http" "://" <x:ref>authority</x:ref> <x:ref>path-abempty</x:ref> [ "?" <x:ref>query</x:ref> ] +</sourcecode> +<t> + The origin server for an "http" URI is identified by the + <x:ref>authority</x:ref> component, which includes a host identifier + (<xref target="URI" x:fmt="," x:sec="3.2.2"/>) + and optional port number (<xref target="URI" x:fmt="," x:sec="3.2.3"/>). + If the port subcomponent is empty or not given, TCP port 80 (the + reserved port for WWW services) is the default. + The origin determines who has the right to respond authoritatively to + requests that target the identified resource, as defined in + <xref target="http.origin"/>. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate an "http" URI with an empty host identifier. + A recipient that processes such a URI reference <bcp14>MUST</bcp14> reject it as invalid. +</t> +<t> + The hierarchical path component and optional query component identify the + target resource within that origin server's namespace. +</t> +</section> + +<section title="https URI Scheme" anchor="https.uri"> + <x:anchor-alias value="https-URI"/> + <x:anchor-alias value="secured"/> + <iref item="https URI scheme" primary="true"/> + <iref item="URI scheme" subitem="https" primary="true"/> + <iref item="secured" primary="true"/> +<t> + The "https" URI scheme is hereby defined for minting identifiers within the + hierarchical namespace governed by a potential origin server listening for + TCP connections on a given port and capable of establishing a TLS + (<xref target="TLS13"/>) connection that has been secured for HTTP + communication. In this context, <x:dfn>secured</x:dfn> specifically + means that the server has been authenticated as acting on behalf of the + identified authority and all HTTP communication with that server has + confidentiality and integrity protection that is acceptable to both client + and server. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="https-URI"><!--terminal production--></iref> + <x:ref>https-URI</x:ref> = "https" "://" <x:ref>authority</x:ref> <x:ref>path-abempty</x:ref> [ "?" <x:ref>query</x:ref> ] +</sourcecode> +<t> + The origin server for an "https" URI is identified by the + <x:ref>authority</x:ref> component, which includes a host identifier + (<xref target="URI" x:fmt="," x:sec="3.2.2"/>) + and optional port number (<xref target="URI" x:fmt="," x:sec="3.2.3"/>). + If the port subcomponent is empty or not given, TCP port 443 + (the reserved port for HTTP over TLS) is the default. + The origin determines who has the right to respond authoritatively to + requests that target the identified resource, as defined in + <xref target="https.origin"/>. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate an "https" URI with an empty host identifier. + A recipient that processes such a URI reference <bcp14>MUST</bcp14> reject it as invalid. +</t> +<t> + The hierarchical path component and optional query component identify the + target resource within that origin server's namespace. +</t> +<t> + A client <bcp14>MUST</bcp14> ensure that its HTTP requests for an "https" resource are + secured, prior to being communicated, and that it only accepts secured + responses to those requests. Note that the definition of what cryptographic + mechanisms are acceptable to client and server are usually negotiated and + can change over time. +</t> +<t> + Resources made available via the "https" scheme have no shared identity + with the "http" scheme. They are distinct origins with separate namespaces. + However, extensions to HTTP that are defined as applying to all origins with + the same host, such as the Cookie protocol <xref target="COOKIE"/>, + allow information set by one service to impact communication with other + services within a matching group of host domains. Such extensions ought to + be designed with great care to prevent information obtained from a secured + connection being inadvertently exchanged within an unsecured context. +</t> +</section> + +<section title="http(s) Normalization and Comparison" anchor="uri.comparison"> +<t> + URIs with an "http" or "https" scheme are normalized and compared according to the + methods defined in <xref target="URI" x:fmt="of" x:sec="6"/>, using + the defaults described above for each scheme. +</t> +<t> + HTTP does not require the use of a specific method for determining + equivalence. For example, a cache key might be compared as a simple + string, after syntax-based normalization, or after scheme-based + normalization. +</t> +<t> + Scheme-based normalization (<xref target="URI" x:fmt="of" x:sec="6.2.3"/>) of "http" and "https" URIs involves the following + additional rules: +</t> +<ul> + <li>If the port is equal to the default port for a scheme, the normal form + is to omit the port subcomponent.</li> + <li>When not being used as the target of an OPTIONS request, an empty path + component is equivalent to an absolute path of "/", so the normal form is + to provide a path of "/" instead.</li> + <li>The scheme and host are case-insensitive and normally provided in + lowercase; all other components are compared in a case-sensitive + manner.</li> + <li>Characters other than those in the "reserved" set are equivalent to + their percent-encoded octets: the normal form is to not encode them (see + Sections <xref target="URI" x:fmt="number" x:sec="2.1"/> and <xref target="URI" x:fmt="number" x:sec="2.2"/> of <xref target="URI"/>).</li> +</ul> +<t> + For example, the following three URIs are equivalent: +</t> +<artwork type="example"> + http://example.com:80/~smith/home.html + http://EXAMPLE.com/%7Esmith/home.html + http://EXAMPLE.com:/%7esmith/home.html +</artwork> +<t> + Two HTTP URIs that are equivalent after normalization (using any method) + can be assumed to identify the same resource, and any HTTP component <bcp14>MAY</bcp14> + perform normalization. As a result, distinct resources <bcp14>SHOULD NOT</bcp14> be + identified by HTTP URIs that are equivalent after normalization (using any + method defined in <xref target="URI" x:fmt="of" x:sec="6.2"/>). +</t> +</section> + +<section title="Deprecation of userinfo in http(s) URIs" anchor="http.userinfo"> +<t> + The URI generic syntax for authority also includes a userinfo subcomponent + (<xref target="URI" x:fmt="," x:sec="3.2.1"/>) for including user + authentication information in the URI. In that subcomponent, the + use of the format "user:password" is deprecated. +</t> +<t> + Some implementations make use of the userinfo component for internal + configuration of authentication information, such as within command + invocation options, configuration files, or bookmark lists, even + though such usage might expose a user identifier or password. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate the userinfo subcomponent (and its "@" + delimiter) when an "http" or "https" URI reference is generated within a + message as a target URI or field value. +</t> +<t> + Before making use of an "http" or "https" URI reference received from an untrusted + source, a recipient <bcp14>SHOULD</bcp14> parse for userinfo and treat its presence as + an error; it is likely being used to obscure the authority for the sake of + phishing attacks. +</t> +</section> + +<section title="http(s) References with Fragment Identifiers" anchor="uri.fragment.identifiers"> + <iref item="Fragment Identifiers"/> +<t> + Fragment identifiers allow for indirect identification + of a secondary resource, independent of the URI scheme, as defined in + <xref target="URI" x:fmt="of" x:sec="3.5"/>. + Some protocol elements that refer to a URI allow inclusion of a fragment, + while others do not. They are distinguished by use of the ABNF rule for + elements where fragment is allowed; otherwise, a specific rule that excludes + fragments is used. +</t> +<aside> + <t> + <x:h>Note:</x:h> The fragment identifier component is not part of the scheme + definition for a URI scheme (see <xref target="URI" x:fmt="of" x:sec="4.3"/>), + thus does not appear in the ABNF definitions for the "http" and "https" + URI schemes above. + </t> +</aside> +</section> +</section> + +<section title="Authoritative Access" anchor="authoritative.access"> +<t> + Authoritative access refers to dereferencing a given identifier, + for the sake of access to the identified resource, in a way that the client + believes is authoritative (controlled by the resource owner). The process + for determining whether access is granted is defined by the URI scheme and often uses + data within the URI components, such as the authority component when + the generic syntax is used. However, authoritative access is not limited to + the identified mechanism. +</t> +<t> + <xref target="origin"/> defines the concept of an origin as an aid to + such uses, and the subsequent subsections explain how to establish that a + peer has the authority to represent an origin. +</t> +<t> + See <xref target="establishing.authority"/> for security considerations + related to establishing authority. +</t> + +<section title="URI Origin" anchor="origin"> + <iref primary="true" item="origin"/> + <iref primary="true" item="URI" subitem="origin"/> +<t> + The <x:dfn>origin</x:dfn> for a given URI is the triple of scheme, host, + and port after normalizing the scheme and host to lowercase and + normalizing the port to remove any leading zeros. If port is elided from + the URI, the default port for that scheme is used. For example, the URI +</t> +<artwork type="example"> + https://Example.Com/happy.js +</artwork> +<t> + would have the origin +</t> +<artwork type="example"> + { "https", "example.com", "443" } +</artwork> +<t> + which can also be described as the normalized URI prefix with port always + present: +</t> +<artwork type="example"> + https://example.com:443 +</artwork> +<t> + Each origin defines its own namespace and controls how identifiers + within that namespace are mapped to resources. In turn, how the origin + responds to valid requests, consistently over time, determines the + semantics that users will associate with a URI, and the usefulness of + those semantics is what ultimately transforms these mechanisms into a + resource for users to reference and access in the future. +</t> +<t> + Two origins are distinct if they differ in scheme, host, or port. Even + when it can be verified that the same entity controls two distinct origins, + the two namespaces under those origins are distinct unless explicitly + aliased by a server authoritative for that origin. +</t> +<t> + Origin is also used within HTML and related Web protocols, beyond the + scope of this document, as described in <xref target="RFC6454"/>. +</t> +</section> + +<section title="http Origins" anchor="http.origin"> +<t> + Although HTTP is independent of the transport protocol, the "http" scheme + (<xref target="http.uri"/>) is specific to associating authority with + whomever controls the origin + server listening for TCP connections on the indicated port of whatever + host is identified within the authority component. This is a very weak + sense of authority because it depends on both client-specific name + resolution mechanisms and communication that might not be secured from + an on-path attacker. Nevertheless, it is a sufficient minimum for + binding "http" identifiers to an origin server for consistent resolution + within a trusted environment. +</t> +<t> + If the host identifier is provided as an IP address, the origin server is + the listener (if any) on the indicated TCP port at that IP address. + If host is a registered name, the registered name is an indirect identifier + for use with a name resolution service, such as DNS, to find an address for + an appropriate origin server. +</t> +<t> + When an "http" URI is used within a context that calls for access to the + indicated resource, a client <bcp14>MAY</bcp14> attempt access by resolving the host + identifier to an IP address, establishing a TCP connection to that + address on the indicated port, and sending over that connection an HTTP + request message containing a request target that matches the client's + target URI (<xref target="target.resource"/>). +</t> +<t> + If the server responds to such a request with a non-interim HTTP response + message, as described in <xref target="status.codes"/>, then that response + is considered an authoritative answer to the client's request. +</t> +<t> + Note, however, that the above is not the only means for obtaining an + authoritative response, nor does it imply that an authoritative response + is always necessary (see <xref target="CACHING"/>). + For example, the Alt-Svc header field <xref target="ALTSVC"/> allows an + origin server to identify other services that are also authoritative for + that origin. Access to "http" identified resources might also be provided + by protocols outside the scope of this document. +</t> +</section> + +<section title="https Origins" anchor="https.origin"> +<t> + The "https" scheme (<xref target="https.uri"/>) associates authority based + on the ability of a server to use the private key corresponding to a + certificate that the client considers to be trustworthy for the identified + origin server. The client usually relies upon a chain of trust, conveyed + from some prearranged or configured trust anchor, to deem a certificate + trustworthy (<xref target="https.verify"/>). +</t> +<t> + In HTTP/1.1 and earlier, a client will only attribute authority to a server + when they are communicating over a successfully established and secured + connection specifically to that URI origin's host. The connection + establishment and certificate verification are used as proof of authority. +</t> +<t> + In HTTP/2 and HTTP/3, a client will attribute authority to a server when + they are communicating over a successfully established and secured + connection if the URI origin's host matches any of the hosts present in the + server's certificate and the client believes that it could open a connection + to that host for that URI. In practice, a client will make a DNS query to + check that the origin's host contains the same server IP address as the + established connection. This restriction can be removed by the origin server + sending an equivalent ORIGIN frame <xref target="RFC8336"/>. +</t> +<t> + The request target's host and port value are passed within each HTTP + request, identifying the origin and distinguishing it from other namespaces + that might be controlled by the same server (<xref target="field.host"/>). + It is the origin's responsibility to ensure that any services provided with + control over its certificate's private key are equally responsible for + managing the corresponding "https" namespaces or at least prepared to + reject requests that appear to have been misdirected + (<xref target="routing.reject"/>). +</t> +<t> + An origin server might be unwilling to process requests for certain target + URIs even when they have the authority to do so. For example, when a host + operates distinct services on different ports (e.g., 443 and 8000), checking + the target URI at the origin server is necessary (even after the connection + has been secured) because a network attacker might cause connections for one + port to be received at some other port. Failing to check the target URI + might allow such an attacker to replace a response to one target URI + (e.g., "https://example.com/foo") with a seemingly authoritative response + from the other port (e.g., "https://example.com:8000/foo"). +</t> +<t> + Note that the "https" scheme does not rely on TCP and the connected port + number for associating authority, since both are outside the secured + communication and thus cannot be trusted as definitive. Hence, the HTTP + communication might take place over any channel that has been secured, + as defined in <xref target="https.uri"/>, including protocols that don't + use TCP. +</t> +<t> + When an "https" URI is used within a context that calls for access to + the indicated resource, a client <bcp14>MAY</bcp14> attempt access by resolving the + host identifier to an IP address, establishing a TCP connection to that + address on the indicated port, securing the connection end-to-end by + successfully initiating TLS over TCP with confidentiality and integrity + protection, and sending over that connection an HTTP request message + containing a request target that matches the client's target URI + (<xref target="target.resource"/>). +</t> +<t> + If the server responds to such a request with a non-interim HTTP response + message, as described in <xref target="status.codes"/>, then that response + is considered an authoritative answer to the client's request. +</t> +<t> + Note, however, that the above is not the only means for obtaining an + authoritative response, nor does it imply that an authoritative response + is always necessary (see <xref target="CACHING"/>). +</t> +</section> + +<section title="https Certificate Verification" anchor="https.verify"> +<t> + To establish a <x:ref>secured</x:ref> connection to dereference a URI, + a client <bcp14>MUST</bcp14> verify that the service's identity is an acceptable + match for the URI's origin server. Certificate verification is used to + prevent server impersonation by an on-path attacker or by an attacker + that controls name resolution. This process requires that a client be + configured with a set of trust anchors. +</t> +<t> + In general, a client <bcp14>MUST</bcp14> verify the service identity using the + verification process defined in + <xref target="RFC6125" x:fmt="of" x:sec="6"/>. The client <bcp14>MUST</bcp14> construct + a reference identity from the service's host: if the host is a literal IP address + (<xref target="https.ip-id"/>), the reference identity is an IP-ID, otherwise + the host is a name and the reference identity is a DNS-ID. +</t> +<t> + A reference identity of type CN-ID <bcp14>MUST NOT</bcp14> be used by clients. As noted + in <xref target="RFC6125" x:fmt="of" x:sec="6.2.1"/>, a reference + identity of type CN-ID might be used by older clients. +</t> +<t> + A client might be specially configured to accept an alternative form of + server identity verification. For example, a client might be connecting + to a server whose address and hostname are dynamic, with an expectation that + the service will present a specific certificate (or a certificate matching + some externally defined reference identity) rather than one matching the + target URI's origin. +</t> +<t> + In special cases, it might be appropriate for + a client to simply ignore the server's identity, but it must be + understood that this leaves a connection open to active attack. +</t> +<t> + If the certificate is not valid for the target URI's origin, + a user agent <bcp14>MUST</bcp14> either obtain confirmation from the user + before proceeding (see <xref target="user.agent"/>) or + terminate the connection with a bad certificate error. Automated + clients <bcp14>MUST</bcp14> log the error to an appropriate audit log (if available) + and <bcp14>SHOULD</bcp14> terminate the connection (with a bad certificate error). + Automated clients <bcp14>MAY</bcp14> provide a configuration setting that disables + this check, but <bcp14>MUST</bcp14> provide a setting which enables it. +</t> +</section> +<section title="IP-ID Reference Identity" anchor="https.ip-id"> +<t> + A server that is identified using an IP address literal in the "host" field + of an "https" URI has a reference identity of type IP-ID. An IP version 4 + address uses the "IPv4address" ABNF rule, and an IP version 6 address uses + the "IP-literal" production with the "IPv6address" option; see + <xref target="URI" x:fmt="of" x:sec="3.2.2"/>. A reference identity of + IP-ID contains the decoded bytes of the IP address. +</t> +<t> + An IP version 4 address is 4 octets, and an IP version 6 address is 16 octets. + Use of IP-ID is not defined for any other IP version. The iPAddress + choice in the certificate subjectAltName extension does not explicitly + include the IP version and so relies on the length of the address to + distinguish versions; see + <xref target="RFC5280" x:fmt="of" x:sec="4.2.1.6"/>. +</t> +<t> + A reference identity of type IP-ID matches if the address is identical to + an iPAddress value of the subjectAltName extension of the certificate. +</t> +</section> +</section> +</section> + +<section title="Fields" anchor="fields"> +<iref primary="true" item="field"/> +<t> + HTTP uses <x:dfn>fields</x:dfn> to provide data in the form of extensible + name/value pairs with a registered key namespace. Fields are sent and + received within the header and trailer sections of messages + (<xref target="message.abstraction"/>). +</t> + +<section title="Field Names" anchor="fields.names"> + <x:anchor-alias value="header.names"/> + <x:anchor-alias value="field-name"/> +<t> + A field name labels the corresponding field value as having the + semantics defined by that name. For example, the <x:ref>Date</x:ref> + header field is defined in <xref target="field.date"/> as containing the + origination timestamp for the message in which it appears. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="field-name"/> + <x:ref>field-name</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + Field names are case-insensitive and ought to be registered within the + "Hypertext Transfer Protocol (HTTP) Field Name Registry"; see <xref target="fields.registry"/>. +</t> +<t> + The interpretation of a field does not change between minor + versions of the same major HTTP version, though the default behavior of a + recipient in the absence of such a field can change. Unless specified + otherwise, fields are defined for all versions of HTTP. + In particular, the <x:ref>Host</x:ref> and <x:ref>Connection</x:ref> + fields ought to be recognized by all HTTP implementations + whether or not they advertise conformance with HTTP/1.1. +</t> +<t> + New fields can be introduced without changing the protocol version if + their defined semantics allow them to be safely ignored by recipients + that do not recognize them; see <xref target="fields.extensibility"/>. +</t> +<t> + A proxy <bcp14>MUST</bcp14> forward unrecognized header fields unless the + field name is listed in the <x:ref>Connection</x:ref> header field + (<xref target="field.connection"/>) or the proxy is specifically + configured to block, or otherwise transform, such fields. + Other recipients <bcp14>SHOULD</bcp14> ignore unrecognized header and trailer fields. + Adhering to these requirements allows HTTP's functionality to be extended + without updating or removing deployed intermediaries. +</t> +</section> + +<section title="Field Lines and Combined Field Value" anchor="field.lines"> +<t> + <iref item="field line"/> + <iref item="field name"/> + <iref item="field line value"/> + Field sections are composed of any number of <x:dfn>field lines</x:dfn>, + each with a <x:dfn>field name</x:dfn> (see <xref target="fields.names"/>) + identifying the field, and a <x:dfn>field line value</x:dfn> that conveys + data for that instance of the field. +</t> +<t> + <iref item="field value"/> + When a field name is only present once in a section, the combined + <x:dfn>field value</x:dfn> for that field consists of the corresponding + field line value. + When a field name is repeated within a section, its combined field value + consists of the list of corresponding field line values within that section, + concatenated in order, with each field line value separated by a comma. +</t> +<t> + For example, this section: +</t> +<sourcecode type="http-message"> +Example-Field: Foo, Bar +Example-Field: Baz +</sourcecode> +<t> + contains two field lines, both with the field name "Example-Field". The + first field line has a field line value of "Foo, Bar", while the second + field line value is "Baz". The field value for "Example-Field" is the list + "Foo, Bar, Baz". +</t> +</section> + +<section title="Field Order" anchor="fields.order"> + <x:anchor-alias value="header.order"/> +<t> + A recipient <bcp14>MAY</bcp14> combine multiple field lines within a field section that + have the same field name + into one field line, without changing the semantics of the message, by + appending each subsequent field line value to the initial field line value + in order, separated by a comma (",") and optional whitespace + (<x:ref>OWS</x:ref>, defined in <xref target="whitespace"/>). + For consistency, use comma SP. +</t> +<t> + The order in which field lines with the + same name are received is therefore significant to the interpretation of + the field value; a proxy <bcp14>MUST NOT</bcp14> change the order of these field line + values when forwarding a message. +</t> +<t> + This means that, aside from the well-known exception noted below, a sender + <bcp14>MUST NOT</bcp14> generate multiple field lines with the same name in a message + (whether in the headers or trailers) or append a field line when a field + line of the same name already exists in the message, unless that field's + definition allows multiple field line values to be recombined as a + comma-separated list (i.e., at least one alternative of the field's + definition allows a comma-separated list, such as an ABNF rule of + #(values) defined in <xref target="abnf.extension"/>). +</t> +<aside> + <t> + <x:h>Note:</x:h> In practice, the "Set-Cookie" header field (<xref target="COOKIE"/>) + often appears in a response message across multiple field lines and does not + use the list syntax, violating the above requirements on multiple field lines + with the same field name. Since it cannot be combined into a single field + value, recipients ought to handle "Set-Cookie" as a special case while + processing fields. (See Appendix A.2.3 of <xref target="Kri2001"/> for + details.) + </t> +</aside> +<t> + The order in which field lines with differing field names are received in a + section is not significant. However, it is good practice to send header + fields that contain additional control data first, such as + <x:ref>Host</x:ref> on requests and <x:ref>Date</x:ref> on responses, so + that implementations can decide when not to handle a message as early as + possible. +</t> +<t> + A server <bcp14>MUST NOT</bcp14> apply a request to the target resource until it + receives the entire request header section, since later header field lines + might include conditionals, authentication credentials, or deliberately + misleading duplicate header fields that could impact request processing. +</t> +</section> + +<section title="Field Limits" anchor="fields.limits"> + <x:anchor-alias value="header.limits"/> +<t> + HTTP does not place a predefined limit on the length of each field line, field value, + or on the length of a header or trailer section as a whole, as described in + <xref target="conformance"/>. Various ad hoc limitations on individual + lengths are found in practice, often depending on the specific + field's semantics. +</t> +<t> + A server that receives a request header field line, field value, or set of + fields larger than it wishes to process <bcp14>MUST</bcp14> respond with an appropriate + <x:ref>4xx (Client Error)</x:ref> status code. Ignoring such header fields + would increase the server's vulnerability to request smuggling attacks + (<xref target="HTTP11" x:rel="#request.smuggling"/>). +</t> +<t> + A client <bcp14>MAY</bcp14> discard or truncate received field lines that are larger + than the client wishes to process if the field semantics are such that the + dropped value(s) can be safely ignored without changing the + message framing or response semantics. +</t> +</section> + +<section title="Field Values" anchor="fields.values"> + <x:anchor-alias value="header.values"/> + <x:anchor-alias value="field-value"/> + <x:anchor-alias value="field-content"/> + <x:anchor-alias value="field-vchar"/> + <x:anchor-alias value="singleton.field"/> + <x:anchor-alias value="list.field"/> + <x:anchor-alias value="obs-text"/> +<t> + HTTP field values consist of a sequence of characters in a format defined + by the field's grammar. Each field's grammar is usually defined using + ABNF (<xref target="RFC5234"/>). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="field-value"/><iref primary="true" item="Grammar" subitem="field-vchar"/><iref primary="true" item="Grammar" subitem="field-content"/><iref primary="true" item="Grammar" subitem="obs-text"/> + <x:ref>field-value</x:ref> = *<x:ref>field-content</x:ref> + <x:ref>field-content</x:ref> = <x:ref>field-vchar</x:ref> + [ 1*( <x:ref>SP</x:ref> / <x:ref>HTAB</x:ref> / <x:ref>field-vchar</x:ref> ) <x:ref>field-vchar</x:ref> ] + <x:ref>field-vchar</x:ref> = <x:ref>VCHAR</x:ref> / <x:ref>obs-text</x:ref> + <x:ref>obs-text</x:ref> = %x80-FF +</sourcecode> +<t> + A field value does not include leading or trailing whitespace. When a + specific version of HTTP allows such whitespace to appear in a message, + a field parsing implementation <bcp14>MUST</bcp14> exclude such whitespace prior to + evaluating the field value. +</t> +<t> + Field values are usually constrained to the range of US-ASCII characters + <xref target="USASCII"/>. + Fields needing a greater range of characters can use an encoding, + such as the one defined in <xref target="RFC8187"/>. + Historically, HTTP allowed field content with text in the ISO-8859-1 + charset <xref target="ISO-8859-1"/>, supporting other charsets only + through use of <xref target="RFC2047"/> encoding. + Specifications for newly defined fields <bcp14>SHOULD</bcp14> limit their values to + visible US-ASCII octets (VCHAR), SP, and HTAB. + A recipient <bcp14>SHOULD</bcp14> treat other allowed octets in field content + (i.e., <x:ref>obs-text</x:ref>) as opaque data. +</t> +<t> + Field values containing CR, LF, or NUL characters are invalid and dangerous, + due to the varying ways that implementations might parse and interpret + those characters; a recipient of CR, LF, or NUL within a field value <bcp14>MUST</bcp14> + either reject the message or replace each of those characters with SP + before further processing or forwarding of that message. Field values + containing other CTL characters are also invalid; however, + recipients <bcp14>MAY</bcp14> retain such characters for the sake of robustness when + they appear within a safe context (e.g., an application-specific quoted + string that will not be processed by any downstream HTTP parser). +</t> +<t><iref item="singleton field"/> + Fields that only anticipate a single member as the field value are + referred to as <x:dfn>singleton fields</x:dfn>. +</t> +<t><iref item="list-based field"/> + Fields that allow multiple members as the field value are referred to as + <x:dfn>list-based fields</x:dfn>. The list operator extension of + <xref target="abnf.extension"/> is used as a common notation for defining + field values that can contain multiple members. +</t> +<t> + Because commas (",") are used as the delimiter between members, they need + to be treated with care if they are allowed as data within a member. This + is true for both list-based and singleton fields, since a singleton field + might be erroneously sent with multiple members and detecting such errors + improves interoperability. Fields that expect to contain a + comma within a member, such as within an <x:ref>HTTP-date</x:ref> or + <x:ref>URI-reference</x:ref> + element, ought to be defined with delimiters around that element to + distinguish any comma within that data from potential list separators. +</t> +<t> + For example, a textual date and a URI (either of which might contain a comma) + could be safely carried in list-based field values like these: +</t> +<sourcecode type="http-message"> +Example-URIs: "http://example.com/a.html,foo", + "http://without-a-comma.example.com/" +Example-Dates: "Sat, 04 May 1996", "Wed, 14 Sep 2005" +</sourcecode> +<t> + Note that double-quote delimiters are almost always used with the + quoted-string production (<xref target="quoted.strings"/>); using a different syntax inside double-quotes + will likely cause unnecessary confusion. +</t> +<t> + Many fields (such as <x:ref>Content-Type</x:ref>, defined in + <xref target="field.content-type"/>) use a common syntax for parameters + that allows both unquoted (token) and quoted (quoted-string) syntax for + a parameter value (<xref target="parameter"/>). Use of common syntax + allows recipients to reuse existing parser components. When allowing both + forms, the meaning of a parameter value ought to be the same whether it + was received as a token or a quoted string. +</t> +<aside> + <t> + <x:h>Note:</x:h> For defining field value syntax, this specification uses an ABNF + rule named after the field name to define the allowed grammar for that + field's value (after said value has been extracted from the underlying + messaging syntax and multiple instances combined into a list). + </t> +</aside> +</section> + +<section title="Common Rules for Defining Field Values" anchor="fields.components"> + <x:anchor-alias value="header.components"/> + +<section title="Lists (#rule ABNF Extension)" anchor="abnf.extension"> +<t> + A #rule extension to the ABNF rules of <xref target="RFC5234"/> is used to + improve readability in the definitions of some list-based field values. +</t> +<t> + A construct "#" is defined, similar to "*", for defining comma-delimited + lists of elements. The full form is "&lt;n&gt;#&lt;m&gt;element" indicating + at least &lt;n&gt; and at most &lt;m&gt; elements, each separated by a single + comma (",") and optional whitespace (<x:ref>OWS</x:ref>, + defined in <xref target="whitespace"/>). +</t> + +<section title="Sender Requirements" anchor="abnf.extension.sender"> +<t> + In any production that uses the list construct, a sender <bcp14>MUST NOT</bcp14> + generate empty list elements. In other words, a sender has to generate + lists that satisfy the following syntax: +</t> +<artwork type="example"> + 1#element =&gt; element *( OWS "," OWS element ) +</artwork> +<t> + and: +</t> +<artwork type="example"> + #element =&gt; [ 1#element ] +</artwork> +<t> + and for n &gt;= 1 and m &gt; 1: +</t> +<artwork type="example"> + &lt;n&gt;#&lt;m&gt;element =&gt; element &lt;n-1&gt;*&lt;m-1&gt;( OWS "," OWS element ) +</artwork> +<t> + <xref target="collected.abnf"/> shows the collected ABNF for senders + after the list constructs have been expanded. +</t> +</section> + +<section title="Recipient Requirements" anchor="abnf.extension.recipient"> +<t> + Empty elements do not contribute to the count of elements present. + A recipient <bcp14>MUST</bcp14> parse and ignore + a reasonable number of empty list elements: enough to handle common mistakes + by senders that merge values, but not so much that they could be used as a + denial-of-service mechanism. In other words, a recipient <bcp14>MUST</bcp14> accept lists + that satisfy the following syntax: +</t> +<artwork type="example"> + #element =&gt; [ element ] *( OWS "," OWS [ element ] ) +</artwork> +<t> + Note that because of the potential presence of empty list elements, the + RFC 5234 ABNF cannot enforce the cardinality of list elements, and + consequently all cases are mapped as if there was no cardinality specified. +</t> +<t> + For example, given these ABNF productions: +</t> +<artwork type="example"> + example-list = 1#example-list-elmt + example-list-elmt = token ; see <xref target="tokens"/> +</artwork> +<t> + Then the following are valid values for example-list (not including the + double quotes, which are present for delimitation only): +</t> +<artwork type="example"> + "foo,bar" + "foo ,bar," + "foo , ,bar,charlie" +</artwork> +<t> + In contrast, the following values would be invalid, since at least one + non-empty element is required by the example-list production: +</t> +<artwork type="example"> + "" + "," + ", ," +</artwork> +</section> +</section> + +<section title="Tokens" anchor="tokens"> +<t anchor="rule.token.separators"> + <x:anchor-alias value="tchar"/> + <x:anchor-alias value="token"/> + Tokens are short textual identifiers that do not include whitespace or + delimiters. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="token"/><iref primary="true" item="Grammar" subitem="tchar"/> + <x:ref>token</x:ref> = 1*<x:ref>tchar</x:ref> +<!-- + NOTE: the definition of tchar and the prose above about special characters need to match! + --> + <x:ref>tchar</x:ref> = "!" / "#" / "$" / "%" / "&amp;" / "'" / "*" + / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + / <x:ref>DIGIT</x:ref> / <x:ref>ALPHA</x:ref> + ; any <x:ref>VCHAR</x:ref>, except delimiters +</sourcecode> +<t anchor="delimiters"> + <iref item="Delimiters"/> + Many HTTP field values are defined using common syntax + components, separated by whitespace or specific delimiting characters. + Delimiters are chosen from the set of US-ASCII visual characters not + allowed in a <x:ref>token</x:ref> (DQUOTE and "(),/:;&lt;=&gt;?@[\]{}"). +</t> +</section> + +<section title="Whitespace" anchor="whitespace"> + <x:anchor-alias value="BWS"/> + <x:anchor-alias value="OWS"/> + <x:anchor-alias value="RWS"/> +<t> + This specification uses three rules to denote the use of linear + whitespace: OWS (optional whitespace), RWS (required whitespace), and + BWS ("bad" whitespace). +</t> +<t> + The OWS rule is used where zero or more linear whitespace octets might + appear. For protocol elements where optional whitespace is preferred to + improve readability, a sender <bcp14>SHOULD</bcp14> generate the optional whitespace + as a single SP; otherwise, a sender <bcp14>SHOULD NOT</bcp14> generate optional + whitespace except as needed to overwrite invalid or unwanted protocol + elements during in-place message filtering. +</t> +<t> + The RWS rule is used when at least one linear whitespace octet is required + to separate field tokens. A sender <bcp14>SHOULD</bcp14> generate RWS as a single SP. +</t> +<t> + OWS and RWS have the same semantics as a single SP. Any content known to + be defined as OWS or RWS <bcp14>MAY</bcp14> be replaced with a single SP before + interpreting it or forwarding the message downstream. +</t> +<t> + The BWS rule is used where the grammar allows optional whitespace only for + historical reasons. A sender <bcp14>MUST NOT</bcp14> generate BWS in messages. + A recipient <bcp14>MUST</bcp14> parse for such bad whitespace and remove it before + interpreting the protocol element. +</t> +<t> + BWS has no semantics. Any content known to be + defined as BWS <bcp14>MAY</bcp14> be removed before interpreting it or forwarding the + message downstream. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="OWS"/><iref primary="true" item="Grammar" subitem="RWS"/><iref primary="true" item="Grammar" subitem="BWS"/> + <x:ref>OWS</x:ref> = *( <x:ref>SP</x:ref> / <x:ref>HTAB</x:ref> ) + ; optional whitespace + <x:ref>RWS</x:ref> = 1*( <x:ref>SP</x:ref> / <x:ref>HTAB</x:ref> ) + ; required whitespace + <x:ref>BWS</x:ref> = <x:ref>OWS</x:ref> + ; "bad" whitespace +</sourcecode> +</section> + +<section title="Quoted Strings" anchor="quoted.strings"> +<t anchor="rule.quoted-string"> + <x:anchor-alias value="quoted-string"/> + <x:anchor-alias value="qdtext"/> + A string of text is parsed as a single value if it is quoted using + double-quote marks. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="quoted-string"/><iref primary="true" item="Grammar" subitem="qdtext"/> + <x:ref>quoted-string</x:ref> = <x:ref>DQUOTE</x:ref> *( <x:ref>qdtext</x:ref> / <x:ref>quoted-pair</x:ref> ) <x:ref>DQUOTE</x:ref> + <x:ref>qdtext</x:ref> = <x:ref>HTAB</x:ref> / <x:ref>SP</x:ref> / %x21 / %x23-5B / %x5D-7E / <x:ref>obs-text</x:ref> +</sourcecode> +<t anchor="rule.quoted-pair"> + <x:anchor-alias value="quoted-pair"/> + The backslash octet ("\") can be used as a single-octet + quoting mechanism within quoted-string and comment constructs. + Recipients that process the value of a quoted-string <bcp14>MUST</bcp14> handle a + quoted-pair as if it were replaced by the octet following the backslash. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="quoted-pair"/> + <x:ref>quoted-pair</x:ref> = "\" ( <x:ref>HTAB</x:ref> / <x:ref>SP</x:ref> / <x:ref>VCHAR</x:ref> / <x:ref>obs-text</x:ref> ) +</sourcecode> +<t> + A sender <bcp14>SHOULD NOT</bcp14> generate a quoted-pair in a quoted-string except + where necessary to quote DQUOTE and backslash octets occurring within that + string. + A sender <bcp14>SHOULD NOT</bcp14> generate a quoted-pair in a comment except + where necessary to quote parentheses ["(" and ")"] and backslash octets + occurring within that comment. +</t> +</section> + +<section title="Comments" anchor="comments"> +<t anchor="rule.comment"> + <x:anchor-alias value="comment"/> + <x:anchor-alias value="ctext"/> + Comments can be included in some HTTP fields by surrounding + the comment text with parentheses. Comments are only allowed in + fields containing "comment" as part of their field value definition. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="comment"/><iref primary="true" item="Grammar" subitem="ctext"/> + <x:ref>comment</x:ref> = "(" *( <x:ref>ctext</x:ref> / <x:ref>quoted-pair</x:ref> / <x:ref>comment</x:ref> ) ")" + <x:ref>ctext</x:ref> = <x:ref>HTAB</x:ref> / <x:ref>SP</x:ref> / %x21-27 / %x2A-5B / %x5D-7E / <x:ref>obs-text</x:ref> +</sourcecode> +</section> + +<section title="Parameters" anchor="parameter"> +<t anchor="rule.parameter"> + <x:anchor-alias value="parameters"/> + <x:anchor-alias value="parameter-name"/> + <x:anchor-alias value="parameter-value"/> + Parameters are instances of name/value pairs; they are often used in field + values as a common syntax for appending auxiliary information to an item. + Each parameter is usually delimited by an immediately preceding semicolon. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="parameters"/><iref primary="true" item="Grammar" subitem="parameter"/><iref primary="true" item="Grammar" subitem="parameter-name"/><iref primary="true" item="Grammar" subitem="parameter-value"/> + <x:ref>parameters</x:ref> = *( OWS ";" OWS [ <x:ref>parameter</x:ref> ] ) + <x:ref>parameter</x:ref> = <x:ref>parameter-name</x:ref> "=" <x:ref>parameter-value</x:ref> + <x:ref>parameter-name</x:ref> = <x:ref>token</x:ref> + <x:ref>parameter-value</x:ref> = ( <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> ) +</sourcecode> +<t> + Parameter names are case-insensitive. Parameter values might or might + not be case-sensitive, depending on the semantics of the parameter + name. Examples of parameters and some equivalent forms can be seen in + media types (<xref target="media.type"/>) and the Accept header field + (<xref target="field.accept"/>). +</t> +<t> + A parameter value that matches the <x:ref>token</x:ref> production can be + transmitted either as a token or within a quoted-string. The quoted and + unquoted values are equivalent. +</t> +<aside> + <t> + <x:h>Note:</x:h> Parameters do not allow whitespace (not even "bad" whitespace) + around the "=" character. + </t> +</aside> +</section> + +<section title="Date/Time Formats" anchor="http.date"> + <x:anchor-alias value="HTTP-date"/> + <x:anchor-alias value="clock"/> + <iref primary="true" item="clock"/> +<t> + Prior to 1995, there were three different formats commonly used by servers + to communicate timestamps. For compatibility with old implementations, all + three are defined here. The preferred format is a fixed-length and + single-zone subset of the date and time specification used by the + Internet Message Format <xref target="RFC5322"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="HTTP-date"/> + <x:ref>HTTP-date</x:ref> = <x:ref>IMF-fixdate</x:ref> / <x:ref>obs-date</x:ref> +</sourcecode> +<t> + An example of the preferred format is +</t> +<artwork type="example" x:indent-with=" "> +Sun, 06 Nov 1994 08:49:37 GMT ; IMF-fixdate +</artwork> +<t> + Examples of the two obsolete formats are +</t> +<artwork type="example" x:indent-with=" "> +Sunday, 06-Nov-94 08:49:37 GMT ; obsolete RFC 850 format +Sun Nov 6 08:49:37 1994 ; ANSI C's asctime() format +</artwork> +<t> + A recipient that parses a timestamp value in an HTTP field <bcp14>MUST</bcp14> + accept all three HTTP-date formats. When a sender generates a field + that contains one or more timestamps defined as HTTP-date, + the sender <bcp14>MUST</bcp14> generate those timestamps in the IMF-fixdate format. +</t> +<t> + An HTTP-date value represents time as an instance of Coordinated + Universal Time (UTC). The first two formats indicate UTC by the + three-letter abbreviation for Greenwich Mean Time, "GMT", a predecessor + of the UTC name; values in the asctime format are assumed to be in UTC. +</t> +<t> + A <x:dfn>clock</x:dfn> is an implementation capable of providing a + reasonable approximation of the current instant in UTC. + A clock implementation ought to use NTP (<xref target="RFC5905"/>), + or some similar protocol, to synchronize with UTC. +</t> +<t anchor="preferred.date.format"> + <x:anchor-alias value="IMF-fixdate"/> + <x:anchor-alias value="time-of-day"/> + <x:anchor-alias value="hour"/> + <x:anchor-alias value="minute"/> + <x:anchor-alias value="second"/> + <x:anchor-alias value="day-name"/> + <x:anchor-alias value="day"/> + <x:anchor-alias value="month"/> + <x:anchor-alias value="year"/> + <x:anchor-alias value="GMT"/> + Preferred format: +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="IMF-fixdate"/><iref primary="true" item="Grammar" subitem="date1"/><iref primary="true" item="Grammar" subitem="time-of-day"/><iref primary="true" item="Grammar" subitem="hour"/><iref primary="true" item="Grammar" subitem="minute"/><iref primary="true" item="Grammar" subitem="second"/><iref primary="true" item="Grammar" subitem="day-name"/><iref primary="true" item="Grammar" subitem="day-name-l"/><iref primary="true" item="Grammar" subitem="day"/><iref primary="true" item="Grammar" subitem="month"/><iref primary="true" item="Grammar" subitem="year"/><iref primary="true" item="Grammar" subitem="GMT"/> + <x:ref>IMF-fixdate</x:ref> = <x:ref>day-name</x:ref> "," <x:ref>SP</x:ref> date1 <x:ref>SP</x:ref> <x:ref>time-of-day</x:ref> <x:ref>SP</x:ref> <x:ref>GMT</x:ref> + ; fixed length/zone/capitalization subset of the format + ; see <xref target="RFC5322" x:fmt="of" x:sec="3.3"/> + + <x:ref>day-name</x:ref> = %s"Mon" / %s"Tue" / %s"Wed" + / %s"Thu" / %s"Fri" / %s"Sat" / %s"Sun" + + <x:ref>date1</x:ref> = <x:ref>day</x:ref> <x:ref>SP</x:ref> <x:ref>month</x:ref> <x:ref>SP</x:ref> <x:ref>year</x:ref> + ; e.g., 02 Jun 1982 + + <x:ref>day</x:ref> = 2<x:ref>DIGIT</x:ref> + <x:ref>month</x:ref> = %s"Jan" / %s"Feb" / %s"Mar" / %s"Apr" + / %s"May" / %s"Jun" / %s"Jul" / %s"Aug" + / %s"Sep" / %s"Oct" / %s"Nov" / %s"Dec" + <x:ref>year</x:ref> = 4<x:ref>DIGIT</x:ref> + + <x:ref>GMT</x:ref> = %s"GMT" + + <x:ref>time-of-day</x:ref> = <x:ref>hour</x:ref> ":" <x:ref>minute</x:ref> ":" <x:ref>second</x:ref> + ; 00:00:00 - 23:59:60 (leap second) + + <x:ref>hour</x:ref> = 2<x:ref>DIGIT</x:ref> + <x:ref>minute</x:ref> = 2<x:ref>DIGIT</x:ref> + <x:ref>second</x:ref> = 2<x:ref>DIGIT</x:ref> +</sourcecode> +<t anchor="obsolete.date.formats"> + <x:anchor-alias value="obs-date"/> + <x:anchor-alias value="rfc850-date"/> + <x:anchor-alias value="asctime-date"/> + <x:anchor-alias value="date1"/> + <x:anchor-alias value="date2"/> + <x:anchor-alias value="date3"/> + <x:anchor-alias value="day-name-l"/> + Obsolete formats: +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="obs-date"/> + <x:ref>obs-date</x:ref> = <x:ref>rfc850-date</x:ref> / <x:ref>asctime-date</x:ref> +</sourcecode> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="rfc850-date"/> + <x:ref>rfc850-date</x:ref> = <x:ref>day-name-l</x:ref> "," <x:ref>SP</x:ref> <x:ref>date2</x:ref> <x:ref>SP</x:ref> <x:ref>time-of-day</x:ref> <x:ref>SP</x:ref> <x:ref>GMT</x:ref> + <x:ref>date2</x:ref> = <x:ref>day</x:ref> "-" <x:ref>month</x:ref> "-" 2<x:ref>DIGIT</x:ref> + ; e.g., 02-Jun-82 + + <x:ref>day-name-l</x:ref> = %s"Monday" / %s"Tuesday" / %s"Wednesday" + / %s"Thursday" / %s"Friday" / %s"Saturday" + / %s"Sunday" +</sourcecode> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="asctime-date"/> + <x:ref>asctime-date</x:ref> = <x:ref>day-name</x:ref> <x:ref>SP</x:ref> <x:ref>date3</x:ref> <x:ref>SP</x:ref> <x:ref>time-of-day</x:ref> <x:ref>SP</x:ref> <x:ref>year</x:ref> + <x:ref>date3</x:ref> = <x:ref>month</x:ref> <x:ref>SP</x:ref> ( 2<x:ref>DIGIT</x:ref> / ( <x:ref>SP</x:ref> 1<x:ref>DIGIT</x:ref> )) + ; e.g., Jun 2 +</sourcecode> +<t> + HTTP-date is case sensitive. Note that <xref target="CACHING" x:fmt="of" x:rel="#expiration.model"/> relaxes this for cache recipients. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate additional whitespace in an HTTP-date beyond + that specifically included as SP in the grammar. + The semantics of <x:ref>day-name</x:ref>, <x:ref>day</x:ref>, + <x:ref>month</x:ref>, <x:ref>year</x:ref>, and <x:ref>time-of-day</x:ref> + are the same as those defined for the Internet Message Format constructs + with the corresponding name (<xref target="RFC5322" x:fmt="," x:sec="3.3"/>). +</t> +<t> + Recipients of a timestamp value in rfc850-date format, which uses a + two-digit year, <bcp14>MUST</bcp14> interpret a timestamp that appears to be more + than 50 years in the future as representing the most recent year in the + past that had the same last two digits. +</t> +<t> + Recipients of timestamp values are encouraged to be robust in parsing + timestamps unless otherwise restricted by the field definition. + For example, messages are occasionally forwarded over HTTP from a non-HTTP + source that might generate any of the date and time specifications defined + by the Internet Message Format. +</t> +<aside> + <t> + <x:h>Note:</x:h> HTTP requirements for timestamp formats apply only + to their usage within the protocol stream. Implementations are + not required to use these formats for user presentation, request + logging, etc. + </t> +</aside> +</section> +</section> +</section> + +<section title="Message Abstraction" anchor="message.abstraction"> +<iref primary="true" item="message abstraction"/> +<iref primary="true" item="message"/> +<iref primary="true" item="self-descriptive"/> +<t> + Each major version of HTTP defines its own syntax for communicating + messages. This section defines an abstract data type for HTTP messages + based on a generalization of those message characteristics, common structure, + and capacity for conveying semantics. This abstraction is used to define + requirements on senders and recipients that are independent of the HTTP + version, such that a message in one version can be relayed through other + versions without changing its meaning. +</t> +<t> + A <x:dfn>message</x:dfn> consists of the following: +</t> +<ul> + <li>control data to describe and route the message,</li> + <li>a headers lookup table of name/value pairs for extending that control + data and conveying additional information about the sender, message, + content, or context,</li> + <li>a potentially unbounded stream of content, and</li> + <li>a trailers lookup table of name/value pairs for communicating information + obtained while sending the content.</li> +</ul> +<t> + Framing and control data is sent first, followed by a header section + containing fields for the headers table. When a message includes content, + the content is sent after the header section, potentially followed by a + trailer section that might contain fields for the trailers table. +</t> +<t> + Messages are expected to be processed as a stream, wherein the purpose of + that stream and its continued processing is revealed while being read. + Hence, control data describes what the recipient needs to know immediately, + header fields describe what needs to be known before receiving content, + the content (when present) presumably contains what the recipient wants or + needs to fulfill the message semantics, and trailer fields provide + optional metadata that was unknown prior to sending the content. +</t> +<t> + Messages are intended to be <x:dfn>self-descriptive</x:dfn>: + everything a recipient needs to know about the message can be determined by + looking at the message itself, after decoding or reconstituting parts that + have been compressed or elided in transit, without requiring an + understanding of the sender's current application state (established via + prior messages). However, a client <bcp14>MUST</bcp14> retain knowledge of the request when + parsing, interpreting, or caching a corresponding response. For example, + responses to the <x:ref>HEAD</x:ref> method look just like the beginning of a + response to <x:ref>GET</x:ref> but cannot be parsed in the same manner. +</t> +<t> + Note that this message abstraction is a generalization across many versions + of HTTP, including features that might not be found in some versions. For + example, trailers were introduced within the HTTP/1.1 chunked transfer + coding as a trailer section after the content. An equivalent feature is + present in HTTP/2 and HTTP/3 within the header block that terminates each + stream. +</t> + +<section title="Framing and Completeness" anchor="message.framing"> +<iref primary="true" item="complete"/> +<iref primary="true" item="incomplete"/> +<t> + Message framing indicates how each message begins and ends, such that each + message can be distinguished from other messages or noise on the same + connection. Each major version of HTTP defines its own framing mechanism. +</t> +<t> + HTTP/0.9 and early deployments of HTTP/1.0 used closure of the underlying + connection to end a response. For backwards compatibility, this implicit + framing is also allowed in HTTP/1.1. However, implicit framing can fail to + distinguish an incomplete response if the connection closes early. For + that reason, almost all modern implementations use explicit framing in + the form of length-delimited sequences of message data. +</t> +<t> + A message is considered <x:dfn>complete</x:dfn> when all of the octets + indicated by its framing are available. Note that, + when no explicit framing is used, a response message that is ended + by the underlying connection's close is considered complete even though it + might be indistinguishable from an incomplete response, unless a + transport-level error indicates that it is not complete. +</t> +</section> + +<section title="Control Data" anchor="message.control.data"> +<iref primary="true" item="control data"/> +<t> + Messages start with control data that describe its primary purpose. Request + message control data includes a request method (<xref target="methods"/>), + request target (<xref target="target.resource"/>), and protocol version + (<xref target="protocol.version"/>). Response message control data includes + a status code (<xref target="status.codes"/>), optional reason phrase, and + protocol version. +</t> +<t> + In HTTP/1.1 (<xref target="HTTP11"/>) and earlier, control data is sent + as the first line of a message. In HTTP/2 (<xref target="HTTP2"/>) and + HTTP/3 (<xref target="HTTP3"/>), control data is sent as pseudo-header + fields with a reserved name prefix (e.g., ":authority"). +</t> +<t> + Every HTTP message has a protocol version. Depending on the version in use, + it might be identified within the message explicitly or inferred by the + connection over which the message is received. Recipients use that version + information to determine limitations or potential for later communication + with that sender. +</t> +<t> + When a message is forwarded by an intermediary, the protocol version is + updated to reflect the version used by that intermediary. + The <x:ref>Via</x:ref> header field (<xref target="field.via"/>) is used to + communicate upstream protocol information within a forwarded message. +</t> +<t> + A client <bcp14>SHOULD</bcp14> send a request version equal to the highest + version to which the client is conformant and + whose major version is no higher than the highest version supported + by the server, if this is known. A client <bcp14>MUST NOT</bcp14> send a + version to which it is not conformant. +</t> +<t> + A client <bcp14>MAY</bcp14> send a lower request version if it is known that + the server incorrectly implements the HTTP specification, but only + after the client has attempted at least one normal request and determined + from the response status code or header fields (e.g., <x:ref>Server</x:ref>) that + the server improperly handles higher request versions. +</t> +<t> + A server <bcp14>SHOULD</bcp14> send a response version equal to the highest version to + which the server is conformant that has a major version less than or equal + to the one received in the request. + A server <bcp14>MUST NOT</bcp14> send a version to which it is not conformant. + A server can send a <x:ref>505 (HTTP Version Not Supported)</x:ref> + response if it wishes, for any reason, to refuse service of the client's + major protocol version. +</t> +<t> + A recipient that receives a message with a major version number that it + implements and a minor version number higher than what it implements + <bcp14>SHOULD</bcp14> process the message as if it + were in the highest minor version within that major version to which the + recipient is conformant. A recipient can assume that a message with a + higher minor version, when sent to a recipient that has not yet indicated + support for that higher version, is sufficiently backwards-compatible to be + safely processed by any implementation of the same major version. +</t> +</section> + +<section title="Header Fields" anchor="header.fields"> + <iref primary="true" item="header section"/> + <iref item="field"/> +<t> + Fields (<xref target="fields"/>) that are sent or received before the content + are referred to as "header fields" (or just "headers", colloquially). +</t> +<t> + The <x:dfn>header section</x:dfn> of a message consists of a sequence of + header field lines. Each header field might modify or extend message + semantics, describe the sender, define the content, or provide additional + context. +</t> +<aside> + <t> + <x:h>Note:</x:h> We refer to named fields specifically as a "header field" when they + are only allowed to be sent in the header section. + </t> +</aside> +</section> + +<section title="Content" anchor="content"> +<iref item="content"/> +<t> + HTTP messages often transfer a complete or partial representation as the + message <x:dfn>content</x:dfn>: a stream of octets sent after the header + section, as delineated by the message framing. +</t> +<t> + This abstract definition of content reflects the data after it has been + extracted from the message framing. For example, an HTTP/1.1 message body + (<xref target="HTTP11" x:rel="#message.body"/>) might consist of a stream of data encoded + with the chunked transfer coding — a sequence of data chunks, one + zero-length chunk, and a trailer section — whereas + the content of that same message + includes only the data stream after the transfer coding has been decoded; + it does not include the chunk lengths, chunked framing syntax, nor the + trailer fields (<xref target="trailer.fields"/>). +</t> +<aside> + <t> + <x:h>Note:</x:h> Some field names have a "Content-" prefix. This is an informal + convention; while some of these fields refer to the content of the + message, as defined above, others are scoped to the selected representation + (<xref target="representations"/>). See the individual field's + definition to disambiguate. + </t> +</aside> + +<section title="Content Semantics" anchor="content.semantics"> +<t> + The purpose of content in a request is defined by the method semantics + (<xref target="methods"/>). +</t> +<t> + For example, a representation in the content of a PUT request + (<xref target="PUT"/>) represents the desired state of the + <x:ref>target resource</x:ref> after the request is successfully applied, + whereas a representation in the content of a POST request + (<xref target="POST"/>) represents information to be processed by the + target resource. +</t> +<t> + In a response, the content's purpose is defined by the request method, + response status code (<xref target="status.codes"/>), and response + fields describing that content. + For example, the content of a <x:ref>200 (OK)</x:ref> response to GET + (<xref target="GET"/>) represents the current state of the + <x:ref>target resource</x:ref>, as observed at the time of the message + origination date (<xref target="field.date"/>), whereas the content of + the same status code in a response to POST might represent either the + processing result or the new state of the target resource after applying + the processing. +</t> +<t> + The content of a <x:ref>206 (Partial Content)</x:ref> response to GET + contains either a single part of the selected representation or a + multipart message body containing multiple parts of that representation, + as described in <xref target="status.206"/>. +</t> +<t> + Response messages with an error status code usually contain content that + represents the error condition, such that the content describes the + error state and what steps are suggested for resolving it. +</t> +<t> + Responses to the HEAD request method (<xref target="HEAD"/>) never include + content; the associated response header fields indicate only + what their values would have been if the request method had been GET + (<xref target="GET"/>). +</t> +<t> + <x:ref>2xx (Successful)</x:ref> responses to a CONNECT request method + (<xref target="CONNECT"/>) switch the connection to tunnel mode instead of + having content. +</t> +<t> + All <x:ref>1xx (Informational)</x:ref>, <x:ref>204 (No Content)</x:ref>, and + <x:ref>304 (Not Modified)</x:ref> responses do not include content. +</t> +<t> + All other responses do include content, although that content + might be of zero length. +</t> +</section> + +<section title="Identifying Content" anchor="identifying.content"> +<t> + When a complete or partial representation is transferred as message + content, it is often desirable for the sender to supply, or the recipient + to determine, an identifier for a resource corresponding to that specific + representation. For example, a client making a GET request on a resource + for "the current weather report" might want an identifier specific to the + content returned (e.g., "weather report for Laguna Beach at 20210720T1711"). + This can be useful for sharing or bookmarking content from resources that + are expected to have changing representations over time. +</t> +<t> + For a request message: +</t> +<ul> + <li>If the request has a <x:ref>Content-Location</x:ref> header field, + then the sender asserts that the content is a representation of the + resource identified by the Content-Location field value. However, + such an assertion cannot be trusted unless it can be verified by + other means (not defined by this specification). The information + might still be useful for revision history links.</li> + <li>Otherwise, the content is unidentified by HTTP, but a more specific + identifier might be supplied within the content itself.</li> +</ul> +<t> + For a response message, the following rules are applied in order until a + match is found: +</t> +<ol> + <li>If the request method is HEAD or the response status code is + <x:ref>204 (No Content)</x:ref> or <x:ref>304 (Not Modified)</x:ref>, + there is no content in the response.</li> + <li>If the request method is GET and the response status code is + <x:ref>200 (OK)</x:ref>, + the content is a representation of the <x:ref>target resource</x:ref> (<xref target="target.resource"/>).</li> + <li>If the request method is GET and the response status code is + <x:ref>203 (Non-Authoritative Information)</x:ref>, the content is + a potentially modified or enhanced representation of the + <x:ref>target resource</x:ref> as provided by an intermediary.</li> + <li>If the request method is GET and the response status code is + <x:ref>206 (Partial Content)</x:ref>, + the content is one or more parts of a representation of the + target resource.</li> + <li>If the response has a <x:ref>Content-Location</x:ref> header field + and its field value is a reference to the same URI as the target URI, + the content is a representation of the target resource.</li> + <li>If the response has a <x:ref>Content-Location</x:ref> header field + and its field value is a reference to a URI different from the + target URI, then the sender asserts that the content is a + representation of the resource identified by the Content-Location + field value. However, such an assertion cannot be trusted unless + it can be verified by other means (not defined by this specification).</li> + <li>Otherwise, the content is unidentified by HTTP, but a more specific + identifier might be supplied within the content itself.</li> +</ol> +</section> +</section> + +<section title="Trailer Fields" anchor="trailer.fields"> +<iref primary="true" item="trailer section"/> +<iref primary="true" item="Trailer Fields"/> +<iref primary="true" item="trailers"/> +<t> + Fields (<xref target="fields"/>) that are located within a + <x:dfn>trailer section</x:dfn> are referred to as "trailer fields" + (or just "trailers", colloquially). + Trailer fields can be useful for supplying message integrity checks, digital + signatures, delivery metrics, or post-processing status information. +</t> +<t> + Trailer fields ought to be processed and stored separately from the fields + in the header section to avoid contradicting message semantics known at + the time the header section was complete. The presence or absence of + certain header fields might impact choices made for the routing or + processing of the message as a whole before the trailers are received; + those choices cannot be unmade by the later discovery of trailer fields. +</t> + +<section title="Limitations on Use of Trailers" anchor="trailers.limitations"> +<t> + A trailer section is only possible when supported by the version + of HTTP in use and enabled by an explicit framing mechanism. + For example, the chunked transfer coding in HTTP/1.1 allows a trailer section to be + sent after the content (<xref target="HTTP11" x:rel="#chunked.trailer.section"/>). + </t> +<t> + Many fields cannot be processed outside the header section because + their evaluation is necessary prior to receiving the content, such as + those that describe message framing, routing, authentication, + request modifiers, response controls, or content format. + A sender <bcp14>MUST NOT</bcp14> generate a trailer field unless the sender knows the + corresponding header field name's definition permits the field to be sent + in trailers. +</t> +<t> + Trailer fields can be difficult to process by intermediaries that forward + messages from one protocol version to another. If the entire message can be + buffered in transit, some intermediaries could merge trailer fields into + the header section (as appropriate) before it is forwarded. However, in + most cases, the trailers are simply discarded. + A recipient <bcp14>MUST NOT</bcp14> merge a trailer field into a header section unless + the recipient understands the corresponding header field definition and + that definition explicitly permits and defines how trailer field values + can be safely merged. +</t> +<t> + The presence of the keyword "trailers" in the TE header field (<xref target="field.te"/>) of a request indicates that the client is willing to + accept trailer fields, on behalf of itself and any downstream clients. For + requests from an intermediary, this implies that all + downstream clients are willing to accept trailer fields in the forwarded + response. Note that the presence of "trailers" does not mean that the + client(s) will process any particular trailer field in the response; only + that the trailer section(s) will not be dropped by any of the clients. +</t> +<t> + Because of the potential for trailer fields to be discarded in transit, a + server <bcp14>SHOULD NOT</bcp14> generate trailer fields that it believes are necessary + for the user agent to receive. +</t> +</section> + +<section title="Processing Trailer Fields" anchor="trailers.processing"> +<t> + The "Trailer" header field (<xref target="field.trailer"/>) can be sent + to indicate fields likely to be sent in the trailer section, which allows + recipients to prepare for their receipt before processing the content. + For example, this could be useful if a field name indicates that a dynamic + checksum should be calculated as the content is received and then + immediately checked upon receipt of the trailer field value. +</t> +<t> + Like header fields, trailer fields with the same name are processed in the + order received; multiple trailer field lines with the same name have the + equivalent semantics as appending the multiple values as a list of members. + Trailer fields that might be generated more than once during a message + <bcp14>MUST</bcp14> be defined as a list-based field even if each member value is only + processed once per field line received. +</t> +<t> + At the end of a message, a recipient <bcp14>MAY</bcp14> treat the set of received + trailer fields as a data structure of name/value pairs, similar to (but + separate from) the header fields. Additional processing expectations, if + any, can be defined within the field specification for a field intended + for use in trailers. +</t> +</section> +</section> + +<section title="Message Metadata" anchor="message.metadata"> +<t> + Fields that describe the message itself, such as when and how the + message has been generated, can appear in both requests and responses. +</t> + +<section title="Date" anchor="field.date"> + <x:anchor-alias value="header.date"/> + <iref primary="true" item="Fields" subitem="Date" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Date" x:for-anchor=""/><iref primary="true" item="Date header field" x:for-anchor=""/> + <x:anchor-alias value="Date"/> +<t> + The "Date" header field represents the date and time at which + the message was originated, having the same semantics as the Origination + Date Field (orig-date) defined in <xref target="RFC5322" x:fmt="of" x:sec="3.6.1"/>. + The field value is an HTTP-date, as defined in <xref target="http.date"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Date"/> + <x:ref>Date</x:ref> = <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + An example is +</t> +<sourcecode type="http-message"> +Date: Tue, 15 Nov 1994 08:12:31 GMT +</sourcecode> +<t> + A sender that generates a Date header field <bcp14>SHOULD</bcp14> generate its + field value as the best available approximation of the date and time of + message generation. In theory, the date ought to represent the moment just + before generating the message content. In practice, a sender can generate + the date value at any time during message origination. +</t> +<t> + An origin server with a clock (as defined in + <xref target="http.date"/>) <bcp14>MUST</bcp14> generate a Date header field in + all <x:ref>2xx (Successful)</x:ref>, <x:ref>3xx (Redirection)</x:ref>, + and <x:ref>4xx (Client Error)</x:ref> responses, + and <bcp14>MAY</bcp14> generate a Date header field in + <x:ref>1xx (Informational)</x:ref> and + <x:ref>5xx (Server Error)</x:ref> responses. +</t> +<t> + An origin server without a clock <bcp14>MUST NOT</bcp14> generate a Date header field. +</t> +<t> + A recipient with a clock that receives a response message without a Date + header field <bcp14>MUST</bcp14> record the time it was received and append a + corresponding Date header field to the message's header section if it is + cached or forwarded downstream. +</t> +<t> + A recipient with a clock that receives a response with an invalid Date + header field value <bcp14>MAY</bcp14> replace that value with the time that + response was received. +</t> +<t> + A user agent <bcp14>MAY</bcp14> send a Date header field in a request, though generally + will not do so unless it is believed to convey useful information to the + server. For example, custom applications of HTTP might convey a Date if + the server is expected to adjust its interpretation of the user's request + based on differences between the user agent and server clocks. +</t> +</section> + +<section title="Trailer" anchor="field.trailer"> + <x:anchor-alias value="header.trailer"/> + <iref primary="true" item="Fields" subitem="Trailer" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Trailer" x:for-anchor=""/><iref primary="true" item="Trailer header field" x:for-anchor=""/> + <x:anchor-alias value="Trailer"/> +<t> + The "Trailer" header field provides a list of field names that the sender + anticipates sending as trailer fields within that message. This allows a + recipient to prepare for receipt of the indicated metadata before it starts + processing the content. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Trailer"/><iref primary="false" item="Grammar" subitem="field-name"/> + <x:ref>Trailer</x:ref> = #<x:ref>field-name</x:ref> +</sourcecode> +<t> + For example, a sender might indicate that a signature will + be computed as the content is being streamed and provide the final + signature as a trailer field. This allows a recipient to perform the same + check on the fly as it receives the content. +</t> +<t> + A sender that intends to generate one or more trailer fields in a message + <bcp14>SHOULD</bcp14> generate a <x:ref>Trailer</x:ref> header field in the header + section of that message to indicate which fields might be present in the + trailers. +</t> +<t> + If an intermediary discards the trailer section in transit, the + <x:ref>Trailer</x:ref> field could provide a hint of what metadata + was lost, though there is no guarantee that a sender of Trailer + will always follow through by sending the named fields. +</t> +</section> +</section> +</section> + +<section title="Routing HTTP Messages" anchor="routing"> +<t> + HTTP request message routing is determined by each client based on the + target resource, the client's proxy configuration, and + establishment or reuse of an inbound connection. The corresponding + response routing follows the same connection chain back to the client. +</t> + +<section title="Determining the Target Resource" anchor="target.resource"> + <iref primary="true" item="target resource"/> + <iref primary="true" item="target URI"/> + <iref primary="true" item="request target"/> + <x:anchor-alias value="target resource"/> + <x:anchor-alias value="target URI"/> + <x:anchor-alias value="request target"/> + <x:anchor-alias value="request.target"/> + <x:anchor-alias value="reconstructing.target.uri"/> +<t> + Although HTTP is used in a wide variety of applications, most clients rely + on the same resource identification mechanism and configuration techniques + as general-purpose Web browsers. Even when communication options are + hard-coded in a client's configuration, we can think of their combined + effect as a URI reference (<xref target="uri.references"/>). +</t> +<t> + A URI reference is resolved to its absolute form in order to obtain the + <x:dfn>target URI</x:dfn>. The target URI excludes the reference's + fragment component, if any, since fragment identifiers are reserved for + client-side processing (<xref target="URI" x:fmt="," x:sec="3.5"/>). +</t> +<t> + To perform an action on a <x:dfn>target resource</x:dfn>, the client sends + a request message containing enough components of its parsed target URI to + enable recipients to identify that same resource. For historical reasons, + the parsed target URI components, collectively referred to as the + <x:dfn>request target</x:dfn>, are sent within the message control data + and the <x:ref>Host</x:ref> header field (<xref target="field.host"/>). +</t> +<t> + There are two unusual cases for which the request target components are in + a method-specific form: + </t> + <ul> + <li> + For CONNECT (<xref target="CONNECT"/>), the request target is the host + name and port number of the tunnel destination, separated by a colon. + </li> + <li> + For OPTIONS (<xref target="OPTIONS"/>), the request target can be a + single asterisk ("*"). + </li> +</ul> +<t> + See the respective method definitions for details. These forms <bcp14>MUST NOT</bcp14> + be used with other methods. +</t> +<t> + Upon receipt of a client's request, a server reconstructs the target URI + from the received components in accordance with their local configuration + and incoming connection context. This reconstruction is specific to each + major protocol version. For example, + <xref target="HTTP11" x:rel="#h1.effective.request.uri"/> defines how a server + determines the target URI of an HTTP/1.1 request. +</t> +<aside anchor="effective.request.uri"> + <t> + <iref primary="true" item="effective request URI"/> + <x:h>Note:</x:h> Previous specifications defined the recomposed target URI as a + distinct concept, the <x:dfn>effective request URI</x:dfn>. + </t> +</aside> +</section> + +<section title="Host and :authority" anchor="field.host"> + <x:anchor-alias value="header.host"/> + <iref primary="true" item="Fields" subitem="Host" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Host" x:for-anchor=""/><iref primary="true" item="Host header field" x:for-anchor=""/> + <x:anchor-alias value="Host"/> + <x:anchor-alias value="pseudo-authority"/> +<t> + The "Host" header field in a request provides the host and port + information from the target URI, enabling the origin + server to distinguish among resources while servicing requests + for multiple host names. +</t> +<t> + In HTTP/2 <xref target="HTTP2"/> and HTTP/3 <xref target="HTTP3"/>, the + Host header field is, in some cases, supplanted by the ":authority" + pseudo-header field of a request's control data. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Host"/> + <x:ref>Host</x:ref> = <x:ref>uri-host</x:ref> [ ":" <x:ref>port</x:ref> ] ; <xref target="uri"/> +</sourcecode> +<t> + The target URI's authority information is critical for handling a + request. A user agent <bcp14>MUST</bcp14> generate a Host header field in a request + unless it sends that information as an ":authority" pseudo-header field. + A user agent that sends Host <bcp14>SHOULD</bcp14> send it as the first field in the + header section of a request. +</t> +<t> + For example, a GET request to the origin server for + &lt;http://www.example.org/pub/WWW/&gt; would begin with: +</t> +<sourcecode type="http-message"> +GET /pub/WWW/ HTTP/1.1 +Host: www.example.org +</sourcecode> +<t> + Since the host and port information acts as an application-level routing + mechanism, it is a frequent target for malware seeking to poison + a shared cache or redirect a request to an unintended server. + An interception proxy is particularly vulnerable if it relies on + the host and port information for redirecting requests to internal + servers, or for use as a cache key in a shared cache, without + first verifying that the intercepted connection is targeting a + valid IP address for that host. +</t> +</section> + +<section title="Routing Inbound Requests" anchor="routing.inbound"> +<t> + Once the target URI and its origin are determined, a client decides whether + a network request is necessary to accomplish the desired semantics and, + if so, where that request is to be directed. +</t> + +<section title="To a Cache" anchor="routing.cache"> +<t> + If the client has a cache <xref target="CACHING"/> and the request can be + satisfied by it, then the request is + usually directed there first. +</t> +</section> + +<section title="To a Proxy" anchor="routing.proxy"> +<t> + If the request is not satisfied by a cache, then a typical client will + check its configuration to determine whether a proxy is to be used to + satisfy the request. Proxy configuration is implementation-dependent, + but is often based on URI prefix matching, selective authority matching, + or both, and the proxy itself is usually identified by an "http" or + "https" URI. +</t> +<t> + If an "http" or "https" proxy is applicable, the client connects + inbound by establishing (or reusing) a connection to that proxy and + then sending it an HTTP request message containing a request target + that matches the client's target URI. +</t> +</section> + +<section title="To the Origin" anchor="routing.origin"> +<t> + If no proxy is applicable, a typical client will invoke a handler + routine (specific to the target URI's scheme) to obtain access to the + identified resource. How that is accomplished is dependent on the + target URI scheme and defined by its associated specification. +</t> +<t> + <xref target="http.origin"/> defines how to obtain access to an + "http" resource by establishing (or reusing) an inbound connection to + the identified origin server and then sending it an HTTP request message + containing a request target that matches the client's target URI. +</t> +<t> + <xref target="https.origin"/> defines how to obtain access to an + "https" resource by establishing (or reusing) an inbound secured + connection to an origin server that is authoritative for the identified + origin and then sending it an HTTP request message containing a request + target that matches the client's target URI. +</t> +</section> +</section> + +<section title="Rejecting Misdirected Requests" anchor="routing.reject"> +<t> + Once a request is received by a server and parsed sufficiently to determine + its target URI, the server decides whether to process the request itself, + forward the request to another server, redirect the client to a different + resource, respond with an error, or drop the connection. This decision can + be influenced by anything about the request or connection context, but is + specifically directed at whether the server has been configured to process + requests for that target URI and whether the connection context is + appropriate for that request. +</t> +<t> + For example, a request might have been misdirected, + deliberately or accidentally, such that the information within a received + <x:ref>Host</x:ref> header field differs from the connection's host or port. + If the connection is from a trusted gateway, such inconsistency might + be expected; otherwise, it might indicate an attempt to bypass security + filters, trick the server into delivering non-public content, or poison a + cache. See <xref target="security.considerations"/> for security + considerations regarding message routing. +</t> +<t> + Unless the connection is from a trusted gateway, + an origin server <bcp14>MUST</bcp14> reject a request if any scheme-specific requirements + for the target URI are not met. In particular, + a request for an "https" resource <bcp14>MUST</bcp14> be rejected unless it has been + received over a connection that has been secured via a certificate + valid for that target URI's origin, as defined by <xref target="https.uri"/>. +</t> +<t> + The <x:ref>421 (Misdirected Request)</x:ref> status code in a response + indicates that the origin server has rejected the request because it + appears to have been misdirected (<xref target="status.421"/>). +</t> +</section> + +<section title="Response Correlation" anchor="response.correlation"> +<t> + A connection might be used for multiple request/response exchanges. The + mechanism used to correlate between request and response messages is + version dependent; some versions of HTTP use implicit ordering of + messages, while others use an explicit identifier. +</t> +<t> + All responses, regardless of the status code (including <x:ref>interim</x:ref> + responses) can be sent at any time after a request is received, even if the + request is not yet complete. A response can complete before its + corresponding request is complete (<xref target="message.framing"/>). Likewise, clients are not expected + to wait any specific amount of time for a response. Clients + (including intermediaries) might abandon a request if the response is not + received within a reasonable period of time. +</t> +<t> + A client that receives a response while it is still sending the associated + request <bcp14>SHOULD</bcp14> continue sending that request unless it receives + an explicit indication to the contrary (see, e.g., <xref target="HTTP11" x:rel="#persistent.failures"/> and <xref target="HTTP2" section="6.4"/>). +</t> +</section> + +<section title="Message Forwarding" anchor="message.forwarding"> +<t> + As described in <xref target="intermediaries"/>, intermediaries can serve + a variety of roles in the processing of HTTP requests and responses. + Some intermediaries are used to improve performance or availability. + Others are used for access control or to filter content. + Since an HTTP stream has characteristics similar to a pipe-and-filter + architecture, there are no inherent limits to the extent an intermediary + can enhance (or interfere) with either direction of the stream. +</t> +<t> + Intermediaries are expected to forward messages even when protocol elements + are not recognized (e.g., new methods, status codes, or field names) since that + preserves extensibility for downstream recipients. +</t> +<t> + An intermediary not acting as a tunnel <bcp14>MUST</bcp14> implement the + <x:ref>Connection</x:ref> header field, as specified in + <xref target="field.connection"/>, and exclude fields from being forwarded + that are only intended for the incoming connection. +</t> +<t> + An intermediary <bcp14>MUST NOT</bcp14> forward a message to itself unless it is + protected from an infinite request loop. In general, an intermediary ought + to recognize its own server names, including any aliases, local variations, + or literal IP addresses, and respond to such requests directly. +</t> +<t> + An HTTP message can be parsed as a stream for incremental processing or + forwarding downstream. + However, senders and recipients cannot rely on incremental + delivery of partial messages, since some implementations will buffer or + delay message forwarding for the sake of network efficiency, security + checks, or content transformations. +</t> + +<section title="Connection" anchor="field.connection"> + <x:anchor-alias value="header.connection"/> + <iref primary="true" item="Fields" subitem="Connection" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Connection" x:for-anchor=""/><iref primary="true" item="Connection header field" x:for-anchor=""/> + <x:anchor-alias value="Connection"/> + <x:anchor-alias value="connection-option"/> +<t> + The "Connection" header field allows the sender to list desired + control options for the current connection. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Connection"/><iref primary="true" item="Grammar" subitem="connection-option"/> + <x:ref>Connection</x:ref> = #<x:ref>connection-option</x:ref> + <x:ref>connection-option</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + Connection options are case-insensitive. +</t> +<t> + When a field aside from Connection is used to supply control + information for or about the current connection, the sender <bcp14>MUST</bcp14> list + the corresponding field name within the Connection header field. + Note that some versions of HTTP prohibit the use of fields for such + information, and therefore do not allow the Connection field. +</t> +<t> + Intermediaries <bcp14>MUST</bcp14> parse a received Connection + header field before a message is forwarded and, for each + connection-option in this field, remove any header or trailer field(s) from + the message with the same name as the connection-option, and then + remove the Connection header field itself (or replace it with the + intermediary's own control options for the forwarded message). +</t> +<t> + Hence, the Connection header field provides a declarative way of + distinguishing fields that are only intended for the + immediate recipient ("hop-by-hop") from those fields that are + intended for all recipients on the chain ("end-to-end"), enabling the + message to be self-descriptive and allowing future connection-specific + extensions to be deployed without fear that they will be blindly + forwarded by older intermediaries. +</t> +<t> + Furthermore, intermediaries <bcp14>SHOULD</bcp14> remove or replace fields + that are known to require removal before forwarding, whether or not + they appear as a connection-option, after applying those fields' + semantics. This includes but is not limited to: +</t> +<ul> + <li>Proxy-Connection (<xref target="HTTP11" x:rel="#compatibility.with.http.1.0.persistent.connections"/>)</li> + <li>Keep-Alive (<xref x:sec="19.7.1" x:fmt="of" target="RFC2068"/>)</li> + <li>TE (<xref target="field.te"/>)</li> + <li>Transfer-Encoding (<xref target="HTTP11" x:rel="#field.transfer-encoding"/>)</li> + <li>Upgrade (<xref target="field.upgrade"/>)</li> +</ul> +<t> + A sender <bcp14>MUST NOT</bcp14> send a connection option corresponding to a + field that is intended for all recipients of the content. + For example, <x:ref>Cache-Control</x:ref> is never appropriate as a + connection option (<xref target="CACHING" x:rel="#field.cache-control"/>). +</t> +<t> + Connection options do not always correspond to a field + present in the message, since a connection-specific field + might not be needed if there are no parameters associated with a + connection option. In contrast, a connection-specific field + received without a corresponding connection option usually indicates + that the field has been improperly forwarded by an intermediary and + ought to be ignored by the recipient. +</t> +<t> + When defining a new connection option that does not correspond to a field, + specification authors ought to reserve the corresponding field name + anyway in order to avoid later collisions. Such reserved field names are + registered in the "Hypertext Transfer Protocol (HTTP) Field Name Registry" + (<xref target="fields.registry"/>). +</t> +</section> + +<section title="Max-Forwards" anchor="field.max-forwards"> + <x:anchor-alias value="header.max-forwards"/> + <iref primary="true" item="Fields" subitem="Max-Forwards" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Max-Forwards" x:for-anchor=""/><iref primary="true" item="Max-Forwards header field" x:for-anchor=""/> + <x:anchor-alias value="Max-Forwards"/> +<t> + The "Max-Forwards" header field provides a mechanism with the + TRACE (<xref target="TRACE"/>) and OPTIONS (<xref target="OPTIONS"/>) + request methods to limit the number of times that the request is forwarded by + proxies. This can be useful when the client is attempting to + trace a request that appears to be failing or looping mid-chain. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Max-Forwards"/> + <x:ref>Max-Forwards</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + The Max-Forwards value is a decimal integer indicating the remaining + number of times this request message can be forwarded. +</t> +<t> + Each intermediary that receives a TRACE or OPTIONS request containing a + Max-Forwards header field <bcp14>MUST</bcp14> check and update its value prior to + forwarding the request. If the received value is zero (0), the intermediary + <bcp14>MUST NOT</bcp14> forward the request; instead, the intermediary <bcp14>MUST</bcp14> respond as + the final recipient. If the received Max-Forwards value is greater than + zero, the intermediary <bcp14>MUST</bcp14> generate an updated Max-Forwards field in the + forwarded message with a field value that is the lesser of a) the received + value decremented by one (1) or b) the recipient's maximum supported value + for Max-Forwards. +</t> +<t> + A recipient <bcp14>MAY</bcp14> ignore a Max-Forwards header field received with any + other request methods. +</t> +</section> + +<section title="Via" anchor="field.via"> + <x:anchor-alias value="header.via"/> + <iref primary="true" item="Fields" subitem="Via" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Via" x:for-anchor=""/><iref primary="true" item="Via header field" x:for-anchor=""/> + <x:anchor-alias value="pseudonym"/> + <x:anchor-alias value="received-by"/> + <x:anchor-alias value="received-protocol"/> + <x:anchor-alias value="Via"/> +<t> + The "Via" header field indicates the presence of intermediate protocols and + recipients between the user agent and the server (on requests) or between + the origin server and the client (on responses), similar to the + "Received" header field in email + (<xref target="RFC5322" x:fmt="of" x:sec="3.6.7"/>). + Via can be used for tracking message forwards, + avoiding request loops, and identifying the protocol capabilities of + senders along the request/response chain. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Via"/><iref primary="true" item="Grammar" subitem="received-protocol"/><iref primary="true" item="Grammar" subitem="protocol-name"/><iref primary="true" item="Grammar" subitem="protocol-version"/><iref primary="true" item="Grammar" subitem="received-by"/><iref primary="true" item="Grammar" subitem="pseudonym"/> + <x:ref>Via</x:ref> = #( <x:ref>received-protocol</x:ref> <x:ref>RWS</x:ref> <x:ref>received-by</x:ref> [ <x:ref>RWS</x:ref> <x:ref>comment</x:ref> ] ) + + <x:ref>received-protocol</x:ref> = [ <x:ref>protocol-name</x:ref> "/" ] <x:ref>protocol-version</x:ref> + ; see <xref target="field.upgrade"/> + <x:ref>received-by</x:ref> = <x:ref>pseudonym</x:ref> [ ":" <x:ref>port</x:ref> ] + <x:ref>pseudonym</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + Each member of the Via field value represents a proxy or gateway that has + forwarded the message. Each intermediary appends its own information + about how the message was received, such that the end result is ordered + according to the sequence of forwarding recipients. +</t> +<t> + A proxy <bcp14>MUST</bcp14> send an appropriate Via header field, as described below, in + each message that it forwards. + An HTTP-to-HTTP gateway <bcp14>MUST</bcp14> send an appropriate Via header field in + each inbound request message and <bcp14>MAY</bcp14> send a Via header field in + forwarded response messages. +</t> +<t> + For each intermediary, the received-protocol indicates the protocol and + protocol version used by the upstream sender of the message. Hence, the + Via field value records the advertised protocol capabilities of the + request/response chain such that they remain visible to downstream + recipients; this can be useful for determining what backwards-incompatible + features might be safe to use in response, or within a later request, as + described in <xref target="protocol.version"/>. For brevity, the protocol-name + is omitted when the received protocol is HTTP. +</t> +<t> + The received-by portion is normally the host and optional + port number of a recipient server or client that subsequently forwarded the + message. + However, if the real host is considered to be sensitive information, a + sender <bcp14>MAY</bcp14> replace it with a pseudonym. If a port is not provided, + a recipient <bcp14>MAY</bcp14> interpret that as meaning it was received on the default + port, if any, for the received-protocol. +</t> +<t> + A sender <bcp14>MAY</bcp14> generate comments to identify the + software of each recipient, analogous to the <x:ref>User-Agent</x:ref> and + <x:ref>Server</x:ref> header fields. However, comments in Via + are optional, and a recipient <bcp14>MAY</bcp14> remove them prior to forwarding the + message. +</t> +<t> + For example, a request message could be sent from an HTTP/1.0 user + agent to an internal proxy code-named "fred", which uses HTTP/1.1 to + forward the request to a public proxy at p.example.net, which completes + the request by forwarding it to the origin server at www.example.com. + The request received by www.example.com would then have the following + Via header field: +</t> +<sourcecode type="http-message"> +Via: 1.0 fred, 1.1 p.example.net +</sourcecode> +<t> + An intermediary used as a portal through a network firewall + <bcp14>SHOULD NOT</bcp14> forward the names and ports of hosts within the firewall + region unless it is explicitly enabled to do so. If not enabled, such an + intermediary <bcp14>SHOULD</bcp14> replace each received-by host of any host behind the + firewall by an appropriate pseudonym for that host. +</t> +<t> + An intermediary <bcp14>MAY</bcp14> combine an ordered subsequence of Via header + field list members into a single member if the entries have identical + received-protocol values. For example, +</t> +<sourcecode type="http-message"> +Via: 1.0 ricky, 1.1 ethel, 1.1 fred, 1.0 lucy +</sourcecode> +<t> + could be collapsed to +</t> +<sourcecode type="http-message"> +Via: 1.0 ricky, 1.1 mertz, 1.0 lucy +</sourcecode> +<t> + A sender <bcp14>SHOULD NOT</bcp14> combine multiple list members unless they are all + under the same organizational control and the hosts have already been + replaced by pseudonyms. A sender <bcp14>MUST NOT</bcp14> combine members that + have different received-protocol values. +</t> +</section> +</section> + +<section title="Message Transformations" anchor="message.transformations"> + <iref primary="true" item="transforming proxy"/> + <iref primary="true" item="non-transforming proxy"/> +<t> + Some intermediaries include features for transforming messages and their + content. A proxy might, for example, convert between image formats in + order to save cache space or to reduce the amount of traffic on a slow + link. However, operational problems might occur when these transformations + are applied to content intended for critical applications, such as medical + imaging or scientific data analysis, particularly when integrity checks or + digital signatures are used to ensure that the content received is + identical to the original. +</t> +<t> + An HTTP-to-HTTP proxy is called a <x:dfn>transforming proxy</x:dfn> + if it is designed or configured to modify messages in a semantically + meaningful way (i.e., modifications, beyond those required by normal + HTTP processing, that change the message in a way that would be + significant to the original sender or potentially significant to + downstream recipients). For example, a transforming proxy might be + acting as a shared annotation server (modifying responses to include + references to a local annotation database), a malware filter, a + format transcoder, or a privacy filter. Such transformations are presumed + to be desired by whichever client (or client organization) chose the + proxy. +</t> +<t> + If a proxy receives a target URI with a host name that is not a + fully qualified domain name, it <bcp14>MAY</bcp14> add its own domain to the host name + it received when forwarding the request. A proxy <bcp14>MUST NOT</bcp14> change the + host name if the target URI contains a fully qualified domain name. +</t> +<t> + A proxy <bcp14>MUST NOT</bcp14> modify the "absolute-path" and "query" parts of the + received target URI when forwarding it to the next inbound server except + as required by that forwarding protocol. For example, a proxy forwarding + a request to an origin server via HTTP/1.1 will replace an empty path with + "/" (<xref target="HTTP11" x:rel="#origin-form"/>) or "*" (<xref target="HTTP11" x:rel="#asterisk-form"/>), + depending on the request method. +</t> +<t> + A proxy <bcp14>MUST NOT</bcp14> transform the content (<xref target="content"/>) of a + response message that contains a no-transform cache directive + (<xref target="CACHING" x:rel="#cache-response-directive.no-transform"/>). Note that this + does not apply to message transformations that do not change the content, + such as the addition or removal of transfer codings + (<xref target="HTTP11" x:rel="#transfer.codings"/>). +</t> +<t> + A proxy <bcp14>MAY</bcp14> transform the content of a message + that does not contain a no-transform cache directive. + A proxy that transforms the content of a <x:ref>200 (OK)</x:ref> response + can inform downstream recipients that a transformation has been + applied by changing the response status code to + <x:ref>203 (Non-Authoritative Information)</x:ref> (<xref target="status.203"/>). +</t> +<t> + A proxy <bcp14>SHOULD NOT</bcp14> modify header fields that provide information about + the endpoints of the communication chain, the resource state, or the + <x:ref>selected representation</x:ref> (other than the content) unless the field's + definition specifically allows such modification or the modification is + deemed necessary for privacy or security. +</t> +</section> + +<section title="Upgrade" anchor="field.upgrade"> + <x:anchor-alias value="header.upgrade"/> + <iref primary="true" item="Fields" subitem="Upgrade" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Upgrade" x:for-anchor=""/><iref primary="true" item="Upgrade header field" x:for-anchor=""/> + <x:anchor-alias value="Upgrade"/> + <x:anchor-alias value="protocol"/> + <x:anchor-alias value="protocol-name"/> + <x:anchor-alias value="protocol-version"/> +<t> + The "Upgrade" header field is intended to provide a simple mechanism + for transitioning from HTTP/1.1 to some other protocol on the same + connection. +</t> +<t> + A client <bcp14>MAY</bcp14> send a list of protocol names in the Upgrade header field + of a request to invite the server to switch to one or more of the named + protocols, in order of descending preference, before sending + the final response. A server <bcp14>MAY</bcp14> ignore a received Upgrade header field + if it wishes to continue using the current protocol on that connection. + Upgrade cannot be used to insist on a protocol change. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Upgrade"/> + <x:ref>Upgrade</x:ref> = #<x:ref>protocol</x:ref> + + <x:ref>protocol</x:ref> = <x:ref>protocol-name</x:ref> ["/" <x:ref>protocol-version</x:ref>] + <x:ref>protocol-name</x:ref> = <x:ref>token</x:ref> + <x:ref>protocol-version</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + Although protocol names are registered with a preferred case, + recipients <bcp14>SHOULD</bcp14> use case-insensitive comparison when matching each + protocol-name to supported protocols. +</t> +<t> + A server that sends a <x:ref>101 (Switching Protocols)</x:ref> response + <bcp14>MUST</bcp14> send an Upgrade header field to indicate the new protocol(s) to + which the connection is being switched; if multiple protocol layers are + being switched, the sender <bcp14>MUST</bcp14> list the protocols in layer-ascending + order. A server <bcp14>MUST NOT</bcp14> switch to a protocol that was not indicated by + the client in the corresponding request's Upgrade header field. + A server <bcp14>MAY</bcp14> choose to ignore the order of preference indicated by the + client and select the new protocol(s) based on other factors, such as the + nature of the request or the current load on the server. +</t> +<t> + A server that sends a <x:ref>426 (Upgrade Required)</x:ref> response + <bcp14>MUST</bcp14> send an Upgrade header field to indicate the acceptable protocols, + in order of descending preference. +</t> +<t> + A server <bcp14>MAY</bcp14> send an Upgrade header field in any other response to + advertise that it implements support for upgrading to the listed protocols, + in order of descending preference, when appropriate for a future request. +</t> +<t> + The following is a hypothetical example sent by a client: +</t> +<sourcecode type="http-message"> +GET /hello HTTP/1.1 +Host: www.example.com +Connection: upgrade +Upgrade: websocket, IRC/6.9, RTA/x11 + +</sourcecode> +<t> + The capabilities and nature of the + application-level communication after the protocol change is entirely + dependent upon the new protocol(s) chosen. However, immediately after + sending the <x:ref>101 (Switching Protocols)</x:ref> response, the server is expected to continue responding to + the original request as if it had received its equivalent within the new + protocol (i.e., the server still has an outstanding request to satisfy + after the protocol has been changed, and is expected to do so without + requiring the request to be repeated). +</t> +<t> + For example, if the Upgrade header field is received in a GET request + and the server decides to switch protocols, it first responds + with a <x:ref>101 (Switching Protocols)</x:ref> message in HTTP/1.1 and + then immediately follows that with the new protocol's equivalent of a + response to a GET on the target resource. This allows a connection to be + upgraded to protocols with the same semantics as HTTP without the + latency cost of an additional round trip. A server <bcp14>MUST NOT</bcp14> switch + protocols unless the received message semantics can be honored by the new + protocol; an OPTIONS request can be honored by any protocol. +</t> +<t> + The following is an example response to the above hypothetical request: +</t> +<sourcecode type="http-message"> +HTTP/1.1 101 Switching Protocols +Connection: upgrade +Upgrade: websocket + +[... data stream switches to websocket with an appropriate response +(as defined by new protocol) to the "GET /hello" request ...] +</sourcecode> +<t> + A sender of Upgrade <bcp14>MUST</bcp14> also send an "Upgrade" connection option in the + <x:ref>Connection</x:ref> header field (<xref target="field.connection"/>) + to inform intermediaries not to forward this field. + A server that receives an Upgrade header field in an HTTP/1.0 request + <bcp14>MUST</bcp14> ignore that Upgrade field. +</t> +<t> + A client cannot begin using an upgraded protocol on the connection until + it has completely sent the request message (i.e., the client can't change + the protocol it is sending in the middle of a message). + If a server receives both an Upgrade and an <x:ref>Expect</x:ref> header field + with the "100-continue" expectation (<xref target="field.expect"/>), the + server <bcp14>MUST</bcp14> send a <x:ref>100 (Continue)</x:ref> response before sending + a <x:ref>101 (Switching Protocols)</x:ref> response. +</t> +<t> + The Upgrade header field only applies to switching protocols on top of the + existing connection; it cannot be used to switch the underlying connection + (transport) protocol, nor to switch the existing communication to a + different connection. For those purposes, it is more appropriate to use a + <x:ref>3xx (Redirection)</x:ref> response (<xref target="status.3xx"/>). +</t> +<t> + This specification only defines the protocol name "HTTP" for use by + the family of Hypertext Transfer Protocols, as defined by the HTTP + version rules of <xref target="protocol.version"/> and future updates to this + specification. Additional protocol names ought to be registered using the + registration procedure defined in <xref target="upgrade.token.registry"/>. +</t> +</section> +</section> + +<section title="Representation Data and Metadata" anchor="representation.data.and.metadata"> +<section title="Representation Data" anchor="representation.data"> + <x:anchor-alias value="representation-data"/> +<t> + The representation data associated with an HTTP message is + either provided as the content of the message or + referred to by the message semantics and the target + URI. The representation data is in a format and encoding defined by + the representation metadata header fields. +</t> +<t> + The data type of the representation data is determined via the header fields + <x:ref>Content-Type</x:ref> and <x:ref>Content-Encoding</x:ref>. + These define a two-layer, ordered encoding model: +</t> +<artwork type="example"> + representation-data := Content-Encoding( Content-Type( data ) ) +</artwork> +</section> + +<section title="Representation Metadata" anchor="representation.metadata"> + <x:anchor-alias value="representation-header"/> +<t> + Representation header fields provide metadata about the representation. + When a message includes content, the representation header fields + describe how to interpret that data. In a response to a HEAD request, the + representation header fields describe the representation data that would + have been enclosed in the content if the same request had been a GET. +</t> +</section> + +<section title="Content-Type" anchor="field.content-type"> + <x:anchor-alias value="header.content-type"/> + <iref primary="true" item="Fields" subitem="Content-Type" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Type" x:for-anchor=""/><iref primary="true" item="Content-Type header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Type"/> +<t> + The "Content-Type" header field indicates the media type of the + associated representation: either the representation enclosed in + the message content or the <x:ref>selected representation</x:ref>, as determined by the + message semantics. The indicated media type defines both the data format + and how that data is intended to be processed by a recipient, within the + scope of the received message semantics, after any content codings + indicated by <x:ref>Content-Encoding</x:ref> are decoded. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Type"/> + <x:ref>Content-Type</x:ref> = <x:ref>media-type</x:ref> +</sourcecode> +<t> + Media types are defined in <xref target="media.type"/>. An example of the + field is +</t> +<sourcecode type="http-message"> +Content-Type: text/html; charset=ISO-8859-4 +</sourcecode> +<t> + A sender that generates a message containing content <bcp14>SHOULD</bcp14> + generate a Content-Type header field in that message unless the intended + media type of the enclosed representation is unknown to the sender. + If a Content-Type header field is not present, the recipient <bcp14>MAY</bcp14> either + assume a media type of + "application/octet-stream" (<xref target="RFC2046" x:fmt="," x:sec="4.5.1"/>) + or examine the data to determine its type. +</t> +<t> + In practice, resource owners do not always properly configure their origin + server to provide the correct Content-Type for a given representation. + Some user agents examine the content and, in certain cases, + override the received type (for example, see <xref target="Sniffing"/>). + This "MIME sniffing" risks drawing incorrect conclusions about the data, + which might expose the user to additional security risks + (e.g., "privilege escalation"). + Furthermore, distinct media types often share a common data format, + differing only in how the data is intended to be processed, which is + impossible to distinguish by inspecting the data alone. + When sniffing is implemented, implementers are encouraged to provide a + means for the user to disable it. +</t> +<t> + Although Content-Type is defined as a singleton field, it is + sometimes incorrectly generated multiple times, resulting in a combined + field value that appears to be a list. + Recipients often attempt to handle this error by using the last + syntactically valid member of the list, leading to potential + interoperability and security issues if different implementations + have different error handling behaviors. +</t> + +<section title="Media Type" anchor="media.type"> + <x:anchor-alias value="media-type"/> + <x:anchor-alias value="type"/> + <x:anchor-alias value="subtype"/> +<t> + HTTP uses media types <xref target="RFC2046"/> in the + <x:ref>Content-Type</x:ref> (<xref target="field.content-type"/>) + and <x:ref>Accept</x:ref> (<xref target="field.accept"/>) header fields in + order to provide open and extensible data typing and type negotiation. + Media types define both a data format and various processing models: + how to process that data in accordance with the message context. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="media-type"/><iref primary="true" item="Grammar" subitem="type"/><iref primary="true" item="Grammar" subitem="subtype"/> + <x:ref>media-type</x:ref> = <x:ref>type</x:ref> "/" <x:ref>subtype</x:ref> <x:ref>parameters</x:ref> + <x:ref>type</x:ref> = <x:ref>token</x:ref> + <x:ref>subtype</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + The type and subtype tokens are case-insensitive. +</t> +<t> + The type/subtype <bcp14>MAY</bcp14> be followed by semicolon-delimited parameters + (<xref target="parameter"/>) in the form of name/value pairs. + The presence or absence of a parameter might be significant to the + processing of a media type, depending on its definition within the media + type registry. + Parameter values might or might not be case-sensitive, depending on the + semantics of the parameter name. +</t> +<t> + For example, the following media types are equivalent in describing HTML + text data encoded in the UTF-8 character encoding scheme, but the first is + preferred for consistency (the "charset" parameter value is defined as + being case-insensitive in <xref target="RFC2046" x:fmt="," x:sec="4.1.2"/>): +</t> +<artwork type="example"> + text/html;charset=utf-8 + Text/HTML;Charset="utf-8" + text/html; charset="utf-8" + text/html;charset=UTF-8 +</artwork> +<t> + Media types ought to be registered with IANA according to the + procedures defined in <xref target="BCP13"/>. +</t> +</section> + +<section title="Charset" anchor="charset"> + <x:anchor-alias value="rule.charset"/> +<t> + HTTP uses <x:dfn>charset</x:dfn> names to indicate or negotiate the + character encoding scheme (<xref target="RFC6365" sectionFormat="comma" section="2"/>) + of a textual representation. In the fields defined by this document, + charset names appear either in parameters (<x:ref>Content-Type</x:ref>), + or, for <x:ref>Accept-Encoding</x:ref>, in the form of a plain <x:ref>token</x:ref>. + In both cases, charset names are matched case-insensitively. +</t> +<t> + Charset names ought to be registered in the IANA "Character Sets" registry + (<eref target="https://www.iana.org/assignments/character-sets"/>) + according to the procedures defined in <xref target="RFC2978" x:fmt="of" x:sec="2"/>. +</t> +<aside> + <t> + <x:h>Note:</x:h> In theory, charset names are defined by the "mime-charset" ABNF + rule defined in <xref target="RFC2978" x:fmt="of" x:sec="2.3"/> (as + corrected in <xref target="Err1912"/>). That rule allows two characters + that are not included in "token" ("{" and "}"), but no charset name + registered at the time of this writing includes braces + (see <xref target="Err5433"/>). + </t> +</aside> +</section> + +<section title="Multipart Types" anchor="multipart.types"> +<t> + MIME provides for a number of "multipart" types — encapsulations of + one or more representations within a single message body. All multipart + types share a common syntax, as defined in <xref target="RFC2046" x:sec="5.1.1" x:fmt="of"/>, + and include a boundary parameter as part of the media type + value. The message body is itself a protocol element; a sender <bcp14>MUST</bcp14> + generate only CRLF to represent line breaks between body parts. +</t> +<t> + HTTP message framing does not use the multipart boundary as an indicator + of message body length, though it might be used by implementations that + generate or process the content. For example, the "multipart/form-data" + type is often used for carrying form data in a request, as described in + <xref target="RFC7578"/>, and the "multipart/byteranges" type is defined + by this specification for use in some <x:ref>206 (Partial Content)</x:ref> + responses (see <xref target="status.206"/>). +</t> +</section> +</section> + +<section title="Content-Encoding" anchor="field.content-encoding"> + <x:anchor-alias value="header.content-encoding"/> + <iref primary="true" item="Fields" subitem="Content-Encoding" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Encoding" x:for-anchor=""/><iref primary="true" item="Content-Encoding header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Encoding"/> +<t> + The "Content-Encoding" header field indicates what content codings + have been applied to the representation, beyond those inherent in the media + type, and thus what decoding mechanisms have to be applied in order to + obtain data in the media type referenced by the <x:ref>Content-Type</x:ref> + header field. + Content-Encoding is primarily used to allow a representation's data to be + compressed without losing the identity of its underlying media type. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Encoding"/> + <x:ref>Content-Encoding</x:ref> = #<x:ref>content-coding</x:ref> +</sourcecode> +<t> + An example of its use is +</t> +<sourcecode type="http-message"> +Content-Encoding: gzip +</sourcecode> +<t> + If one or more encodings have been applied to a representation, the sender + that applied the encodings <bcp14>MUST</bcp14> generate a Content-Encoding header field + that lists the content codings in the order in which they were applied. + Note that the coding named "identity" is reserved for its special role + in <x:ref>Accept-Encoding</x:ref> and thus <bcp14>SHOULD NOT</bcp14> be included. +</t> +<t> + Additional information about the encoding parameters can be provided + by other header fields not defined by this specification. +</t> +<t> + Unlike Transfer-Encoding (<xref target="HTTP11" x:rel="#field.transfer-encoding"/>), the codings listed + in Content-Encoding are a characteristic of the representation; the + representation is defined in terms of the coded form, and all other + metadata about the representation is about the coded form unless otherwise + noted in the metadata definition. Typically, the representation is only + decoded just prior to rendering or analogous usage. +</t> +<t> + If the media type includes an inherent encoding, such as a data format + that is always compressed, then that encoding would not be restated in + Content-Encoding even if it happens to be the same algorithm as one + of the content codings. Such a content coding would only be listed if, + for some bizarre reason, it is applied a second time to form the + representation. Likewise, an origin server might choose to publish the + same data as multiple representations that differ only in whether + the coding is defined as part of <x:ref>Content-Type</x:ref> or + Content-Encoding, since some user agents will behave differently in their + handling of each response (e.g., open a "Save as ..." dialog instead of + automatic decompression and rendering of content). +</t> +<t> + An origin server <bcp14>MAY</bcp14> respond with a status code of + <x:ref>415 (Unsupported Media Type)</x:ref> if a representation in the + request message has a content coding that is not acceptable. +</t> + +<section title="Content Codings" anchor="content.codings"> + <iref primary="true" item="content coding"/> + <iref primary="true" item="compress (content coding)"/> + <iref primary="true" item="x-compress (content coding)"/> + <iref primary="true" item="deflate (content coding)"/> + <iref primary="true" item="gzip (content coding)"/> + <iref primary="true" item="x-gzip (content coding)"/> + <x:anchor-alias value="content-coding"/> +<t> + Content coding values indicate an encoding transformation that has + been or can be applied to a representation. Content codings are primarily + used to allow a representation to be compressed or otherwise usefully + transformed without losing the identity of its underlying media type + and without loss of information. Frequently, the representation is stored + in coded form, transmitted directly, and only decoded by the final recipient. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="content-coding"/> + <x:ref>content-coding</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + All content codings are case-insensitive and ought to be registered + within the "HTTP Content Coding Registry", as described in + <xref target="content.coding.extensibility"/> +</t> +<t> + Content-coding values are used in the + <x:ref>Accept-Encoding</x:ref> (<xref target="field.accept-encoding"/>) + and <x:ref>Content-Encoding</x:ref> (<xref target="field.content-encoding"/>) + header fields. +</t> + +<section title="Compress Coding" anchor="compress.coding"> +<iref item="compress (Coding Format)"/> +<t> + The "compress" coding is an adaptive Lempel-Ziv-Welch (LZW) coding + <xref target="Welch"/> that is commonly produced by the UNIX file + compression program "compress". + A recipient <bcp14>SHOULD</bcp14> consider "x-compress" to be equivalent to "compress". +</t> +</section> + +<section title="Deflate Coding" anchor="deflate.coding"> +<iref item="deflate (Coding Format)"/> +<t> + The "deflate" coding is a "zlib" data format <xref target="RFC1950"/> + containing a "deflate" compressed data stream <xref target="RFC1951"/> + that uses a combination of the Lempel-Ziv (LZ77) compression algorithm and + Huffman coding. +</t> +<aside> + <t> + <x:h>Note:</x:h> Some non-conformant implementations send the "deflate" + compressed data without the zlib wrapper. + </t> +</aside> +</section> + +<section title="Gzip Coding" anchor="gzip.coding"> +<iref item="gzip (Coding Format)"/> +<t> + The "gzip" coding is an LZ77 coding with a 32-bit Cyclic Redundancy Check + (CRC) that is commonly + produced by the gzip file compression program <xref target="RFC1952"/>. + A recipient <bcp14>SHOULD</bcp14> consider "x-gzip" to be equivalent to "gzip". +</t> +</section> +</section> +</section> + +<section title="Content-Language" anchor="field.content-language"> + <x:anchor-alias value="header.content-language"/> + <iref primary="true" item="Fields" subitem="Content-Language" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Language" x:for-anchor=""/><iref primary="true" item="Content-Language header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Language"/> +<t> + The "Content-Language" header field describes the natural + language(s) of the intended audience for the representation. Note that this might + not be equivalent to all the languages used within the representation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Language"/> + <x:ref>Content-Language</x:ref> = #<x:ref>language-tag</x:ref> +</sourcecode> +<t> + Language tags are defined in <xref target="language.tags"/>. The primary purpose of + Content-Language is to allow a user to identify and differentiate + representations according to the users' own preferred language. Thus, if the + content is intended only for a Danish-literate audience, the + appropriate field is +</t> +<sourcecode type="http-message"> +Content-Language: da +</sourcecode> +<t> + If no Content-Language is specified, the default is that the content + is intended for all language audiences. This might mean that the + sender does not consider it to be specific to any natural language, + or that the sender does not know for which language it is intended. +</t> +<t> + Multiple languages <bcp14>MAY</bcp14> be listed for content that is intended for + multiple audiences. For example, a rendition of the "Treaty of + Waitangi", presented simultaneously in the original Maori and English + versions, would call for +</t> +<sourcecode type="http-message"> +Content-Language: mi, en +</sourcecode> +<t> + However, just because multiple languages are present within a representation + does not mean that it is intended for multiple linguistic audiences. + An example would be a beginner's language primer, such as "A First + Lesson in Latin", which is clearly intended to be used by an + English-literate audience. In this case, the Content-Language would + properly only include "en". +</t> +<t> + Content-Language <bcp14>MAY</bcp14> be applied to any media type — it is not + limited to textual documents. +</t> + +<section title="Language Tags" anchor="language.tags"> + <x:anchor-alias value="language-tag"/> +<t> + A language tag, as defined in <xref target="RFC5646"/>, identifies a + natural language spoken, written, or otherwise conveyed by human beings for + communication of information to other human beings. Computer languages are + explicitly excluded. +</t> +<t> + HTTP uses language tags within the <x:ref>Accept-Language</x:ref> and + <x:ref>Content-Language</x:ref> header fields. + <x:ref>Accept-Language</x:ref> uses the broader language-range production + defined in <xref target="field.accept-language"/>, whereas + <x:ref>Content-Language</x:ref> uses the language-tag production defined + below. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="language-tag"/> + <x:ref>language-tag</x:ref> = &lt;Language-Tag, see <xref target="RFC5646" x:sec="2.1"/>&gt; +</sourcecode> +<t> + A language tag is a sequence of one or more case-insensitive subtags, each + separated by a hyphen character ("-", %x2D). In most cases, a language tag + consists of a primary language subtag that identifies a broad family of + related languages (e.g., "en" = English), which is optionally followed by a + series of subtags that refine or narrow that language's range (e.g., + "en-CA" = the variety of English as communicated in Canada). + Whitespace is not allowed within a language tag. + Example tags include: +</t> +<artwork type="example"> + fr, en-US, es-419, az-Arab, x-pig-latin, man-Nkoo-GN +</artwork> +<t> + See <xref target="RFC5646"/> for further information. +</t> +</section> +</section> + +<section title="Content-Length" anchor="field.content-length"> + <x:anchor-alias value="header.content-length"/> + <iref primary="true" item="Fields" subitem="Content-Length" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Length" x:for-anchor=""/><iref primary="true" item="Content-Length header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Length"/> +<t> + The "Content-Length" header field indicates the associated representation's + data length as a decimal non-negative integer number of octets. + When transferring a representation as content, Content-Length refers + specifically to the amount of data enclosed so that it can be used to + delimit framing (e.g., <xref target="HTTP11" x:rel="#body.content-length"/>). + In other cases, Content-Length indicates the selected representation's + current length, which can be used by recipients to estimate transfer time + or to compare with previously stored representations. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Length"/> + <x:ref>Content-Length</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + An example is +</t> +<sourcecode type="http-message"> +Content-Length: 3495 +</sourcecode> +<t> + A user agent <bcp14>SHOULD</bcp14> send Content-Length in a request when the method + defines a meaning for enclosed content and it is not sending + <x:ref>Transfer-Encoding</x:ref>. + For example, a user agent normally sends Content-Length in a POST request + even when the value is 0 (indicating empty content). + A user agent <bcp14>SHOULD NOT</bcp14> send a + Content-Length header field when the request message does not contain + content and the method semantics do not anticipate such data. +</t> +<t> + A server <bcp14>MAY</bcp14> send a Content-Length header field in a response to a HEAD + request (<xref target="HEAD"/>); a server <bcp14>MUST NOT</bcp14> send Content-Length in such a + response unless its field value equals the decimal number of octets that + would have been sent in the content of a response if the same + request had used the GET method. +</t> +<t> + A server <bcp14>MAY</bcp14> send a Content-Length header field in a + <x:ref>304 (Not Modified)</x:ref> response to a conditional GET request + (<xref target="status.304"/>); a server <bcp14>MUST NOT</bcp14> send Content-Length in such a + response unless its field value equals the decimal number of octets that + would have been sent in the content of a <x:ref>200 (OK)</x:ref> + response to the same request. +</t> +<t> + A server <bcp14>MUST NOT</bcp14> send a Content-Length header field in any response + with a status code of + <x:ref>1xx (Informational)</x:ref> or <x:ref>204 (No Content)</x:ref>. + A server <bcp14>MUST NOT</bcp14> send a Content-Length header field in any + <x:ref>2xx (Successful)</x:ref> response to a CONNECT request (<xref target="CONNECT"/>). +</t> +<t> + Aside from the cases defined above, in the absence of Transfer-Encoding, + an origin server <bcp14>SHOULD</bcp14> send a Content-Length header field when the + content size is known prior to sending the complete header section. + This will allow downstream recipients to measure transfer progress, + know when a received message is complete, and potentially reuse the + connection for additional requests. +</t> +<t> + Any Content-Length field value greater than or equal to zero is valid. + Since there is no predefined limit to the length of content, a + recipient <bcp14>MUST</bcp14> anticipate potentially large decimal numerals and + prevent parsing errors due to integer conversion overflows + or precision loss due to integer conversion + (<xref target="attack.protocol.element.length"/>). +</t> +<t> + Because Content-Length is used for message delimitation in HTTP/1.1, + its field value can impact how the message is parsed by downstream + recipients even when the immediate connection is not using HTTP/1.1. + If the message is forwarded by a downstream intermediary, a Content-Length + field value that is inconsistent with the received message framing might + cause a security failure due to request smuggling or response splitting. +</t> +<t> + As a result, a sender <bcp14>MUST NOT</bcp14> forward a message with a + Content-Length header field value that is known to be incorrect. +</t> +<t> + Likewise, a sender <bcp14>MUST NOT</bcp14> forward a message with a Content-Length + header field value that does not match the ABNF above, with one exception: + a recipient of a Content-Length header field value consisting of the same + decimal value repeated as a comma-separated list (e.g, + "Content-Length: 42, 42") <bcp14>MAY</bcp14> either reject the message as invalid or + replace that invalid field value with a single instance of the decimal + value, since this likely indicates that a duplicate was generated or + combined by an upstream message processor. +</t> +</section> + +<section title="Content-Location" anchor="field.content-location"> + <x:anchor-alias value="header.content-location"/> + <iref primary="true" item="Fields" subitem="Content-Location" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Location" x:for-anchor=""/><iref primary="true" item="Content-Location header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Location"/> +<t> + The "Content-Location" header field references a URI that can be used + as an identifier for a specific resource corresponding to the + representation in this message's content. + In other words, if one were to perform a GET request on this URI at the time + of this message's generation, then a <x:ref>200 (OK)</x:ref> response would + contain the same representation that is enclosed as content in this message. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Location"/> + <x:ref>Content-Location</x:ref> = <x:ref>absolute-URI</x:ref> / <x:ref>partial-URI</x:ref> +</sourcecode> +<t> + The field value is either an <x:ref>absolute-URI</x:ref> or a + <x:ref>partial-URI</x:ref>. In the latter case (<xref target="uri"/>), + the referenced URI is relative to the target URI + (<xref target="URI" x:fmt="," x:sec="5"/>). +</t> +<t> + The Content-Location value is not a replacement for the target URI + (<xref target="target.resource"/>). It is representation metadata. + It has the same syntax and semantics as the header field of the same name + defined for MIME body parts in <xref target="RFC2557" x:fmt="of" x:sec="4"/>. + However, its appearance in an HTTP message has some special implications + for HTTP recipients. +</t> +<t> + If Content-Location is included in a <x:ref>2xx (Successful)</x:ref> + response message and its value refers (after conversion to absolute form) + to a URI that is the same as the target URI, then + the recipient <bcp14>MAY</bcp14> consider the content to be a current representation of + that resource at the time indicated by the message origination date. + For a GET (<xref target="GET"/>) or HEAD (<xref target="HEAD"/>) request, + this is the same as the default semantics when no Content-Location is + provided by the server. + For a state-changing request like PUT (<xref target="PUT"/>) or + POST (<xref target="POST"/>), it implies that the server's response + contains the new representation of that resource, thereby distinguishing it + from representations that might only report about the action + (e.g., "It worked!"). + This allows authoring applications to update their local copies without + the need for a subsequent GET request. +</t> +<t> + If Content-Location is included in a <x:ref>2xx (Successful)</x:ref> + response message and its field value refers to a URI that differs from the + target URI, then the origin server claims that the URI + is an identifier for a different resource corresponding to the enclosed + representation. Such a claim can only be trusted if both identifiers share + the same resource owner, which cannot be programmatically determined via + HTTP. +</t> +<ul> + <li>For a response to a GET or HEAD request, this is an indication that the + target URI refers to a resource that is subject to content + negotiation and the Content-Location field value is a more specific + identifier for the <x:ref>selected representation</x:ref>.</li> + <li>For a <x:ref>201 (Created)</x:ref> response to a state-changing method, + a Content-Location field value that is identical to the + <x:ref>Location</x:ref> field value indicates that this content is a + current representation of the newly created resource.</li> + <li>Otherwise, such a Content-Location indicates that this content is a + representation reporting on the requested action's status and that the + same report is available (for future access with GET) at the given URI. + For example, a purchase transaction made via a POST request might + include a receipt document as the content of the <x:ref>200 (OK)</x:ref> + response; the Content-Location field value provides an identifier for + retrieving a copy of that same receipt in the future.</li> +</ul> +<t> + A user agent that sends Content-Location in a request message is stating + that its value refers to where the user agent originally obtained the + content of the enclosed representation (prior to any modifications made by + that user agent). In other words, the user agent is providing a back link + to the source of the original representation. +</t> +<t> + An origin server that receives a Content-Location field in a request + message <bcp14>MUST</bcp14> treat the information as transitory request context rather + than as metadata to be saved verbatim as part of the representation. + An origin server <bcp14>MAY</bcp14> use that context to guide in processing the + request or to save it for other uses, such as within source links or + versioning metadata. However, an origin server <bcp14>MUST NOT</bcp14> use such context + information to alter the request semantics. +</t> +<t> + For example, if a client makes a PUT request on a negotiated resource and + the origin server accepts that PUT (without redirection), then the new + state of that resource is expected to be consistent with the one + representation supplied in that PUT; the Content-Location cannot be used as + a form of reverse content selection identifier to update only one of the + negotiated representations. If the user agent had wanted the latter + semantics, it would have applied the PUT directly to the Content-Location + URI. +</t> +</section> + +<section title="Validator Fields" anchor="response.validator"> + <iref primary="true" item="metadata"/> + <iref primary="true" item="validator"/> + <iref item="selected representation"/> +<t> + Resource metadata is referred to as a <x:dfn>validator</x:dfn> if it + can be used within a precondition (<xref target="preconditions"/>) to + make a conditional request (<xref target="conditional.requests"/>). + Validator fields convey a current validator for the + <x:ref>selected representation</x:ref> + (<xref target="representations"/>). +</t> +<t> + In responses to safe requests, validator fields describe the selected + representation chosen by the origin server while handling the response. + Note that, depending on the method and status code semantics, the + selected representation for a given response is not + necessarily the same as the representation enclosed as response content. +</t> +<t> + In a successful response to a state-changing request, validator fields + describe the new representation that has replaced the prior + <x:ref>selected representation</x:ref> as a result of processing the + request. +</t> +<t> + For example, an ETag field in a <x:ref>201 (Created)</x:ref> response + communicates the entity tag of the newly created resource's + representation, so that the entity tag can be used as a validator in + later conditional requests to prevent the "lost update" problem. +</t> +<t> + This specification defines two forms of metadata that are commonly used + to observe resource state and test for preconditions: modification dates + (<xref target="field.last-modified"/>) and opaque entity tags + (<xref target="field.etag"/>). + Additional metadata that reflects resource state + has been defined by various extensions of HTTP, such as Web Distributed + Authoring and Versioning <xref target="WEBDAV"/>, that are beyond the + scope of this specification. +</t> + +<section title="Weak versus Strong" anchor="weak.and.strong.validators"> + <iref primary="true" item="validator" subitem="weak"/> + <iref primary="true" item="validator" subitem="strong"/> +<t> + Validators come in two flavors: strong or weak. Weak validators are easy + to generate but are far less useful for comparisons. Strong validators + are ideal for comparisons but can be very difficult (and occasionally + impossible) to generate efficiently. Rather than impose that all forms + of resource adhere to the same strength of validator, HTTP exposes the + type of validator in use and imposes restrictions on when weak validators + can be used as preconditions. +</t> +<t> + A <x:dfn>strong validator</x:dfn> is representation metadata that changes value whenever + a change occurs to the representation data that would be observable in the + content of a <x:ref>200 (OK)</x:ref> response to GET. +</t> +<t> + A strong validator might change for reasons other than a change to the + representation data, such as when a + semantically significant part of the representation metadata is changed + (e.g., <x:ref>Content-Type</x:ref>), but it is in the best interests of the + origin server to only change the value when it is necessary to invalidate + the stored responses held by remote caches and authoring tools. +</t> +<t> + Cache entries might persist for arbitrarily long periods, regardless + of expiration times. Thus, a cache might attempt to validate an + entry using a validator that it obtained in the distant past. + A strong validator is unique across all versions of all + representations associated with a particular resource over time. + However, there is no implication of uniqueness across representations + of different resources (i.e., the same strong validator might be + in use for representations of multiple resources at the same time + and does not imply that those representations are equivalent). +</t> +<t> + There are a variety of strong validators used in practice. The best are + based on strict revision control, wherein each change to a representation + always results in a unique node name and revision identifier being assigned + before the representation is made accessible to GET. + A collision-resistant hash + function applied to the representation data is also sufficient if the data + is available prior to the response header fields being sent and the digest + does not need to be recalculated every time a validation request is + received. However, if a resource has distinct representations that differ + only in their metadata, such as might occur with content negotiation over + media types that happen to share the same data format, then the origin + server needs to incorporate additional information in the validator to + distinguish those representations. +</t> +<t> + In contrast, a <x:dfn>weak validator</x:dfn> is representation metadata + that might not change for every change to the representation data. This + weakness might be due to limitations in how the value is calculated + (e.g., clock resolution), an inability to ensure uniqueness for all + possible representations of the resource, or a desire of the resource + owner to group representations by some self-determined set of + equivalency rather than unique sequences of data. +</t> +<t> + An origin server <bcp14>SHOULD</bcp14> change a weak entity tag whenever it + considers prior representations to be unacceptable as a substitute for + the current representation. In other words, a weak entity tag ought to + change whenever the origin server wants caches to invalidate old + responses. +</t> +<t> + For example, the representation of a weather report that changes in + content every second, based on dynamic measurements, might be grouped + into sets of equivalent representations (from the origin server's + perspective) with the same weak validator in order to allow cached + representations to be valid for a reasonable period of time (perhaps + adjusted dynamically based on server load or weather quality). + Likewise, a representation's modification time, if defined with only + one-second resolution, might be a weak validator if it is possible + for the representation to be modified twice during a single second and + retrieved between those modifications. +</t> +<t> + Likewise, a validator is weak if it is shared by two or more + representations of a given resource at the same time, unless those + representations have identical representation data. For example, if the + origin server sends the same validator for a representation with a gzip + content coding applied as it does for a representation with no content + coding, then that validator is weak. However, two simultaneous + representations might share the same strong validator if they differ only + in the representation metadata, such as when two different media types are + available for the same representation data. +</t> +<t> + Strong validators are usable for all conditional requests, including cache + validation, partial content ranges, and "lost update" avoidance. + Weak validators are only usable when the client does not require exact + equality with previously obtained representation data, such as when + validating a cache entry or limiting a web traversal to recent changes. +</t> +</section> + +<section title="Last-Modified" anchor="field.last-modified"> + <x:anchor-alias value="header.last-modified"/> + <iref primary="true" item="Fields" subitem="Last-Modified" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Last-Modified" x:for-anchor=""/><iref primary="true" item="Last-Modified header field" x:for-anchor=""/> + <x:anchor-alias value="Last-Modified"/> +<t> + The "Last-Modified" header field in a response provides a timestamp + indicating the date and time at which the origin server believes the + <x:ref>selected representation</x:ref> was last modified, as determined at the conclusion + of handling the request. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Last-Modified"/> + <x:ref>Last-Modified</x:ref> = <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + An example of its use is +</t> +<sourcecode type="http-message"> +Last-Modified: Tue, 15 Nov 1994 12:45:26 GMT +</sourcecode> + +<section title="Generation" anchor="lastmod.generation"> +<t> + An origin server <bcp14>SHOULD</bcp14> send Last-Modified for any selected + representation for which a last modification date can be reasonably + and consistently determined, since its use in conditional requests + and evaluating cache freshness (<xref target="CACHING"/>) can + substantially reduce unnecessary transfers and significantly + improve service availability and scalability. +</t> +<t> + A representation is typically the sum of many parts behind the + resource interface. The last-modified time would usually be + the most recent time that any of those parts were changed. + How that value is determined for any given resource is an + implementation detail beyond the scope of this specification. +</t> +<t> + An origin server <bcp14>SHOULD</bcp14> obtain the Last-Modified value of the + representation as close as possible to the time that it generates the + <x:ref>Date</x:ref> field value for its response. This allows a recipient to + make an accurate assessment of the representation's modification time, + especially if the representation changes near the time that the + response is generated. +</t> +<t> + An origin server with a clock (as defined in <xref target="http.date"/>) + <bcp14>MUST NOT</bcp14> generate a Last-Modified date that is later than the + server's time of message origination + (<x:ref>Date</x:ref>, <xref target="field.date"/>). + If the last modification time is derived from implementation-specific + metadata that evaluates to some time in the future, according to the + origin server's clock, then the origin server <bcp14>MUST</bcp14> replace that + value with the message origination date. This prevents a future + modification date from having an adverse impact on cache validation. +</t> +<t> + An origin server without a clock <bcp14>MUST NOT</bcp14> generate a Last-Modified + date for a response unless that date value was assigned to the resource + by some other system (presumably one with a clock). +</t> +</section> + +<section title="Comparison" anchor="lastmod.comparison"> +<t> + A Last-Modified time, when used as a validator in a request, is + implicitly weak unless it is possible to deduce that it is strong, + using the following rules: +</t> +<ul> + <li>The validator is being compared by an origin server to the + actual current validator for the representation and,</li> + <li>That origin server reliably knows that the associated representation did + not change twice during the second covered by the presented + validator;</li> +</ul> +<t> + or +</t> +<ul> + <li>The validator is about to be used by a client in an + <x:ref>If-Modified-Since</x:ref>, + <x:ref>If-Unmodified-Since</x:ref>, or <x:ref>If-Range</x:ref> header + field, because the client has a cache entry for the associated + representation, and</li> + <li>That cache entry includes a <x:ref>Date</x:ref> value which is + at least one second after the Last-Modified value and + the client has reason to believe that they were generated by the + same clock or that there is enough difference between the Last-Modified + and Date values to make clock synchronization issues unlikely;</li> +</ul> +<t> + or +</t> +<ul> + <li>The validator is being compared by an intermediate cache to the + validator stored in its cache entry for the representation, and</li> + <li>That cache entry includes a <x:ref>Date</x:ref> value which is + at least one second after the Last-Modified value and + the cache has reason to believe that they were generated by the + same clock or that there is enough difference between the Last-Modified + and Date values to make clock synchronization issues unlikely.</li> +</ul> +<t> + This method relies on the fact that if two different responses were + sent by the origin server during the same second, but both had the + same Last-Modified time, then at least one of those responses would + have a <x:ref>Date</x:ref> value equal to its Last-Modified time. +</t> +</section> +</section> + +<section title="ETag" anchor="field.etag"> + <x:anchor-alias value="header.etag"/> + <iref primary="true" item="Fields" subitem="ETag" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="ETag" x:for-anchor=""/><iref primary="true" item="Trailer Fields" subitem="ETag" x:for-anchor=""/><iref primary="true" item="ETag field" x:for-anchor=""/> + <x:anchor-alias value="ETag"/> + <x:anchor-alias value="entity-tag"/> + <x:anchor-alias value="opaque-tag"/> + <x:anchor-alias value="weak"/> + <x:anchor-alias value="etagc"/> +<t> + The "ETag" field in a response provides the current entity tag for + the <x:ref>selected representation</x:ref>, as determined at the conclusion of handling + the request. + An entity tag is an opaque validator for differentiating between + multiple representations of the same resource, regardless of whether + those multiple representations are due to resource state changes over + time, content negotiation resulting in multiple representations being + valid at the same time, or both. An entity tag consists of an opaque + quoted string, possibly prefixed by a weakness indicator. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="ETag"/><iref primary="true" item="Grammar" subitem="entity-tag"/><iref primary="true" item="Grammar" subitem="weak"/><iref primary="true" item="Grammar" subitem="opaque-tag"/><iref primary="true" item="Grammar" subitem="etagc"/> + <x:ref>ETag</x:ref> = <x:ref>entity-tag</x:ref> + + <x:ref>entity-tag</x:ref> = [ <x:ref>weak</x:ref> ] <x:ref>opaque-tag</x:ref> + <x:ref>weak</x:ref> = %s"W/" + <x:ref>opaque-tag</x:ref> = <x:ref>DQUOTE</x:ref> *<x:ref>etagc</x:ref> <x:ref>DQUOTE</x:ref> + <x:ref>etagc</x:ref> = %x21 / %x23-7E / <x:ref>obs-text</x:ref> + ; <x:ref>VCHAR</x:ref> except double quotes, plus obs-text +</sourcecode> +<aside> + <t> + <x:h>Note:</x:h> Previously, opaque-tag was defined to be a quoted-string + (<xref target="RFC2616" x:fmt="," x:sec="3.11"/>); thus, some recipients + might perform backslash unescaping. Servers therefore ought to avoid + backslash characters in entity tags. + </t> +</aside> +<t> + An entity tag can be more reliable for validation than a modification + date in situations where it is inconvenient to store modification + dates, where the one-second resolution of HTTP-date values is not + sufficient, or where modification dates are not consistently maintained. +</t> +<t> + Examples: +</t> +<sourcecode type="http-message"> +ETag: "xyzzy" +ETag: W/"xyzzy" +ETag: "" +</sourcecode> +<t> + An entity tag can be either a weak or strong validator, with + strong being the default. If an origin server provides an entity tag + for a representation and the generation of that entity tag does not satisfy + all of the characteristics of a strong validator + (<xref target="weak.and.strong.validators"/>), then the origin server + <bcp14>MUST</bcp14> mark the entity tag as weak by prefixing its opaque value + with "W/" (case-sensitive). +</t> +<t> + A sender <bcp14>MAY</bcp14> send the ETag field in a trailer section (see + <xref target="trailer.fields"/>). However, since trailers are often + ignored, it is preferable to send ETag as a header field unless the + entity tag is generated while sending the content. +</t> + +<section title="Generation" anchor="entity.tag.generation"> +<t> + The principle behind entity tags is that only the service author + knows the implementation of a resource well enough to select the + most accurate and efficient validation mechanism for that resource, + and that any such mechanism can be mapped to a simple sequence of + octets for easy comparison. Since the value is opaque, there is no + need for the client to be aware of how each entity tag is constructed. +</t> +<t> + For example, a resource that has implementation-specific versioning + applied to all changes might use an internal revision number, perhaps + combined with a variance identifier for content negotiation, to + accurately differentiate between representations. + Other implementations might use a collision-resistant hash of + representation content, a combination of various file attributes, or + a modification timestamp that has sub-second resolution. +</t> +<t> + An origin server <bcp14>SHOULD</bcp14> send an ETag for any selected representation + for which detection of changes can be reasonably and consistently + determined, since the entity tag's use in conditional requests and + evaluating cache freshness (<xref target="CACHING"/>) can + substantially reduce unnecessary transfers and significantly + improve service availability, scalability, and reliability. +</t> +</section> + +<section title="Comparison" anchor="entity.tag.comparison"> + <x:anchor-alias value="validator.comparison"/> + <x:anchor-alias value="strong comparison"/> + <x:anchor-alias value="weak comparison"/> +<t> + There are two entity tag comparison functions, depending on whether or not + the comparison context allows the use of weak validators: +</t> +<dl> + <dt> + <x:dfn>Strong comparison</x:dfn>: + </dt> + <dd> + two entity tags are equivalent if both are not weak and their opaque-tags + match character-by-character. + </dd> + <dt> + <x:dfn>Weak comparison</x:dfn>: + </dt> + <dd> + two entity tags are equivalent if their opaque-tags match + character-by-character, regardless of either or both being tagged as "weak". + </dd> +</dl> +<t> + The example below shows the results for a set of entity tag pairs and both + the weak and strong comparison function results: +</t> +<table align="left"> + <thead> + <tr> + <th>ETag 1</th> + <th>ETag 2</th> + <th>Strong Comparison</th> + <th>Weak Comparison</th> + </tr> + </thead> + <tbody> + <tr> + <td>W/"1"</td> + <td>W/"1"</td> + <td>no match</td> + <td>match</td> + </tr> + <tr> + <td>W/"1"</td> + <td>W/"2"</td> + <td>no match</td> + <td>no match</td> + </tr> + <tr> + <td>W/"1"</td> + <td>"1"</td> + <td>no match</td> + <td>match</td> + </tr> + <tr> + <td>"1"</td> + <td>"1"</td> + <td>match</td> + <td>match</td> + </tr> + </tbody> +</table> +</section> + +<section title="Example: Entity Tags Varying on Content-Negotiated Resources" anchor="example.entity.tag.vs.conneg"> +<t> + Consider a resource that is subject to content negotiation + (<xref target="content.negotiation"/>), and where the representations sent in response to + a GET request vary based on the <x:ref>Accept-Encoding</x:ref> request + header field (<xref target="field.accept-encoding"/>): +</t> +<t> + &gt;&gt; Request: +</t> +<sourcecode type="http-message"> +GET /index HTTP/1.1 +Host: www.example.com +Accept-Encoding: gzip + +</sourcecode> +<t> + In this case, the response might or might not use the gzip content coding. + If it does not, the response might look like: +</t> +<t> + &gt;&gt; Response: +</t> +<sourcecode type="http-message"> +HTTP/1.1 200 OK +Date: Fri, 26 Mar 2010 00:05:00 GMT +ETag: "123-a" +Content-Length: <x:length-of target="exbody2"/> +Vary: Accept-Encoding +Content-Type: text/plain + +<x:span anchor="exbody2">Hello World! +Hello World! +Hello World! +Hello World! +Hello World! +</x:span></sourcecode> +<t> + An alternative representation that does use gzip content coding would be: +</t> +<t> + &gt;&gt; Response: +</t> +<sourcecode type="http-message"> +HTTP/1.1 200 OK +Date: Fri, 26 Mar 2010 00:05:00 GMT +ETag: "123-b" +Content-Length: 43 +Vary: Accept-Encoding +Content-Type: text/plain +Content-Encoding: gzip + +<spanx>...binary data...</spanx></sourcecode> +<aside> + <t> + <x:h>Note:</x:h> Content codings are a property of the representation data, + so a strong entity tag for a content-encoded representation has to be + distinct from the entity tag of an unencoded representation to prevent + potential conflicts during cache updates and range requests. In contrast, + transfer codings (<xref target="HTTP11" x:rel="#transfer.codings"/>) apply only during message transfer + and do not result in distinct entity tags. + </t> +</aside> +</section> +</section> +</section> +</section> + +<section title="Methods" anchor="methods"> + +<section title="Overview" anchor="method.overview"> + <x:anchor-alias value="method"/> +<t> + The request method token is the primary source of request semantics; + it indicates the purpose for which the client has made this request + and what is expected by the client as a successful result. +</t> +<t> + The request method's semantics might be further specialized by the + semantics of some header fields when present in a request + if those additional semantics do not conflict with the method. + For example, a client can send conditional request header fields + (<xref target="preconditions"/>) to make the requested + action conditional on the current state of the target resource. +</t> +<t> + HTTP is designed to be usable as an interface to distributed + object systems. The request method invokes an action to be applied to + a <x:ref>target resource</x:ref> in much the same way that a remote + method invocation can be sent to an identified object. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="method"/> + <x:ref>method</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + The method token is case-sensitive because it might be used as a gateway + to object-based systems with case-sensitive method names. By convention, + standardized methods are defined in all-uppercase US-ASCII letters. +</t> +<t> + Unlike distributed objects, the standardized request methods in HTTP are + not resource-specific, since uniform interfaces provide for better + visibility and reuse in network-based systems <xref target="REST"/>. + Once defined, a standardized method ought to have the same semantics when + applied to any resource, though each resource determines for itself + whether those semantics are implemented or allowed. +</t> +<t> + This specification defines a number of standardized methods that are + commonly used in HTTP, as outlined by the following table. +</t> +<table align="left" anchor="table.of.methods"> + <thead> + <tr> + <th>Method Name</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>GET</td> + <td>Transfer a current representation of the target resource.</td> + <td><xref target="GET" format="counter"/></td> + </tr> + <tr> + <td>HEAD</td> + <td>Same as GET, but do not transfer the response content.</td> + <td><xref target="HEAD" format="counter"/></td> + </tr> + <tr> + <td>POST</td> + <td>Perform resource-specific processing on the request content.</td> + <td><xref target="POST" format="counter"/></td> + </tr> + <tr> + <td>PUT</td> + <td>Replace all current representations of the target resource with + the request content.</td> + <td><xref target="PUT" format="counter"/></td> + </tr> + <tr> + <td>DELETE</td> + <td>Remove all current representations of the target resource.</td> + <td><xref target="DELETE" format="counter"/></td> + </tr> + <tr> + <td>CONNECT</td> + <td>Establish a tunnel to the server identified by the target resource.</td> + <td><xref target="CONNECT" format="counter"/></td> + </tr> + <tr> + <td>OPTIONS</td> + <td>Describe the communication options for the target resource.</td> + <td><xref target="OPTIONS" format="counter"/></td> + </tr> + <tr> + <td>TRACE</td> + <td>Perform a message loop-back test along the path to the target resource.</td> + <td><xref target="TRACE" format="counter"/></td> + </tr> + </tbody> +</table> +<t> + All general-purpose servers <bcp14>MUST</bcp14> support the methods GET and HEAD. + All other methods are <bcp14>OPTIONAL</bcp14>. +</t> +<t> + The set of methods allowed by a target resource can be listed in an + <x:ref>Allow</x:ref> header field (<xref target="field.allow"/>). + However, the set of allowed methods can change dynamically. + An origin server that receives a request method that is unrecognized or + not implemented <bcp14>SHOULD</bcp14> respond with the + <x:ref>501 (Not Implemented)</x:ref> status code. + An origin server that receives a request method that is recognized and + implemented, but not allowed for the target resource, <bcp14>SHOULD</bcp14> respond + with the <x:ref>405 (Method Not Allowed)</x:ref> status code. +</t> +<t> + Additional methods, outside the scope of this specification, have been + specified for use in HTTP. All such methods ought to be registered + within the "Hypertext Transfer Protocol (HTTP) Method Registry", + as described in <xref target="method.extensibility"/>. +</t> +</section> + +<section title="Common Method Properties" anchor="method.properties"> + +<section title="Safe Methods" anchor="safe.methods"> + <iref item="safe" primary="true"/> + <x:anchor-alias value="safe"/> +<t> + Request methods are considered <x:dfn>safe</x:dfn> if + their defined semantics are essentially read-only; i.e., the client does + not request, and does not expect, any state change on the origin server + as a result of applying a safe method to a target resource. Likewise, + reasonable use of a safe method is not expected to cause any harm, + loss of property, or unusual burden on the origin server. +</t> +<t> + This definition of safe methods does not prevent an implementation from + including behavior that is potentially harmful, that is not entirely read-only, + or that causes side effects while invoking a safe method. What is + important, however, is that the client did not request that additional + behavior and cannot be held accountable for it. For example, + most servers append request information to access log files at the + completion of every response, regardless of the method, and that is + considered safe even though the log storage might become full and cause + the server to fail. Likewise, a safe request initiated by selecting an + advertisement on the Web will often have the side effect of charging an + advertising account. +</t> +<t> + Of the request methods defined by this specification, the + <x:ref>GET</x:ref>, <x:ref>HEAD</x:ref>, <x:ref>OPTIONS</x:ref>, and + <x:ref>TRACE</x:ref> methods are defined to be safe. +</t> +<t> + The purpose of distinguishing between safe and unsafe methods is to + allow automated retrieval processes (spiders) and cache performance + optimization (pre-fetching) to work without fear of causing harm. + In addition, it allows a user agent to apply appropriate constraints + on the automated use of unsafe methods when processing potentially + untrusted content. +</t> +<t> + A user agent <bcp14>SHOULD</bcp14> distinguish between safe and unsafe methods when + presenting potential actions to a user, such that the user can be made + aware of an unsafe action before it is requested. +</t> +<t> + When a resource is constructed such that parameters within the target URI + have the effect of selecting an action, it is the resource + owner's responsibility to ensure that the action is consistent with the + request method semantics. + For example, it is common for Web-based content editing software + to use actions within query parameters, such as "page?do=delete". + If the purpose of such a resource is to perform an unsafe action, then + the resource owner <bcp14>MUST</bcp14> disable or disallow that action when it is + accessed using a safe request method. Failure to do so will result in + unfortunate side effects when automated processes perform a GET on + every URI reference for the sake of link maintenance, pre-fetching, + building a search index, etc. +</t> +</section> + +<section title="Idempotent Methods" anchor="idempotent.methods"> +<iref item="idempotent" primary="true"/> +<t> + A request method is considered + <x:dfn anchor="idempotent">idempotent</x:dfn> + if the intended effect on the server of multiple identical requests with + that method is the same as the effect for a single such request. + Of the request methods defined by this + specification, <x:ref>PUT</x:ref>, <x:ref>DELETE</x:ref>, and safe request + methods are idempotent. +</t> +<t> + Like the definition of safe, the idempotent property only applies to + what has been requested by the user; a server is free to log each request + separately, retain a revision control history, or implement other + non-idempotent side effects for each idempotent request. +</t> +<t> + Idempotent methods are distinguished because the request can be repeated + automatically if a communication failure occurs before the client is + able to read the server's response. For example, if a client sends a PUT + request and the underlying connection is closed before any response is + received, then the client can establish a new connection and retry the + idempotent request. It knows that repeating the request will have + the same intended effect, even if the original request succeeded, though + the response might differ. +</t> +<t> + A client <bcp14>SHOULD NOT</bcp14> automatically retry a request with a non-idempotent + method unless it has some means to know that the request semantics are + actually idempotent, regardless of the method, or some means to detect that + the original request was never applied. +</t> +<t> + For example, a user agent can repeat a POST request automatically if it + knows (through design or configuration) that the request is safe for that + resource. Likewise, a user agent designed specifically to operate on + a version control repository might be able to recover from partial failure + conditions by checking the target resource revision(s) after a failed + connection, reverting or fixing any changes that were partially applied, + and then automatically retrying the requests that failed. +</t> +<t> + Some clients take a riskier approach and attempt to guess when an + automatic retry is possible. For example, a client might automatically + retry a POST request if the underlying transport connection closed before + any part of a response is received, particularly if an idle persistent + connection was used. +</t> +<t> + A proxy <bcp14>MUST NOT</bcp14> automatically retry non-idempotent requests. + A client <bcp14>SHOULD NOT</bcp14> automatically retry a failed automatic retry. +</t> +</section> + +<section title="Methods and Caching" anchor="cacheable.methods"> +<t> + For a cache to store and use a response, the associated method needs to + explicitly allow caching and to detail under what conditions a response can + be used to satisfy subsequent requests; a method definition that does not + do so cannot be cached. For additional requirements see <xref target="CACHING"/>. +</t> +<t> + This specification defines caching semantics for GET, HEAD, and POST, + although the overwhelming majority of cache implementations only support + GET and HEAD. +</t> +</section> +</section> + +<section title="Method Definitions" anchor="method.definitions"> + +<section title="GET" anchor="GET"> + <rdf:Description> + <safe xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</safe> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="GET method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="GET" x:for-anchor=""/> +<t> + The GET method requests transfer of a current + <x:ref>selected representation</x:ref> for the + <x:ref>target resource</x:ref>. + A successful response reflects the quality of "sameness" identified by + the target URI (<xref target="URI" section="1.2.2"/>). Hence, + retrieving identifiable information via HTTP is usually performed by + making a GET request on an identifier associated with the potential for + providing that information in a <x:ref>200 (OK)</x:ref> response. +</t> +<t> + GET is the primary mechanism of information retrieval and the focus of + almost all performance optimizations. Applications that produce a URI for + each important resource can benefit from those optimizations while enabling + their reuse by other applications, creating a network effect that promotes + further expansion of the Web. +</t> +<t> + It is tempting to think of resource identifiers as remote file system + pathnames and of representations as being a copy of the contents of such + files. In fact, that is how many resources are implemented (see + <xref target="attack.pathname"/> for related security considerations). + However, there are no such limitations in practice. +</t> +<t> + The HTTP interface for + a resource is just as likely to be implemented as a tree of content + objects, a programmatic view on various database records, or a gateway to + other information systems. Even when the URI mapping mechanism is tied to a + file system, an origin server might be configured to execute the files with + the request as input and send the output as the representation rather than + transfer the files directly. Regardless, only the origin server needs to + know how each resource identifier corresponds to an implementation + and how that implementation manages to select and send a current + representation of the target resource. +</t> +<t> + A client can alter the semantics of GET to be a "range request", requesting + transfer of only some part(s) of the selected representation, by sending a + <x:ref>Range</x:ref> header field in the request (<xref target="field.range"/>). +</t> +<t> + Although request message framing is independent of the method used, + content received in a GET request has no generally defined semantics, + cannot alter the meaning or target of the request, and might lead some + implementations to reject the request and close the connection because of + its potential as a request smuggling attack + (<xref target="HTTP11" x:rel="#request.smuggling"/>). + A client <bcp14>SHOULD NOT</bcp14> generate content in a GET request unless it is + made directly to an origin server that has previously indicated, + in or out of band, that such a request has a purpose and will be adequately + supported. An origin server <bcp14>SHOULD NOT</bcp14> rely on private agreements to + receive content, since participants in HTTP communication are often + unaware of intermediaries along the request chain. +</t> +<t> + The response to a GET request is cacheable; a cache <bcp14>MAY</bcp14> use it to satisfy + subsequent GET and HEAD requests unless otherwise indicated by the + Cache-Control header field (<xref target="CACHING" x:rel="#field.cache-control"/>). +</t> +<t> + When information retrieval is performed with a mechanism that constructs a + target URI from user-provided information, such as the query fields of a + form using GET, potentially sensitive data might be provided that would not + be appropriate for disclosure within a URI + (see <xref target="sensitive.information.in.uris"/>). In some cases, the + data can be filtered or transformed such that it would not reveal such + information. In others, particularly when there is no benefit from caching + a response, using the POST method (<xref target="POST"/>) instead of GET + can transmit such information in the request content rather than within + the target URI. +</t> +</section> + +<section title="HEAD" anchor="HEAD"> + <rdf:Description> + <safe xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</safe> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="HEAD method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="HEAD" x:for-anchor=""/> +<t> + The HEAD method is identical to GET except that the server <bcp14>MUST NOT</bcp14> + send content in the response. HEAD is used to obtain metadata about the + <x:ref>selected representation</x:ref> without transferring its + representation data, often for the sake of testing hypertext links or + finding recent modifications. +</t> +<t> + The server <bcp14>SHOULD</bcp14> send the same header fields in response to a HEAD + request as it would have sent if the request method had been GET. + However, a server <bcp14>MAY</bcp14> omit header fields for which a value is determined + only while generating the content. For example, some servers buffer a + dynamic response to GET until a minimum amount of data is generated so + that they can more efficiently delimit small responses or make late + decisions with regard to content selection. Such a response to GET might + contain <x:ref>Content-Length</x:ref> and <x:ref>Vary</x:ref> fields, for + example, that are not generated within a HEAD response. These minor + inconsistencies are considered preferable to generating and discarding the + content for a HEAD request, since HEAD is usually requested for the + sake of efficiency. +</t> +<t> + Although request message framing is independent of the method used, + content received in a HEAD request has no generally defined semantics, + cannot alter the meaning or target of the request, and might lead some + implementations to reject the request and close the connection because of + its potential as a request smuggling attack + (<xref target="HTTP11" x:rel="#request.smuggling"/>). + A client <bcp14>SHOULD NOT</bcp14> generate content in a HEAD request unless it is + made directly to an origin server that has previously indicated, + in or out of band, that such a request has a purpose and will be adequately + supported. An origin server <bcp14>SHOULD NOT</bcp14> rely on private agreements to + receive content, since participants in HTTP communication are often + unaware of intermediaries along the request chain. +</t> +<t> + The response to a HEAD request is cacheable; a cache <bcp14>MAY</bcp14> use it to + satisfy subsequent HEAD requests unless otherwise indicated by the + Cache-Control header field (<xref target="CACHING" x:rel="#field.cache-control"/>). + A HEAD response might also affect previously cached responses to GET; + see <xref target="CACHING" x:rel="#head.effects"/>. +</t> +</section> + +<section title="POST" anchor="POST"> + <iref primary="true" item="POST method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="POST" x:for-anchor=""/> +<t> + The POST method requests that the <x:ref>target resource</x:ref> process + the representation enclosed in the request according to the resource's own + specific semantics. For example, POST is used for the following functions + (among others): +</t> +<ul> + <li>Providing a block of data, such as the fields entered into an HTML + form, to a data-handling process;</li> + <li>Posting a message to a bulletin board, newsgroup, mailing list, blog, + or similar group of articles;</li> + <li>Creating a new resource that has yet to be identified by the origin + server; and</li> + <li>Appending data to a resource's existing representation(s).</li> +</ul> +<t> + An origin server indicates response semantics by choosing an appropriate + status code depending on the result of processing the POST request; + almost all of the status codes defined by this specification could be + received in a response to POST (the exceptions being <x:ref>206 (Partial Content)</x:ref>, + <x:ref>304 (Not Modified)</x:ref>, and <x:ref>416 (Range Not Satisfiable)</x:ref>). +</t> +<t> + If one or more resources has been created on the origin server as a result + of successfully processing a POST request, the origin server <bcp14>SHOULD</bcp14> send + a <x:ref>201 (Created)</x:ref> response containing a <x:ref>Location</x:ref> + header field that provides an identifier for the primary resource created + (<xref target="field.location"/>) and a representation that describes the + status of the request while referring to the new resource(s). +</t> +<t> + Responses to POST requests are only cacheable when they include explicit + freshness information (see <xref target="CACHING" x:rel="#calculating.freshness.lifetime"/>) and a + <x:ref>Content-Location</x:ref> header field that has the same value as + the POST's target URI (<xref target="field.content-location"/>). A cached POST response can be reused + to satisfy a later GET or HEAD request. In contrast, a POST request cannot + be satisfied by a cached POST response because POST is potentially unsafe; + see <xref target="CACHING" x:rel="#constructing.responses.from.caches"/>. +</t> +<t> + If the result of processing a POST would be equivalent to a representation + of an existing resource, an origin server <bcp14>MAY</bcp14> redirect the user agent to + that resource by sending a <x:ref>303 (See Other)</x:ref> response with the + existing resource's identifier in the <x:ref>Location</x:ref> field. + This has the benefits of providing the user agent a resource identifier + and transferring the representation via a method more amenable to shared + caching, though at the cost of an extra request if the user agent does not + already have the representation cached. +</t> +</section> + +<section title="PUT" anchor="PUT"> + <rdf:Description> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="PUT method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="PUT" x:for-anchor=""/> +<t> + The PUT method requests that the state of the <x:ref>target resource</x:ref> + be created or replaced with the state defined by the representation + enclosed in the request message content. A successful PUT of a given + representation would suggest that a subsequent GET on that same target + resource will result in an equivalent representation being sent in + a <x:ref>200 (OK)</x:ref> response. However, there is no guarantee that + such a state change will be observable, since the target resource might be + acted upon by other user agents in parallel, or might be subject to dynamic + processing by the origin server, before any subsequent GET is received. + A successful response only implies that the user agent's intent was + achieved at the time of its processing by the origin server. +</t> +<t> + If the target resource does not have a current representation and + the PUT successfully creates one, then the origin server <bcp14>MUST</bcp14> inform + the user agent by sending a <x:ref>201 (Created)</x:ref> response. If the + target resource does have a current representation and that representation is + successfully modified in accordance with the state of the enclosed + representation, then the origin server <bcp14>MUST</bcp14> send either a + <x:ref>200 (OK)</x:ref> or a <x:ref>204 (No Content)</x:ref> response to + indicate successful completion of the request. +</t> +<t> + An origin server <bcp14>SHOULD</bcp14> verify that the PUT representation is consistent + with its configured constraints for the target resource. For example, if + an origin server determines a resource's representation metadata based on + the URI, then the origin server needs to ensure that the content received + in a successful PUT request is consistent with that metadata. When a PUT + representation is inconsistent with the target resource, the origin + server <bcp14>SHOULD</bcp14> either make them consistent, by transforming the + representation or changing the resource configuration, or respond + with an appropriate error message containing sufficient information + to explain why the representation is unsuitable. The + <x:ref>409 (Conflict)</x:ref> or <x:ref>415 (Unsupported Media Type)</x:ref> + status codes are suggested, with the latter being specific to constraints on + <x:ref>Content-Type</x:ref> values. +</t> +<t> + For example, if the target resource is configured to always have a + <x:ref>Content-Type</x:ref> of "text/html" and the representation being PUT + has a Content-Type of "image/jpeg", the origin server ought to do one of: +</t> +<ol type="a"> + <li>reconfigure the target resource to reflect the new media type;</li> + <li>transform the PUT representation to a format consistent with that + of the resource before saving it as the new resource state; or,</li> + <li>reject the request with a <x:ref>415 (Unsupported Media Type)</x:ref> + response indicating that the target resource is limited to "text/html", + perhaps including a link to a different resource that would be a + suitable target for the new representation.</li> +</ol> +<t> + HTTP does not define exactly how a PUT method affects the state + of an origin server beyond what can be expressed by the intent of + the user agent request and the semantics of the origin server response. + It does not define what a resource might be, in any sense of that + word, beyond the interface provided via HTTP. It does not define + how resource state is "stored", nor how such storage might change + as a result of a change in resource state, nor how the origin server + translates resource state into representations. Generally speaking, + all implementation details behind the resource interface are + intentionally hidden by the server. +</t> +<t> + This extends to how header and trailer fields are stored; while common + header fields like <x:ref>Content-Type</x:ref> will typically be stored + and returned upon subsequent GET requests, header and trailer field + handling is specific to the resource that received the request. As a result, + an origin server <bcp14>SHOULD</bcp14> ignore unrecognized header and trailer fields + received in a PUT request (i.e., not save them as part of the resource + state). +</t> +<t> + An origin server <bcp14>MUST NOT</bcp14> send a validator field + (<xref target="response.validator"/>), such as an <x:ref>ETag</x:ref> or + <x:ref>Last-Modified</x:ref> field, in a successful response to PUT unless + the request's representation data was saved without any transformation + applied to the content (i.e., the resource's new representation data is + identical to the content received in the PUT request) and the + validator field value reflects the new representation. + This requirement allows a user agent to know when the representation it + sent (and retains in memory) is the result of the PUT, and thus it doesn't + need to be retrieved again from the origin server. The new validator(s) + received in the response can be used for future conditional requests in + order to prevent accidental overwrites (<xref target="preconditions"/>). +</t> +<t> + The fundamental difference between the POST and PUT methods is + highlighted by the different intent for the enclosed representation. + The target resource in a POST request is intended to handle the + enclosed representation according to the resource's own semantics, + whereas the enclosed representation in a PUT request is defined as + replacing the state of the target resource. Hence, the intent of PUT is + idempotent and visible to intermediaries, even though the exact effect is + only known by the origin server. +</t> +<t> + Proper interpretation of a PUT request presumes that the user agent knows + which target resource is desired. A service that selects a proper URI on + behalf of the client, after receiving a state-changing request, <bcp14>SHOULD</bcp14> be + implemented using the POST method rather than PUT. If the origin server + will not make the requested PUT state change to the target resource and + instead wishes to have it applied to a different resource, such as when the + resource has been moved to a different URI, then the origin server <bcp14>MUST</bcp14> + send an appropriate <x:ref>3xx (Redirection)</x:ref> response; the + user agent <bcp14>MAY</bcp14> then make its own decision regarding whether or not to + redirect the request. +</t> +<t> + A PUT request applied to the target resource can have side effects + on other resources. For example, an article might have a URI for + identifying "the current version" (a resource) that is separate + from the URIs identifying each particular version (different + resources that at one point shared the same state as the current version + resource). A successful PUT request on "the current version" URI might + therefore create a new version resource in addition to changing the + state of the target resource, and might also cause links to be added + between the related resources. +</t> +<t> + Some origin servers support use of the <x:ref>Content-Range</x:ref> header + field (<xref target="field.content-range"/>) as a request modifier to + perform a partial PUT, as described in <xref target="partial.PUT"/>. +</t> +<t> + Responses to the PUT method are not cacheable. If a successful PUT request + passes through a cache that has one or more stored responses for the + target URI, those stored responses will be invalidated + (see <xref target="CACHING" x:rel="#invalidation"/>). +</t> +</section> + +<section title="DELETE" anchor="DELETE"> + <rdf:Description> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="DELETE method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="DELETE" x:for-anchor=""/> +<t> + The DELETE method requests that the origin server remove the association + between the <x:ref>target resource</x:ref> and its current functionality. + In effect, this method is similar to the "rm" command in UNIX: it expresses a + deletion operation on the URI mapping of the origin server rather than an + expectation that the previously associated information be deleted. +</t> +<t> + If the target resource has one or more current representations, they might + or might not be destroyed by the origin server, and the associated storage + might or might not be reclaimed, depending entirely on the nature of the + resource and its implementation by the origin server (which are beyond the + scope of this specification). Likewise, other implementation aspects of a + resource might need to be deactivated or archived as a result of a DELETE, + such as database or gateway connections. In general, it is assumed that the + origin server will only allow DELETE on resources for which it has a + prescribed mechanism for accomplishing the deletion. +</t> +<t> + Relatively few resources allow the DELETE method — its primary use + is for remote authoring environments, where the user has some direction + regarding its effect. For example, a resource that was previously created + using a PUT request, or identified via the Location header field after a + <x:ref>201 (Created)</x:ref> response to a POST request, might allow a + corresponding DELETE request to undo those actions. Similarly, custom + user agent implementations that implement an authoring function, such as + revision control clients using HTTP for remote operations, might use + DELETE based on an assumption that the server's URI space has been crafted + to correspond to a version repository. +</t> +<t> + If a DELETE method is successfully applied, the origin server <bcp14>SHOULD</bcp14> send +</t> +<ul> + <li>a <x:ref>202 (Accepted)</x:ref> status code if the action will likely succeed but + has not yet been enacted,</li> + <li>a <x:ref>204 (No Content)</x:ref> status code if the action has been + enacted and no further information is to be supplied, or</li> + <li>a <x:ref>200 (OK)</x:ref> status code if the action has been enacted and + the response message includes a representation describing the status.</li> +</ul> +<t> + Although request message framing is independent of the method used, + content received in a DELETE request has no generally defined semantics, + cannot alter the meaning or target of the request, and might lead some + implementations to reject the request and close the connection because of + its potential as a request smuggling attack + (<xref target="HTTP11" x:rel="#request.smuggling"/>). + A client <bcp14>SHOULD NOT</bcp14> generate content in a DELETE request unless it is + made directly to an origin server that has previously indicated, + in or out of band, that such a request has a purpose and will be adequately + supported. An origin server <bcp14>SHOULD NOT</bcp14> rely on private agreements to + receive content, since participants in HTTP communication are often + unaware of intermediaries along the request chain. +</t> +<t> + Responses to the DELETE method are not cacheable. If a successful DELETE + request passes through a cache that has one or more stored responses for + the target URI, those stored responses will be invalidated (see + <xref target="CACHING" x:rel="#invalidation"/>). +</t> +</section> + +<section title="CONNECT" anchor="CONNECT"> + <iref primary="true" item="CONNECT method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="CONNECT" x:for-anchor=""/> +<t> + The CONNECT method requests that the recipient establish a tunnel to the + destination origin server identified by the request target and, if + successful, thereafter restrict its behavior to blind forwarding of + data, in both directions, until the tunnel is closed. + Tunnels are commonly used to create an end-to-end virtual connection, + through one or more proxies, which can then be secured using TLS + (Transport Layer Security, <xref target="TLS13"/>). +</t> +<t> + CONNECT uses a special form of request target, unique to this method, + consisting of only the host and port number of the tunnel destination, + separated by a colon. There is no default port; a client <bcp14>MUST</bcp14> send the + port number even if the CONNECT request is based on a URI reference that + contains an authority component with an elided port + (<xref target="uri.references"/>). For example, +</t> +<sourcecode type="http-message"> +CONNECT server.example.com:80 HTTP/1.1 +Host: server.example.com + +</sourcecode> +<t> + A server <bcp14>MUST</bcp14> reject a CONNECT request that targets an empty or invalid + port number, typically by responding with a 400 (Bad Request) status code. +</t> +<t> + Because CONNECT changes the request/response nature of an HTTP connection, + specific HTTP versions might have different ways of mapping its semantics + into the protocol's wire format. +</t> +<t> + CONNECT is intended for use in requests to a proxy. + The recipient can establish a tunnel either by directly connecting to + the server identified by the request target or, if configured to use + another proxy, by forwarding the CONNECT request to the next inbound proxy. + An origin server <bcp14>MAY</bcp14> accept a CONNECT request, but most origin servers + do not implement CONNECT. +</t> +<t> + Any <x:ref>2xx (Successful)</x:ref> response indicates + that the sender (and all inbound proxies) will switch to tunnel mode + immediately after the response header section; data received after that + header section is from the server identified by the request target. + Any response other than a successful response indicates that the tunnel + has not yet been formed. +</t> +<t> + A tunnel is closed when a tunnel intermediary detects that either side + has closed its connection: the intermediary <bcp14>MUST</bcp14> attempt to send any + outstanding data that came from the closed side to the other side, close + both connections, and then discard any remaining data left undelivered. +</t> +<t> + Proxy authentication might be used to establish the + authority to create a tunnel. For example, +</t> +<sourcecode type="http-message"> +CONNECT server.example.com:443 HTTP/1.1 +Host: server.example.com:443 +Proxy-Authorization: basic aGVsbG86d29ybGQ= + +</sourcecode> +<t> + There are significant risks in establishing a tunnel to arbitrary servers, + particularly when the destination is a well-known or reserved TCP port that + is not intended for Web traffic. For example, a CONNECT to + "example.com:25" would suggest that the proxy connect to the reserved + port for SMTP traffic; if allowed, that could trick the proxy into + relaying spam email. Proxies that support CONNECT <bcp14>SHOULD</bcp14> restrict its + use to a limited set of known ports or a configurable list of safe + request targets. +</t> +<t> + A server <bcp14>MUST NOT</bcp14> send any <x:ref>Transfer-Encoding</x:ref> or + <x:ref>Content-Length</x:ref> header fields in a + <x:ref>2xx (Successful)</x:ref> response to CONNECT. + A client <bcp14>MUST</bcp14> ignore any Content-Length or Transfer-Encoding header + fields received in a successful response to CONNECT. +</t> +<t> + A CONNECT request message does not have content. The interpretation of + data sent after the header section of the CONNECT request message is + specific to the version of HTTP in use. +</t> +<t> + Responses to the CONNECT method are not cacheable. +</t> +</section> + +<section title="OPTIONS" anchor="OPTIONS"> + <rdf:Description> + <safe xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</safe> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="OPTIONS method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="OPTIONS" x:for-anchor=""/> +<t> + The OPTIONS method requests information about the communication options + available for the target resource, at either the origin server or an + intervening intermediary. This method allows a client to determine the + options and/or requirements associated with a resource, or the capabilities + of a server, without implying a resource action. +</t> +<t> + An OPTIONS request with an asterisk ("*") as the request target + (<xref target="target.resource"/>) applies to the server in general rather than to a + specific resource. Since a server's communication options typically depend + on the resource, the "*" request is only useful as a "ping" or "no-op" + type of method; it does nothing beyond allowing the client to test + the capabilities of the server. For example, this can be used to test + a proxy for HTTP/1.1 conformance (or lack thereof). +</t> +<t> + If the request target is not an asterisk, the OPTIONS request applies + to the options that are available when communicating with the target + resource. +</t> +<t> + A server generating a successful response to OPTIONS <bcp14>SHOULD</bcp14> send any + header that might indicate optional features implemented by the + server and applicable to the target resource (e.g., <x:ref>Allow</x:ref>), + including potential extensions not defined by this specification. + The response content, if any, might also describe the communication options + in a machine or human-readable representation. A standard format for such a + representation is not defined by this specification, but might be defined by + future extensions to HTTP. +</t> +<t> + A client <bcp14>MAY</bcp14> send a <x:ref>Max-Forwards</x:ref> header field in an + OPTIONS request to target a specific recipient in the request chain (see + <xref target="field.max-forwards"/>). A proxy <bcp14>MUST NOT</bcp14> generate a + Max-Forwards header field while forwarding a request unless that request + was received with a Max-Forwards field. +</t> +<t> + A client that generates an OPTIONS request containing content + <bcp14>MUST</bcp14> send a valid <x:ref>Content-Type</x:ref> header field describing + the representation media type. Note that this specification does not define + any use for such content. +</t> +<t> + Responses to the OPTIONS method are not cacheable. +</t> +</section> + +<section title="TRACE" anchor="TRACE"> + <rdf:Description> + <safe xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</safe> + <idempotent xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</idempotent> + </rdf:Description> + <iref primary="true" item="TRACE method" x:for-anchor=""/> + <iref primary="true" item="Method" subitem="TRACE" x:for-anchor=""/> +<t> + The TRACE method requests a remote, application-level loop-back of the + request message. The final recipient of the request <bcp14>SHOULD</bcp14> reflect the + message received, excluding some fields described below, back to the client + as the content of a <x:ref>200 (OK)</x:ref> response. The "message/http" + format (<xref target="HTTP11" x:rel="#media.type.message.http"/>) is one way to do so. + The final recipient is either the origin server or the first server to + receive a <x:ref>Max-Forwards</x:ref> value of zero (0) in the request + (<xref target="field.max-forwards"/>). +</t> +<t> + A client <bcp14>MUST NOT</bcp14> generate fields in a TRACE request containing + sensitive data that might be disclosed by the response. For example, it + would be foolish for a user agent to send stored user credentials + (<xref target="authentication"/>) or cookies <xref target="COOKIE"/> in a TRACE + request. The final recipient of the request <bcp14>SHOULD</bcp14> exclude any request + fields that are likely to contain sensitive data when that recipient + generates the response content. +</t> +<t> + TRACE allows the client to see what is being received at the other + end of the request chain and use that data for testing or diagnostic + information. The value of the <x:ref>Via</x:ref> header field (<xref target="field.via"/>) + is of particular interest, since it acts as a trace of the request chain. + Use of the <x:ref>Max-Forwards</x:ref> header field allows the client to + limit the length of the request chain, which is useful for testing a chain + of proxies forwarding messages in an infinite loop. +</t> +<t> + A client <bcp14>MUST NOT</bcp14> send content in a TRACE request. +</t> +<t> + Responses to the TRACE method are not cacheable. +</t> +</section> +</section> +</section> + +<section title="Message Context" anchor="context"> + +<section title="Request Context Fields" anchor="request.context"> + <x:anchor-alias value="request-header"/> +<t> + The request header fields below provide additional information about the + request context, including information about the user, user agent, and + resource behind the request. +</t> + +<section title="Expect" anchor="field.expect"> + <x:anchor-alias value="header.expect"/> + <iref primary="true" item="Fields" subitem="Expect" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Expect" x:for-anchor=""/><iref primary="true" item="Expect header field" x:for-anchor=""/> + <iref primary="true" item="100-continue (expect value)"/> + <x:anchor-alias value="Expect"/> + <x:anchor-alias value="expectation"/> + <x:anchor-alias value="100-continue"/> +<t> + The "Expect" header field in a request indicates a certain set of + behaviors (expectations) that need to be supported by the server in + order to properly handle this request. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Expect"/> + <x:ref>Expect</x:ref> = #<x:ref>expectation</x:ref> + <x:ref>expectation</x:ref> = <x:ref>token</x:ref> [ "=" ( <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> ) <x:ref>parameters</x:ref> ] +</sourcecode> +<t> + The Expect field value is case-insensitive. +</t> +<t> + The only expectation defined by this specification is "100-continue" + (with no defined parameters). +</t> +<t> + A server that receives an Expect field value containing a member other than + <x:ref>100-continue</x:ref> <bcp14>MAY</bcp14> respond with a + <x:ref>417 (Expectation Failed)</x:ref> status code to indicate that the + unexpected expectation cannot be met. +</t> +<t> + A <x:dfn>100-continue</x:dfn> expectation informs recipients that the + client is about to send (presumably large) content in this request + and wishes to receive a <x:ref>100 (Continue)</x:ref> interim response if + the method, target URI, and header fields are not sufficient to cause an immediate + success, redirect, or error response. This allows the client to wait for an + indication that it is worthwhile to send the content before actually + doing so, which can improve efficiency when the data is huge or + when the client anticipates that an error is likely (e.g., when sending a + state-changing method, for the first time, without previously verified + authentication credentials). +</t> +<t> + For example, a request that begins with +</t> +<sourcecode type="http-message"> +PUT /somewhere/fun HTTP/1.1 +Host: origin.example.com +Content-Type: video/h264 +Content-Length: 1234567890987 +Expect: 100-continue + +</sourcecode> +<t> + allows the origin server to immediately respond with an error message, such + as <x:ref>401 (Unauthorized)</x:ref> or <x:ref>405 (Method Not Allowed)</x:ref>, + before the client starts filling the pipes with an unnecessary data + transfer. +</t> +<t> + Requirements for clients: +</t> +<ul> + <li> + A client <bcp14>MUST NOT</bcp14> generate a 100-continue expectation in a request that + does not include content. + </li> + <li> + A client that will wait for a <x:ref>100 (Continue)</x:ref> response + before sending the request content <bcp14>MUST</bcp14> send an + <x:ref>Expect</x:ref> header field containing a 100-continue expectation. + </li> + <li> + A client that sends a 100-continue expectation is not required to wait + for any specific length of time; such a client <bcp14>MAY</bcp14> proceed to send the + content even if it has not yet received a response. Furthermore, + since <x:ref>100 (Continue)</x:ref> responses cannot be sent through an + HTTP/1.0 intermediary, such a client <bcp14>SHOULD NOT</bcp14> wait for an indefinite + period before sending the content. + </li> + <li> + A client that receives a <x:ref>417 (Expectation Failed)</x:ref> status + code in response to a request containing a 100-continue expectation + <bcp14>SHOULD</bcp14> repeat that request without a 100-continue expectation, since + the 417 response merely indicates that the response chain does not + support expectations (e.g., it passes through an HTTP/1.0 server). + </li> +</ul> +<t> + Requirements for servers: +</t> +<ul> + <li> + A server that receives a 100-continue expectation in an HTTP/1.0 request + <bcp14>MUST</bcp14> ignore that expectation. + </li> + <li> + A server <bcp14>MAY</bcp14> omit sending a <x:ref>100 (Continue)</x:ref> response if + it has already received some or all of the content for the + corresponding request, or if the framing indicates that there is no + content. + </li> + <li> + A server that sends a <x:ref>100 (Continue)</x:ref> response <bcp14>MUST</bcp14> + ultimately send a final status code, once it receives and processes the + request content, unless the connection is closed prematurely. + </li> + <li> + A server that responds with a final status code before reading the + entire request content <bcp14>SHOULD</bcp14> indicate whether it intends to + close the connection (e.g., see <xref target="HTTP11" x:rel="#persistent.tear-down"/>) or + continue reading the request content. + </li> +</ul> +<t> + Upon receiving an HTTP/1.1 (or later) request that has a method, target URI, + and complete header section that contains a 100-continue expectation and + an indication that request content will follow, an origin server <bcp14>MUST</bcp14> + send either: +</t> +<ul> + <li>an immediate response with a final status code, if that status can be + determined by examining just the method, target URI, and header fields, or</li> + <li>an immediate <x:ref>100 (Continue)</x:ref> response to encourage the client + to send the request content.</li> +</ul> +<t> + The origin server <bcp14>MUST NOT</bcp14> wait for the content + before sending the <x:ref>100 (Continue)</x:ref> response. +</t> +<t> + Upon receiving an HTTP/1.1 (or later) request that has a method, target URI, + and complete header section that contains a 100-continue expectation and + indicates a request content will follow, a proxy <bcp14>MUST</bcp14> either: +</t> +<ul> + <li>send an immediate + response with a final status code, if that status can be determined by + examining just the method, target URI, and header fields, or</li> + <li>forward the request toward the origin server by sending a corresponding + request-line and header section to the next inbound server.</li> +</ul> +<t> + If the proxy believes (from configuration or past interaction) that the + next inbound server only supports HTTP/1.0, the proxy <bcp14>MAY</bcp14> generate an + immediate <x:ref>100 (Continue)</x:ref> response to encourage the client to + begin sending the content. +</t> +</section> + +<section title="From" anchor="field.from"> + <x:anchor-alias value="header.from"/> + <iref primary="true" item="Fields" subitem="From" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="From" x:for-anchor=""/><iref primary="true" item="From header field" x:for-anchor=""/> + <x:anchor-alias value="From"/> + <x:anchor-alias value="mailbox"/> +<t> + The "From" header field contains an Internet email address for a human + user who controls the requesting user agent. The address ought to be + machine-usable, as defined by "mailbox" + in <xref x:sec="3.4" x:fmt="of" target="RFC5322"/>: +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="From"/> + <x:ref>From</x:ref> = <x:ref>mailbox</x:ref> + + <x:ref>mailbox</x:ref> = &lt;mailbox, see <xref x:sec="3.4" x:fmt="," target="RFC5322"/>&gt; +</sourcecode> +<t> + An example is: +</t> +<sourcecode type="http-message"> +From: spider-admin@example.org +</sourcecode> +<t> + The From header field is rarely sent by non-robotic user agents. + A user agent <bcp14>SHOULD NOT</bcp14> send a From header field without explicit + configuration by the user, since that might conflict with the user's + privacy interests or their site's security policy. +</t> +<t> + A robotic user agent <bcp14>SHOULD</bcp14> send a valid From header field so that the + person responsible for running the robot can be contacted if problems + occur on servers, such as if the robot is sending excessive, unwanted, + or invalid requests. +</t> +<t> + A server <bcp14>SHOULD NOT</bcp14> use the From header field for access control or + authentication, since its value is expected to be visible to anyone + receiving or observing the request and is often recorded within logfiles + and error reports without any expectation of privacy. +</t> +</section> + +<section title="Referer" anchor="field.referer"> + <x:anchor-alias value="header.referer"/> + <iref primary="true" item="Fields" subitem="Referer" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Referer" x:for-anchor=""/><iref primary="true" item="Referer header field" x:for-anchor=""/> + <x:anchor-alias value="Referer"/> +<t> + The "Referer" [sic] header field allows the user agent to specify a URI + reference for the resource from which the <x:ref>target URI</x:ref> was + obtained (i.e., the "referrer", though the field name is misspelled). + A user agent <bcp14>MUST NOT</bcp14> include the fragment and userinfo components + of the URI reference <xref target="URI"/>, if any, when generating the + Referer field value. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Referer"/> + <x:ref>Referer</x:ref> = <x:ref>absolute-URI</x:ref> / <x:ref>partial-URI</x:ref> +</sourcecode> +<t> + The field value is either an <x:ref>absolute-URI</x:ref> or a + <x:ref>partial-URI</x:ref>. In the latter case (<xref target="uri"/>), + the referenced URI is relative to the target URI + (<xref target="URI" x:fmt="," x:sec="5"/>). +</t> +<t> + The Referer header field allows servers to generate back-links to other + resources for simple analytics, logging, optimized caching, etc. It also + allows obsolete or mistyped links to be found for maintenance. Some servers + use the Referer header field as a means of denying links from other sites + (so-called "deep linking") or restricting cross-site request forgery (CSRF), + but not all requests contain it. +</t> +<t> + Example: +</t> +<sourcecode type="http-message"> +Referer: http://www.example.org/hypertext/Overview.html +</sourcecode> +<t> + If the target URI was obtained from a source that does not have its own + URI (e.g., input from the user keyboard, or an entry within the user's + bookmarks/favorites), the user agent <bcp14>MUST</bcp14> either exclude the Referer header field + or send it with a value of "about:blank". +</t> +<t> + The Referer header field value need not convey the full URI of the referring + resource; a user agent <bcp14>MAY</bcp14> truncate parts other than the referring origin. +</t> +<t> + The Referer header field has the potential to reveal information about the request + context or browsing history of the user, which is a privacy concern if the + referring resource's identifier reveals personal information (such as an + account name) or a resource that is supposed to be confidential (such as + behind a firewall or internal to a secured service). Most general-purpose + user agents do not send the Referer header field when the referring + resource is a local "file" or "data" URI. A user agent <bcp14>SHOULD NOT</bcp14> send a + <x:ref>Referer</x:ref> header field if the referring resource was accessed with + a secure protocol and the request target has an origin differing from that + of the referring resource, unless the referring resource explicitly allows + Referer to be sent. A user agent <bcp14>MUST NOT</bcp14> send a + <x:ref>Referer</x:ref> header field in an unsecured HTTP request if the + referring resource was accessed with a secure protocol. + See <xref target="sensitive.information.in.uris"/> for additional + security considerations. +</t> +<t> + Some intermediaries have been known to indiscriminately remove Referer + header fields from outgoing requests. This has the unfortunate side effect + of interfering with protection against CSRF attacks, which can be far + more harmful to their users. Intermediaries and user agent extensions that + wish to limit information disclosure in Referer ought to restrict their + changes to specific edits, such as replacing internal domain names with + pseudonyms or truncating the query and/or path components. + An intermediary <bcp14>SHOULD NOT</bcp14> modify or delete the Referer header field when + the field value shares the same scheme and host as the target URI. +</t> +</section> + +<section title="TE" anchor="field.te"> + <x:anchor-alias value="header.te"/> + <iref primary="true" item="Fields" subitem="TE" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="TE" x:for-anchor=""/><iref primary="true" item="TE header field" x:for-anchor=""/> + <x:anchor-alias value="TE"/> + <x:anchor-alias value="t-codings"/> + <x:anchor-alias value="transfer-coding"/> + <x:anchor-alias value="transfer-parameter"/> +<t> + The "TE" header field describes capabilities of the client with regard to + transfer codings and trailer sections. +</t> +<t> + As described in <xref target="trailer.fields"/>, a TE field with a + "trailers" member sent in a request indicates that the client will not + discard trailer fields. +</t> +<t> + TE is also used within HTTP/1.1 to advise servers about which transfer + codings the client is able to accept in a response. + As of publication, only HTTP/1.1 uses transfer codings + (see <xref target="HTTP11" x:rel="#transfer.codings"/>). +</t> +<t> + The TE field value is a list of members, with each member (aside from + "trailers") consisting of a transfer coding name token with an optional + weight indicating the client's relative preference for that + transfer coding (<xref target="quality.values"/>) and + optional parameters for that transfer coding. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="TE"/><iref primary="true" item="Grammar" subitem="t-codings"/><iref primary="true" item="Grammar" subitem="transfer-coding"/><iref primary="true" item="Grammar" subitem="transfer-parameter"/> + <x:ref>TE</x:ref> = #<x:ref>t-codings</x:ref> + <x:ref>t-codings</x:ref> = "trailers" / ( <x:ref>transfer-coding</x:ref> [ <x:ref>weight</x:ref> ] ) + <x:ref>transfer-coding</x:ref> = <x:ref>token</x:ref> *( <x:ref>OWS</x:ref> ";" <x:ref>OWS</x:ref> <x:ref>transfer-parameter</x:ref> ) + <x:ref>transfer-parameter</x:ref> = <x:ref>token</x:ref> <x:ref>BWS</x:ref> "=" <x:ref>BWS</x:ref> ( <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> ) +</sourcecode> +<t> + A sender of TE <bcp14>MUST</bcp14> also send a "TE" connection option within the + <x:ref>Connection</x:ref> header field (<xref target="field.connection"/>) + to inform intermediaries not to forward this field. +</t> +</section> + +<section title="User-Agent" anchor="field.user-agent"> + <x:anchor-alias value="header.user-agent"/> + <iref primary="true" item="Fields" subitem="User-Agent" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="User-Agent" x:for-anchor=""/><iref primary="true" item="User-Agent header field" x:for-anchor=""/> + <x:anchor-alias value="User-Agent"/> + <x:anchor-alias value="product"/> + <x:anchor-alias value="product-version"/> +<t> + The "User-Agent" header field contains information about the user agent + originating the request, which is often used by servers to help identify + the scope of reported interoperability problems, to work around or tailor + responses to avoid particular user agent limitations, and for analytics + regarding browser or operating system use. A user agent <bcp14>SHOULD</bcp14> send + a User-Agent header field in each request unless specifically configured not + to do so. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="User-Agent"/> + <x:ref>User-Agent</x:ref> = <x:ref>product</x:ref> *( <x:ref>RWS</x:ref> ( <x:ref>product</x:ref> / <x:ref>comment</x:ref> ) ) +</sourcecode> +<t> + The User-Agent field value consists of one or more product identifiers, + each followed by zero or more comments (<xref target="comments"/>), which together + identify the user agent software and its significant subproducts. + By convention, the product identifiers are listed in decreasing order of + their significance for identifying the user agent software. Each product + identifier consists of a name and optional version. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="product"/><iref primary="true" item="Grammar" subitem="product-version"/> + <x:ref>product</x:ref> = <x:ref>token</x:ref> ["/" <x:ref>product-version</x:ref>] + <x:ref>product-version</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + A sender <bcp14>SHOULD</bcp14> limit generated product identifiers to what is necessary + to identify the product; a sender <bcp14>MUST NOT</bcp14> generate advertising or other + nonessential information within the product identifier. + A sender <bcp14>SHOULD NOT</bcp14> generate information in <x:ref>product-version</x:ref> + that is not a version identifier (i.e., successive versions of the same + product name ought to differ only in the product-version portion of the + product identifier). +</t> +<t> + Example: +</t> +<sourcecode type="http-message"> +User-Agent: CERN-LineMode/2.15 libwww/2.17b3 +</sourcecode> +<t> + A user agent <bcp14>SHOULD NOT</bcp14> generate a User-Agent header field containing needlessly + fine-grained detail and <bcp14>SHOULD</bcp14> limit the addition of subproducts by third + parties. Overly long and detailed User-Agent field values increase request + latency and the risk of a user being identified against their wishes + ("fingerprinting"). +</t> +<t> + Likewise, implementations are encouraged not to use the product tokens of + other implementations in order to declare compatibility with them, as this + circumvents the purpose of the field. If a user agent masquerades as a + different user agent, recipients can assume that the user intentionally + desires to see responses tailored for that identified user agent, even + if they might not work as well for the actual user agent being used. +</t> +</section> +</section> + +<section title="Response Context Fields" anchor="response.context"> + <x:anchor-alias value="response-header"/> +<t> + The response header fields below provide additional information about the + response, beyond what is implied by the status code, including information + about the server, about the <x:ref>target resource</x:ref>, or about related + resources. +</t> + +<section title="Allow" anchor="field.allow"> + <x:anchor-alias value="header.allow"/> + <iref primary="true" item="Fields" subitem="Allow" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Allow" x:for-anchor=""/><iref primary="true" item="Allow header field" x:for-anchor=""/> + <x:anchor-alias value="Allow"/> +<t> + The "Allow" header field lists the set of methods advertised as + supported by the <x:ref>target resource</x:ref>. The purpose of this field + is strictly to inform the recipient of valid request methods associated + with the resource. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Allow"/> + <x:ref>Allow</x:ref> = #<x:ref>method</x:ref> +</sourcecode> +<t> + Example of use: +</t> +<sourcecode type="http-message"> +Allow: GET, HEAD, PUT +</sourcecode> +<t> + The actual set of allowed methods is defined by the origin server at the + time of each request. An origin server <bcp14>MUST</bcp14> generate an Allow header field in a + <x:ref>405 (Method Not Allowed)</x:ref> response and <bcp14>MAY</bcp14> do so in any + other response. An empty Allow field value indicates that the resource + allows no methods, which might occur in a 405 response if the resource has + been temporarily disabled by configuration. +</t> +<t> + A proxy <bcp14>MUST NOT</bcp14> modify the Allow header field — it does not need + to understand all of the indicated methods in order to handle them + according to the generic message handling rules. +</t> +</section> + +<section title="Location" anchor="field.location"> + <x:anchor-alias value="header.location"/> + <iref primary="true" item="Fields" subitem="Location" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Location" x:for-anchor=""/><iref primary="true" item="Location header field" x:for-anchor=""/> + <x:anchor-alias value="Location"/> +<t> + The "Location" header field is used in some responses to refer to a + specific resource in relation to the response. The type of relationship is + defined by the combination of request method and status code semantics. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Location"/> + <x:ref>Location</x:ref> = <x:ref>URI-reference</x:ref> +</sourcecode> +<t> + The field value consists of a single URI-reference. When it has the form + of a relative reference (<xref target="URI" x:fmt="," x:sec="4.2"/>), + the final value is computed by resolving it against the target + URI (<xref target="URI" x:fmt="," x:sec="5"/>). +</t> +<t> + For <x:ref>201 (Created)</x:ref> responses, the Location value refers to + the primary resource created by the request. + For <x:ref>3xx (Redirection)</x:ref> responses, the Location value refers + to the preferred target resource for automatically redirecting the request. +</t> +<t> + If the Location value provided in a <x:ref>3xx (Redirection)</x:ref> + response does not have a fragment component, a user agent <bcp14>MUST</bcp14> process the + redirection as if the value inherits the fragment component of the URI + reference used to generate the target URI (i.e., the redirection + inherits the original reference's fragment, if any). +</t> +<t> + For example, a GET request generated for the URI reference + "http://www.example.org/~tim" might result in a + <x:ref>303 (See Other)</x:ref> response containing the header field: +</t> +<sourcecode type="http-message"> +Location: /People.html#tim +</sourcecode> +<t> + which suggests that the user agent redirect to + "http://www.example.org/People.html#tim" +</t> +<t> + Likewise, a GET request generated for the URI reference + "http://www.example.org/index.html#larry" might result in a + <x:ref>301 (Moved Permanently)</x:ref> response containing the header + field: +</t> +<sourcecode type="http-message"> +Location: http://www.example.net/index.html +</sourcecode> +<t> + which suggests that the user agent redirect to + "http://www.example.net/index.html#larry", preserving the original fragment + identifier. +</t> +<t> + There are circumstances in which a fragment identifier in a Location + value would not be appropriate. For example, the Location header field in a + <x:ref>201 (Created)</x:ref> response is supposed to provide a URI that is + specific to the created resource. +</t> +<aside> + <t> + <x:h>Note:</x:h> Some recipients attempt to recover from Location header fields + that are not valid URI references. This specification does not mandate or + define such processing, but does allow it for the sake of robustness. + A Location field value cannot allow a list of members because the comma list separator + is a valid data character within a URI-reference. If an invalid message is sent with multiple + Location field lines, a recipient along the path might combine those field lines into + one value. Recovery of a valid Location field value from that situation is difficult and not + interoperable across implementations. + </t> +</aside> +<aside> + <t> + <x:h>Note:</x:h> The <x:ref>Content-Location</x:ref> header field + (<xref target="field.content-location"/>) differs from Location in that the + Content-Location refers to the most specific resource corresponding to the + enclosed representation. It is therefore possible for a response to contain + both the Location and Content-Location header fields. + </t> +</aside> +</section> + +<section title="Retry-After" anchor="field.retry-after"> + <x:anchor-alias value="header.retry-after"/> + <iref primary="true" item="Fields" subitem="Retry-After" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Retry-After" x:for-anchor=""/><iref primary="true" item="Retry-After header field" x:for-anchor=""/> + <x:anchor-alias value="Retry-After"/> +<t> + Servers send the "Retry-After" header field to indicate how long the user + agent ought to wait before making a follow-up request. When sent with a + <x:ref>503 (Service Unavailable)</x:ref> response, Retry-After indicates + how long the service is expected to be unavailable to the client. + When sent with any <x:ref>3xx (Redirection)</x:ref> response, Retry-After + indicates the minimum time that the user agent is asked to wait before + issuing the redirected request. +</t> +<t> + The Retry-After field value can be either an HTTP-date or a number + of seconds to delay after receiving the response. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Retry-After"/> + <x:ref>Retry-After</x:ref> = <x:ref>HTTP-date</x:ref> / <x:ref>delay-seconds</x:ref> +</sourcecode> +<t anchor="rule.delay-seconds"> + <x:anchor-alias value="delay-seconds"/> + A delay-seconds value is a non-negative decimal integer, representing time + in seconds. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="delay-seconds"/> + <x:ref>delay-seconds</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + Two examples of its use are +</t> +<sourcecode type="http-message"> +Retry-After: Fri, 31 Dec 1999 23:59:59 GMT +Retry-After: 120 +</sourcecode> +<t> + In the latter example, the delay is 2 minutes. +</t> +</section> + +<section title="Server" anchor="field.server"> + <x:anchor-alias value="header.server"/> + <iref primary="true" item="Fields" subitem="Server" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Server" x:for-anchor=""/><iref primary="true" item="Server header field" x:for-anchor=""/> + <x:anchor-alias value="Server"/> +<t> + The "Server" header field contains information about the + software used by the origin server to handle the request, which is often + used by clients to help identify the scope of reported interoperability + problems, to work around or tailor requests to avoid particular server + limitations, and for analytics regarding server or operating system use. + An origin server <bcp14>MAY</bcp14> generate a Server header field in its responses. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Server"/> + <x:ref>Server</x:ref> = <x:ref>product</x:ref> *( <x:ref>RWS</x:ref> ( <x:ref>product</x:ref> / <x:ref>comment</x:ref> ) ) +</sourcecode> +<t> + The Server header field value consists of one or more product identifiers, each + followed by zero or more comments (<xref target="comments"/>), which together + identify the origin server software and its significant subproducts. + By convention, the product identifiers are listed in decreasing order of + their significance for identifying the origin server software. Each product + identifier consists of a name and optional version, as defined in + <xref target="field.user-agent"/>. +</t> +<t> + Example: +</t> +<sourcecode type="http-message"> +Server: CERN/3.0 libwww/2.17 +</sourcecode> +<t> + An origin server <bcp14>SHOULD NOT</bcp14> generate a Server header field containing needlessly + fine-grained detail and <bcp14>SHOULD</bcp14> limit the addition of subproducts by third + parties. Overly long and detailed Server field values increase response + latency and potentially reveal internal implementation details that might + make it (slightly) easier for attackers to find and exploit known security + holes. +</t> +</section> +</section> +</section> + +<section title="HTTP Authentication" anchor="authentication"> +<section title="Authentication Scheme" anchor="auth.scheme"> + <x:anchor-alias value="auth-scheme"/> +<t> + HTTP provides a general framework for access control and authentication, + via an extensible set of challenge-response authentication schemes, which + can be used by a server to challenge a client request and by a client to + provide authentication information. It uses a case-insensitive + token to identify the authentication scheme: +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="auth-scheme"/> + auth-scheme = <x:ref>token</x:ref> +</sourcecode> +<t> + Aside from the general framework, this document does not specify any + authentication schemes. New and existing authentication schemes are + specified independently and ought to be registered within the + "Hypertext Transfer Protocol (HTTP) Authentication Scheme Registry". + For example, the "basic" and "digest" authentication schemes are defined by + <xref target="RFC7617"/> and <xref target="RFC7616"/>, respectively. +</t> +</section> + +<section title="Authentication Parameters" anchor="auth.params"> + <x:anchor-alias value="auth-param"/> + <x:anchor-alias value="token68"/> +<t> + The authentication scheme is followed by additional information necessary + for achieving authentication via that scheme as either a + comma-separated list of parameters or a single sequence of characters + capable of holding base64-encoded information. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="token68"/> + token68 = 1*( <x:ref>ALPHA</x:ref> / <x:ref>DIGIT</x:ref> / + "-" / "." / "_" / "~" / "+" / "/" ) *"=" +</sourcecode> +<t> + The token68 syntax allows the 66 unreserved URI characters + (<xref target="URI"/>), plus a few others, so that it can hold a + base64, base64url (URL and filename safe alphabet), base32, or base16 (hex) + encoding, with or without padding, but excluding whitespace + (<xref target="RFC4648"/>). +</t> +<t> + Authentication parameters are name/value pairs, where the name token is + matched case-insensitively + and each parameter name <bcp14>MUST</bcp14> only occur once per challenge. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="auth-param"/> + auth-param = <x:ref>token</x:ref> <x:ref>BWS</x:ref> "=" <x:ref>BWS</x:ref> ( <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> ) +</sourcecode> +<t> + Parameter values can be expressed either as "token" or as "quoted-string" + (<xref target="fields.components"/>). + Authentication scheme definitions need to accept both notations, both for + senders and recipients, to allow recipients to use generic parsing + components regardless of the authentication scheme. +</t> +<t> + For backwards compatibility, authentication scheme definitions can restrict + the format for senders to one of the two variants. This can be important + when it is known that deployed implementations will fail when encountering + one of the two formats. +</t> +</section> + +<section title="Challenge and Response" anchor="challenge.and.response"> + <x:anchor-alias value="challenge"/> +<t> + A <x:ref>401 (Unauthorized)</x:ref> response message is used by an origin + server to challenge the authorization of a user agent, including a + <x:ref>WWW-Authenticate</x:ref> header field containing at least one + challenge applicable to the requested resource. +</t> +<t> + A <x:ref>407 (Proxy Authentication Required)</x:ref> response message is + used by a proxy to challenge the authorization of a client, including a + <x:ref>Proxy-Authenticate</x:ref> header field containing at least one + challenge applicable to the proxy for the requested resource. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="challenge"/> + <x:ref>challenge</x:ref> = <x:ref>auth-scheme</x:ref> [ 1*<x:ref>SP</x:ref> ( <x:ref>token68</x:ref> / #<x:ref>auth-param</x:ref> ) ] +</sourcecode> +<aside> + <t> + <x:h>Note:</x:h> Many clients fail to parse a challenge that contains an unknown + scheme. A workaround for this problem is to list well-supported schemes + (such as "basic") first.<!-- see https://greenbytes.de/tech/tc/httpauth/#multibasicunknown2 --> + </t> +</aside> +<t> + A user agent that wishes to authenticate itself with an origin server + — usually, but not necessarily, after receiving a + <x:ref>401 (Unauthorized)</x:ref> — can do so by including an + <x:ref>Authorization</x:ref> header field with the request. +</t> +<t> + A client that wishes to authenticate itself with a proxy — usually, + but not necessarily, after receiving a + <x:ref>407 (Proxy Authentication Required)</x:ref> — can do so by + including a <x:ref>Proxy-Authorization</x:ref> header field with the + request. +</t> +</section> + +<section title="Credentials" anchor="credentials"> +<t> + Both the <x:ref>Authorization</x:ref> field value and the + <x:ref>Proxy-Authorization</x:ref> field value contain the client's + credentials for the realm of the resource being requested, based upon a + challenge received in a response (possibly at some point in the past). + When creating their values, the user agent ought to do so by selecting the + challenge with what it considers to be the most secure auth-scheme that it + understands, obtaining credentials from the user as appropriate. + Transmission of credentials within header field values implies significant + security considerations regarding the confidentiality of the underlying + connection, as described in + <xref target="confidentiality.of.credentials"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="credentials"/> + <x:ref>credentials</x:ref> = <x:ref>auth-scheme</x:ref> [ 1*<x:ref>SP</x:ref> ( <x:ref>token68</x:ref> / #<x:ref>auth-param</x:ref> ) ] +</sourcecode> +<t> + Upon receipt of a request for a protected resource that omits credentials, + contains invalid credentials (e.g., a bad password) or partial credentials + (e.g., when the authentication scheme requires more than one round trip), + an origin server <bcp14>SHOULD</bcp14> send a <x:ref>401 (Unauthorized)</x:ref> response + that contains a <x:ref>WWW-Authenticate</x:ref> header field with at least + one (possibly new) challenge applicable to the requested resource. +</t> +<t> + Likewise, upon receipt of a request that omits proxy credentials or + contains invalid or partial proxy credentials, a proxy that requires + authentication <bcp14>SHOULD</bcp14> generate a + <x:ref>407 (Proxy Authentication Required)</x:ref> response that contains + a <x:ref>Proxy-Authenticate</x:ref> header field with at least one + (possibly new) challenge applicable to the proxy. +</t> +<t> + A server that receives valid credentials that are not adequate to gain + access ought to respond with the <x:ref>403 (Forbidden)</x:ref> status + code (<xref target="status.403"/>). +</t> +<t> + HTTP does not restrict applications to this simple challenge-response + framework for access authentication. Additional mechanisms can be used, + such as authentication at the transport level or via message encapsulation, + and with additional header fields specifying authentication information. + However, such additional mechanisms are not defined by this specification. +</t> +<t> + Note that various custom mechanisms for user authentication use the + Set-Cookie and Cookie header fields, defined in <xref target="COOKIE"/>, + for passing tokens related to authentication. +</t> +</section> + +<section title="Establishing a Protection Space (Realm)" anchor="protection.space"> + <iref item="Protection Space"/> + <iref item="Realm"/> + <iref item="origin"/> +<t> + The <x:dfn>realm</x:dfn> authentication parameter is reserved for use by + authentication schemes that wish to indicate a scope of protection. +</t> +<t> + A <x:dfn>protection space</x:dfn> is defined by the origin (see + <xref target="origin"/>) of the + server being accessed, in combination with the realm value if present. + These realms allow the protected resources on a server to be + partitioned into a set of protection spaces, each with its own + authentication scheme and/or authorization database. The realm value + is a string, generally assigned by the origin server, that can have + additional semantics specific to the authentication scheme. Note that a + response can have multiple challenges with the same auth-scheme but + with different realms. +</t> +<t> + The protection space determines the domain over which credentials can + be automatically applied. If a prior request has been authorized, the + user agent <bcp14>MAY</bcp14> reuse the same credentials for all other requests within + that protection space for a period of time determined by the authentication + scheme, parameters, and/or user preferences (such as a configurable + inactivity timeout). +</t> +<t> + The extent of a protection space, and therefore the requests to which + credentials might be automatically applied, is not necessarily known to + clients without additional information. An authentication scheme might + define parameters that describe the extent of a protection space. Unless + specifically allowed by the authentication scheme, a single protection + space cannot extend outside the scope of its server. +</t> +<t> + For historical reasons, a sender <bcp14>MUST</bcp14> only generate the quoted-string syntax. + Recipients might have to support both token and quoted-string syntax for + maximum interoperability with existing clients that have been accepting both + notations for a long time. +</t> +</section> + +<section title="Authenticating Users to Origin Servers" anchor="auth.user.origin.server"> + +<section title="WWW-Authenticate" anchor="field.www-authenticate"> + <x:anchor-alias value="header.www-authenticate"/> + <iref primary="true" item="Fields" subitem="WWW-Authenticate" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="WWW-Authenticate" x:for-anchor=""/><iref primary="true" item="WWW-Authenticate header field" x:for-anchor=""/> + <x:anchor-alias value="WWW-Authenticate"/> +<t> + The "WWW-Authenticate" response header field indicates the authentication + scheme(s) and parameters applicable to the target resource. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="WWW-Authenticate"/> + <x:ref>WWW-Authenticate</x:ref> = #<x:ref>challenge</x:ref> +</sourcecode> +<t> + A server generating a <x:ref>401 (Unauthorized)</x:ref> response + <bcp14>MUST</bcp14> send a WWW-Authenticate header field containing at least one + challenge. A server <bcp14>MAY</bcp14> generate a WWW-Authenticate header field + in other response messages to indicate that supplying credentials + (or different credentials) might affect the response. +</t> +<t> + A proxy forwarding a response <bcp14>MUST NOT</bcp14> modify any + <x:ref>WWW-Authenticate</x:ref> header fields in that response. +</t> +<t> + User agents are advised to take special care in parsing the field value, as + it might contain more than one challenge, and each challenge can contain a + comma-separated list of authentication parameters. Furthermore, the header + field itself can occur multiple times. +</t> +<t> + For instance: +</t> +<sourcecode type="http-message"> +WWW-Authenticate: Basic realm="simple", Newauth realm="apps", + type=1, title="Login to \"apps\"" +</sourcecode> +<t> + This header field contains two challenges, one for the "Basic" scheme with + a realm value of "simple" and another for the "Newauth" scheme with a + realm value of "apps". It also contains two additional parameters, "type" + and "title". +</t> +<t> + Some user agents do not recognize this form, however. As a result, sending + a WWW-Authenticate field value with more than one member on the same field + line might not be interoperable. +</t> +<aside> + <t> + <x:h>Note:</x:h> The challenge grammar production uses the list syntax as + well. Therefore, a sequence of comma, whitespace, and comma can be + considered either as applying to the preceding challenge, or to be an + empty entry in the list of challenges. In practice, this ambiguity + does not affect the semantics of the header field value and thus is + harmless. + </t> +</aside> +</section> + +<section title="Authorization" anchor="field.authorization"> + <x:anchor-alias value="header.authorization"/> + <iref primary="true" item="Fields" subitem="Authorization" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Authorization" x:for-anchor=""/><iref primary="true" item="Authorization header field" x:for-anchor=""/> + <x:anchor-alias value="Authorization"/> +<t> + The "Authorization" header field allows a user agent to authenticate itself + with an origin server — usually, but not necessarily, after receiving + a <x:ref>401 (Unauthorized)</x:ref> response. Its value consists of + credentials containing the authentication information of the user agent for + the realm of the resource being requested. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Authorization"/> + <x:ref>Authorization</x:ref> = <x:ref>credentials</x:ref> +</sourcecode> +<t> + If a request is authenticated and a realm specified, the same credentials + are presumed to be valid for all other requests within this realm (assuming + that the authentication scheme itself does not require otherwise, such as + credentials that vary according to a challenge value or using synchronized + clocks). +</t> +<t> + A proxy forwarding a request <bcp14>MUST NOT</bcp14> modify any + <x:ref>Authorization</x:ref> header fields in that request. + See <xref target="CACHING" x:rel="#caching.authenticated.responses"/> for details of and requirements + pertaining to handling of the Authorization header field by HTTP caches. +</t> +</section> + +<section title="Authentication-Info" anchor="field.authentication-info"> + <x:anchor-alias value="header.authentication-info"/> + <iref primary="true" item="Fields" subitem="Authentication-Info" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Authentication-Info" x:for-anchor=""/><iref primary="true" item="Authentication-Info header field" x:for-anchor=""/> + <x:anchor-alias value="Authentication-Info"/> +<t> + HTTP authentication schemes can use the "Authentication-Info" response + field to communicate information after the client's authentication credentials have been accepted. + This information can include a finalization message from the server (e.g., it can contain the + server authentication). +</t> +<t> + The field value is a list of parameters (name/value pairs), using the "auth-param" + syntax defined in <xref target="challenge.and.response"/>. + This specification only describes the generic format; authentication schemes + using Authentication-Info will define the individual parameters. The "Digest" + Authentication Scheme, for instance, defines multiple parameters in + <xref target="RFC7616" x:sec="3.5"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Authentication-Info"/> + <x:ref>Authentication-Info</x:ref> = #<x:ref>auth-param</x:ref> +</sourcecode> +<t> + The Authentication-Info field can be used in any HTTP response, + independently of request method and status code. Its semantics are defined + by the authentication scheme indicated by the <x:ref>Authorization</x:ref> header field + (<xref target="field.authorization"/>) of the corresponding request. +</t> +<t> + A proxy forwarding a response is not allowed to modify the field value in any + way. +</t> +<t> + Authentication-Info can be sent as a trailer field + (<xref target="trailer.fields"/>) + when the authentication scheme explicitly allows this. +</t> +</section> +</section> + +<section title="Authenticating Clients to Proxies" anchor="auth.client.proxy"> + +<section title="Proxy-Authenticate" anchor="field.proxy-authenticate"> + <x:anchor-alias value="header.proxy-authenticate"/> + <iref primary="true" item="Fields" subitem="Proxy-Authenticate" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Proxy-Authenticate" x:for-anchor=""/><iref primary="true" item="Proxy-Authenticate header field" x:for-anchor=""/> + <x:anchor-alias value="Proxy-Authenticate"/> +<t> + The "Proxy-Authenticate" header field consists of at least one + challenge that indicates the authentication scheme(s) and parameters + applicable to the proxy for this request. + A proxy <bcp14>MUST</bcp14> send at least one Proxy-Authenticate header field in + each <x:ref>407 (Proxy Authentication Required)</x:ref> response that it + generates. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Proxy-Authenticate"/> + <x:ref>Proxy-Authenticate</x:ref> = #<x:ref>challenge</x:ref> +</sourcecode> +<t> + Unlike <x:ref>WWW-Authenticate</x:ref>, the Proxy-Authenticate header field + applies only to the next outbound client on the response chain. + This is because only the client that chose a given proxy is likely to have + the credentials necessary for authentication. However, when multiple + proxies are used within the same administrative domain, such as office and + regional caching proxies within a large corporate network, it is common + for credentials to be generated by the user agent and passed through the + hierarchy until consumed. Hence, in such a configuration, it will appear + as if Proxy-Authenticate is being forwarded because each proxy will send + the same challenge set. +</t> +<t> + Note that the parsing considerations for <x:ref>WWW-Authenticate</x:ref> + apply to this header field as well; see <xref target="field.www-authenticate"/> + for details. +</t> +</section> + +<section title="Proxy-Authorization" anchor="field.proxy-authorization"> + <x:anchor-alias value="header.proxy-authorization"/> + <iref primary="true" item="Fields" subitem="Proxy-Authorization" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Proxy-Authorization" x:for-anchor=""/><iref primary="true" item="Proxy-Authorization header field" x:for-anchor=""/> + <x:anchor-alias value="Proxy-Authorization"/> +<t> + The "Proxy-Authorization" header field allows the client to + identify itself (or its user) to a proxy that requires + authentication. Its value consists of credentials containing the + authentication information of the client for the proxy and/or realm of the + resource being requested. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Proxy-Authorization"/> + <x:ref>Proxy-Authorization</x:ref> = <x:ref>credentials</x:ref> +</sourcecode> +<t> + Unlike <x:ref>Authorization</x:ref>, the Proxy-Authorization header field + applies only to the next inbound proxy that demanded authentication using + the <x:ref>Proxy-Authenticate</x:ref> header field. When multiple proxies are used + in a chain, the Proxy-Authorization header field is consumed by the first + inbound proxy that was expecting to receive credentials. A proxy <bcp14>MAY</bcp14> + relay the credentials from the client request to the next proxy if that is + the mechanism by which the proxies cooperatively authenticate a given + request. +</t> +</section> + +<section title="Proxy-Authentication-Info" anchor="field.proxy-authentication-info"> + <x:anchor-alias value="header.proxy-authentication-info"/> + <iref primary="true" item="Fields" subitem="Proxy-Authentication-Info" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Proxy-Authentication-Info" x:for-anchor=""/><iref primary="true" item="Proxy-Authentication-Info header field" x:for-anchor=""/> + <x:anchor-alias value="Proxy-Authentication-Info"/> +<t> + The "Proxy-Authentication-Info" response header field is equivalent to + <x:ref>Authentication-Info</x:ref>, except that it applies to proxy authentication (<xref target="challenge.and.response"/>) + and its semantics are defined by the + authentication scheme indicated by the Proxy-Authorization header field + (<xref target="field.proxy-authorization"/>) + of the corresponding request: +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Proxy-Authentication-Info"/> + <x:ref>Proxy-Authentication-Info</x:ref> = #<x:ref>auth-param</x:ref> +</sourcecode> +<t> + However, unlike <x:ref>Authentication-Info</x:ref>, the Proxy-Authentication-Info header + field applies only to the next outbound client on the response chain. This is + because only the client that chose a given proxy is likely to have the + credentials necessary for authentication. However, when multiple proxies are + used within the same administrative domain, such as office and regional + caching proxies within a large corporate network, it is common for + credentials to be generated by the user agent and passed through the + hierarchy until consumed. Hence, in such a configuration, it will appear as + if Proxy-Authentication-Info is being forwarded because each proxy will send + the same field value. +</t> +<t> + Proxy-Authentication-Info can be sent as a trailer field + (<xref target="trailer.fields"/>) + when the authentication scheme explicitly allows this. +</t> +</section> +</section> +</section> + +<section title="Content Negotiation" anchor="content.negotiation"> + <x:anchor-alias value="content negotiation"/> +<t> + When responses convey content, whether indicating a success or + an error, the origin server often has different ways of representing that + information; for example, in different formats, languages, or encodings. + Likewise, different users or user agents might have differing capabilities, + characteristics, or preferences that could influence which representation, + among those available, would be best to deliver. For this reason, HTTP + provides mechanisms for <x:ref>content negotiation</x:ref>. +</t> +<t> + This specification defines three patterns of content negotiation that can + be made visible within the protocol: + "proactive" negotiation, where the server selects the representation based + upon the user agent's stated preferences; "reactive" negotiation, + where the server provides a list of representations for the user agent to + choose from; and "request content" negotiation, where the user agent + selects the representation for a future request based upon the server's + stated preferences in past responses. +</t> +<t> + Other patterns of content negotiation include + "conditional content", where the representation consists of multiple + parts that are selectively rendered based on user agent parameters, + "active content", where the representation contains a script that + makes additional (more specific) requests based on the user agent + characteristics, and "Transparent Content Negotiation" + (<xref target="RFC2295"/>), where content selection is performed by + an intermediary. These patterns are not mutually exclusive, and each has + trade-offs in applicability and practicality. +</t> +<t> + Note that, in all cases, HTTP is not aware of the resource semantics. + The consistency with which an origin server responds to requests, over time + and over the varying dimensions of content negotiation, and thus the + "sameness" of a resource's observed representations over time, is + determined entirely by whatever entity or algorithm selects or generates + those responses. +</t> + +<section title="Proactive Negotiation" anchor="proactive.negotiation"> + <x:anchor-alias value="proactive negotiation"/> + <x:anchor-alias value="server-driven negotiation"/> +<t> + When content negotiation preferences are sent by the user agent in a + request to encourage an algorithm located at the server to + select the preferred representation, it is called + <x:dfn>proactive negotiation</x:dfn> + (a.k.a., <x:dfn>server-driven negotiation</x:dfn>). Selection is based on + the available representations for a response (the dimensions over which it + might vary, such as language, content coding, etc.) compared to various + information supplied in the request, including both the explicit + negotiation header fields below and implicit + characteristics, such as the client's network address or parts of the + <x:ref>User-Agent</x:ref> field. +</t> +<t> + Proactive negotiation is advantageous when the algorithm for + selecting from among the available representations is difficult to + describe to a user agent, or when the server desires to send its + "best guess" to the user agent along with the first response (when that + "best guess" is good enough for the user, this avoids the round-trip + delay of a subsequent request). In order to improve the server's + guess, a user agent <bcp14>MAY</bcp14> send request header fields that describe + its preferences. +</t> +<t> + Proactive negotiation has serious disadvantages: +</t> +<ul> + <li> + It is impossible for the server to accurately determine what + might be "best" for any given user, since that would require + complete knowledge of both the capabilities of the user agent + and the intended use for the response (e.g., does the user want + to view it on screen or print it on paper?); + </li> + <li> + Having the user agent describe its capabilities in every + request can be both very inefficient (given that only a small + percentage of responses have multiple representations) and a + potential risk to the user's privacy; + </li> + <li> + It complicates the implementation of an origin server and the + algorithms for generating responses to a request; and, + </li> + <li> + It limits the reusability of responses for shared caching. + </li> +</ul> +<t> + A user agent cannot rely on proactive negotiation preferences being + consistently honored, since the origin server might not implement proactive + negotiation for the requested resource or might decide that sending a + response that doesn't conform to the user agent's preferences is better + than sending a <x:ref>406 (Not Acceptable)</x:ref> response. +</t> +<t> + A <x:ref>Vary</x:ref> header field (<xref target="field.vary"/>) is + often sent in a response subject to proactive negotiation to indicate what + parts of the request information were used in the selection algorithm. +</t> +<t> + The request header fields <x:ref>Accept</x:ref>, + <x:ref>Accept-Charset</x:ref>, <x:ref>Accept-Encoding</x:ref>, and + <x:ref>Accept-Language</x:ref> are defined below for a user agent to engage + in <x:ref>proactive negotiation</x:ref> of the response content. + The preferences sent in these + fields apply to any content in the response, including representations of + the target resource, representations of error or processing status, and + potentially even the miscellaneous text strings that might appear within + the protocol. +</t> +</section> + +<section title="Reactive Negotiation" anchor="reactive.negotiation"> + <x:anchor-alias value="reactive negotiation"/> + <x:anchor-alias value="agent-driven negotiation"/> +<t> + With <x:dfn>reactive negotiation</x:dfn> + (a.k.a., <x:dfn>agent-driven negotiation</x:dfn>), selection of + content (regardless of the status code) is performed by + the user agent after receiving an initial response. The mechanism for + reactive negotiation might be as simple as a list of references to + alternative representations. +</t> +<t> + If the user agent is not satisfied by the initial response content, + it can perform a GET request on one or more of the alternative resources + to obtain a different representation. Selection of such alternatives might + be performed automatically (by the user agent) or manually (e.g., by the + user selecting from a hypertext menu). +</t> +<t> + A server might choose not to send an initial representation, other than + the list of alternatives, and thereby indicate that reactive + negotiation by the user agent is preferred. For example, the alternatives + listed in responses with the <x:ref>300 (Multiple Choices)</x:ref> and + <x:ref>406 (Not Acceptable)</x:ref> status codes include information about + available representations so that the user or user agent can react by + making a selection. +</t> +<t> + Reactive negotiation is advantageous when the response would vary + over commonly used dimensions (such as type, language, or encoding), + when the origin server is unable to determine a user agent's + capabilities from examining the request, and generally when public + caches are used to distribute server load and reduce network usage. +</t> +<t> + Reactive negotiation suffers from the disadvantages of transmitting + a list of alternatives to the user agent, which degrades user-perceived + latency if transmitted in the header section, and needing a second request + to obtain an alternate representation. Furthermore, this specification + does not define a mechanism for supporting automatic selection, though it + does not prevent such a mechanism from being developed. +</t> +</section> + +<section title="Request Content Negotiation" anchor="request.content.negotiation"> + <x:anchor-alias value="request content negotiation"/> +<t> + When content negotiation preferences are sent in a server's response, the + listed preferences are called <x:dfn>request content negotiation</x:dfn> + because they intend to influence selection of an appropriate content for + subsequent requests to that resource. For example, + the <x:ref>Accept</x:ref> (<xref target="field.accept"/>) and + <x:ref>Accept-Encoding</x:ref> (<xref target="field.accept-encoding"/>) + header fields can be sent in a response to indicate preferred media types + and content codings for subsequent requests to that resource. +</t> +<t> + Similarly, <xref target="RFC5789" x:sec="3.1" x:fmt="of"/> defines + the "Accept-Patch" response header field, which allows discovery of + which content types are accepted in PATCH requests. +</t> +</section> + +<section title="Content Negotiation Field Features" anchor="conneg.features"> +<section title="Absence" anchor="conneg.absent"> +<t> + For each of the content negotiation fields, a request that does not contain + the field implies that the sender has no preference on that dimension of + negotiation. +</t> +<t> + If a content negotiation header field is present in a request and none of + the available + representations for the response can be considered acceptable according to + it, the origin server can either honor the header field by sending a + <x:ref>406 (Not Acceptable)</x:ref> response or disregard the header field + by treating the response as if it is not subject to content negotiation + for that request header field. This does not imply, however, that the + client will be able to use the representation. +</t> +<aside><t> + <x:h>Note:</x:h> A user agent sending these header fields makes it easier for a + server to identify an individual by virtue of the user agent's request + characteristics (<xref target="fingerprinting"/>). +</t></aside> +</section> + +<section title="Quality Values" anchor="quality.values"> + <x:anchor-alias value="weight"/> + <x:anchor-alias value="qvalue"/> +<t> + The content negotiation fields defined by this specification + use a common parameter, named "q" (case-insensitive), to assign a relative + "weight" to the preference for that associated kind of content. + This weight is referred to as a "quality value" (or "qvalue") because + the same parameter name is often used within server configurations to + assign a weight to the relative quality of the various representations + that can be selected for a resource. +</t> +<t> + The weight is normalized to a real number in the range 0 through 1, + where 0.001 is the least preferred and 1 is the most preferred; + a value of 0 means "not acceptable". If no "q" parameter is present, + the default weight is 1. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="weight"/><iref primary="true" item="Grammar" subitem="qvalue"/> + <x:ref>weight</x:ref> = <x:ref>OWS</x:ref> ";" <x:ref>OWS</x:ref> "q=" <x:ref>qvalue</x:ref> + <x:ref>qvalue</x:ref> = ( "0" [ "." 0*3<x:ref>DIGIT</x:ref> ] ) + / ( "1" [ "." 0*3("0") ] ) +</sourcecode> +<t> + A sender of qvalue <bcp14>MUST NOT</bcp14> generate more than three digits after the + decimal point. User configuration of these values ought to be limited in + the same fashion. +</t> +</section> + +<section title="Wildcard Values" anchor="wildcard.values"> + <x:anchor-alias value="wildcard"/> +<t> + Most of these header fields, where indicated, define a wildcard value ("*") + to select unspecified values. If no wildcard is present, values that are + not explicitly mentioned in the field are considered unacceptable. + Within <x:ref>Vary</x:ref>, the wildcard value means that the variance + is unlimited. +</t> +<aside><t> + <x:h>Note:</x:h> In practice, using wildcards in content negotiation has limited + practical value because it is seldom useful to say, for example, "I + prefer image/* more or less than (some other specific value)". By sending + Accept: */*;q=0, clients can explicitly request a <x:ref>406 (Not Acceptable)</x:ref> + response if a more preferred format is not available, but + they still need to be able to handle a different response since the + server is allowed to ignore their preference. +</t></aside> +</section> +</section> + +<section title="Content Negotiation Fields" anchor="conneg.fields"> +<section title="Accept" anchor="field.accept"> + <x:anchor-alias value="header.accept"/> + <iref primary="true" item="Fields" subitem="Accept" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Accept" x:for-anchor=""/><iref primary="true" item="Accept header field" x:for-anchor=""/> + <x:anchor-alias value="Accept"/> + <x:anchor-alias value="accept-params"/> + <x:anchor-alias value="media-range"/> +<t> + The "Accept" header field can be used by user agents to specify their + preferences regarding response media types. For example, Accept header + fields can be used to indicate that the request is specifically limited to + a small set of desired types, as in the case of a request for an in-line + image. +</t> +<t> + When sent by a server in a response, Accept provides information + about which content types are preferred in the content of a subsequent + request to the same resource. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Accept"/><iref primary="true" item="Grammar" subitem="media-range"/> + <x:ref>Accept</x:ref> = #( <x:ref>media-range</x:ref> [ <x:ref>weight</x:ref> ] ) + + <x:ref>media-range</x:ref> = ( "*/*" + / ( <x:ref>type</x:ref> "/" "*" ) + / ( <x:ref>type</x:ref> "/" <x:ref>subtype</x:ref> ) + ) <x:ref>parameters</x:ref> +</sourcecode> +<t> + The asterisk "*" character is used to group media types into ranges, + with "*/*" indicating all media types and "type/*" indicating all + subtypes of that type. The media-range can include media type + parameters that are applicable to that range. +</t> +<t> + Each media-range might be followed by optional applicable media type + parameters (e.g., <x:ref>charset</x:ref>), followed by an optional "q" + parameter for indicating a relative weight (<xref target="quality.values"/>). +</t> +<t> + Previous specifications allowed additional extension parameters to appear + after the weight parameter. The accept extension grammar (accept-params, accept-ext) has + been removed because it had a complicated definition, was not being used in + practice, and is more easily deployed through new header fields. Senders + using weights <bcp14>SHOULD</bcp14> send "q" last (after all media-range parameters). + Recipients <bcp14>SHOULD</bcp14> process any parameter named "q" as weight, regardless of + parameter ordering. +</t> +<aside> + <t> + <x:h>Note:</x:h> Use of the "q" parameter name to control content negotiation + would interfere with any media type parameter having the same name. + Hence, the media type registry disallows parameters named "q". + </t> +</aside> +<t> + The example +</t> +<sourcecode type="http-message"> +Accept: audio/*; q=0.2, audio/basic +</sourcecode> +<t> + is interpreted as "I prefer audio/basic, but send me any audio + type if it is the best available after an 80% markdown in quality". +</t> +<t> + A more elaborate example is +</t> +<sourcecode type="http-message"> +Accept: text/plain; q=0.5, text/html, + text/x-dvi; q=0.8, text/x-c +</sourcecode> +<t> + Verbally, this would be interpreted as "text/html and text/x-c are + the equally preferred media types, but if they do not exist, then send the + text/x-dvi representation, and if that does not exist, send the text/plain + representation". +</t> +<t> + Media ranges can be overridden by more specific media ranges or + specific media types. If more than one media range applies to a given + type, the most specific reference has precedence. For example, +</t> +<sourcecode type="http-message"> +Accept: text/*, text/plain, text/plain;format=flowed, */* +</sourcecode> +<t> + have the following precedence: +</t> +<ol> + <li>text/plain;format=flowed</li> + <li>text/plain</li> + <li>text/*</li> + <li>*/*</li> +</ol> +<t> + The media type quality factor associated with a given type is + determined by finding the media range with the highest precedence + that matches the type. For example, +</t> +<sourcecode type="http-message"> +Accept: text/*;q=0.3, text/plain;q=0.7, text/plain;format=flowed, + text/plain;format=fixed;q=0.4, */*;q=0.5 +</sourcecode> +<t> + would cause the following values to be associated: +</t> +<table align="left"> + <thead> + <tr> + <th>Media Type</th> + <th>Quality Value</th> + </tr> + </thead> + <tbody> + <tr> + <td>text/plain;format=flowed</td> + <td>1</td> + </tr> + <tr> + <td>text/plain</td> + <td>0.7</td> + </tr> + <tr> + <td>text/html</td> + <td>0.3</td> + </tr> + <tr> + <td>image/jpeg</td> + <td>0.5</td> + </tr> + <tr> + <td>text/plain;format=fixed</td> + <td>0.4</td> + </tr> + <tr> + <td>text/html;level=3</td> + <td>0.7</td> + </tr> + </tbody> +</table> +<aside><t> + <x:h>Note:</x:h> A user agent might be provided with a default set of quality + values for certain media ranges. However, unless the user agent is + a closed system that cannot interact with other rendering agents, + this default set ought to be configurable by the user. +</t></aside> +</section> + +<section title="Accept-Charset" anchor="field.accept-charset"> + <x:anchor-alias value="header.accept-charset"/> + <rdf:Description> + <status xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">deprecated</status> + </rdf:Description> + <iref primary="true" item="Fields" subitem="Accept-Charset" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Accept-Charset" x:for-anchor=""/><iref primary="true" item="Accept-Charset header field" x:for-anchor=""/> + <x:anchor-alias value="Accept-Charset"/> +<t> + The "Accept-Charset" header field can be sent by a user agent to indicate + its preferences for charsets in textual response content. For example, + this field allows user agents capable of understanding more comprehensive + or special-purpose charsets to signal that capability to an origin server + that is capable of representing information in those charsets. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Accept-Charset"/> + <x:ref>Accept-Charset</x:ref> = #( ( <x:ref>token</x:ref> / "*" ) [ <x:ref>weight</x:ref> ] ) +</sourcecode> +<t> + Charset names are defined in <xref target="charset"/>. + A user agent <bcp14>MAY</bcp14> associate a quality value with each charset to indicate + the user's relative preference for that charset, as defined in <xref target="quality.values"/>. + An example is +</t> +<sourcecode type="http-message"> +Accept-Charset: iso-8859-5, unicode-1-1;q=0.8 +</sourcecode> +<t> + The special value "*", if present in the Accept-Charset header field, + matches every charset that is not mentioned elsewhere in the + field. +</t> +<aside><t> + <x:h>Note:</x:h> Accept-Charset is deprecated because UTF-8 has become nearly + ubiquitous and sending a detailed list of user-preferred charsets wastes + bandwidth, increases latency, and makes passive fingerprinting far too + easy (<xref target="fingerprinting"/>). Most general-purpose user agents + do not send Accept-Charset unless specifically configured to do so. +</t></aside> +</section> + +<section title="Accept-Encoding" anchor="field.accept-encoding"> + <x:anchor-alias value="header.accept-encoding"/> + <iref primary="true" item="Fields" subitem="Accept-Encoding" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Accept-Encoding" x:for-anchor=""/><iref primary="true" item="Accept-Encoding header field" x:for-anchor=""/> + <x:anchor-alias value="Accept-Encoding"/> + <x:anchor-alias value="codings"/> +<t> + The "Accept-Encoding" header field can be used to indicate preferences + regarding the use of content codings (<xref target="content.codings"/>). +</t> +<t> + When sent by a user agent in a request, Accept-Encoding indicates the + content codings acceptable in a response. +</t> +<t> + When sent by a server in a response, Accept-Encoding provides information + about which content codings are preferred in the content of a subsequent + request to the same resource. +</t> +<t> + An "identity" token is used as a synonym for + "no encoding" in order to communicate when no encoding is preferred. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Accept-Encoding"/><iref primary="true" item="Grammar" subitem="codings"/> + <x:ref>Accept-Encoding</x:ref> = #( <x:ref>codings</x:ref> [ <x:ref>weight</x:ref> ] ) + <x:ref>codings</x:ref> = <x:ref>content-coding</x:ref> / "identity" / "*" +</sourcecode> +<t> + Each codings value <bcp14>MAY</bcp14> be given an associated quality value (weight) + representing the preference for that encoding, as defined in <xref target="quality.values"/>. + The asterisk "*" symbol in an Accept-Encoding field matches any available + content coding not explicitly listed in the field. +</t> +<t> + Examples: +</t> +<sourcecode type="http-message"> +Accept-Encoding: compress, gzip +Accept-Encoding: +Accept-Encoding: * +Accept-Encoding: compress;q=0.5, gzip;q=1.0 +Accept-Encoding: gzip;q=1.0, identity; q=0.5, *;q=0 +</sourcecode> +<t> + A server tests whether a content coding for a given representation is + acceptable using these rules: +</t> +<ol> + <li>If no Accept-Encoding header field is in the request, any content coding is + considered acceptable by the user agent.</li> + <li>If the representation has no content coding, then it is acceptable + by default unless specifically excluded by the Accept-Encoding header field + stating either "identity;q=0" or "*;q=0" without a more specific + entry for "identity".</li> + <li>If the representation's content coding is one of the content codings + listed in the Accept-Encoding field value, then it is acceptable unless + it is accompanied by a qvalue of 0. (As defined in <xref target="quality.values"/>, a + qvalue of 0 means "not acceptable".)</li> +</ol> +<t> + A representation could be encoded with multiple content codings. However, most + content codings are alternative ways to accomplish the same purpose + (e.g., data compression). When selecting between multiple content codings that + have the same purpose, the acceptable content coding with the highest + non-zero qvalue is preferred. +</t> +<t> + An Accept-Encoding header field with a field value that is empty + implies that the user agent does not want any content coding in response. + If a non-empty Accept-Encoding header field is present in a request and none of the + available representations for the response have a content coding that + is listed as acceptable, the origin server <bcp14>SHOULD</bcp14> send a response + without any content coding unless the identity coding is indicated as unacceptable. +</t> +<t> + When the Accept-Encoding header field is present in a response, it indicates + what content codings the resource was willing to accept in the associated + request. The field value is evaluated the same way as in a request. +</t> +<t> + Note that this information is specific to the associated request; the set of + supported encodings might be different for other resources on the same + server and could change over time or depend on other aspects of the request + (such as the request method). +</t> +<t> + Servers that fail a request due to an unsupported content coding ought to + respond with a <x:ref>415 (Unsupported Media Type)</x:ref> status and + include an Accept-Encoding header field in that response, allowing + clients to distinguish between issues related to content codings and media + types. In order to avoid confusion with issues related to media types, + servers that fail a request with a 415 status for reasons unrelated to + content codings <bcp14>MUST NOT</bcp14> include the Accept-Encoding header + field. +</t> +<t> + The most common use of Accept-Encoding is in responses with a + <x:ref>415 (Unsupported Media Type)</x:ref> status code, in response to + optimistic use of a content coding by clients. However, the header field + can also be used to indicate to clients that content codings are supported + in order to optimize future interactions. For example, a resource might include it + in a <x:ref>2xx (Successful)</x:ref> response when the request content was + big enough to justify use of a compression coding but the client failed do + so. +</t> +</section> + +<section title="Accept-Language" anchor="field.accept-language"> + <x:anchor-alias value="header.accept-language"/> + <iref primary="true" item="Fields" subitem="Accept-Language" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Accept-Language" x:for-anchor=""/><iref primary="true" item="Accept-Language header field" x:for-anchor=""/> + <x:anchor-alias value="Accept-Language"/> + <x:anchor-alias value="language-range"/> +<t> + The "Accept-Language" header field can be used by user agents to + indicate the set of natural languages that are preferred in the response. + Language tags are defined in <xref target="language.tags"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Accept-Language"/><iref primary="true" item="Grammar" subitem="language-range"/> + <x:ref>Accept-Language</x:ref> = #( <x:ref>language-range</x:ref> [ <x:ref>weight</x:ref> ] ) + <x:ref>language-range</x:ref> = + &lt;language-range, see <xref target="RFC4647" x:fmt="," x:sec="2.1"/>&gt; +</sourcecode> +<t> + Each language-range can be given an associated quality value + representing an estimate of the user's preference for the languages + specified by that range, as defined in <xref target="quality.values"/>. For example, +</t> +<sourcecode type="http-message"> +Accept-Language: da, en-gb;q=0.8, en;q=0.7 +</sourcecode> +<t> + would mean: "I prefer Danish, but will accept British English and + other types of English". +</t> +<t> + Note that some recipients treat the order in which language tags are listed + as an indication of descending priority, particularly for tags that are + assigned equal quality values (no value is the same as q=1). However, this + behavior cannot be relied upon. For consistency and to maximize + interoperability, many user agents assign each language tag a unique + quality value while also listing them in order of decreasing quality. + Additional discussion of language priority lists can be found in + <xref target="RFC4647" x:sec="2.3" x:fmt="of"/>. +</t> +<t> + For matching, <xref target="RFC4647" x:sec="3" x:fmt="of"/> defines + several matching schemes. Implementations can offer the most appropriate + matching scheme for their requirements. The "Basic Filtering" scheme + (<xref target="RFC4647" x:fmt="," x:sec="3.3.1"/>) is identical to the + matching scheme that was previously defined for HTTP in + <xref target="RFC2616" x:fmt="of" x:sec="14.4"/>. +</t> +<t> + It might be contrary to the privacy expectations of the user to send + an Accept-Language header field with the complete linguistic preferences of + the user in every request (<xref target="fingerprinting"/>). +</t> +<t> + Since intelligibility is highly dependent on the individual user, user + agents need to allow user control over the linguistic preference (either + through configuration of the user agent itself or by defaulting to a user + controllable system setting). + A user agent that does not provide such control to the user <bcp14>MUST NOT</bcp14> + send an Accept-Language header field. +</t> +<aside> + <t> + <x:h>Note:</x:h> User agents ought to provide guidance to users when setting a + preference, since users are rarely familiar with the details of language + matching as described above. For example, users might assume that on + selecting "en-gb", they will be served any kind of English document if + British English is not available. A user agent might suggest, in such a + case, to add "en" to the list for better matching behavior. + </t> +</aside> +</section> + +<section title="Vary" anchor="field.vary"> + <x:anchor-alias value="header.vary"/> + <iref primary="true" item="Fields" subitem="Vary" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Vary" x:for-anchor=""/><iref item="Vary header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="Vary"/> +<t> + The "Vary" header field in a response describes what parts of a request + message, aside from the method and target URI, might have influenced the + origin server's process for selecting the content of this response. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Vary"/> + <x:ref>Vary</x:ref> = #( "*" / <x:ref>field-name</x:ref> ) +</sourcecode> +<t> + A Vary field value is either the wildcard member "*" or a list of + request field names, known as the selecting header fields, that might + have had a role in selecting the representation for this response. + Potential selecting header fields are not limited to fields defined by + this specification. +</t> +<t> + A list containing the member "*" signals that other aspects of the + request might have played a role in selecting the response representation, + possibly including aspects outside the message syntax (e.g., the + client's network address). + A recipient will not be able to determine whether this response is + appropriate for a later request without forwarding the request to the + origin server. A proxy <bcp14>MUST NOT</bcp14> generate "*" in a Vary field value. +</t> +<t> + For example, a response that contains +</t> +<sourcecode type="http-message"> +Vary: accept-encoding, accept-language +</sourcecode> +<t> + indicates that the origin server might have used the request's + <x:ref>Accept-Encoding</x:ref> and <x:ref>Accept-Language</x:ref> + header fields (or lack thereof) as determining factors while choosing + the content for this response. +</t> +<t> + A Vary field containing a list of field names has two purposes: +</t> +<ol> + <li> + <t> + To inform cache recipients that they <bcp14>MUST NOT</bcp14> use this response + to satisfy a later request unless the later request has the + same values for the listed header fields as the original request + (<xref target="CACHING" x:rel="#caching.negotiated.responses"/>) or reuse of the + response has been validated by the origin server. + In other words, Vary expands the cache key + required to match a new request to the stored cache entry. + </t> + </li> + <li> + <t> + To inform user agent recipients that this response was subject to + content negotiation (<xref target="content.negotiation"/>) and a + different representation might be sent in a subsequent request if + other values are provided in the listed header fields + (<x:ref>proactive negotiation</x:ref>). + </t> + </li> +</ol> +<t> + An origin server <bcp14>SHOULD</bcp14> generate a Vary header field on a cacheable + response when it wishes that response to be selectively reused for + subsequent requests. Generally, that is the case when the response + content has been tailored to better fit the preferences expressed by + those selecting header fields, such as when an origin server has + selected the response's language based on the request's + <x:ref>Accept-Language</x:ref> header field. +</t> +<t> + Vary might be elided when an origin server considers variance in + content selection to be less significant than Vary's performance impact + on caching, particularly when reuse is already limited by cache + response directives (<xref target="CACHING" x:rel="#field.cache-control"/>). +</t> +<t> + There is no need to send the Authorization field name in Vary because + reuse of that response for a different user is prohibited by the field + definition (<xref target="field.authorization"/>). + Likewise, if the response content has been selected or influenced by + network region, but the origin server wants the cached response to be + reused even if recipients move from one region to another, then there + is no need for the origin server to indicate such variance in Vary. +</t> +</section> +</section> +</section> + +<section title="Conditional Requests" anchor="conditional.requests"> + <iref item="conditional request" primary="true"/> +<t> + A conditional request is an HTTP request with one or more request header + fields that indicate a precondition to be tested before + applying the request method to the target resource. + <xref target="evaluation"/> defines when to evaluate preconditions and + their order of precedence when more than one precondition is present. +</t> +<t> + Conditional GET requests are the most efficient mechanism for HTTP + cache updates <xref target="CACHING"/>. Conditionals can also be + applied to state-changing methods, such as PUT and DELETE, to prevent + the "lost update" problem: one client accidentally overwriting + the work of another client that has been acting in parallel. +</t> + +<section title="Preconditions" anchor="preconditions"> + <iref primary="false" item="selected representation"/> +<t> + Preconditions are usually defined with respect to a state of the target + resource as a whole (its current value set) or the state as observed in a + previously obtained representation (one value in that set). If a resource + has multiple current representations, each with its own observable state, + a precondition will assume that the mapping of each request to a + <x:ref>selected representation</x:ref> (<xref target="representations"/>) + is consistent over time. + Regardless, if the mapping is inconsistent or the server is unable to + select an appropriate representation, then no harm will result when the + precondition evaluates to false. +</t> +<t> + Each precondition defined below consists of a comparison between a + set of validators obtained from prior representations of the target + resource to the current state of validators for the selected + representation (<xref target="response.validator"/>). Hence, these + preconditions evaluate whether the state of the target resource has + changed since a given state known by the client. The effect of such an + evaluation depends on the method semantics and choice of conditional, as + defined in <xref target="evaluation"/>. +</t> +<t> + Other preconditions, defined by other specifications as extension fields, + might place conditions on all recipients, on the state of the target + resource in general, or on a group of resources. For instance, the "If" + header field in WebDAV can make a request conditional on various aspects + of multiple resources, such as locks, if the recipient understands and + implements that field (<xref target="WEBDAV" x:fmt="," x:sec="10.4"/>). +</t> +<t> + Extensibility of preconditions is only possible when the precondition can + be safely ignored if unknown (like <x:ref>If-Modified-Since</x:ref>), when + deployment can be assumed for a given use case, or when implementation + is signaled by some other property of the target resource. This encourages + a focus on mutually agreed deployment of common standards. +</t> + +<section title="If-Match" anchor="field.if-match"> + <x:anchor-alias value="header.if-match"/> + <iref primary="true" item="Fields" subitem="If-Match" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="If-Match" x:for-anchor=""/><iref primary="true" item="If-Match header field" x:for-anchor=""/> + <x:anchor-alias value="If-Match"/> +<t> + The "If-Match" header field makes the request method conditional on the + recipient origin server either having at least one current + representation of the target resource, when the field value is "*", or + having a current representation of the target resource that has an + entity tag matching a member of the list of entity tags provided in the + field value. +</t> +<t> + An origin server <bcp14>MUST</bcp14> use the strong comparison function when comparing + entity tags for If-Match (<xref target="entity.tag.comparison"/>), since + the client intends this precondition to prevent the method from being + applied if there have been any changes to the representation data. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="If-Match"/> + <x:ref>If-Match</x:ref> = "*" / #<x:ref>entity-tag</x:ref> +</sourcecode> +<t> + Examples: +</t> +<sourcecode type="http-message"> +If-Match: "xyzzy" +If-Match: "xyzzy", "r2d2xxxx", "c3piozzzz" +If-Match: * +</sourcecode> +<t> + If-Match is most often used with state-changing methods (e.g., POST, PUT, + DELETE) to prevent accidental overwrites when multiple user agents might be + acting in parallel on the same resource (i.e., to prevent the "lost update" + problem). In general, it can be used with any method that involves the + selection or modification of a representation to abort the request if the + <x:ref>selected representation</x:ref>'s current entity tag is not a + member within the If-Match field value. +</t> +<t> + When an origin server receives a request that selects a representation + and that request includes an If-Match header field, + the origin server <bcp14>MUST</bcp14> evaluate the If-Match condition per + <xref target="evaluation"/> prior to performing the method. +</t> +<t> + To evaluate a received If-Match header field: +</t> +<ol> + <li> + If the field value is "*", the condition is true if the origin server + has a current representation for the target resource. + </li> + <li> + If the field value is a list of entity tags, the condition is true if + any of the listed tags match the entity tag of the selected representation. + </li> + <li> + Otherwise, the condition is false. + </li> +</ol> +<t> + An origin server that evaluates an If-Match condition <bcp14>MUST NOT</bcp14> perform + the requested method if the condition evaluates to false. Instead, + the origin server <bcp14>MAY</bcp14> + indicate that the conditional request failed by responding with a + <x:ref>412 (Precondition Failed)</x:ref> status code. Alternatively, + if the request is a state-changing operation that appears to have already + been applied to the selected representation, the origin server <bcp14>MAY</bcp14> respond + with a <x:ref>2xx (Successful)</x:ref> status code + (i.e., the change requested by the user agent has already succeeded, but + the user agent might not be aware of it, perhaps because the prior response + was lost or an equivalent change was made by some other user agent). +</t> +<t> + Allowing an origin server to send a success response when a change request + appears to have already been applied is more efficient for many authoring + use cases, but comes with some risk if multiple user agents are making + change requests that are very similar but not cooperative. + For example, multiple user agents writing to a common resource as a + semaphore (e.g., a nonatomic increment) are likely to collide and + potentially lose important state transitions. For those kinds of resources, + an origin server is better off being stringent in sending 412 for every + failed precondition on an unsafe method. + In other cases, excluding the ETag field from a success response might + encourage the user agent to perform a GET as its next request to eliminate + confusion about the resource's current state. +</t> +<t> + A client <bcp14>MAY</bcp14> send an If-Match header field in a + <x:ref>GET</x:ref> request to indicate that it would prefer a + <x:ref>412 (Precondition Failed)</x:ref> response if the selected + representation does not match. However, this is only useful in range + requests (<xref target="range.requests"/>) for completing a previously + received partial representation when there is no desire for a new + representation. <x:ref>If-Range</x:ref> (<xref target="field.if-range"/>) + is better suited for range requests when the client prefers to receive a + new representation. +</t> +<t> + A cache or intermediary <bcp14>MAY</bcp14> ignore If-Match because its + interoperability features are only necessary for an origin server. +</t> +<t> + Note that an If-Match header field with a list value containing "*" and + other values (including other instances of "*") is syntactically + invalid (therefore not allowed to be generated) and furthermore is + unlikely to be interoperable. +</t> +</section> + +<section title="If-None-Match" anchor="field.if-none-match"> + <x:anchor-alias value="header.if-none-match"/> + <iref primary="true" item="Fields" subitem="If-None-Match" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="If-None-Match" x:for-anchor=""/><iref primary="true" item="If-None-Match header field" x:for-anchor=""/> + <x:anchor-alias value="If-None-Match"/> +<t> + The "If-None-Match" header field makes the request method conditional on + a recipient cache or origin server either not having any current + representation of the target resource, when the field value is "*", or + having a <x:ref>selected representation</x:ref> with an entity tag that does not match any + of those listed in the field value. +</t> +<t> + A recipient <bcp14>MUST</bcp14> use the weak comparison function when comparing + entity tags for If-None-Match (<xref target="entity.tag.comparison"/>), + since weak entity tags can be used for cache validation even if there have + been changes to the representation data. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="If-None-Match"/> + <x:ref>If-None-Match</x:ref> = "*" / #<x:ref>entity-tag</x:ref> +</sourcecode> +<t> + Examples: +</t> +<sourcecode type="http-message"> +If-None-Match: "xyzzy" +If-None-Match: W/"xyzzy" +If-None-Match: "xyzzy", "r2d2xxxx", "c3piozzzz" +If-None-Match: W/"xyzzy", W/"r2d2xxxx", W/"c3piozzzz" +If-None-Match: * +</sourcecode> +<t> + If-None-Match is primarily used in conditional GET requests to enable + efficient updates of cached information with a minimum amount of + transaction overhead. When a client desires to update one or more stored + responses that have entity tags, the client <bcp14>SHOULD</bcp14> generate an + If-None-Match header field containing a list of those entity tags when + making a GET request; this allows recipient servers to send a + <x:ref>304 (Not Modified)</x:ref> response to indicate when one of those + stored responses matches the selected representation. +</t> +<t> + If-None-Match can also be used with a value of "*" to prevent an unsafe + request method (e.g., PUT) from inadvertently modifying an existing + representation of the target resource when the client believes that + the resource does not have a current representation (<xref target="safe.methods"/>). + This is a variation on the "lost update" problem that might arise if more + than one client attempts to create an initial representation for the target + resource. +</t> +<t> + When an origin server receives a request that selects a representation + and that request includes an If-None-Match header field, + the origin server <bcp14>MUST</bcp14> evaluate the If-None-Match condition per + <xref target="evaluation"/> prior to performing the method. +</t> +<t> + To evaluate a received If-None-Match header field: +</t> +<ol> + <li> + If the field value is "*", the condition is false if the origin server + has a current representation for the target resource. + </li> + <li> + If the field value is a list of entity tags, the condition is false if + one of the listed tags matches the entity tag of the selected representation. + </li> + <li> + Otherwise, the condition is true. + </li> +</ol> +<t> + An origin server that evaluates an If-None-Match condition <bcp14>MUST NOT</bcp14> + perform the requested method if the condition evaluates to false; instead, + the origin server <bcp14>MUST</bcp14> respond with either + a) the <x:ref>304 (Not Modified)</x:ref> status code if the request method + is GET or HEAD or b) the <x:ref>412 (Precondition Failed)</x:ref> status + code for all other request methods. +</t> +<t> + Requirements on cache handling of a received If-None-Match header field + are defined in <xref target="CACHING" x:rel="#validation.received"/>. +</t> +<t> + Note that an If-None-Match header field with a list value containing "*" and + other values (including other instances of "*") is syntactically + invalid (therefore not allowed to be generated) and furthermore is + unlikely to be interoperable. +</t> +</section> + +<section title="If-Modified-Since" anchor="field.if-modified-since"> + <x:anchor-alias value="header.if-modified-since"/> + <iref primary="true" item="Fields" subitem="If-Modified-Since" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="If-Modified-Since" x:for-anchor=""/><iref primary="true" item="If-Modified-Since header field" x:for-anchor=""/> + <x:anchor-alias value="If-Modified-Since"/> +<t> + The "If-Modified-Since" header field makes a GET or HEAD request method + conditional on the <x:ref>selected representation</x:ref>'s modification + date being more + recent than the date provided in the field value. Transfer of the selected + representation's data is avoided if that data has not changed. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="If-Modified-Since"/> + <x:ref>If-Modified-Since</x:ref> = <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + An example of the field is: +</t> +<sourcecode type="http-message"> +If-Modified-Since: Sat, 29 Oct 1994 19:43:31 GMT +</sourcecode> +<t> + A recipient <bcp14>MUST</bcp14> ignore If-Modified-Since if the request contains an + <x:ref>If-None-Match</x:ref> header field; the condition in + <x:ref>If-None-Match</x:ref> is considered to be a more accurate + replacement for the condition in If-Modified-Since, and the two are only + combined for the sake of interoperating with older intermediaries that + might not implement <x:ref>If-None-Match</x:ref>. +</t> +<t> + A recipient <bcp14>MUST</bcp14> ignore the If-Modified-Since header field if the + received field value is not a valid HTTP-date, the field value has more than + one member, or if the request method is neither GET nor HEAD. +</t> +<t> + A recipient <bcp14>MUST</bcp14> ignore the If-Modified-Since header field if the + resource does not have a modification date available. +</t> +<t> + A recipient <bcp14>MUST</bcp14> interpret an If-Modified-Since field value's timestamp + in terms of the origin server's clock. +</t> +<t> + If-Modified-Since is typically used for two distinct purposes: + 1) to allow efficient updates of a cached representation that does not + have an entity tag and 2) to limit the scope of a web traversal to resources + that have recently changed. +</t> +<t> + When used for cache updates, a cache will typically use the value of the + cached message's <x:ref>Last-Modified</x:ref> header field to generate the field + value of If-Modified-Since. This behavior is most interoperable for cases + where clocks are poorly synchronized or when the server has chosen to only + honor exact timestamp matches (due to a problem with Last-Modified dates + that appear to go "back in time" when the origin server's clock is + corrected or a representation is restored from an archived backup). + However, caches occasionally generate the field value based on other data, + such as the <x:ref>Date</x:ref> header field of the cached message or the + clock time at which the message was received, particularly when the + cached message does not contain a <x:ref>Last-Modified</x:ref> header field. +</t> +<t> + When used for limiting the scope of retrieval to a recent time window, a + user agent will generate an If-Modified-Since field value based on either + its own clock or a <x:ref>Date</x:ref> header field received from the + server in a prior response. Origin servers that choose an exact + timestamp match based on the selected representation's + <x:ref>Last-Modified</x:ref> + header field will not be able to help the user agent limit its data + transfers to only those changed during the specified window. +</t> +<t> + When an origin server receives a request that selects a representation + and that request includes an If-Modified-Since header field without an + <x:ref>If-None-Match</x:ref> header field, the origin server <bcp14>SHOULD</bcp14> + evaluate the If-Modified-Since condition per + <xref target="evaluation"/> prior to performing the method. +</t> +<t> + To evaluate a received If-Modified-Since header field: +</t> +<ol> + <li> + If the selected representation's last modification date is earlier or + equal to the date provided in the field value, the condition is false. + </li> + <li> + Otherwise, the condition is true. + </li> +</ol> +<t> + An origin server that evaluates an If-Modified-Since condition + <bcp14>SHOULD NOT</bcp14> perform the requested method if the condition evaluates to + false; instead, + the origin server <bcp14>SHOULD</bcp14> generate a <x:ref>304 (Not Modified)</x:ref> + response, including only those metadata that are useful for identifying or + updating a previously cached response. +</t> +<t> + Requirements on cache handling of a received If-Modified-Since header field + are defined in <xref target="CACHING" x:rel="#validation.received"/>. +</t> +</section> + +<section title="If-Unmodified-Since" anchor="field.if-unmodified-since"> + <x:anchor-alias value="header.if-unmodified-since"/> + <iref primary="true" item="Fields" subitem="If-Unmodified-Since" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="If-Unmodified-Since" x:for-anchor=""/><iref primary="true" item="If-Unmodified-Since header field" x:for-anchor=""/> + <x:anchor-alias value="If-Unmodified-Since"/> +<t> + The "If-Unmodified-Since" header field makes the request method conditional + on the <x:ref>selected representation</x:ref>'s last modification date being + earlier than or equal to the date provided in the field value. + This field accomplishes the + same purpose as <x:ref>If-Match</x:ref> for cases where the user agent does + not have an entity tag for the representation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="If-Unmodified-Since"/> + <x:ref>If-Unmodified-Since</x:ref> = <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + An example of the field is: +</t> +<sourcecode type="http-message"> +If-Unmodified-Since: Sat, 29 Oct 1994 19:43:31 GMT +</sourcecode> +<t> + A recipient <bcp14>MUST</bcp14> ignore If-Unmodified-Since if the request contains an + <x:ref>If-Match</x:ref> header field; the condition in + <x:ref>If-Match</x:ref> is considered to be a more accurate replacement for + the condition in If-Unmodified-Since, and the two are only combined for the + sake of interoperating with older intermediaries that might not implement + <x:ref>If-Match</x:ref>. +</t> +<t> + A recipient <bcp14>MUST</bcp14> ignore the If-Unmodified-Since header field if the + received field value is not a valid HTTP-date (including when the field + value appears to be a list of dates). +</t> +<t> + A recipient <bcp14>MUST</bcp14> ignore the If-Unmodified-Since header field if the + resource does not have a modification date available. +</t> +<t> + A recipient <bcp14>MUST</bcp14> interpret an If-Unmodified-Since field value's timestamp + in terms of the origin server's clock. +</t> +<t> + If-Unmodified-Since is most often used with state-changing methods + (e.g., POST, PUT, DELETE) to prevent accidental overwrites when multiple + user agents might be acting in parallel on a resource that does + not supply entity tags with its representations (i.e., to prevent the + "lost update" problem). + In general, it can be used with any method that involves the selection + or modification of a representation to abort the request if the + <x:ref>selected representation</x:ref>'s last modification date has + changed since the date provided in the If-Unmodified-Since field value. +</t> +<t> + When an origin server receives a request that selects a representation + and that request includes an If-Unmodified-Since header field without + an <x:ref>If-Match</x:ref> header field, + the origin server <bcp14>MUST</bcp14> evaluate the If-Unmodified-Since condition per + <xref target="evaluation"/> prior to performing the method. +</t> +<t> + To evaluate a received If-Unmodified-Since header field: +</t> +<ol> + <li> + If the selected representation's last modification date is earlier than or + equal to the date provided in the field value, the condition is true. + </li> + <li> + Otherwise, the condition is false. + </li> +</ol> +<t> + An origin server that evaluates an If-Unmodified-Since condition <bcp14>MUST NOT</bcp14> + perform the requested method if the condition evaluates to false. + Instead, the origin server <bcp14>MAY</bcp14> indicate that the conditional request + failed by responding with a <x:ref>412 (Precondition Failed)</x:ref> + status code. Alternatively, if the request is a state-changing operation + that appears to have already been applied to the selected representation, + the origin server <bcp14>MAY</bcp14> respond with a <x:ref>2xx (Successful)</x:ref> + status code + (i.e., the change requested by the user agent has already succeeded, but + the user agent might not be aware of it, perhaps because the prior response + was lost or an equivalent change was made by some other user agent). +</t> +<t> + Allowing an origin server to send a success response when a change request + appears to have already been applied is more efficient for many authoring + use cases, but comes with some risk if multiple user agents are making + change requests that are very similar but not cooperative. + In those cases, an origin server is better off being stringent in sending + 412 for every failed precondition on an unsafe method. +</t> +<t> + A client <bcp14>MAY</bcp14> send an If-Unmodified-Since header field in a + <x:ref>GET</x:ref> request to indicate that it would prefer a + <x:ref>412 (Precondition Failed)</x:ref> response if the selected + representation has been modified. However, this is only useful in range + requests (<xref target="range.requests"/>) for completing a previously + received partial representation when there is no desire for a new + representation. <x:ref>If-Range</x:ref> (<xref target="field.if-range"/>) + is better suited for range requests when the client prefers to receive a + new representation. +</t> +<t> + A cache or intermediary <bcp14>MAY</bcp14> ignore If-Unmodified-Since because its + interoperability features are only necessary for an origin server. +</t> +</section> + +<section title="If-Range" anchor="field.if-range"> + <x:anchor-alias value="header.if-range"/> + <iref primary="true" item="Fields" subitem="If-Range" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="If-Range" x:for-anchor=""/><iref primary="true" item="If-Range header field" x:for-anchor=""/> + <x:anchor-alias value="If-Range"/> +<t> + The "If-Range" header field provides a special conditional request + mechanism that is similar to the <x:ref>If-Match</x:ref> and + <x:ref>If-Unmodified-Since</x:ref> header fields but that instructs the + recipient to ignore the <x:ref>Range</x:ref> header field if the validator + doesn't match, resulting in transfer of the new <x:ref>selected representation</x:ref> + instead of a <x:ref>412 (Precondition Failed)</x:ref> response. +</t> +<t> + If a client has a partial copy of a representation and wishes + to have an up-to-date copy of the entire representation, it could use the + <x:ref>Range</x:ref> header field with a conditional GET (using + either or both of <x:ref>If-Unmodified-Since</x:ref> and + <x:ref>If-Match</x:ref>.) However, if the precondition fails because the + representation has been modified, the client would then have to make a + second request to obtain the entire current representation. +</t> +<t> + The "If-Range" header field allows a client to "short-circuit" the second + request. Informally, its meaning is as follows: if the representation is unchanged, + send me the part(s) that I am requesting in Range; otherwise, send me the + entire representation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="If-Range"/> + <x:ref>If-Range</x:ref> = <x:ref>entity-tag</x:ref> / <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + A valid <x:ref>entity-tag</x:ref> can be distinguished from a valid + <x:ref>HTTP-date</x:ref> by examining the first three characters for a + DQUOTE. +</t> +<t> + A client <bcp14>MUST NOT</bcp14> generate an If-Range header field in a request that + does not contain a <x:ref>Range</x:ref> header field. + A server <bcp14>MUST</bcp14> ignore an If-Range header field received in a request that + does not contain a <x:ref>Range</x:ref> header field. + An origin server <bcp14>MUST</bcp14> ignore an If-Range header field received in a + request for a target resource that does not support Range requests. +</t> +<t> + A client <bcp14>MUST NOT</bcp14> generate an If-Range header field containing an + entity tag that is marked as weak. + A client <bcp14>MUST NOT</bcp14> generate an If-Range header field containing an + <x:ref>HTTP-date</x:ref> unless the client has no entity tag for + the corresponding representation and the date is a strong validator + in the sense defined by <xref target="lastmod.comparison"/>. +</t> +<t> + A server that receives an If-Range header field on a Range request <bcp14>MUST</bcp14> + evaluate the condition per <xref target="evaluation"/> prior to + performing the method. +</t> +<t> + To evaluate a received If-Range header field containing an + <x:ref>HTTP-date</x:ref>: +</t> +<ol> + <li>If the <x:ref>HTTP-date</x:ref> validator provided is not a + strong validator in the sense defined by + <xref target="lastmod.comparison"/>, the condition is false.</li> + <li>If the <x:ref>HTTP-date</x:ref> validator provided exactly matches + the <x:ref>Last-Modified</x:ref> field value for the selected + representation, the condition is true.</li> + <li>Otherwise, the condition is false.</li> +</ol> +<t> + To evaluate a received If-Range header field containing an + <x:ref>entity-tag</x:ref>: +</t> +<ol> + <li>If the <x:ref>entity-tag</x:ref> validator provided exactly matches + the <x:ref>ETag</x:ref> field value for the selected representation + using the strong comparison function + (<xref target="entity.tag.comparison"/>), the condition is true.</li> + <li>Otherwise, the condition is false.</li> +</ol> +<t> + A recipient of an If-Range header field <bcp14>MUST</bcp14> ignore the + <x:ref>Range</x:ref> header field if the If-Range condition + evaluates to false. Otherwise, the recipient <bcp14>SHOULD</bcp14> process the + <x:ref>Range</x:ref> header field as requested. +</t> +<t> + Note that the If-Range comparison is by exact match, including when the + validator is an <x:ref>HTTP-date</x:ref>, and so it + differs from the "earlier than or equal to" comparison used when evaluating + an <x:ref>If-Unmodified-Since</x:ref> conditional. +</t> +</section> +</section> + +<section title="Evaluation of Preconditions" anchor="evaluation"> + +<section title="When to Evaluate" anchor="when.to.evaluate"> +<t> + Except when excluded below, a recipient cache or origin server <bcp14>MUST</bcp14> + evaluate received request preconditions after it has successfully performed + its normal request checks and just before it would process the request content + (if any) or perform the action associated with the request method. + A server <bcp14>MUST</bcp14> ignore all received preconditions if its response to the + same request without those conditions, prior to processing the request content, + would have been a status code other than a <x:ref>2xx (Successful)</x:ref> + or <x:ref>412 (Precondition Failed)</x:ref>. + In other words, redirects and failures that can be detected before + significant processing occurs take precedence over the evaluation + of preconditions. +</t> +<t> + A server that is not the origin server for the target resource and cannot + act as a cache for requests on the target resource <bcp14>MUST NOT</bcp14> evaluate the + conditional request header fields defined by this specification, and it + <bcp14>MUST</bcp14> forward them if the request is forwarded, since the generating + client intends that they be evaluated by a server that can provide a + current representation. + Likewise, a server <bcp14>MUST</bcp14> ignore the conditional request header fields + defined by this specification when received with a request method that does + not involve the selection or modification of a + <x:ref>selected representation</x:ref>, such as CONNECT, OPTIONS, or TRACE. +</t> +<t> + Note that protocol extensions can modify the conditions under which + preconditions are evaluated or the consequences of their evaluation. + For example, the immutable cache directive + (defined by <xref target="RFC8246"/>) instructs caches to forgo + forwarding conditional requests when they hold a fresh response. +</t> +<t> + Although conditional request header fields are defined as being usable with + the HEAD method (to keep HEAD's semantics consistent with those of GET), + there is no point in sending a conditional HEAD because a successful + response is around the same size as a <x:ref>304 (Not Modified)</x:ref> + response and more useful than a <x:ref>412 (Precondition Failed)</x:ref> + response. +</t> +</section> + +<section title="Precedence of Preconditions" anchor="precedence"> +<t> + When more than one conditional request header field is present in a request, + the order in which the fields are evaluated becomes important. In practice, + the fields defined in this document are consistently implemented in a + single, logical order, since "lost update" preconditions have more strict + requirements than cache validation, a validated cache is more efficient + than a partial response, and entity tags are presumed to be more accurate + than date validators. +</t> +<t> + A recipient cache or origin server <bcp14>MUST</bcp14> evaluate the request + preconditions defined by this specification in the following order: +</t> +<ol> + <li anchor="precedence1"><t>When recipient is the origin server and + <x:ref>If-Match</x:ref> is present, + evaluate the <x:ref>If-Match</x:ref> precondition:</t> + <ul> + <li>if true, continue to step <xref target="precedence3" format="counter"/></li> + <li>if false, respond <x:ref>412 (Precondition Failed)</x:ref> unless + it can be determined that the state-changing request has already + succeeded (see <xref target="field.if-match"/>)</li> + </ul> + </li> + <li anchor="precedence2"><t>When recipient is the origin server, + <x:ref>If-Match</x:ref> is not present, and + <x:ref>If-Unmodified-Since</x:ref> is present, + evaluate the <x:ref>If-Unmodified-Since</x:ref> precondition:</t> + <ul> + <li>if true, continue to step <xref target="precedence3" format="counter"/></li> + <li>if false, respond <x:ref>412 (Precondition Failed)</x:ref> unless + it can be determined that the state-changing request has already + succeeded (see <xref target="field.if-unmodified-since"/>)</li> + </ul> + </li> + <li anchor="precedence3"><t>When <x:ref>If-None-Match</x:ref> is present, + evaluate the <x:ref>If-None-Match</x:ref> precondition:</t> + <ul> + <li>if true, continue to step <xref target="precedence5" format="counter"/></li> + <li>if false for GET/HEAD, respond <x:ref>304 (Not Modified)</x:ref></li> + <li>if false for other methods, respond <x:ref>412 (Precondition Failed)</x:ref></li> + </ul> + </li> + <li anchor="precedence4"><t>When the method is GET or HEAD, + <x:ref>If-None-Match</x:ref> is not present, and + <x:ref>If-Modified-Since</x:ref> is present, + evaluate the <x:ref>If-Modified-Since</x:ref> precondition:</t> + <ul> + <li>if true, continue to step <xref target="precedence5" format="counter"/></li> + <li>if false, respond <x:ref>304 (Not Modified)</x:ref></li> + </ul> + </li> + <li anchor="precedence5"><t>When the method is GET and both + <x:ref>Range</x:ref> and <x:ref>If-Range</x:ref> are present, + evaluate the <x:ref>If-Range</x:ref> precondition:</t> + <ul> + <li>if true and the <x:ref>Range</x:ref> is + applicable to the <x:ref>selected representation</x:ref>, + respond <x:ref>206 (Partial Content)</x:ref></li> + <li>otherwise, ignore the <x:ref>Range</x:ref> header field + and respond <x:ref>200 (OK)</x:ref></li> + </ul> + </li> + <li anchor="precedencelast"><t>Otherwise,</t> + <ul> + <li>perform the requested method and + respond according to its success or failure.</li> + </ul> + </li> +</ol> +<t> + Any extension to HTTP that defines additional conditional request + header fields ought to define the order + for evaluating such fields in relation to those defined in this document + and other conditionals that might be found in practice. +</t> +</section> +</section> +</section> + +<section title="Range Requests" anchor="range.requests"> +<t> + Clients often encounter interrupted data + transfers as a result of canceled requests or dropped connections. When a + client has stored a partial representation, it is desirable to request the + remainder of that representation in a subsequent request rather than + transfer the entire representation. Likewise, devices with limited local + storage might benefit from being able to request only a subset of a larger + representation, such as a single page of a very large document, or the + dimensions of an embedded image. +</t> +<t> + Range requests are an <bcp14>OPTIONAL</bcp14> feature + of HTTP, designed so that recipients not implementing this feature (or not + supporting it for the target resource) can respond as if it is a normal + GET request without impacting interoperability. Partial responses are + indicated by a distinct status code to not be mistaken for full responses + by caches that might not implement the feature. +</t> + +<section title="Range Units" anchor="range.units"> + <x:anchor-alias value="range-unit"/> + <x:anchor-alias value="range unit"/> +<t> + Representation data can be partitioned into subranges when there are + addressable structural units inherent to that data's content coding or + media type. For example, octet (a.k.a. byte) boundaries are a structural + unit common to all representation data, allowing partitions of the data to + be identified as a range of bytes at some offset from the start or end of + that data. +</t> +<t> + This general notion of a <x:dfn>range unit</x:dfn> is used + in the <x:ref>Accept-Ranges</x:ref> (<xref target="field.accept-ranges"/>) + response header field to advertise support for range requests, the + <x:ref>Range</x:ref> (<xref target="field.range"/>) request header field + to delineate the parts of a representation that are requested, and the + <x:ref>Content-Range</x:ref> (<xref target="field.content-range"/>) + header field to describe which part of a representation is being + transferred. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="range-unit"/> + <x:ref>range-unit</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + All range unit names are case-insensitive and ought to be registered + within the "HTTP Range Unit Registry", as defined in + <xref target="range.unit.registry"/>. +</t> +<t> + Range units are intended to be extensible, as described in + <xref target="range.unit.extensibility"/>. +</t> + +<section title="Range Specifiers" anchor="range.specifiers"> + <iref primary="true" item="satisfiable range"/> + <iref primary="true" item="unsatisfiable range"/> +<t> + Ranges are expressed in terms of a range unit paired with a set of range + specifiers. The range unit name determines what kinds of range-spec + are applicable to its own specifiers. Hence, the following grammar is + generic: each range unit is expected to specify requirements on when + <x:ref>int-range</x:ref>, <x:ref>suffix-range</x:ref>, and + <x:ref>other-range</x:ref> are allowed. +</t> +<t anchor="rule.ranges-specifier"> + <x:anchor-alias value="ranges-specifier"/> + <x:anchor-alias value="range-set"/> + <x:anchor-alias value="range-spec"/> + A range request can specify a single range or a set + of ranges within a single representation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="ranges-specifier"/><iref primary="true" item="Grammar" subitem="range-set"/><iref primary="true" item="Grammar" subitem="range-spec"/> + <x:ref>ranges-specifier</x:ref> = <x:ref>range-unit</x:ref> "=" <x:ref>range-set</x:ref> + <x:ref>range-set</x:ref> = 1#<x:ref>range-spec</x:ref> + <x:ref>range-spec</x:ref> = <x:ref>int-range</x:ref> + / <x:ref>suffix-range</x:ref> + / <x:ref>other-range</x:ref> +</sourcecode> +<t anchor="rule.int-range"> + <x:anchor-alias value="int-range"/> + <x:anchor-alias value="first-pos"/> + <x:anchor-alias value="last-pos"/> + An <x:ref>int-range</x:ref> is a range expressed as two non-negative + integers or as one non-negative integer through to the end of the + representation data. + The range unit specifies what the integers mean (e.g., they might indicate + unit offsets from the beginning, inclusive numbered parts, etc.). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="int-range"/><iref primary="true" item="Grammar" subitem="first-pos"/><iref primary="true" item="Grammar" subitem="last-pos"/> + <x:ref>int-range</x:ref> = <x:ref>first-pos</x:ref> "-" [ <x:ref>last-pos</x:ref> ] + <x:ref>first-pos</x:ref> = 1*<x:ref>DIGIT</x:ref> + <x:ref>last-pos</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + An <x:ref>int-range</x:ref> is invalid if the + <x:ref>last-pos</x:ref> value is present and less than the + <x:ref>first-pos</x:ref>. +</t> +<t anchor="rule.suffix-range"> + <x:anchor-alias value="suffix-range"/> + <x:anchor-alias value="suffix-length"/> + A <x:ref>suffix-range</x:ref> is a range expressed as a suffix of the + representation data with the provided non-negative integer maximum length + (in range units). In other words, the last N units of the representation + data. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="suffix-range"/><iref primary="true" item="Grammar" subitem="suffix-length"/> + <x:ref>suffix-range</x:ref> = "-" <x:ref>suffix-length</x:ref> + <x:ref>suffix-length</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t anchor="rule.other-range"> + <x:anchor-alias value="other-range"/> + To provide for extensibility, the <x:ref>other-range</x:ref> rule is a + mostly unconstrained grammar that allows application-specific or future + range units to define additional range specifiers. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="other-range"/> + <x:ref>other-range</x:ref> = 1*( %x21-2B / %x2D-7E ) + ; 1*(VCHAR excluding comma) +</sourcecode> +<t> + A <x:ref>ranges-specifier</x:ref> is invalid if it contains any + <x:ref>range-spec</x:ref> that is invalid or undefined for the indicated + <x:ref>range-unit</x:ref>. +</t> +<t anchor="satisfiable"> + A valid <x:ref>ranges-specifier</x:ref> is <x:dfn>satisfiable</x:dfn> + if it contains at least one <x:ref>range-spec</x:ref> that is + satisfiable, as defined by the indicated <x:ref>range-unit</x:ref>. + Otherwise, the <x:ref>ranges-specifier</x:ref> is + <x:dfn>unsatisfiable</x:dfn>. +</t> +</section> + +<section title="Byte Ranges" anchor="byte.ranges"> + <x:anchor-alias value="bytes-unit"/> +<t> + The "bytes" range unit is used to express subranges of a representation + data's octet sequence. + Each byte range is expressed as an integer range at some offset, relative + to either the beginning (<x:ref>int-range</x:ref>) or end + (<x:ref>suffix-range</x:ref>) of the representation data. + Byte ranges do not use the <x:ref>other-range</x:ref> specifier. +</t> +<t> + The <x:ref>first-pos</x:ref> value in a bytes <x:ref>int-range</x:ref> + gives the offset of the first byte in a range. + The <x:ref>last-pos</x:ref> value gives the offset of the last + byte in the range; that is, the byte positions specified are inclusive. + Byte offsets start at zero. +</t> +<t> + If the representation data has a content coding applied, each byte range is + calculated with respect to the encoded sequence of bytes, not the sequence + of underlying bytes that would be obtained after decoding. +</t> +<t> + Examples of bytes range specifiers: +</t> +<ul> + <li><t>The first 500 bytes (byte offsets 0-499, inclusive):</t> +<artwork type="example" x:indent-with=" "> +bytes=0-499 +</artwork> + </li> + <li><t>The second 500 bytes (byte offsets 500-999, inclusive):</t> +<artwork type="example" x:indent-with=" "> +bytes=500-999 +</artwork> + </li> +</ul> +<t> + A client can limit the number of bytes requested without knowing the size + of the <x:ref>selected representation</x:ref>. + If the <x:ref>last-pos</x:ref> value is absent, or if the value is + greater than or equal to the current length of the representation data, the + byte range is interpreted as the remainder of the representation (i.e., the + server replaces the value of <x:ref>last-pos</x:ref> with a value that + is one less than the current length of the selected representation). +</t> +<t> + A client can refer to the last N bytes (N &gt; 0) of the selected + representation using a <x:ref>suffix-range</x:ref>. + If the selected representation is shorter than the specified + <x:ref>suffix-length</x:ref>, the entire representation is used. +</t> +<t> + Additional examples, assuming a representation of length 10000: +</t> +<ul> + <li><t>The final 500 bytes (byte offsets 9500-9999, inclusive):</t> +<artwork type="example" x:indent-with=" "> +bytes=-500 +</artwork> + <t>Or:</t> +<artwork type="example" x:indent-with=" "> +bytes=9500- +</artwork> + </li> + <li><t>The first and last bytes only (bytes 0 and 9999):</t> +<artwork type="example" x:indent-with=" "> +bytes=0-0,-1 +</artwork> + </li> + <li><t>The first, middle, and last 1000 bytes:</t> +<artwork type="example" x:indent-with=" "> +bytes= 0-999, 4500-5499, -1000 +</artwork> + </li> + <li><t>Other valid (but not canonical) specifications of the second 500 + bytes (byte offsets 500-999, inclusive):</t> +<artwork type="example" x:indent-with=" "> +bytes=500-600,601-999 +bytes=500-700,601-999 +</artwork> + </li> +</ul> +<t> + For a <x:ref>GET</x:ref> request, a valid bytes <x:ref>range-spec</x:ref> + is <x:ref>satisfiable</x:ref> if it is either: +</t> +<ul> + <li>an <x:ref>int-range</x:ref> with a <x:ref>first-pos</x:ref> that + is less than the current length of the selected representation or</li> + <li>a <x:ref>suffix-range</x:ref> with a non-zero + <x:ref>suffix-length</x:ref>.</li> +</ul> +<t> + When a selected representation has zero length, the only + <x:ref>satisfiable</x:ref> form of <x:ref>range-spec</x:ref> in a + <x:ref>GET</x:ref> request is a <x:ref>suffix-range</x:ref> with a + non-zero <x:ref>suffix-length</x:ref>. +</t> +<t> + In the byte-range syntax, <x:ref>first-pos</x:ref>, + <x:ref>last-pos</x:ref>, and <x:ref>suffix-length</x:ref> are + expressed as decimal number of octets. Since there is no predefined limit + to the length of content, recipients <bcp14>MUST</bcp14> anticipate potentially + large decimal numerals and prevent parsing errors due to integer conversion + overflows. +</t> +</section> +</section> + +<section title="Range" anchor="field.range"> + <x:anchor-alias value="header.range"/> + <iref primary="true" item="Fields" subitem="Range" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Range" x:for-anchor=""/><iref primary="true" item="Range header field" x:for-anchor=""/> + <x:anchor-alias value="Range"/> +<t> + The "Range" header field on a GET request modifies the method semantics to + request transfer of only one or more subranges of the + selected representation data (<xref target="representation.data"/>), + rather than the entire <x:ref>selected representation</x:ref>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Range"/> + <x:ref>Range</x:ref> = <x:ref>ranges-specifier</x:ref> +</sourcecode> +<t> + A server <bcp14>MAY</bcp14> ignore the Range header field. However, origin servers and + intermediate caches ought to support byte ranges when possible, since they + support efficient recovery from partially failed transfers and partial + retrieval of large representations. +</t> +<t> + A server <bcp14>MUST</bcp14> ignore a Range header field received with a request method + that is unrecognized or for which range handling is not defined. For this + specification, <x:ref>GET</x:ref> is the only method for which range handling + is defined. +</t> +<t> + An origin server <bcp14>MUST</bcp14> ignore a Range header field that contains a range + unit it does not understand. A proxy <bcp14>MAY</bcp14> discard a Range header + field that contains a range unit it does not understand. +</t> +<t> + A server that supports range requests <bcp14>MAY</bcp14> ignore or reject a + <x:ref>Range</x:ref> header field that contains an invalid + <x:ref>ranges-specifier</x:ref> (<xref target="range.specifiers"/>), + a <x:ref>ranges-specifier</x:ref> with more than two overlapping ranges, + or a set of many small ranges that are not listed in ascending order, + since these are indications of either a broken client or a deliberate + denial-of-service attack (<xref target="overlapping.ranges"/>). + A client <bcp14>SHOULD NOT</bcp14> request multiple ranges that are inherently less + efficient to process and transfer than a single range that encompasses the + same data. +</t> +<t> + A server that supports range requests <bcp14>MAY</bcp14> ignore a <x:ref>Range</x:ref> + header field when the selected representation has no content + (i.e., the selected representation's data is of zero length). +</t> +<t> + A client that is requesting multiple ranges <bcp14>SHOULD</bcp14> list those ranges in + ascending order (the order in which they would typically be received in a + complete representation) unless there is a specific need to request a later + part earlier. For example, a user agent processing a large representation + with an internal catalog of parts might need to request later parts first, + particularly if the representation consists of pages stored in reverse + order and the user agent wishes to transfer one page at a time. +</t> +<t> + The Range header field is evaluated after evaluating the precondition header + fields defined in <xref target="preconditions"/>, and only if the result in absence + of the Range header field would be a <x:ref>200 (OK)</x:ref> response. In + other words, Range is ignored when a conditional GET would result in a + <x:ref>304 (Not Modified)</x:ref> response. +</t> +<t> + The If-Range header field (<xref target="field.if-range"/>) can be used as + a precondition to applying the Range header field. +</t> +<t> + If all of the preconditions are true, the server supports the Range header + field for the target resource, the received Range field-value contains a + valid <x:ref>ranges-specifier</x:ref> with a <x:ref>range-unit</x:ref> + supported for that target resource, and that + <x:ref>ranges-specifier</x:ref> is <x:ref>satisfiable</x:ref> with respect + to the selected representation, + the server <bcp14>SHOULD</bcp14> send a <x:ref>206 (Partial Content)</x:ref> response + with content containing one or more partial representations + that correspond to the satisfiable <x:ref>range-spec</x:ref>(s) requested. +</t> +<t> + The above does not imply that a server will send all requested ranges. + In some cases, it may only be possible (or efficient) to send a portion of + the requested ranges first, while expecting the client to re-request the + remaining portions later if they are still desired + (see <xref target="status.206"/>). +</t> +<t> + If all of the preconditions are true, the server supports the Range header + field for the target resource, the received Range field-value contains a + valid <x:ref>ranges-specifier</x:ref>, and either the + <x:ref>range-unit</x:ref> is not supported for that target resource or + the <x:ref>ranges-specifier</x:ref> is unsatisfiable with respect to + the selected representation, the server <bcp14>SHOULD</bcp14> send a + <x:ref>416 (Range Not Satisfiable)</x:ref> response. +</t> +</section> + +<section title="Accept-Ranges" anchor="field.accept-ranges"> + <x:anchor-alias value="header.accept-ranges"/> + <iref primary="true" item="Fields" subitem="Accept-Ranges" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Accept-Ranges" x:for-anchor=""/><iref primary="true" item="Accept-Ranges header field" x:for-anchor=""/> + <x:anchor-alias value="Accept-Ranges"/> + <x:anchor-alias value="acceptable-ranges"/> +<t> + The "Accept-Ranges" field in a response indicates whether an upstream + server supports range requests for the target resource. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Accept-Ranges"/><iref primary="true" item="Grammar" subitem="acceptable-ranges"/> + <x:ref>Accept-Ranges</x:ref> = <x:ref>acceptable-ranges</x:ref> + <x:ref>acceptable-ranges</x:ref> = 1#<x:ref>range-unit</x:ref> +</sourcecode> +<t> + For example, a server that supports + <xref target="byte.ranges">byte-range requests</xref> can send the field +</t> +<sourcecode type="http-message"> +Accept-Ranges: bytes +</sourcecode> +<t> + to indicate that it supports byte range requests for that target resource, + thereby encouraging its use by the client for future partial requests on + the same request path. + Range units are defined in <xref target="range.units"/>. +</t> +<t> + A client <bcp14>MAY</bcp14> generate range requests regardless of having received an + Accept-Ranges field. The information only provides advice for the sake of + improving performance and reducing unnecessary network transfers. +</t> +<t> + Conversely, a client <bcp14>MUST NOT</bcp14> assume that receiving an Accept-Ranges field + means that future range requests will return partial responses. The content might + change, the server might only support range requests at certain times or under + certain conditions, or a different intermediary might process the next request. +</t> +<t> + A server that does not support any kind of range request for the target + resource <bcp14>MAY</bcp14> send +</t> +<sourcecode type="http-message"> +Accept-Ranges: none +</sourcecode> +<t> + to advise the client not to attempt a range request on the same request path. + The range unit "none" is reserved for this purpose. +</t> +<t> + The Accept-Ranges field <bcp14>MAY</bcp14> be sent in a trailer section, but is preferred + to be sent as a header field because the information is particularly useful + for restarting large information transfers that have failed in mid-content + (before the trailer section is received). +</t> +</section> + +<section title="Content-Range" anchor="field.content-range"> + <x:anchor-alias value="header.content-range"/> + <iref primary="true" item="Fields" subitem="Content-Range" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-Range" x:for-anchor=""/><iref primary="true" item="Content-Range header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Range"/> + <x:anchor-alias value="range-resp"/> + <x:anchor-alias value="incl-range"/> + <x:anchor-alias value="unsatisfied-range"/> + <x:anchor-alias value="complete-length"/> +<t> + The "Content-Range" header field is sent in a single part + <x:ref>206 (Partial Content)</x:ref> response to indicate the partial range + of the <x:ref>selected representation</x:ref> enclosed as the message content, sent in + each part of a multipart 206 response to indicate the range enclosed within + each body part (<xref target="multipart.byteranges"/>), and sent in <x:ref>416 (Range Not Satisfiable)</x:ref> + responses to provide information about the selected representation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Content-Range"/><iref primary="true" item="Grammar" subitem="range-resp"/><iref primary="true" item="Grammar" subitem="incl-range"/><iref primary="true" item="Grammar" subitem="unsatisfied-range"/><iref primary="true" item="Grammar" subitem="complete-length"/><iref primary="false" item="Grammar" subitem="first-pos"/><iref primary="false" item="Grammar" subitem="last-pos"/> + <x:ref>Content-Range</x:ref> = <x:ref>range-unit</x:ref> <x:ref>SP</x:ref> + ( <x:ref>range-resp</x:ref> / <x:ref>unsatisfied-range</x:ref> ) + + <x:ref>range-resp</x:ref> = <x:ref>incl-range</x:ref> "/" ( <x:ref>complete-length</x:ref> / "*" ) + <x:ref>incl-range</x:ref> = <x:ref>first-pos</x:ref> "-" <x:ref>last-pos</x:ref> + <x:ref>unsatisfied-range</x:ref> = "*/" <x:ref>complete-length</x:ref> + + <x:ref>complete-length</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + If a <x:ref>206 (Partial Content)</x:ref> response contains a + <x:ref>Content-Range</x:ref> header field with a <x:ref>range unit</x:ref> + (<xref target="range.units"/>) that the recipient does not understand, the + recipient <bcp14>MUST NOT</bcp14> attempt to recombine it with a stored representation. + A proxy that receives such a message <bcp14>SHOULD</bcp14> forward it downstream. +</t> +<t> + Content-Range might also be sent as a request modifier to request a + partial PUT, as described in <xref target="partial.PUT"/>, based on private + agreements between client and origin server. + A server <bcp14>MUST</bcp14> ignore a Content-Range header field received in a request + with a method for which Content-Range support is not defined. +</t> +<t> + For byte ranges, a sender <bcp14>SHOULD</bcp14> indicate the complete length of the + representation from which the range has been extracted, unless the complete + length is unknown or difficult to determine. An asterisk character ("*") in + place of the complete-length indicates that the representation length was + unknown when the header field was generated. +</t> +<t> + The following example illustrates when the complete length of the selected + representation is known by the sender to be 1234 bytes: +</t> +<sourcecode type="http-message"> +Content-Range: bytes 42-1233/1234 +</sourcecode> +<t> + and this second example illustrates when the complete length is unknown: +</t> +<sourcecode type="http-message"> +Content-Range: bytes 42-1233/* +</sourcecode> +<t> + A Content-Range field value is invalid if it contains a + <x:ref>range-resp</x:ref> that has a <x:ref>last-pos</x:ref> + value less than its <x:ref>first-pos</x:ref> value, or a + <x:ref>complete-length</x:ref> value less than or equal to its + <x:ref>last-pos</x:ref> value. The recipient of an invalid + <x:ref>Content-Range</x:ref> <bcp14>MUST NOT</bcp14> attempt to recombine the received + content with a stored representation. +</t> +<t> + A server generating a <x:ref>416 (Range Not Satisfiable)</x:ref> response + to a byte-range request <bcp14>SHOULD</bcp14> send a Content-Range header field with an + <x:ref>unsatisfied-range</x:ref> value, as in the following example: +</t> +<sourcecode type="http-message"> +Content-Range: bytes */1234 +</sourcecode> +<t> + The complete-length in a 416 response indicates the current length of the + selected representation. +</t> +<t> + The Content-Range header field has no meaning for status codes that do + not explicitly describe its semantic. For this specification, only the + <x:ref>206 (Partial Content)</x:ref> and + <x:ref>416 (Range Not Satisfiable)</x:ref> status codes describe a meaning + for Content-Range. +</t> +<t> + The following are examples of Content-Range values in which the + selected representation contains a total of 1234 bytes: +</t> +<ul> + <li><t>The first 500 bytes:</t> +<sourcecode type="http-message"> +Content-Range: bytes 0-499/1234 +</sourcecode> + </li> + <li><t>The second 500 bytes:</t> +<sourcecode type="http-message"> +Content-Range: bytes 500-999/1234 +</sourcecode> + </li> + <li><t>All except for the first 500 bytes:</t> +<sourcecode type="http-message"> +Content-Range: bytes 500-1233/1234 +</sourcecode> + </li> + <li><t>The last 500 bytes:</t> +<sourcecode type="http-message"> +Content-Range: bytes 734-1233/1234 +</sourcecode> + </li> +</ul> +</section> + +<section title="Partial PUT" anchor="partial.PUT"> + <x:anchor-alias value="partial PUT"/> + <iref primary="false" item="Fields" subitem="Content-Range"/><iref primary="false" item="Header Fields" subitem="Content-Range"/><iref primary="false" item="Content-Range header field"/> +<t> + Some origin servers support <x:ref>PUT</x:ref> of a partial representation + when the user agent sends a <x:ref>Content-Range</x:ref> header field + (<xref target="field.content-range"/>) in the request, though + such support is inconsistent and depends on private agreements with + user agents. In general, it requests that the state of the + <x:ref>target resource</x:ref> be partly replaced with the enclosed content + at an offset and length indicated by the Content-Range value, where the + offset is relative to the current selected representation. +</t> +<t> + An origin server <bcp14>SHOULD</bcp14> respond with a <x:ref>400 (Bad Request)</x:ref> + status code if it receives <x:ref>Content-Range</x:ref> on a PUT for a + target resource that does not support partial PUT requests. +</t> +<t> + Partial PUT is not backwards compatible with the original definition of PUT. + It may result in the content being written as a complete replacement for the + current representation. +</t> +<t> + Partial resource updates are also possible by targeting a separately + identified resource with state that overlaps or extends a portion of the + larger resource, or by using a different method that has been specifically + defined for partial updates (for example, the PATCH method defined in + <xref target="RFC5789"/>). +</t> +</section> + +<section title="Media Type multipart/byteranges" anchor="multipart.byteranges"> +<iref item="Media Type" subitem="multipart/byteranges" primary="true"/> +<iref item="multipart/byteranges Media Type" primary="true"/> +<t> + When a <x:ref>206 (Partial Content)</x:ref> response message includes the + content of multiple ranges, they are transmitted as body parts in a + multipart message body (<xref target="RFC2046" x:fmt="," x:sec="5.1"/>) + with the media type of "multipart/byteranges". +</t> +<t> + The "multipart/byteranges" media type includes one or more body parts, each + with its own <x:ref>Content-Type</x:ref> and <x:ref>Content-Range</x:ref> + fields. The required boundary parameter specifies the boundary string used + to separate each body part. +</t> +<t> + Implementation Notes: +</t> +<ol> + <li>Additional CRLFs might precede the first boundary string in the body.</li> + <li>Although <xref target="RFC2046"/> permits the boundary string to be + quoted, some existing implementations handle a quoted boundary + string incorrectly.</li> + <li>A number of clients and servers were coded to an early draft + of the byteranges specification that used a media type of + "multipart/x-byteranges",<iref item="multipart/x-byteranges Media Type"/><iref item="Media Type" subitem="multipart/x-byteranges"/> + which is almost (but not quite) compatible with this type.</li> +</ol> +<t> + Despite the name, the "multipart/byteranges" media type is not limited to + byte ranges. The following example uses an "exampleunit" range unit: +</t> +<sourcecode type="http-message"> +HTTP/1.1 206 Partial Content +Date: Tue, 14 Nov 1995 06:25:24 GMT +Last-Modified: Tue, 14 July 04:58:08 GMT +Content-Length: 2331785 +Content-Type: multipart/byteranges; boundary=THIS_STRING_SEPARATES + +--THIS_STRING_SEPARATES +Content-Type: video/example +Content-Range: exampleunit 1.2-4.3/25 + +...the first range... +--THIS_STRING_SEPARATES +Content-Type: video/example +Content-Range: exampleunit 11.2-14.3/25 + +...the second range +--THIS_STRING_SEPARATES-- +</sourcecode> +<t> + The following information serves as the registration form for the + "multipart/byteranges" media type. +</t> +<dl> + <dt>Type name:</dt> + <dd>multipart</dd> + <dt>Subtype name:</dt> + <dd>byteranges</dd> + <dt>Required parameters:</dt> + <dd>boundary</dd> + <dt>Optional parameters:</dt> + <dd>N/A</dd> + <dt>Encoding considerations:</dt> + <dd>only "7bit", "8bit", or "binary" are permitted</dd> + <dt>Security considerations:</dt> + <dd>see <xref target="security.considerations"/></dd> + <dt>Interoperability considerations:</dt> + <dd>N/A</dd> + <dt>Published specification:</dt> + <dd>RFC 9110 (see <xref target="multipart.byteranges"/>)</dd> + <dt>Applications that use this media type:</dt> + <dd>HTTP components supporting multiple ranges in a single request</dd> + <dt>Fragment identifier considerations:</dt> + <dd>N/A</dd> + <dt>Additional information:</dt> + <dd> + <dl> + <dt>Deprecated alias names for this type:</dt> + <dd>N/A</dd> + <dt>Magic number(s):</dt> + <dd>N/A</dd> + <dt>File extension(s):</dt> + <dd>N/A</dd> + <dt>Macintosh file type code(s):</dt> + <dd>N/A</dd> + </dl> + </dd> + <dt>Person and email address to contact for further information:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Intended usage:</dt> + <dd>COMMON</dd> + <dt>Restrictions on usage:</dt> + <dd>N/A</dd> + <dt>Author:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Change controller:</dt> + <dd>IESG</dd> +</dl> +</section> +</section> + +<section title="Status Codes" anchor="status.codes"> + <iref item="Status Code"/> +<t> + The status code of a response is a three-digit integer code that describes + the result of the request and the semantics of the response, including + whether the request was successful and what content is enclosed (if any). + All valid status codes are within the range of 100 to 599, inclusive. +</t> +<t> + The first digit of the status code defines the class of response. The + last two digits do not have any categorization role. There are five + values for the first digit: +</t> +<ul> + <li> + <x:ref>1xx (Informational)</x:ref>: The request was received, continuing + process + </li> + <li> + <x:ref>2xx (Successful)</x:ref>: The request was successfully received, + understood, and accepted + </li> + <li> + <x:ref>3xx (Redirection)</x:ref>: Further action needs to be taken in order to + complete the request + </li> + <li> + <x:ref>4xx (Client Error)</x:ref>: The request contains bad syntax or cannot + be fulfilled + </li> + <li> + <x:ref>5xx (Server Error)</x:ref>: The server failed to fulfill an apparently + valid request + </li> +</ul> +<t> + HTTP status codes are extensible. A client is not required to understand + the meaning of all registered status codes, though such understanding is + obviously desirable. However, a client <bcp14>MUST</bcp14> understand the class of any + status code, as indicated by the first digit, and treat an unrecognized + status code as being equivalent to the x00 status code of that class. +</t> +<t> + For example, if a client receives an unrecognized status code of 471, + it can see from the first digit that there was something wrong with its + request and treat the response as if it had received a + <x:ref>400 (Bad Request)</x:ref> status code. The response + message will usually contain a representation that explains the status. +</t> +<t> + Values outside the range 100..599 are invalid. Implementations often use + three-digit integer values outside of that range (i.e., 600..999) for + internal communication of non-HTTP status (e.g., library errors). A client + that receives a response with an invalid status code <bcp14>SHOULD</bcp14> process the + response as if it had a <x:ref>5xx (Server Error)</x:ref> status code. +</t> +<t anchor="final.interim"> + <x:anchor-alias value="final"/> + <x:anchor-alias value="interim"/> + <iref item="Status Codes" subitem="Final"/> + <iref item="Status Codes" subitem="Interim"/> + <iref item="Status Codes" subitem="Informational"/> + A single request can have multiple associated responses: zero or more + <x:dfn>interim</x:dfn> (non-final) responses with status codes in the + "informational" (<x:ref>1xx</x:ref>) range, followed by exactly one + <x:dfn>final</x:dfn> response with a status code in one of the other ranges. +</t> + +<section title="Overview of Status Codes" anchor="overview.of.status.codes"> +<t> + The status codes listed below are defined in this specification. + The reason phrases listed here are only recommendations — they can be + replaced by local equivalents or left out altogether without affecting the + protocol. +</t> +<t> + Responses with status codes that are defined as heuristically cacheable + (e.g., 200, 203, 204, 206, 300, 301, 308, 404, 405, 410, 414, and 501 in this + specification) can be reused by a cache with heuristic expiration unless + otherwise indicated by the method definition or explicit cache controls + <xref target="CACHING"/>; all other status codes are not heuristically cacheable. +</t> +<t> + Additional status codes, outside the scope of this specification, have been + specified for use in HTTP. All such status codes ought to be registered + within the "Hypertext Transfer Protocol (HTTP) Status Code Registry", + as described in <xref target="status.code.extensibility"/>. +</t> +</section> + +<section title="Informational 1xx" anchor="status.1xx"> + <x:anchor-alias value="1xx"/> + <x:anchor-alias value="1xx (Informational)"/> + <iref primary="true" item="1xx Informational (status code class)" x:for-anchor=""/> + <iref primary="true" item="Status Codes Classes" subitem="1xx Informational" x:for-anchor=""/> +<t> + The 1xx (Informational) class of status code indicates an + interim response for communicating connection status or request progress + prior to completing the requested action and sending a final response. + Since HTTP/1.0 did not define any 1xx status codes, a server <bcp14>MUST NOT</bcp14> send + a 1xx response to an HTTP/1.0 client. +</t> +<t> + A 1xx response is terminated by the end of the header section; + it cannot contain content or trailers. +</t> +<t> + A client <bcp14>MUST</bcp14> be able to parse one or more 1xx responses received + prior to a final response, even if the client does not expect one. + A user agent <bcp14>MAY</bcp14> ignore unexpected 1xx responses. +</t> +<t> + A proxy <bcp14>MUST</bcp14> forward 1xx responses unless the proxy itself + requested the generation of the 1xx response. For example, if a + proxy adds an "Expect: 100-continue" header field when it forwards a request, + then it need not forward the corresponding <x:ref>100 (Continue)</x:ref> + response(s). +</t> + +<section title="100 Continue" anchor="status.100"> + <iref primary="true" item="100 Continue (status code)" x:for-anchor=""/> + <x:anchor-alias value="100 (Continue)"/> +<t> + The 100 (Continue) status code indicates that the initial + part of a request has been received and has not yet been rejected by the + server. The server intends to send a final response after the request has + been fully received and acted upon. +</t> +<t> + When the request contains an <x:ref>Expect</x:ref> header field that + includes a <x:ref>100-continue</x:ref> expectation, the 100 response + indicates that the server wishes to receive the request content, + as described in <xref target="field.expect"/>. The client + ought to continue sending the request and discard the 100 response. +</t> +<t> + If the request did not contain an <x:ref>Expect</x:ref> header field + containing the <x:ref>100-continue</x:ref> expectation, + the client can simply discard this interim response. +</t> +</section> + +<section title="101 Switching Protocols" anchor="status.101"> + <iref primary="true" item="101 Switching Protocols (status code)" x:for-anchor=""/> + <x:anchor-alias value="101 (Switching Protocols)"/> +<t> + The 101 (Switching Protocols) status code indicates that the + server understands and is willing to comply with the client's request, + via the <x:ref>Upgrade</x:ref> header field (<xref target="field.upgrade"/>), for + a change in the application protocol being used on this connection. + The server <bcp14>MUST</bcp14> generate an Upgrade header field in the response that + indicates which protocol(s) will be in effect after this response. +</t> +<t> + It is assumed that the server will only agree to switch protocols when + it is advantageous to do so. For example, switching to a newer version of + HTTP might be advantageous over older versions, and switching to a + real-time, synchronous protocol might be advantageous when delivering + resources that use such features. +</t> +</section> +</section> + +<section title="Successful 2xx" anchor="status.2xx"> + <x:anchor-alias value="2xx"/> + <x:anchor-alias value="2xx (Successful)"/> + <iref primary="true" item="2xx Successful (status code class)" x:for-anchor=""/> + <iref primary="true" item="Status Codes Classes" subitem="2xx Successful" x:for-anchor=""/> +<t> + The 2xx (Successful) class of status code indicates that + the client's request was successfully received, understood, and accepted. +</t> + +<section title="200 OK" anchor="status.200"> + <iref primary="true" item="200 OK (status code)" x:for-anchor=""/> + <x:anchor-alias value="200 (OK)"/> +<t> + The 200 (OK) status code indicates that the request has + succeeded. The content sent in a 200 response depends on the request + method. For the methods defined by this specification, the intended meaning + of the content can be summarized as: +</t> +<table align="left"> + <thead> + <tr> + <th>Request Method</th> + <th>Response content is a representation of:</th> + </tr> + </thead> + <tbody> + <tr> + <td>GET</td> + <td>the <x:ref>target resource</x:ref></td> + </tr> + <tr> + <td>HEAD</td> + <td>the <x:ref>target resource</x:ref>, like GET, but without + transferring the representation data</td> + </tr> + <tr> + <td>POST</td> + <td>the status of, or results obtained from, the action</td> + </tr> + <tr> + <td>PUT, DELETE</td> + <td>the status of the action</td> + </tr> + <tr> + <td>OPTIONS</td> + <td>communication options for the target resource</td> + </tr> + <tr> + <td>TRACE</td> + <td>the request message as received by the server returning the + trace</td> + </tr> + </tbody> +</table> +<t> + Aside from responses to CONNECT, a 200 response is expected to contain + message content unless the message framing explicitly indicates that the + content has zero length. If some aspect of the request indicates a + preference for no content upon success, the origin server ought to send a + <x:ref>204 (No Content)</x:ref> response instead. + For CONNECT, there is no content because the successful result is a + tunnel, which begins immediately after the 200 response header section. +</t> +<t> + A 200 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +<t> + In 200 responses to GET or HEAD, an origin server <bcp14>SHOULD</bcp14> send any + available validator fields (<xref target="response.validator"/>) for the + <x:ref>selected representation</x:ref>, with both a strong entity tag and + a <x:ref>Last-Modified</x:ref> date being preferred. +</t> +<t> + In 200 responses to state-changing methods, any validator fields + (<xref target="response.validator"/>) sent in the response convey the + current validators for the new representation formed as a result of + successfully applying the request semantics. Note that the PUT method + (<xref target="PUT"/>) has additional requirements that might preclude + sending such validators. +</t> +</section> + +<section title="201 Created" anchor="status.201"> + <iref primary="true" item="201 Created (status code)" x:for-anchor=""/> + <x:anchor-alias value="201 (Created)"/> +<t> + The 201 (Created) status code indicates that the request has + been fulfilled and has resulted in one or more new resources being created. + The primary resource created by the request is identified by either a + <x:ref>Location</x:ref> header field in the response or, if no + <x:ref>Location</x:ref> header field is received, by the target URI. +</t> +<t> + The 201 response content typically describes and links to the resource(s) + created. Any validator fields (<xref target="response.validator"/>) + sent in the response convey the current validators for a new + representation created by the request. Note that the PUT method + (<xref target="PUT"/>) has additional requirements that might preclude + sending such validators. +</t> +</section> + +<section title="202 Accepted" anchor="status.202"> + <iref primary="true" item="202 Accepted (status code)" x:for-anchor=""/> + <x:anchor-alias value="202 (Accepted)"/> +<t> + The 202 (Accepted) status code indicates that the request + has been accepted for processing, but the processing has not been + completed. The request might or might not eventually be acted upon, as it + might be disallowed when processing actually takes place. There is no + facility in HTTP for re-sending a status code from an asynchronous + operation. +</t> +<t> + The 202 response is intentionally noncommittal. Its purpose is to + allow a server to accept a request for some other process (perhaps a + batch-oriented process that is only run once per day) without + requiring that the user agent's connection to the server persist + until the process is completed. The representation sent with this + response ought to describe the request's current status and point to + (or embed) a status monitor that can provide the user with an estimate of + when the request will be fulfilled. +</t> +</section> + +<section title="203 Non-Authoritative Information" anchor="status.203"> + <iref primary="true" item="203 Non-Authoritative Information (status code)" x:for-anchor=""/> + <x:anchor-alias value="203 (Non-Authoritative Information)"/> +<t> + The 203 (Non-Authoritative Information) status code + indicates that the request was successful but the enclosed content has been + modified from that of the origin server's <x:ref>200 (OK)</x:ref> response + by a transforming proxy (<xref target="message.transformations"/>). This status code allows the + proxy to notify recipients when a transformation has been applied, since + that knowledge might impact later decisions regarding the content. For + example, future cache validation requests for the content might only be + applicable along the same request path (through the same proxies). +</t> +<t> + A 203 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +</section> + +<section title="204 No Content" anchor="status.204"> + <iref primary="true" item="204 No Content (status code)" x:for-anchor=""/> + <x:anchor-alias value="204 (No Content)"/> +<t> + The 204 (No Content) status code indicates that the server + has successfully fulfilled the request and that there is no additional + content to send in the response content. Metadata in the response + header fields refer to the <x:ref>target resource</x:ref> and its + <x:ref>selected representation</x:ref> after the requested action was applied. +</t> +<t> + For example, if a 204 status code is received in response to a PUT + request and the response contains an <x:ref>ETag</x:ref> field, then + the PUT was successful and the ETag field value contains the entity tag for + the new representation of that target resource. +</t> +<t> + The 204 response allows a server to indicate that the action has been + successfully applied to the target resource, while implying that the + user agent does not need to traverse away from its current "document view" + (if any). The server assumes that the user agent will provide some + indication of the success to its user, in accord with its own interface, + and apply any new or updated metadata in the response to its active + representation. +</t> +<t> + For example, a 204 status code is commonly used with document editing + interfaces corresponding to a "save" action, such that the document + being saved remains available to the user for editing. It is also + frequently used with interfaces that expect automated data transfers + to be prevalent, such as within distributed version control systems. +</t> +<t> + A 204 response is terminated by the end of the header section; + it cannot contain content or trailers. +</t> +<t> + A 204 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +</section> + +<section title="205 Reset Content" anchor="status.205"> + <iref primary="true" item="205 Reset Content (status code)" x:for-anchor=""/> +<t> + The 205 (Reset Content) status code indicates that the + server has fulfilled the request and desires that the user agent reset the + "document view", which caused the request to be sent, to its original state + as received from the origin server. +</t> +<t> + This response is intended to support a common data entry use case where + the user receives content that supports data entry (a form, notepad, + canvas, etc.), enters or manipulates data in that space, causes the entered + data to be submitted in a request, and then the data entry mechanism is + reset for the next entry so that the user can easily initiate another + input action. +</t> +<t> + Since the 205 status code implies that no additional content will be + provided, a server <bcp14>MUST NOT</bcp14> generate content in a 205 response. +</t> +</section> + +<section title="206 Partial Content" anchor="status.206"> + <iref primary="true" item="206 Partial Content (status code)" x:for-anchor=""/> + <x:anchor-alias value="206"/> + <x:anchor-alias value="206 (Partial Content)"/> +<t> + The 206 (Partial Content) status code indicates that the + server is successfully fulfilling a range request for the target resource + by transferring one or more parts of the + <x:ref>selected representation</x:ref>. +</t> +<t> + A server that supports range requests (<xref target="range.requests"/>) will + usually attempt to satisfy all of the requested ranges, since sending + less data will likely result in another client request for the remainder. + However, a server might want to send only a subset of the data requested + for reasons of its own, such as temporary unavailability, cache efficiency, + load balancing, etc. Since a 206 response is self-descriptive, the client + can still understand a response that only partially satisfies its range + request. +</t> +<t> + A client <bcp14>MUST</bcp14> inspect a 206 response's <x:ref>Content-Type</x:ref> and + <x:ref>Content-Range</x:ref> field(s) to determine what parts are enclosed + and whether additional requests are needed. +</t> +<t> + A server that generates a 206 response <bcp14>MUST</bcp14> generate the following + header fields, in addition to those required in the subsections below, + if the field would + have been sent in a <x:ref>200 (OK)</x:ref> response to the same request: + <x:ref>Date</x:ref>, <x:ref>Cache-Control</x:ref>, <x:ref>ETag</x:ref>, + <x:ref>Expires</x:ref>, <x:ref>Content-Location</x:ref>, and + <x:ref>Vary</x:ref>. +</t> +<t> + A <x:ref>Content-Length</x:ref> header field present in a 206 response + indicates the number of octets in the content of this message, which is + usually not the complete length of the selected representation. + Each <x:ref>Content-Range</x:ref> header field includes information about the + selected representation's complete length. +</t> +<t> + A sender that generates a 206 response to a request with an <x:ref>If-Range</x:ref> + header field <bcp14>SHOULD NOT</bcp14> generate other representation header + fields beyond those required because the client + already has a prior response containing those header fields. + Otherwise, a sender <bcp14>MUST</bcp14> generate all of the representation header + fields that would have been sent in a <x:ref>200 (OK)</x:ref> response + to the same request. +</t> +<t> + A 206 response is heuristically cacheable; i.e., unless otherwise indicated by + explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> + +<section title="Single Part" anchor="partial.single"> +<t> + If a single part is being transferred, the server generating the 206 + response <bcp14>MUST</bcp14> generate a <x:ref>Content-Range</x:ref> header field, + describing what range of the selected representation is enclosed, and a + content consisting of the range. For example: +</t> +<sourcecode type="http-message"> +HTTP/1.1 206 Partial Content +Date: Wed, 15 Nov 1995 06:25:24 GMT +Last-Modified: Wed, 15 Nov 1995 04:58:08 GMT +Content-Range: bytes 21010-47021/47022 +Content-Length: 26012 +Content-Type: image/gif + +... 26012 bytes of partial image data ... +</sourcecode> +</section> + +<section title="Multiple Parts" anchor="partial.multipart"> +<t> + If multiple parts are being transferred, the server generating the 206 + response <bcp14>MUST</bcp14> generate "multipart/byteranges" content, as defined + in <xref target="multipart.byteranges"/>, and a + <x:ref>Content-Type</x:ref> header field containing the + "multipart/byteranges" media type and its required boundary parameter. + To avoid confusion with single-part responses, a server <bcp14>MUST NOT</bcp14> generate + a <x:ref>Content-Range</x:ref> header field in the HTTP header section of a + multiple part response (this field will be sent in each part instead). +</t> +<t> + Within the header area of each body part in the multipart content, the + server <bcp14>MUST</bcp14> generate a <x:ref>Content-Range</x:ref> header field + corresponding to the range being enclosed in that body part. + If the selected representation would have had a <x:ref>Content-Type</x:ref> + header field in a <x:ref>200 (OK)</x:ref> response, the server <bcp14>SHOULD</bcp14> + generate that same <x:ref>Content-Type</x:ref> header field in the header area of + each body part. For example: +</t> +<sourcecode type="http-message"> +HTTP/1.1 206 Partial Content +Date: Wed, 15 Nov 1995 06:25:24 GMT +Last-Modified: Wed, 15 Nov 1995 04:58:08 GMT +Content-Length: 1741 +Content-Type: multipart/byteranges; boundary=THIS_STRING_SEPARATES + +--THIS_STRING_SEPARATES +Content-Type: application/pdf +Content-Range: bytes 500-999/8000 + +...the first range... +--THIS_STRING_SEPARATES +Content-Type: application/pdf +Content-Range: bytes 7000-7999/8000 + +...the second range +--THIS_STRING_SEPARATES-- +</sourcecode> +<t> + When multiple ranges are requested, a server <bcp14>MAY</bcp14> coalesce any of the + ranges that overlap, or that are separated by a gap that is smaller than the + overhead of sending multiple parts, regardless of the order in which the + corresponding range-spec appeared in the received <x:ref>Range</x:ref> + header field. Since the typical overhead between each part of a + "multipart/byteranges" is around 80 bytes, depending on the selected + representation's media type and the chosen boundary parameter length, it + can be less efficient to transfer many small disjoint parts than it is to + transfer the entire selected representation. +</t> +<t> + A server <bcp14>MUST NOT</bcp14> generate a multipart response to a request for a single + range, since a client that does not request multiple parts might not + support multipart responses. However, a server <bcp14>MAY</bcp14> generate a + "multipart/byteranges" response with only a single body part if multiple + ranges were requested and only one range was found to be satisfiable or + only one range remained after coalescing. + A client that cannot process a "multipart/byteranges" response <bcp14>MUST NOT</bcp14> + generate a request that asks for multiple ranges. +</t> +<t> + A server that generates a multipart response <bcp14>SHOULD</bcp14> send + the parts in the same order that the corresponding range-spec appeared + in the received <x:ref>Range</x:ref> header field, excluding those ranges + that were deemed unsatisfiable or that were coalesced into other ranges. + A client that receives a multipart response <bcp14>MUST</bcp14> inspect the + <x:ref>Content-Range</x:ref> header field present in each body part in + order to determine which range is contained in that body part; a client + cannot rely on receiving the same ranges that it requested, nor the same + order that it requested. +</t> +</section> + +<section title="Combining Parts" anchor="combining.byte.ranges"> +<t> + A response might transfer only a subrange of a representation if the + connection closed prematurely or if the request used one or more Range + specifications. After several such transfers, a client might have + received several ranges of the same representation. These ranges can only + be safely combined if they all have in common the same strong validator + (<xref target="weak.and.strong.validators"/>). +</t> +<t> + A client that has received multiple partial responses to GET requests on a + target resource <bcp14>MAY</bcp14> combine those responses into a larger continuous + range if they share the same strong validator. +</t> +<t> + If the most recent response is an incomplete <x:ref>200 (OK)</x:ref> + response, then the header fields of that response are used for any + combined response and replace those of the matching stored responses. +</t> +<t> + If the most recent response is a <x:ref>206 (Partial Content)</x:ref> + response and at least one of the matching stored responses is a + <x:ref>200 (OK)</x:ref>, then the combined response header fields consist + of the most recent 200 response's header fields. If all of the matching + stored responses are 206 responses, then the stored response with the most + recent header fields is used as the source of header fields for the + combined response, except that the client <bcp14>MUST</bcp14> use other header fields + provided in the new response, aside from <x:ref>Content-Range</x:ref>, to + replace all instances of the corresponding header fields in the stored + response. +</t> +<t> + The combined response content consists of the union of partial content + ranges within the new response and all of the matching stored responses. + If the union consists of the entire range of the representation, then the + client <bcp14>MUST</bcp14> process the combined response as if it were a complete + <x:ref>200 (OK)</x:ref> response, including a <x:ref>Content-Length</x:ref> + header field that reflects the complete length. + Otherwise, the client <bcp14>MUST</bcp14> process the set of continuous ranges as one of + the following: + an incomplete <x:ref>200 (OK)</x:ref> response if the combined response is + a prefix of the representation, + a single <x:ref>206 (Partial Content)</x:ref> response containing + "multipart/byteranges" content, or + multiple <x:ref>206 (Partial Content)</x:ref> responses, each with one + continuous range that is indicated by a <x:ref>Content-Range</x:ref> header + field. +</t> +</section> +</section> +</section> + +<section title="Redirection 3xx" anchor="status.3xx"> + <x:anchor-alias value="3xx"/> + <x:anchor-alias value="3xx (Redirection)"/> + <iref primary="true" item="3xx Redirection (status code class)" x:for-anchor=""/> + <iref primary="true" item="Status Codes Classes" subitem="3xx Redirection" x:for-anchor=""/> +<t> + The 3xx (Redirection) class of status code indicates that + further action needs to be taken by the user agent in order to fulfill the + request. There are several types of redirects: +</t> +<ol> + <li> + Redirects that indicate this resource might be available at a + different URI, as provided by the <x:ref>Location</x:ref> header field, + as in the status codes <x:ref>301 (Moved Permanently)</x:ref>, + <x:ref>302 (Found)</x:ref>, <x:ref>307 (Temporary Redirect)</x:ref>, and + <x:ref>308 (Permanent Redirect)</x:ref>. + </li> + <li> + Redirection that offers a choice among matching resources capable + of representing this resource, as in the + <x:ref>300 (Multiple Choices)</x:ref> status code. + </li> + <li> + Redirection to a different resource, identified by the + <x:ref>Location</x:ref> header field, that can represent an indirect + response to the request, as in the <x:ref>303 (See Other)</x:ref> + status code. + </li> + <li> + Redirection to a previously stored result, as in the + <x:ref>304 (Not Modified)</x:ref> status code. + </li> +</ol> +<aside> + <t> + <x:h>Note:</x:h> In HTTP/1.0, the status codes <x:ref>301 (Moved Permanently)</x:ref> + and <x:ref>302 (Found)</x:ref> were originally defined as method-preserving + (<xref target="HTTP10" x:fmt="," x:sec="9.3"/>) to match their implementation + at CERN; <x:ref>303 (See Other)</x:ref> was defined for a redirection that + changed its method to GET. However, early user agents split on whether to + redirect POST requests as POST (according to then-current specification) + or as GET (the safer alternative when redirected to a different site). + Prevailing practice eventually converged on changing the method to GET. + <x:ref>307 (Temporary Redirect)</x:ref> and + <x:ref>308 (Permanent Redirect)</x:ref> <xref target="RFC7538"/> were + later added to unambiguously indicate method-preserving redirects, and + status codes <x:ref>301</x:ref> and <x:ref>302</x:ref> have been adjusted to allow a POST + request to be redirected as GET. + </t> +</aside> +<t> + If a <x:ref>Location</x:ref> header field + (<xref target="field.location"/>) is provided, the user agent <bcp14>MAY</bcp14> + automatically redirect its request to the URI referenced by the Location + field value, even if the specific status code is not understood. + Automatic redirection needs to be done with care for methods not known to be + <x:ref>safe</x:ref>, as defined in <xref target="safe.methods"/>, since + the user might not wish to redirect an unsafe request. +</t> +<t> + When automatically following a redirected request, the user agent <bcp14>SHOULD</bcp14> + resend the original request message with the following modifications: +</t> +<ol> + <li> + <t> + Replace the target URI with the URI referenced by the redirection response's + <x:ref>Location</x:ref> header field value after resolving it relative to the original + request's target URI. + </t> + </li> + <li> + <t> + Remove header fields that were automatically generated by the implementation, + replacing them with updated values as appropriate to the new request. This + includes: + </t> + <ol> + <li>Connection-specific header fields (see <xref target="field.connection"/>),</li> + <li>Header fields specific to the client's proxy configuration, + including (but not limited to) <x:ref>Proxy-Authorization</x:ref>,</li> + <li>Origin-specific header fields (if any), including (but not + limited to) <x:ref>Host</x:ref>,</li> + <li>Validating header fields that were added by the implementation's + cache (e.g., <x:ref>If-None-Match</x:ref>, + <x:ref>If-Modified-Since</x:ref>), and</li> + <li>Resource-specific header fields, including (but not limited to) + <x:ref>Referer</x:ref>, Origin, + <x:ref>Authorization</x:ref>, and Cookie.</li> + </ol> + </li> + <li> + <t> + Consider removing header fields that were not automatically generated by the + implementation (i.e., those present in the request because they were added + by the calling context) where there are security implications; this + includes but is not limited to <x:ref>Authorization</x:ref> and Cookie. + </t> + </li> + <li> + <t> + Change the request method according to the redirecting status code's + semantics, if applicable. + </t> + </li> + <li> + <t> + If the request method has been changed to GET or HEAD, remove + content-specific header fields, including (but not limited to) + <x:ref>Content-Encoding</x:ref>, + <x:ref>Content-Language</x:ref>, <x:ref>Content-Location</x:ref>, + <x:ref>Content-Type</x:ref>, <x:ref>Content-Length</x:ref>, + Digest, <x:ref>Last-Modified</x:ref>. + </t> + </li> +</ol> +<t> + A client <bcp14>SHOULD</bcp14> detect and intervene in cyclical redirections (i.e., + "infinite" redirection loops). +</t> +<aside> + <t> + <x:h>Note:</x:h> An earlier version of this specification recommended a + maximum of five redirections (<xref target="RFC2068" x:fmt="," x:sec="10.3"/>). + Content developers need to be aware that some clients might + implement such a fixed limitation. + </t> +</aside> + +<section title="300 Multiple Choices" anchor="status.300"> + <iref primary="true" item="300 Multiple Choices (status code)" x:for-anchor=""/> + <x:anchor-alias value="300 (Multiple Choices)"/> +<t> + The 300 (Multiple Choices) status code indicates that the + <x:ref>target resource</x:ref> has more than one representation, each with + its own more specific identifier, and information about the alternatives is + being provided so that the user (or user agent) can select a preferred + representation by redirecting its request to one or more of those + identifiers. In other words, the server desires that the user agent engage + in reactive negotiation to select the most appropriate representation(s) + for its needs (<xref target="content.negotiation"/>). +</t> +<t> + If the server has a preferred choice, the server <bcp14>SHOULD</bcp14> generate a + <x:ref>Location</x:ref> header field containing a preferred choice's URI + reference. The user agent <bcp14>MAY</bcp14> use the Location field value for automatic + redirection. +</t> +<t> + For request methods other than HEAD, the server <bcp14>SHOULD</bcp14> generate content + in the 300 response containing a list of representation metadata and URI + reference(s) from which the user or user agent can choose the one most + preferred. The user agent <bcp14>MAY</bcp14> make a selection from that list + automatically if it understands the provided media type. A specific format + for automatic selection is not defined by this specification because HTTP + tries to remain orthogonal to the definition of its content. + In practice, the representation is provided in some easily parsed format + believed to be acceptable to the user agent, as determined by shared design + or content negotiation, or in some commonly accepted hypertext format. +</t> +<t> + A 300 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +<aside> + <t> + <x:h>Note:</x:h> The original proposal for the 300 status code defined the URI header field as + providing a list of alternative representations, such that it would be + usable for 200, 300, and 406 responses and be transferred in responses to + the HEAD method. However, lack of deployment and disagreement over syntax + led to both URI and Alternates (a subsequent proposal) being dropped from + this specification. It is possible to communicate the list as a + Link header field value <xref target="RFC8288"/> whose members have a relationship of + "alternate", though deployment is a chicken-and-egg problem. + </t> +</aside> +</section> + +<section title="301 Moved Permanently" anchor="status.301"> + <iref primary="true" item="301 Moved Permanently (status code)" x:for-anchor=""/> + <x:anchor-alias value="301"/> + <x:anchor-alias value="301 (Moved Permanently)"/> +<t> + The 301 (Moved Permanently) status code indicates that the + <x:ref>target resource</x:ref> has been assigned a new permanent URI and + any future references to this resource ought to use one of the enclosed + URIs. The server is suggesting that a user agent with link-editing capability + can permanently replace references to the target URI with one of the + new references sent by the server. However, this suggestion is usually + ignored unless the user agent is actively editing references + (e.g., engaged in authoring content), the connection is secured, and + the origin server is a trusted authority for the content being edited. +</t> +<t> + The server <bcp14>SHOULD</bcp14> generate a <x:ref>Location</x:ref> header field in the + response containing a preferred URI reference for the new permanent URI. + The user agent <bcp14>MAY</bcp14> use the Location field value for automatic redirection. + The server's response content usually contains a short hypertext note with + a hyperlink to the new URI(s). +</t> +<aside> + <t> + <x:h>Note:</x:h> For historical reasons, a user agent <bcp14>MAY</bcp14> change the + request method from POST to GET for the subsequent request. If this + behavior is undesired, the <x:ref>308 (Permanent Redirect)</x:ref> + status code can be used instead. + </t> +</aside> +<t> + A 301 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +</section> + +<section title="302 Found" anchor="status.302"> + <iref primary="true" item="302 Found (status code)" x:for-anchor=""/> + <x:anchor-alias value="302"/> + <x:anchor-alias value="302 (Found)"/> +<t> + The 302 (Found) status code indicates that the target + resource resides temporarily under a different URI. Since the redirection + might be altered on occasion, the client ought to continue to use the + target URI for future requests. +</t> +<t> + The server <bcp14>SHOULD</bcp14> generate a <x:ref>Location</x:ref> header field in the + response containing a URI reference for the different URI. + The user agent <bcp14>MAY</bcp14> use the Location field value for automatic redirection. + The server's response content usually contains a short hypertext note with + a hyperlink to the different URI(s). +</t> +<aside> + <t> + <x:h>Note:</x:h> For historical reasons, a user agent <bcp14>MAY</bcp14> change the + request method from POST to GET for the subsequent request. If this + behavior is undesired, the <x:ref>307 (Temporary Redirect)</x:ref> + status code can be used instead. + </t> +</aside> +</section> + +<section title="303 See Other" anchor="status.303"> + <iref primary="true" item="303 See Other (status code)" x:for-anchor=""/> + <x:anchor-alias value="303 (See Other)"/> +<t> + The 303 (See Other) status code indicates that the server is + redirecting the user agent to a different resource, as indicated by a URI + in the <x:ref>Location</x:ref> header field, which is intended to provide + an indirect response to the original request. A user agent can perform a + retrieval request targeting that URI (a GET or HEAD request if using HTTP), + which might also be redirected, and present the eventual result as an + answer to the original request. Note that the new URI in the Location + header field is not considered equivalent to the target URI. +</t> +<t> + This status code is applicable to any HTTP method. It is + primarily used to allow the output of a POST action to redirect + the user agent to a different resource, since doing so provides the + information corresponding to the POST response as a resource that + can be separately identified, bookmarked, and cached. +</t> +<t> + A 303 response to a GET request indicates that the origin server does not + have a representation of the <x:ref>target resource</x:ref> that can be + transferred by the server over HTTP. However, the + <x:ref>Location</x:ref> field value refers to a resource that is + descriptive of the target resource, such that making a retrieval request + on that other resource might result in a representation that is useful to + recipients without implying that it represents the original target resource. + Note that answers to the questions of what can be represented, what + representations are adequate, and what might be a useful description are + outside the scope of HTTP. +</t> +<t> + Except for responses to a HEAD request, the representation of a 303 + response ought to contain a short hypertext note with a hyperlink to the + same URI reference provided in the <x:ref>Location</x:ref> header field. +</t> +</section> + +<section title="304 Not Modified" anchor="status.304"> + <iref primary="true" item="304 Not Modified (status code)" x:for-anchor=""/> + <x:anchor-alias value="304"/> + <x:anchor-alias value="304 (Not Modified)"/> +<t> + The 304 (Not Modified) status code indicates that a + conditional GET or HEAD request has been + received and would have resulted in a <x:ref>200 (OK)</x:ref> response + if it were not for the fact that the condition evaluated to false. + In other words, there is no need for the server to transfer a + representation of the target resource because the request indicates that + the client, which made the request conditional, already has a valid + representation; the server is therefore redirecting the client to make + use of that stored representation as if it were the content of a + <x:ref>200 (OK)</x:ref> response. +</t> +<t> + The server generating a 304 response <bcp14>MUST</bcp14> generate any of the following + header fields that would have been sent in a <x:ref>200 (OK)</x:ref> + response to the same request: +</t> +<ul> + <li> + <x:ref>Content-Location</x:ref>, <x:ref>Date</x:ref>, <x:ref>ETag</x:ref>, + and <x:ref>Vary</x:ref> + </li> + <li> + <x:ref>Cache-Control</x:ref> and <x:ref>Expires</x:ref> (see + <xref target="CACHING"/>) + </li> +</ul> +<t> + Since the goal of a 304 response is to minimize information transfer + when the recipient already has one or more cached representations, + a sender <bcp14>SHOULD NOT</bcp14> generate representation metadata other + than the above listed fields unless said metadata exists for the + purpose of guiding cache updates (e.g., <x:ref>Last-Modified</x:ref> might + be useful if the response does not have an <x:ref>ETag</x:ref> field). +</t> +<t> + Requirements on a cache that receives a 304 response are defined in + <xref target="CACHING" x:rel="#freshening.responses"/>. If the conditional request originated with an + outbound client, such as a user agent with its own cache sending a + conditional GET to a shared proxy, then the proxy <bcp14>SHOULD</bcp14> forward the + 304 response to that client. +</t> +<t> + A 304 response is terminated by the end of the header section; + it cannot contain content or trailers. +</t> +</section> + +<section title="305 Use Proxy" anchor="status.305"> + <iref primary="true" item="305 Use Proxy (status code)" x:for-anchor=""/> + <x:anchor-alias value="305 (Use Proxy)"/> +<t> + The 305 (Use Proxy) status code was defined in a previous + version of this specification and is now deprecated (<xref target="RFC7231" x:fmt="of" x:sec="B"/>). +</t> +</section> + +<section title="306 (Unused)" anchor="status.306"> + <iref primary="true" item="306 (Unused) (status code)" x:for-anchor=""/> +<t> + The 306 status code was defined in a previous version of this + specification, is no longer used, and the code is reserved. +</t> +</section> + +<section title="307 Temporary Redirect" anchor="status.307"> + <iref primary="true" item="307 Temporary Redirect (status code)" x:for-anchor=""/> + <x:anchor-alias value="307"/> + <x:anchor-alias value="307 (Temporary Redirect)"/> +<t> + The 307 (Temporary Redirect) status code indicates that the + <x:ref>target resource</x:ref> resides temporarily under a different URI + and the user agent <bcp14>MUST NOT</bcp14> change the request method if it performs an + automatic redirection to that URI. + Since the redirection can change over time, the client ought to continue + using the original target URI for future requests. +</t> +<t> + The server <bcp14>SHOULD</bcp14> generate a <x:ref>Location</x:ref> header field in the + response containing a URI reference for the different URI. + The user agent <bcp14>MAY</bcp14> use the Location field value for automatic redirection. + The server's response content usually contains a short hypertext note with + a hyperlink to the different URI(s). +</t> +</section> + +<section title="308 Permanent Redirect" anchor="status.308"> + <iref primary="true" item="308 Permanent Redirect (status code)" x:for-anchor=""/> + <x:anchor-alias value="308"/> + <x:anchor-alias value="308 (Permanent Redirect)"/> +<t> + The 308 (Permanent Redirect) status code indicates that the + <x:ref>target resource</x:ref> has been assigned a new permanent URI and + any future references to this resource ought to use one of the enclosed + URIs. The server is suggesting that a user agent with link-editing capability + can permanently replace references to the target URI with one of the + new references sent by the server. However, this suggestion is usually + ignored unless the user agent is actively editing references + (e.g., engaged in authoring content), the connection is secured, and + the origin server is a trusted authority for the content being edited. +</t> +<t> + The server <bcp14>SHOULD</bcp14> generate a <x:ref>Location</x:ref> header field in the + response containing a preferred URI reference for the new permanent URI. + The user agent <bcp14>MAY</bcp14> use the Location field value for automatic redirection. + The server's response content usually contains a short hypertext note with + a hyperlink to the new URI(s). +</t> +<t> + A 308 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +<aside> + <t> + <x:h>Note:</x:h> This status code is much younger (June 2014) than its sibling codes and thus + might not be recognized everywhere. See <xref target="RFC7538" x:fmt="of" x:sec="4"/> + for deployment considerations. + </t> +</aside> +</section> +</section> + +<section title="Client Error 4xx" anchor="status.4xx"> + <x:anchor-alias value="4xx"/> + <x:anchor-alias value="4xx (Client Error)"/> + <iref primary="true" item="4xx Client Error (status code class)" x:for-anchor=""/> + <iref primary="true" item="Status Codes Classes" subitem="4xx Client Error" x:for-anchor=""/> +<t> + The 4xx (Client Error) class of status code indicates that + the client seems to have erred. Except when responding to a HEAD request, + the server <bcp14>SHOULD</bcp14> send a representation containing an explanation of + the error situation, and whether it is a temporary or permanent condition. + These status codes are applicable to any request method. User agents + <bcp14>SHOULD</bcp14> display any included representation to the user. +</t> + +<section title="400 Bad Request" anchor="status.400"> + <iref primary="true" item="400 Bad Request (status code)" x:for-anchor=""/> + <x:anchor-alias value="400 (Bad Request)"/> +<t> + The 400 (Bad Request) status code indicates that the server + cannot or will not process the request due to something that is perceived + to be a client error (e.g., malformed request syntax, invalid request + message framing, or deceptive request routing). +</t> +</section> + +<section title="401 Unauthorized" anchor="status.401"> + <iref primary="true" item="401 Unauthorized (status code)" x:for-anchor=""/> + <x:anchor-alias value="401 (Unauthorized)"/> +<t> + The 401 (Unauthorized) status code indicates that the + request has not been applied because it lacks valid authentication + credentials for the target resource. + The server generating a 401 response <bcp14>MUST</bcp14> send a + <x:ref>WWW-Authenticate</x:ref> header field + (<xref target="field.www-authenticate"/>) + containing at least one challenge applicable to the target resource. +</t> +<t> + If the request included authentication credentials, then the 401 response + indicates that authorization has been refused for those credentials. + The user agent <bcp14>MAY</bcp14> repeat the request with a new or replaced + <x:ref>Authorization</x:ref> header field (<xref target="field.authorization"/>). + If the 401 response contains the same challenge as the prior response, and + the user agent has already attempted authentication at least once, then the + user agent <bcp14>SHOULD</bcp14> present the enclosed representation to the user, since + it usually contains relevant diagnostic information. +</t> +</section> + +<section title="402 Payment Required" anchor="status.402"> + <iref primary="true" item="402 Payment Required (status code)" x:for-anchor=""/> + <x:anchor-alias value="402 (Payment Required)"/> +<t> + The 402 (Payment Required) status code is reserved for + future use. +</t> +</section> + +<section title="403 Forbidden" anchor="status.403"> + <iref primary="true" item="403 Forbidden (status code)" x:for-anchor=""/> + <x:anchor-alias value="403 (Forbidden)"/> +<t> + The 403 (Forbidden) status code indicates that the + server understood the request but refuses to fulfill it. + A server that wishes to make public why the request has been forbidden + can describe that reason in the response content (if any). +</t> +<t> + If authentication credentials were provided in the request, the + server considers them insufficient to grant access. + The client <bcp14>SHOULD NOT</bcp14> automatically repeat the request with the same + credentials. + The client <bcp14>MAY</bcp14> repeat the request with new or different credentials. + However, a request might be forbidden for reasons unrelated to the + credentials. +</t> +<t> + An origin server that wishes to "hide" the current existence of a forbidden + <x:ref>target resource</x:ref> <bcp14>MAY</bcp14> instead respond with a status + code of <x:ref>404 (Not Found)</x:ref>. +</t> +</section> + +<section title="404 Not Found" anchor="status.404"> + <iref primary="true" item="404 Not Found (status code)" x:for-anchor=""/> + <x:anchor-alias value="404 (Not Found)"/> +<t> + The 404 (Not Found) status code indicates that the origin + server did not find a current representation for the + <x:ref>target resource</x:ref> or is not willing to disclose that one + exists. A 404 status code does not indicate whether this lack of representation + is temporary or permanent; the <x:ref>410 (Gone)</x:ref> status code is + preferred over 404 if the origin server knows, presumably through some + configurable means, that the condition is likely to be permanent. +</t> +<t> + A 404 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +</section> + +<section title="405 Method Not Allowed" anchor="status.405"> + <iref primary="true" item="405 Method Not Allowed (status code)" x:for-anchor=""/> + <x:anchor-alias value="405 (Method Not Allowed)"/> +<t> + The 405 (Method Not Allowed) status code indicates that the + method received in the request-line is known by the origin server but + not supported by the <x:ref>target resource</x:ref>. + The origin server <bcp14>MUST</bcp14> generate an <x:ref>Allow</x:ref> header field in + a 405 response containing a list of the target resource's currently + supported methods. +</t> +<t> + A 405 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> + +</section> + +<section title="406 Not Acceptable" anchor="status.406"> + <iref primary="true" item="406 Not Acceptable (status code)" x:for-anchor=""/> + <x:anchor-alias value="406 (Not Acceptable)"/> +<t> + The 406 (Not Acceptable) status code indicates that the + <x:ref>target resource</x:ref> does not have a current representation that + would be acceptable to the user agent, according to the + <x:ref>proactive negotiation</x:ref> header fields received in the request + (<xref target="proactive.negotiation"/>), and the server is unwilling to supply a + default representation. +</t> +<t> + The server <bcp14>SHOULD</bcp14> generate content containing a list of available + representation characteristics and corresponding resource identifiers from + which the user or user agent can choose the one most appropriate. + A user agent <bcp14>MAY</bcp14> automatically select the most appropriate choice from + that list. However, this specification does not define any standard for + such automatic selection, as described in <xref target="status.300"/>. +</t> +</section> + +<section title="407 Proxy Authentication Required" anchor="status.407"> + <iref primary="true" item="407 Proxy Authentication Required (status code)" x:for-anchor=""/> + <x:anchor-alias value="407 (Proxy Authentication Required)"/> +<t> + The 407 (Proxy Authentication Required) status code is + similar to <x:ref>401 (Unauthorized)</x:ref>, but it indicates that the client + needs to authenticate itself in order to use a proxy for this request. + The proxy <bcp14>MUST</bcp14> send a <x:ref>Proxy-Authenticate</x:ref> header field + (<xref target="field.proxy-authenticate"/>) containing a challenge + applicable to that proxy for the request. The client <bcp14>MAY</bcp14> repeat + the request with a new or replaced <x:ref>Proxy-Authorization</x:ref> + header field (<xref target="field.proxy-authorization"/>). +</t> +</section> + +<section title="408 Request Timeout" anchor="status.408"> + <iref primary="true" item="408 Request Timeout (status code)" x:for-anchor=""/> + <x:anchor-alias value="408 (Request Timeout)"/> +<t> + The 408 (Request Timeout) status code indicates + that the server did not receive a complete request message within the time + that it was prepared to wait. +</t> +<t> + If the client has an outstanding request in transit, it <bcp14>MAY</bcp14> repeat that + request. If the current connection is not usable (e.g., as it would be in + HTTP/1.1 because request delimitation is lost), a new connection will be + used. +</t> +</section> + +<section title="409 Conflict" anchor="status.409"> + <iref primary="true" item="409 Conflict (status code)" x:for-anchor=""/> + <x:anchor-alias value="409 (Conflict)"/> +<t> + The 409 (Conflict) status code indicates that the request + could not be completed due to a conflict with the current state of the target + resource. This code is used in situations where the user might be able to + resolve the conflict and resubmit the request. The server <bcp14>SHOULD</bcp14> generate + content that includes enough information for a user to recognize the + source of the conflict. +</t> +<t> + Conflicts are most likely to occur in response to a PUT request. For + example, if versioning were being used and the representation being PUT + included changes to a resource that conflict with those made by an + earlier (third-party) request, the origin server might use a 409 response + to indicate that it can't complete the request. In this case, the response + representation would likely contain information useful for merging the + differences based on the revision history. +</t> +</section> + +<section title="410 Gone" anchor="status.410"> + <iref primary="true" item="410 Gone (status code)" x:for-anchor=""/> + <x:anchor-alias value="410 (Gone)"/> +<t> + The 410 (Gone) status code indicates that access to the + <x:ref>target resource</x:ref> is no longer available at the origin + server and that this condition is likely to be permanent. If the origin + server does not know, or has no facility to determine, whether or not the + condition is permanent, the status code <x:ref>404 (Not Found)</x:ref> + ought to be used instead. +</t> +<t> + The 410 response is primarily intended to assist the task of web + maintenance by notifying the recipient that the resource is + intentionally unavailable and that the server owners desire that + remote links to that resource be removed. Such an event is common for + limited-time, promotional services and for resources belonging to + individuals no longer associated with the origin server's site. It is not + necessary to mark all permanently unavailable resources as "gone" or + to keep the mark for any length of time — that is left to the + discretion of the server owner. +</t> +<t> + A 410 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> +</section> + +<section title="411 Length Required" anchor="status.411"> + <iref primary="true" item="411 Length Required (status code)" x:for-anchor=""/> + <x:anchor-alias value="411 (Length Required)"/> +<t> + The 411 (Length Required) status code indicates that the + server refuses to accept the request without a defined + <x:ref>Content-Length</x:ref> (<xref target="field.content-length"/>). + The client <bcp14>MAY</bcp14> repeat the request if it adds a valid Content-Length + header field containing the length of the request content. +</t> +</section> + +<section title="412 Precondition Failed" anchor="status.412"> + <iref primary="true" item="412 Precondition Failed (status code)" x:for-anchor=""/> + <x:anchor-alias value="412 (Precondition Failed)"/> +<t> + The 412 (Precondition Failed) status code indicates that one + or more conditions given in the request header fields evaluated to false + when tested on the server (<xref target="conditional.requests"/>). This + response status code allows the client to place preconditions on the + current resource state (its current representations and metadata) and, + thus, prevent the request method from being applied if the target resource + is in an unexpected state. +</t> +</section> + +<section title="413 Content Too Large" anchor="status.413"> + <iref primary="true" item="413 Content Too Large (status code)" x:for-anchor=""/> + <x:anchor-alias value="413 (Content Too Large)"/> +<t> + The 413 (Content Too Large) status code indicates + that the server is refusing to process a request because the request + content is larger than the server is willing or able to process. + The server <bcp14>MAY</bcp14> terminate the request, if the protocol version in use + allows it; otherwise, the server <bcp14>MAY</bcp14> close the connection. +</t> +<t> + If the condition is temporary, the server <bcp14>SHOULD</bcp14> generate a + <x:ref>Retry-After</x:ref> header field to indicate that it is temporary + and after what time the client <bcp14>MAY</bcp14> try again. +</t> +</section> + +<section title="414 URI Too Long" anchor="status.414"> + <iref primary="true" item="414 URI Too Long (status code)" x:for-anchor=""/> + <x:anchor-alias value="414 (URI Too Long)"/> +<t> + The 414 (URI Too Long) status code indicates that the server + is refusing to service the request because the + target URI is longer than the server is willing to + interpret. This rare condition is only likely to occur when a client has + improperly converted a POST request to a GET request with long query + information, when the client has descended into an infinite loop of + redirection (e.g., a redirected URI prefix that points to a suffix of + itself) or when the server is under attack by a client attempting to + exploit potential security holes. +</t> +<t> + A 414 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> + +</section> + +<section title="415 Unsupported Media Type" anchor="status.415"> + <iref primary="true" item="415 Unsupported Media Type (status code)" x:for-anchor=""/> + <x:anchor-alias value="415 (Unsupported Media Type)"/> +<t> + The 415 (Unsupported Media Type) status code indicates that + the origin server is refusing to service the request because the content is + in a format not supported by this method on the <x:ref>target resource</x:ref>. +</t> +<t> + The format problem might be due to the request's indicated + <x:ref>Content-Type</x:ref> or <x:ref>Content-Encoding</x:ref>, or as a + result of inspecting the data directly. +</t> +<t> + If the problem was caused by an unsupported content coding, the + <x:ref>Accept-Encoding</x:ref> response header field + (<xref target="field.accept-encoding"/>) ought to be + used to indicate which (if any) content codings would have been accepted + in the request. +</t> +<t> + On the other hand, if the cause was an unsupported media type, the + <x:ref>Accept</x:ref> response header field (<xref target="field.accept"/>) + can be used to indicate which media types would have been accepted + in the request. +</t> +</section> + +<section title="416 Range Not Satisfiable" anchor="status.416"> + <iref primary="true" item="416 Range Not Satisfiable (status code)" x:for-anchor=""/> + <x:anchor-alias value="416 (Range Not Satisfiable)"/> +<t> + The 416 (Range Not Satisfiable) status code indicates that + the set of ranges in the request's <x:ref>Range</x:ref> header field + (<xref target="field.range"/>) has been rejected either because none of + the requested ranges are satisfiable or because the client has requested + an excessive number of small or overlapping ranges (a potential denial of + service attack). +</t> +<t> + Each range unit defines what is required for its own range sets to be + satisfiable. For example, <xref target="byte.ranges"/> defines what makes + a bytes range set satisfiable. +</t> +<t> + A server that generates a 416 response to a byte-range request <bcp14>SHOULD</bcp14> + generate a <x:ref>Content-Range</x:ref> header field + specifying the current length of the selected representation + (<xref target="field.content-range"/>). +</t> +<t> + For example: +</t> +<sourcecode type="http-message"> +HTTP/1.1 416 Range Not Satisfiable +Date: Fri, 20 Jan 2012 15:41:54 GMT +Content-Range: bytes */47022 +</sourcecode> +<aside> + <t> + <x:h>Note:</x:h> Because servers are free to ignore <x:ref>Range</x:ref>, many + implementations will respond with the entire selected representation + in a <x:ref>200 (OK)</x:ref> response. That is partly because + most clients are prepared to receive a <x:ref>200 (OK)</x:ref> to + complete the task (albeit less efficiently) and partly because clients + might not stop making an invalid range request until they have received + a complete representation. Thus, clients cannot depend on receiving a + <x:ref>416 (Range Not Satisfiable)</x:ref> response even when it is most + appropriate. + </t> +</aside> +</section> + +<section title="417 Expectation Failed" anchor="status.417"> + <iref primary="true" item="417 Expectation Failed (status code)" x:for-anchor=""/> + <x:anchor-alias value="417 (Expectation Failed)"/> +<t> + The 417 (Expectation Failed) status code indicates that the + expectation given in the request's <x:ref>Expect</x:ref> header field + (<xref target="field.expect"/>) could not be met by at least one of the + inbound servers. +</t> +</section> + +<section title="418 (Unused)" anchor="status.418"> + <iref primary="true" item="418 (Unused) (status code)" x:for-anchor=""/> + <x:anchor-alias value="418 (Unused)"/> +<t> + <xref target="RFC2324"/> was an April 1 RFC that lampooned the various ways + HTTP was abused; one such abuse was the definition of an + application-specific 418 status code, which has been deployed as a joke + often enough for the code to be unusable for any future use. +</t> +<t> + Therefore, the 418 status code is reserved in the IANA HTTP Status Code + Registry. This indicates that the status code cannot be assigned to other + applications currently. If future circumstances require its use (e.g., + exhaustion of 4NN status codes), it can be re-assigned to another use. +</t> +</section> + +<section title="421 Misdirected Request" anchor="status.421"> + <iref primary="true" item="421 Misdirected Request (status code)" x:for-anchor=""/> + <x:anchor-alias value="421 (Misdirected Request)"/> +<t> + The 421 (Misdirected Request) status code indicates that the request was + directed at a server that is unable or unwilling to produce an + authoritative response for the target URI. An origin server (or gateway + acting on behalf of the origin server) sends 421 to reject a target URI + that does not match an <x:ref>origin</x:ref> for which the server has been + configured (<xref target="origin"/>) or does not match the connection + context over which the request was received + (<xref target="routing.reject"/>). +</t> +<t> + A client that receives a 421 (Misdirected Request) response <bcp14>MAY</bcp14> retry the + request, whether or not the request method is idempotent, over a different + connection, such as a fresh connection specific to the target resource's + origin, or via an alternative service <xref target="ALTSVC"/>. +</t> +<t> + A proxy <bcp14>MUST NOT</bcp14> generate a 421 response. +</t> +</section> + +<section title="422 Unprocessable Content" anchor="status.422"> + <iref primary="true" item="422 Unprocessable Content (status code)" x:for-anchor=""/> + <x:anchor-alias value="422 (Unprocessable Content)"/> +<t> + The 422 (Unprocessable Content) status code indicates that the server + understands the content type of the request content (hence a + <x:ref>415 (Unsupported Media Type)</x:ref> status code is inappropriate), + and the syntax of the request content is correct, but it was unable to process + the contained instructions. For example, this status code can be sent if + an XML request content contains well-formed (i.e., syntactically correct), but + semantically erroneous XML instructions. +</t> +</section> + +<section title="426 Upgrade Required" anchor="status.426"> + <iref primary="true" item="426 Upgrade Required (status code)" x:for-anchor=""/> + <x:anchor-alias value="426 (Upgrade Required)"/> +<t> + The 426 (Upgrade Required) status code indicates that the + server refuses to perform the request using the current protocol but might + be willing to do so after the client upgrades to a different protocol. + The server <bcp14>MUST</bcp14> send an <x:ref>Upgrade</x:ref> header field in a 426 + response to indicate the required protocol(s) (<xref target="field.upgrade"/>). +</t> +<t> + Example: +</t> +<sourcecode type="http-message"> +HTTP/1.1 426 Upgrade Required +Upgrade: HTTP/3.0 +Connection: Upgrade +Content-Length: <x:length-of target="s426body"/> +Content-Type: text/plain + +<x:span anchor="s426body">This service requires use of the HTTP/3.0 protocol. +</x:span></sourcecode> +</section> +</section> + +<section title="Server Error 5xx" anchor="status.5xx"> + <x:anchor-alias value="5xx"/> + <x:anchor-alias value="5xx (Server Error)"/> + <iref primary="true" item="5xx Server Error (status code class)" x:for-anchor=""/> + <iref primary="true" item="Status Codes Classes" subitem="5xx Server Error" x:for-anchor=""/> +<t> + The 5xx (Server Error) class of status code indicates that + the server is aware that it has erred or is incapable of performing the + requested method. + Except when responding to a HEAD request, the server <bcp14>SHOULD</bcp14> send a + representation containing an explanation of the error situation, and + whether it is a temporary or permanent condition. + A user agent <bcp14>SHOULD</bcp14> display any included representation to the user. + These status codes are applicable to any request method. +</t> + +<section title="500 Internal Server Error" anchor="status.500"> + <iref primary="true" item="500 Internal Server Error (status code)" x:for-anchor=""/> + <x:anchor-alias value="500 (Internal Server Error)"/> +<t> + The 500 (Internal Server Error) status code indicates that + the server encountered an unexpected condition that prevented it from + fulfilling the request. +</t> +</section> + +<section title="501 Not Implemented" anchor="status.501"> + <iref primary="true" item="501 Not Implemented (status code)" x:for-anchor=""/> + <x:anchor-alias value="501 (Not Implemented)"/> +<t> + The 501 (Not Implemented) status code indicates that the + server does not support the functionality required to fulfill the request. + This is the appropriate response when the server does not recognize the + request method and is not capable of supporting it for any resource. +</t> +<t> + A 501 response is heuristically cacheable; i.e., unless otherwise indicated by + the method definition or explicit cache controls (see <xref target="CACHING" x:rel="#heuristic.freshness"/>). +</t> + +</section> + +<section title="502 Bad Gateway" anchor="status.502"> + <iref primary="true" item="502 Bad Gateway (status code)" x:for-anchor=""/> + <x:anchor-alias value="502 (Bad Gateway)"/> +<t> + The 502 (Bad Gateway) status code indicates that the server, + while acting as a gateway or proxy, received an invalid response from an + inbound server it accessed while attempting to fulfill the request. +</t> +</section> + +<section title="503 Service Unavailable" anchor="status.503"> + <iref primary="true" item="503 Service Unavailable (status code)" x:for-anchor=""/> + <x:anchor-alias value="503 (Service Unavailable)"/> +<t> + The 503 (Service Unavailable) status code indicates that the + server is currently unable to handle the request due to a temporary overload + or scheduled maintenance, which will likely be alleviated after some delay. + The server <bcp14>MAY</bcp14> send a <x:ref>Retry-After</x:ref> header field + (<xref target="field.retry-after"/>) to suggest an appropriate + amount of time for the client to wait before retrying the request. +</t> +<aside> + <t> + <x:h>Note:</x:h> The existence of the 503 status code does not imply that a + server has to use it when becoming overloaded. Some servers might + simply refuse the connection. + </t> +</aside> +</section> + +<section title="504 Gateway Timeout" anchor="status.504"> + <iref primary="true" item="504 Gateway Timeout (status code)" x:for-anchor=""/> + <x:anchor-alias value="504 (Gateway Timeout)"/> +<t> + The 504 (Gateway Timeout) status code indicates that the + server, while acting as a gateway or proxy, did not receive a timely + response from an upstream server it needed to access in order to + complete the request. +</t> +</section> + +<section title="505 HTTP Version Not Supported" anchor="status.505"> + <iref primary="true" item="505 HTTP Version Not Supported (status code)" x:for-anchor=""/> + <x:anchor-alias value="505 (HTTP Version Not Supported)"/> +<t> + The 505 (HTTP Version Not Supported) status code indicates + that the server does not support, or refuses to support, the major version + of HTTP that was used in the request message. The server is indicating that + it is unable or unwilling to complete the request using the same major + version as the client, as described in <xref target="protocol.version"/>, other than with this + error message. The server <bcp14>SHOULD</bcp14> generate a representation for the 505 + response that describes why that version is not supported and what other + protocols are supported by that server. +</t> +</section> +</section> +</section> + +<section title="Extending HTTP" anchor="extending"> +<t> + HTTP defines a number of generic extension points that can be used to + introduce capabilities to the protocol without introducing a new version, + including methods, status codes, field names, and further extensibility + points within defined fields, such as authentication schemes and + cache directives (see Cache-Control extensions in <xref target="CACHING" x:rel="#cache.control.extensions"/>). Because the semantics of HTTP are + not versioned, these extension points are persistent; the version of the + protocol in use does not affect their semantics. +</t> +<t> + Version-independent extensions are discouraged from depending on or + interacting with the specific version of the protocol in use. When this is + unavoidable, careful consideration needs to be given to how the extension + can interoperate across versions. +</t> +<t> + Additionally, specific versions of HTTP might have their own extensibility + points, such as transfer codings in HTTP/1.1 (<xref target="HTTP11" x:rel="#field.transfer-encoding"/>) and HTTP/2 SETTINGS or frame types + (<xref target="HTTP2"/>). These extension points are specific to the + version of the protocol they occur within. +</t> +<t> + Version-specific extensions cannot override or modify the semantics of + a version-independent mechanism or extension point (like a method or + header field) without explicitly being allowed by that protocol element. For + example, the CONNECT method (<xref target="CONNECT"/>) allows this. +</t> +<t> + These guidelines assure that the protocol operates correctly and + predictably, even when parts of the path implement different versions of + HTTP. +</t> + +<section title="Method Extensibility" anchor="method.extensibility"> + +<section title="Method Registry" anchor="method.registry"> +<t> + The "Hypertext Transfer Protocol (HTTP) Method Registry", maintained by + IANA at <eref target="https://www.iana.org/assignments/http-methods"/>, + registers <x:ref>method</x:ref> names. +</t> +<t> + HTTP method registrations <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Method Name (see <xref target="methods"/>)</li> + <li>Safe ("yes" or "no", see <xref target="safe.methods"/>)</li> + <li>Idempotent ("yes" or "no", see <xref target="idempotent.methods"/>)</li> + <li>Pointer to specification text</li> +</ul> +<t> + Values to be added to this namespace require IETF Review + (see <xref target="RFC8126" x:fmt="," x:sec="4.8"/>). +</t> +</section> + +<section title="Considerations for New Methods" anchor="considerations.for.new.methods"> +<t> + Standardized methods are generic; that is, they are potentially + applicable to any resource, not just one particular media type, kind of + resource, or application. As such, it is preferred that new methods + be registered in a document that isn't specific to a single application or + data format, since orthogonal technologies deserve orthogonal specification. +</t> +<t> + Since message parsing (<xref target="message.abstraction"/>) needs to be + independent of method + semantics (aside from responses to HEAD), definitions of new methods + cannot change the parsing algorithm or prohibit the presence of content + on either the request or the response message. + Definitions of new methods can specify that only a zero-length content + is allowed by requiring a Content-Length header field with a value of "0". +</t> +<t> + Likewise, new methods cannot use the special host:port and asterisk forms of + request target that are allowed for <x:ref>CONNECT</x:ref> and + <x:ref>OPTIONS</x:ref>, respectively (<xref target="target.resource"/>). + A full URI in absolute form is needed for the target URI, which means either + the request target needs to be sent in absolute form or the target URI will + be reconstructed from the request context in the same way it is for other + methods. +</t> +<t> + A new method definition needs to indicate whether it is safe (<xref target="safe.methods"/>), idempotent (<xref target="idempotent.methods"/>), + cacheable (<xref target="cacheable.methods"/>), what + semantics are to be associated with the request content (if any), and what + refinements the method makes to header field or status code semantics. + If the new method is cacheable, its definition ought to describe how, and + under what conditions, a cache can store a response and use it to satisfy a + subsequent request. + The new method ought to describe whether it can be made conditional + (<xref target="preconditions"/>) and, if so, how a server responds + when the condition is false. + Likewise, if the new method might have some use for partial response + semantics (<xref target="field.range"/>), it ought to document this, too. +</t> +<aside> + <t> + <x:h>Note:</x:h> Avoid defining a method name that starts with "M-", since that + prefix might be misinterpreted as having the semantics assigned to it + by <xref target="RFC2774"/>. + </t> +</aside> +</section> +</section> + +<section title="Status Code Extensibility" anchor="status.code.extensibility"> + +<section title="Status Code Registry" anchor="status.code.registry"> +<t> + The "Hypertext Transfer Protocol (HTTP) Status Code Registry", maintained + by IANA at <eref target="https://www.iana.org/assignments/http-status-codes"/>, + registers <x:ref>status code</x:ref> numbers. +</t> +<t> + A registration <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Status Code (3 digits)</li> + <li>Short Description</li> + <li>Pointer to specification text</li> +</ul> +<t> + Values to be added to the HTTP status code namespace require IETF Review + (see <xref target="RFC8126" x:fmt="," x:sec="4.8"/>). +</t> +</section> + +<section title="Considerations for New Status Codes" anchor="considerations.for.new.status.codes"> +<t> + When it is necessary to express semantics for a response that are not + defined by current status codes, a new status code can be registered. + Status codes are generic; they are potentially applicable to any resource, + not just one particular media type, kind of resource, or application of + HTTP. As such, it is preferred that new status codes be registered in a + document that isn't specific to a single application. +</t> +<t> + New status codes are required to fall under one of the categories + defined in <xref target="status.codes"/>. To allow existing parsers to + process the response message, new status codes cannot disallow content, + although they can mandate a zero-length content. +</t> +<t> + Proposals for new status codes that are not yet widely deployed ought to + avoid allocating a specific number for the code until there is clear + consensus that it will be registered; instead, early drafts can use a + notation such as "4NN", or "3N0" .. "3N9", to indicate the class + of the proposed status code(s) without consuming a number prematurely. +</t> +<t> + The definition of a new status code ought to explain the request + conditions that would cause a response containing that status code (e.g., + combinations of request header fields and/or method(s)) along with any + dependencies on response header fields (e.g., what fields are required, + what fields can modify the semantics, and what field semantics are + further refined when used with the new status code). +</t> +<t> + By default, a status code applies only to the request corresponding to the + response it occurs within. If a status code applies to a larger scope of + applicability — for example, all requests to the resource in question or + all requests to a server — this must be explicitly specified. When doing + so, it should be noted that not all clients can be expected to + consistently apply a larger scope because they might not understand the + new status code. +</t> +<t> + The definition of a new final status code ought to specify whether or not it + is heuristically cacheable. Note that any response with a final status code + can be cached if the response has explicit freshness information. A status + code defined as heuristically cacheable is allowed to be cached without + explicit freshness information. + Likewise, the definition of a status code can place + constraints upon cache behavior if the must-understand cache + directive is used. See <xref target="CACHING"/> for more information. +</t> +<t> + Finally, the definition of a new status code ought to indicate whether the + content has any implied association with an identified resource (<xref target="identifying.content"/>). +</t> +</section> +</section> + +<section title="Field Extensibility" anchor="fields.extensibility"> + <x:anchor-alias value="header.extensibility"/> + +<t> + HTTP's most widely used extensibility point is the definition of new header and + trailer fields. +</t> +<t> + New fields can be defined such that, when they are understood by a + recipient, they override or enhance the interpretation of previously + defined fields, define preconditions on request evaluation, or + refine the meaning of responses. +</t> +<t> + However, defining a field doesn't guarantee its deployment or recognition + by recipients. Most fields are designed with the expectation that a recipient + can safely ignore (but forward downstream) any field not recognized. + In other cases, the sender's ability to understand a given field might be + indicated by its prior communication, perhaps in the protocol version + or fields that it sent in prior messages, or its use of a specific media type. + Likewise, direct inspection of support might be possible through an + OPTIONS request or by interacting with a defined well-known URI + <xref target="RFC8615"/> if such inspection is defined along with + the field being introduced. +</t> + +<section title="Field Name Registry" anchor="fields.registry"> + <x:anchor-alias value="header.name.registry"/> +<t> + The "Hypertext Transfer Protocol (HTTP) Field Name Registry" defines the + namespace for HTTP field names. +</t> +<t> + Any party can request registration of an HTTP field. See <xref target="considerations.for.new.fields"/> for considerations to take + into account when creating a new HTTP field. +</t> +<t> + The "Hypertext Transfer Protocol (HTTP) Field Name Registry" is located at + <eref target="https://www.iana.org/assignments/http-fields/"/>. + Registration requests can be made by following the instructions located + there or by sending an email to the "ietf-http-wg@w3.org" mailing list. +</t> +<t> + Field names are registered on the advice of a designated expert + (appointed by the IESG or their delegate). Fields with the status + 'permanent' are Specification Required + (<xref target="RFC8126" x:sec="4.6" x:fmt=","/>). +</t> +<t> + Registration requests consist of the following information: +</t> +<dl newline="true"> + <dt>Field name:</dt> + <dd> + The requested field name. It <bcp14>MUST</bcp14> conform to the + field-name syntax defined in <xref target="fields.names"/>, and it <bcp14>SHOULD</bcp14> be + restricted to just letters, digits, and hyphen ('-') + characters, with the first character being a letter. + </dd> + <dt>Status:</dt> + <dd> + "permanent", "provisional", "deprecated", or "obsoleted". + </dd> + <dt>Specification document(s):</dt> + <dd> + Reference to the document that specifies + the field, preferably including a URI that can be used to retrieve + a copy of the document. Optional but encouraged for provisional registrations. + An indication of the relevant section(s) can also be included, but is not required. + </dd> +</dl> +<t> + And optionally: +</t> +<dl> + <dt>Comments:</dt> + <dd> + Additional information, such as about reserved entries. + </dd> +</dl> +<t> + The expert(s) can define additional fields to be collected in the + registry, in consultation with the community. +</t> +<t> + Standards-defined names have a status of "permanent". Other names can also + be registered as permanent if the expert(s) finds that they are in use, in + consultation with the community. Other names should be registered as + "provisional". +</t> +<t> + Provisional entries can be removed by the expert(s) if — in consultation + with the community — the expert(s) find that they are not in use. The + expert(s) can change a provisional entry's status to permanent at any time. +</t> +<t> + Note that names can be registered by third parties (including the + expert(s)) if the expert(s) determines that an unregistered name is widely + deployed and not likely to be registered in a timely manner otherwise. +</t> +</section> + +<section title="Considerations for New Fields" anchor="considerations.for.new.fields"> +<t> + HTTP header and trailer fields are a widely used extension point for the protocol. + While they can be used in an ad hoc fashion, fields that are intended for + wider use need to be carefully documented to ensure interoperability. +</t> +<t> + In particular, authors of specifications defining new fields are advised to consider + and, where appropriate, document the following aspects: +</t> +<ul> + <li>Under what conditions the field can be used; e.g., only in + responses or requests, in all messages, only on responses to a + particular request method, etc.</li> + <li>Whether the field semantics are further refined by their context, + such as their use with certain request methods or status codes.</li> + <li>The scope of applicability for the information conveyed. + By default, fields apply only to the message they are + associated with, but some response fields are designed to apply to all + representations of a resource, the resource itself, or an even broader + scope. Specifications that expand the scope of a response field will + need to carefully consider issues such as content negotiation, the time + period of applicability, and (in some cases) multi-tenant server + deployments.</li> + <li>Under what conditions intermediaries are allowed to insert, + delete, or modify the field's value.</li> + <li>If the field is allowable in trailers; by + default, it will not be (see <xref target="trailers.limitations"/>).</li> + <li>Whether it is appropriate or even required to list the field name in the + <x:ref>Connection</x:ref> header field (i.e., if the field is to + be hop-by-hop; see <xref target="field.connection"/>).</li> + <li>Whether the field introduces any additional security considerations, such + as disclosure of privacy-related data.</li> +</ul> +<t> + Request header fields have additional considerations that need to be documented + if the default behavior is not appropriate: +</t> +<ul> + <li>If it is appropriate to list the field name in a + <x:ref>Vary</x:ref> response header field (e.g., when the request header + field is used by an origin server's content selection algorithm; see + <xref target="field.vary"/>).</li> + <li>If the field is intended to be stored when received in a PUT + request (see <xref target="PUT"/>).</li> + <li>If the field ought to be removed when automatically redirecting a + request due to security concerns (see <xref target="status.3xx"/>).</li> +</ul> + + +<section title="Considerations for New Field Names" anchor="considerations.for.new.field.names"> +<t> + Authors of specifications defining new fields are advised to choose a short + but descriptive field name. Short names avoid needless data transmission; + descriptive names avoid confusion and "squatting" on names that might have + broader uses. +</t> +<t> + To that end, limited-use fields (such as a header confined to a single + application or use case) are encouraged to use a name that includes that use + (or an abbreviation) as a prefix; for example, if the Foo Application needs + a Description field, it might use "Foo-Desc"; "Description" is too generic, + and "Foo-Description" is needlessly long. +</t> +<t> + While the field-name syntax is defined to allow any token character, in + practice some implementations place limits on the characters they accept + in field-names. To be interoperable, new field names <bcp14>SHOULD</bcp14> constrain + themselves to alphanumeric characters, "-", and ".", and <bcp14>SHOULD</bcp14> + begin with a letter. For example, the underscore + ("_") character can be problematic when passed through non-HTTP + gateway interfaces (see <xref target="underscore.in.fields"/>). +</t> +<t> + Field names ought not be prefixed with "X-"; see + <xref target="BCP178"/> for further information. +</t> +<t> + Other prefixes are sometimes used in HTTP field names; for example, + "Accept-" is used in many content negotiation headers, and "Content-" is used + as explained in <xref target="content"/>. These prefixes are + only an aid to recognizing the purpose of a field and do not + trigger automatic processing. +</t> +</section> + +<section title="Considerations for New Field Values" anchor="considerations.for.new.field.values"> +<t> + A major task in the definition of a new HTTP field is the specification of + the field value syntax: what senders should generate, and how recipients + should infer semantics from what is received. +</t> +<t> + Authors are encouraged (but not required) to use either the ABNF rules in + this specification or those in <xref target="RFC8941"/> to define the syntax + of new field values. +</t> +<t> + Authors are advised to carefully consider how the combination of multiple + field lines will impact them (see <xref target="fields.order"/>). Because + senders might erroneously send multiple values, and both intermediaries + and HTTP libraries can perform combination automatically, this applies to + all field values — even when only a single value is anticipated. +</t> +<t> + Therefore, authors are advised to delimit or encode values that contain + commas (e.g., with the <x:ref>quoted-string</x:ref> rule of + <xref target="quoted.strings"/>, the String data type of + <xref target="RFC8941"/>, or a field-specific encoding). + This ensures that commas within field data are not confused + with the commas that delimit a list value. +</t> +<t> + For example, the <x:ref>Content-Type</x:ref> field value only allows commas + inside quoted strings, which can be reliably parsed even when multiple + values are present. The <x:ref>Location</x:ref> field value provides a + counter-example that should not be emulated: because URIs can include + commas, it is not possible to reliably distinguish between a single value + that includes a comma from two values. +</t> +<t> + Authors of fields with a singleton value (see <xref target="fields.values"/>) are additionally advised to document how to treat + messages where the multiple members are present (a sensible default would + be to ignore the field, but this might not always be the right choice). +</t> +</section> +</section> +</section> + +<section title="Authentication Scheme Extensibility" anchor="auth.scheme.extensibility"> + +<section title="Authentication Scheme Registry" anchor="auth.scheme.registry"> +<t> + The "Hypertext Transfer Protocol (HTTP) Authentication Scheme Registry" + defines the namespace for the authentication schemes in challenges and + credentials. It is maintained + at <eref target="https://www.iana.org/assignments/http-authschemes"/>. +</t> +<t> + Registrations <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Authentication Scheme Name</li> + <li>Pointer to specification text</li> + <li>Notes (optional)</li> +</ul> +<t> + Values to be added to this namespace require IETF Review + (see <xref target="RFC8126" x:fmt="," x:sec="4.8"/>). +</t> +</section> + +<section title="Considerations for New Authentication Schemes" anchor="considerations.for.new.auth.schemes"> +<t> + There are certain aspects of the HTTP Authentication framework that put + constraints on how new authentication schemes can work: +</t> +<ul> + <li><t> + HTTP authentication is presumed to be stateless: all of the information + necessary to authenticate a request <bcp14>MUST</bcp14> be provided in the request, + rather than be dependent on the server remembering prior requests. + Authentication based on, or bound to, the underlying connection is + outside the scope of this specification and inherently flawed unless + steps are taken to ensure that the connection cannot be used by any + party other than the authenticated user + (see <xref target="connections"/>). + </t> + </li> + <li> + <t> + The authentication parameter "realm" is reserved for defining protection + spaces as described in <xref target="protection.space"/>. New schemes + <bcp14>MUST NOT</bcp14> use it in a way incompatible with that definition. + </t> + </li> + <li> + <t> + The "token68" notation was introduced for compatibility with existing + authentication schemes and can only be used once per challenge or credential. + Thus, new schemes ought to use the auth-param syntax instead, because + otherwise future extensions will be impossible. + </t> + </li> + <li> + <t> + The parsing of challenges and credentials is defined by this specification + and cannot be modified by new authentication schemes. When the auth-param + syntax is used, all parameters ought to support both token and + quoted-string syntax, and syntactical constraints ought to be defined on + the field value after parsing (i.e., quoted-string processing). This is + necessary so that recipients can use a generic parser that applies to + all authentication schemes. + </t> + <t> + <x:h>Note:</x:h> The fact that the value syntax for the "realm" parameter + is restricted to quoted-string was a bad design choice not to be repeated + for new parameters. + </t> + </li> + <li> + <t> + Definitions of new schemes ought to define the treatment of unknown + extension parameters. In general, a "must-ignore" rule is preferable + to a "must-understand" rule, because otherwise it will be hard to introduce + new parameters in the presence of legacy recipients. Furthermore, + it's good to describe the policy for defining new parameters (such + as "update the specification" or "use this registry"). + </t> + </li> + <li> + <t> + Authentication schemes need to document whether they are usable in + origin-server authentication (i.e., using <x:ref>WWW-Authenticate</x:ref>), + and/or proxy authentication (i.e., using <x:ref>Proxy-Authenticate</x:ref>). + </t> + </li> + <li> + <t> + The credentials carried in an <x:ref>Authorization</x:ref> header field are specific to + the user agent and, therefore, have the same effect on HTTP caches as the + "private" cache response directive (<xref target="CACHING" x:rel="#cache-response-directive.private"/>), + within the scope of the request in which they appear. + </t> + <t> + Therefore, new authentication schemes that choose not to carry + credentials in the <x:ref>Authorization</x:ref> header field (e.g., using a newly defined + header field) will need to explicitly disallow caching, by mandating the use of + cache response directives (e.g., "private"). + </t> + </li> + <li> + <t> + Schemes using <x:ref>Authentication-Info</x:ref>, <x:ref>Proxy-Authentication-Info</x:ref>, + or any other authentication related response header field need to + consider and document the related security considerations (see + <xref target="security.auth.add.resp"/>). + </t> + </li> +</ul> +</section> +</section> + +<section title="Range Unit Extensibility" anchor="range.unit.extensibility"> + +<section title="Range Unit Registry" anchor="range.unit.registry"> +<t> + The "HTTP Range Unit Registry" defines the namespace for the range + unit names and refers to their corresponding specifications. + It is maintained at + <eref target="https://www.iana.org/assignments/http-parameters"/>. +</t> +<t> + Registration of an HTTP Range Unit <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Name</li> + <li>Description</li> + <li>Pointer to specification text</li> +</ul> +<t> + Values to be added to this namespace require IETF Review + (see <xref target="RFC8126" x:fmt="," x:sec="4.8"/>). +</t> +</section> + +<section title="Considerations for New Range Units" anchor="considerations.for.new.range.units"> + <x:anchor-alias value="other-range-unit"/> +<t> + Other range units, such as format-specific boundaries like pages, + sections, records, rows, or time, are potentially usable in HTTP for + application-specific purposes, but are not commonly used in practice. + Implementors of alternative range units ought to consider how they would + work with content codings and general-purpose intermediaries. +</t> +</section> +</section> + +<section title="Content Coding Extensibility" anchor="content.coding.extensibility"> + +<section title="Content Coding Registry" anchor="content.coding.registry"> +<t> + The "HTTP Content Coding Registry", maintained by + IANA at <eref target="https://www.iana.org/assignments/http-parameters/"/>, + registers <x:ref>content-coding</x:ref> names. +</t> +<t> + Content coding registrations <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Name</li> + <li>Description</li> + <li>Pointer to specification text</li> +</ul> +<t> + Names of content codings <bcp14>MUST NOT</bcp14> overlap with names of transfer codings + (per the "HTTP Transfer Coding Registry" located at + <eref target="https://www.iana.org/assignments/http-parameters/"/>) unless + the encoding transformation is + identical (as is the case for the compression codings defined in + <xref target="content.codings"/>). +</t> +<t> + Values to be added to this namespace require IETF Review + (see <xref target="RFC8126" x:fmt="of" x:sec="4.8"/>) and <bcp14>MUST</bcp14> + conform to the purpose of content coding defined in + <xref target="content.codings"/>. +</t> +</section> + +<section title="Considerations for New Content Codings" anchor="considerations.for.new.content.codings"> +<t> + New content codings ought to be self-descriptive whenever possible, with + optional parameters discoverable within the coding format itself, rather + than rely on external metadata that might be lost during transit. +</t> +</section> +</section> + +<section title="Upgrade Token Registry" anchor="upgrade.token.registry"> +<t> + The "Hypertext Transfer Protocol (HTTP) Upgrade Token Registry" defines + the namespace for protocol-name tokens used to identify protocols in the + <x:ref>Upgrade</x:ref> header field. The registry is maintained at + <eref target="https://www.iana.org/assignments/http-upgrade-tokens"/>. +</t> +<t> + Each registered protocol name is associated with contact information + and an optional set of specifications that details how the connection + will be processed after it has been upgraded. +</t> +<t> + Registrations happen on a "First Come First Served" basis (see + <xref target="RFC8126" x:sec="4.4" x:fmt="of"/>) and are subject to the + following rules: +</t> +<ol> + <li>A protocol-name token, once registered, stays registered forever.</li> + <li>A protocol-name token is case-insensitive and registered with the + preferred case to be generated by senders.</li> + <li>The registration <bcp14>MUST</bcp14> name a responsible party for the + registration.</li> + <li>The registration <bcp14>MUST</bcp14> name a point of contact.</li> + <li>The registration <bcp14>MAY</bcp14> name a set of specifications associated with + that token. Such specifications need not be publicly available.</li> + <li>The registration <bcp14>SHOULD</bcp14> name a set of expected "protocol-version" + tokens associated with that token at the time of registration.</li> + <li>The responsible party <bcp14>MAY</bcp14> change the registration at any time. + The IANA will keep a record of all such changes, and make them + available upon request.</li> + <li>The IESG <bcp14>MAY</bcp14> reassign responsibility for a protocol token. + This will normally only be used in the case when a + responsible party cannot be contacted.</li> +</ol> +</section> +</section> + +<section title="Security Considerations" anchor="security.considerations"> +<t> + This section is meant to inform developers, information providers, and + users of known security concerns relevant to HTTP semantics and its + use for transferring information over the Internet. Considerations related + to caching are discussed in <xref target="CACHING" x:rel="#security.considerations"/>, + and considerations related to HTTP/1.1 message syntax and parsing are + discussed in <xref target="HTTP11" x:rel="#security.considerations"/>. +</t> +<t> + The list of considerations below is not exhaustive. Most security concerns + related to HTTP semantics are about securing server-side applications (code + behind the HTTP interface), securing user agent processing of content + received via HTTP, or secure use of the Internet in general, rather than + security of the protocol. The security considerations for URIs, which + are fundamental to HTTP operation, are discussed in + <xref target="URI" x:fmt="of" x:sec="7"/>. Various organizations maintain + topical information and links to current research on Web application + security (e.g., <xref target="OWASP"/>). +</t> + +<section title="Establishing Authority" anchor="establishing.authority"> + <iref item="authoritative response" primary="true"/> + <iref item="phishing" primary="true"/> +<t> + HTTP relies on the notion of an <x:dfn>authoritative response</x:dfn>: a + response that has been determined by (or at the direction of) the origin + server identified within the target URI to be the most appropriate response + for that request given the state of the target resource at the time of + response message origination. +</t> +<t> + When a registered name is used in the authority component, the "http" URI + scheme (<xref target="http.uri"/>) relies on the user's local name + resolution service to determine where it can find authoritative responses. + This means that any attack on a user's network host table, cached names, + or name resolution libraries becomes an avenue for attack on establishing + authority for "http" URIs. Likewise, the user's choice of server for + Domain Name Service (DNS), and the hierarchy of servers from which it + obtains resolution results, could impact the authenticity of address + mappings; DNS Security Extensions (DNSSEC, <xref target="RFC4033"/>) are + one way to improve authenticity, as are the various mechanisms for making + DNS requests over more secure transfer protocols. +</t> +<t> + Furthermore, after an IP address is obtained, establishing authority for + an "http" URI is vulnerable to attacks on Internet Protocol routing. +</t> +<t> + The "https" scheme (<xref target="https.uri"/>) is intended to prevent + (or at least reveal) many of these potential attacks on establishing + authority, provided that the negotiated connection is secured and + the client properly verifies that the communicating server's identity + matches the target URI's authority component + (<xref target="https.verify"/>). Correctly implementing such verification + can be difficult (see <xref target="Georgiev"/>). +</t> +<t> + Authority for a given origin server can be delegated through protocol + extensions; for example, <xref target="ALTSVC"/>. Likewise, the set of + servers for which a connection is considered authoritative can be changed + with a protocol extension like <xref target="RFC8336"/>. +</t> +<t> + Providing a response from a non-authoritative source, such as a shared + proxy cache, is often useful to improve performance and availability, but + only to the extent that the source can be trusted or the distrusted + response can be safely used. +</t> +<t> + Unfortunately, communicating authority to users can be difficult. + For example, <x:dfn>phishing</x:dfn> is an attack on the user's perception + of authority, where that perception can be misled by presenting similar + branding in hypertext, possibly aided by userinfo obfuscating the authority + component (see <xref target="http.uri"/>). + User agents can reduce the impact of phishing attacks by enabling users to + easily inspect a target URI prior to making an action, by prominently + distinguishing (or rejecting) userinfo when present, and by not sending + stored credentials and cookies when the referring document is from an + unknown or untrusted source. +</t> +</section> + +<section title="Risks of Intermediaries" anchor="risks.intermediaries"> +<t> + HTTP intermediaries are inherently situated for on-path attacks. + Compromise of + the systems on which the intermediaries run can result in serious security + and privacy problems. Intermediaries might have access to security-related + information, personal information about individual users and + organizations, and proprietary information belonging to users and + content providers. A compromised intermediary, or an intermediary + implemented or configured without regard to security and privacy + considerations, might be used in the commission of a wide range of + potential attacks. +</t> +<t> + Intermediaries that contain a shared cache are especially vulnerable + to cache poisoning attacks, as described in <xref target="CACHING" x:rel="#security.considerations"/>. +</t> +<t> + Implementers need to consider the privacy and security + implications of their design and coding decisions, and of the + configuration options they provide to operators (especially the + default configuration). +</t> +<t> + Intermediaries are no more trustworthy than the people and policies + under which they operate; HTTP cannot solve this problem. +</t> +</section> + +<section title="Attacks Based on File and Path Names" anchor="attack.pathname"> +<t> + Origin servers frequently make use of their local file system to manage the + mapping from target URI to resource representations. + Most file systems are not designed to protect against malicious file + or path names. Therefore, an origin server needs to avoid accessing + names that have a special significance to the system when mapping the + target resource to files, folders, or directories. +</t> +<t> + For example, UNIX, Microsoft Windows, and other operating systems use ".." + as a path component to indicate a directory level above the current one, + and they use specially named paths or file names to send data to system devices. + Similar naming conventions might exist within other types of storage + systems. Likewise, local storage systems have an annoying tendency to + prefer user-friendliness over security when handling invalid or unexpected + characters, recomposition of decomposed characters, and case-normalization + of case-insensitive names. +</t> +<t> + Attacks based on such special names tend to focus on either denial-of-service + (e.g., telling the server to read from a COM port) or disclosure + of configuration and source files that are not meant to be served. +</t> +</section> + +<section title="Attacks Based on Command, Code, or Query Injection" anchor="attack.injection"> +<t> + Origin servers often use parameters within the URI as a + means of identifying system services, selecting database entries, or + choosing a data source. However, data received in a request cannot be + trusted. An attacker could construct any of the request data elements + (method, target URI, header fields, or content) to contain data that might + be misinterpreted as a command, code, or query when passed through a + command invocation, language interpreter, or database interface. +</t> +<t> + For example, SQL injection is a common attack wherein additional query + language is inserted within some part of the target URI or header + fields (e.g., <x:ref>Host</x:ref>, <x:ref>Referer</x:ref>, etc.). + If the received data is used directly within a SELECT statement, the + query language might be interpreted as a database command instead of a + simple string value. This type of implementation vulnerability is extremely + common, in spite of being easy to prevent. +</t> +<t> + In general, resource implementations ought to avoid use of request data + in contexts that are processed or interpreted as instructions. Parameters + ought to be compared to fixed strings and acted upon as a result of that + comparison, rather than passed through an interface that is not prepared + for untrusted data. Received data that isn't based on fixed parameters + ought to be carefully filtered or encoded to avoid being misinterpreted. +</t> +<t> + Similar considerations apply to request data when it is stored and later + processed, such as within log files, monitoring tools, or when included + within a data format that allows embedded scripts. +</t> +</section> + +<section title="Attacks via Protocol Element Length" anchor="attack.protocol.element.length"> +<t> + Because HTTP uses mostly textual, character-delimited fields, parsers are + often vulnerable to attacks based on sending very long (or very slow) + streams of data, particularly where an implementation is expecting a + protocol element with no predefined length + (<xref target="length.requirements"/>). +</t> +<t> + To promote interoperability, specific recommendations are made for minimum + size limits on fields (<xref target="fields.limits"/>). These are + minimum recommendations, chosen to be supportable even by implementations + with limited resources; it is expected that most implementations will + choose substantially higher limits. +</t> +<t> + A server can reject a message that + has a target URI that is too long (<xref target="status.414"/>) or request content + that is too large (<xref target="status.413"/>). Additional status codes related to + capacity limits have been defined by extensions to HTTP + <xref target="RFC6585"/>. +</t> +<t> + Recipients ought to carefully limit the extent to which they process other + protocol elements, including (but not limited to) request methods, response + status phrases, field names, numeric values, and chunk lengths. + Failure to limit such processing can result in arbitrary code execution due to + buffer or arithmetic + overflows, and increased vulnerability to denial-of-service attacks. +</t> +</section> + +<section title="Attacks Using Shared-Dictionary Compression" anchor="compression.attacks"> +<t> + Some attacks on encrypted protocols use the differences in size created by + dynamic compression to reveal confidential information; for example, <xref target="BREACH"/>. These attacks rely on creating a redundancy between + attacker-controlled content and the confidential information, such that a + dynamic compression algorithm using the same dictionary for both content + will compress more efficiently when the attacker-controlled content matches + parts of the confidential content. +</t> +<t> + HTTP messages can be compressed in a number of ways, including using TLS + compression, content codings, transfer codings, and other extension or + version-specific mechanisms. +</t> +<t> + The most effective mitigation for this risk is to disable compression on + sensitive data, or to strictly separate sensitive data from attacker-controlled + data so that they cannot share the same compression dictionary. With + careful design, a compression scheme can be designed in a way that is not + considered exploitable in limited use cases, such as HPACK (<xref target="HPACK"/>). +</t> +</section> + +<section title="Disclosure of Personal Information" anchor="personal.information"> +<t> + Clients are often privy to large amounts of personal information, + including both information provided by the user to interact with resources + (e.g., the user's name, location, mail address, passwords, encryption + keys, etc.) and information about the user's browsing activity over + time (e.g., history, bookmarks, etc.). Implementations need to + prevent unintentional disclosure of personal information. +</t> +</section> + +<section title="Privacy of Server Log Information" anchor="privacy.of.server.log.information"> +<t> + A server is in the position to save personal data about a user's requests + over time, which might identify their reading patterns or subjects of + interest. In particular, log information gathered at an intermediary + often contains a history of user agent interaction, across a multitude + of sites, that can be traced to individual users. +</t> +<t> + HTTP log information is confidential in nature; its handling is often + constrained by laws and regulations. Log information needs to be securely + stored and appropriate guidelines followed for its analysis. + Anonymization of personal information within individual entries helps, + but it is generally not sufficient to prevent real log traces from being + re-identified based on correlation with other access characteristics. + As such, access traces that are keyed to a specific client are unsafe to + publish even if the key is pseudonymous. +</t> +<t> + To minimize the risk of theft or accidental publication, log information + ought to be purged of personally identifiable information, including + user identifiers, IP addresses, and user-provided query parameters, + as soon as that information is no longer necessary to support operational + needs for security, auditing, or fraud control. +</t> +</section> + +<section title="Disclosure of Sensitive Information in URIs" anchor="sensitive.information.in.uris"> +<t> + URIs are intended to be shared, not secured, even when they identify secure + resources. URIs are often shown on displays, added to templates when a page + is printed, and stored in a variety of unprotected bookmark lists. + Many servers, proxies, and user agents log or display the target URI + in places where it might be visible to third parties. + It is therefore unwise to include information within a URI that + is sensitive, personally identifiable, or a risk to disclose. +</t> +<t> + When an application uses client-side mechanisms to construct a target URI + out of user-provided information, such as the query fields of a form using + GET, potentially sensitive data might be provided that would not be + appropriate for disclosure within a URI. POST is often preferred in such + cases because it usually doesn't construct a URI; instead, POST of a form + transmits the potentially sensitive data in the request content. However, this + hinders caching and uses an unsafe method for what would otherwise be a safe + request. Alternative workarounds include transforming the user-provided data + prior to constructing the URI or filtering the data to only include common + values that are not sensitive. Likewise, redirecting the result of a query + to a different (server-generated) URI can remove potentially sensitive data + from later links and provide a cacheable response for later reuse. +</t> +<t> + Since the <x:ref>Referer</x:ref> header field tells a target site about the + context that resulted in a request, it has the potential to reveal + information about the user's immediate browsing history and any personal + information that might be found in the referring resource's URI. + Limitations on the Referer header field are described in <xref target="field.referer"/> to + address some of its security considerations. +</t> +</section> + +<section title="Application Handling of Field Names" anchor="underscore.in.fields"> +<t> + Servers often use non-HTTP gateway interfaces and frameworks to process a received + request and produce content for the response. For historical reasons, such interfaces + often pass received field names as external variable names, using a name mapping + suitable for environment variables. +</t> +<t> + For example, the Common Gateway Interface (CGI) mapping of protocol-specific + meta-variables, defined by <xref target="RFC3875" x:fmt="of" x:sec="4.1.18"/>, + is applied to received header fields that do not correspond to one of CGI's + standard variables; the mapping consists of prepending "HTTP_" to each name + and changing all instances of hyphen ("-") to underscore ("_"). This same mapping + has been inherited by many other application frameworks in order to simplify + moving applications from one platform to the next. +</t> +<t> + In CGI, a received <x:ref>Content-Length</x:ref> field would be passed + as the meta-variable "CONTENT_LENGTH" with a string value matching the + received field's value. In contrast, a received "Content_Length" header field would + be passed as the protocol-specific meta-variable "HTTP_CONTENT_LENGTH", + which might lead to some confusion if an application mistakenly reads the + protocol-specific meta-variable instead of the default one. (This historical practice + is why <xref target="considerations.for.new.field.names"/> discourages the creation + of new field names that contain an underscore.) +</t> +<t> + Unfortunately, mapping field names to different interface names can lead to + security vulnerabilities if the mapping is incomplete or ambiguous. For example, + if an attacker were to send a field named "Transfer_Encoding", a naive interface + might map that to the same variable name as the "Transfer-Encoding" field, resulting + in a potential request smuggling vulnerability (<xref target="HTTP11" x:rel="#request.smuggling"/>). +</t> +<t> + To mitigate the associated risks, implementations that perform such + mappings are advised to make the mapping unambiguous and complete + for the full range of potential octets received as a name (including those + that are discouraged or forbidden by the HTTP grammar). + For example, a field with an unusual name character might + result in the request being blocked, the specific field being removed, + or the name being passed with a different prefix to distinguish it from + other fields. +</t> +</section> + +<section title="Disclosure of Fragment after Redirects" anchor="fragment.disclosure"> +<t> + Although fragment identifiers used within URI references are not sent + in requests, implementers ought to be aware that they will be visible to + the user agent and any extensions or scripts running as a result of the + response. In particular, when a redirect occurs and the original request's + fragment identifier is inherited by the new reference in + <x:ref>Location</x:ref> (<xref target="field.location"/>), this might + have the effect of disclosing one site's fragment to another site. + If the first site uses personal information in fragments, it ought to + ensure that redirects to other sites include a (possibly empty) fragment + component in order to block that inheritance. +</t> +</section> + +<section title="Disclosure of Product Information" anchor="disclosure.product.information"> +<t> + The <x:ref>User-Agent</x:ref> (<xref target="field.user-agent"/>), + <x:ref>Via</x:ref> (<xref target="field.via"/>), and + <x:ref>Server</x:ref> (<xref target="field.server"/>) header fields often + reveal information about the respective sender's software systems. + In theory, this can make it easier for an attacker to exploit known + security holes; in practice, attackers tend to try all potential holes + regardless of the apparent software versions being used. +</t> +<t> + Proxies that serve as a portal through a network firewall ought to take + special precautions regarding the transfer of header information that might + identify hosts behind the firewall. The <x:ref>Via</x:ref> header field + allows intermediaries to replace sensitive machine names with pseudonyms. +</t> +</section> + +<section title="Browser Fingerprinting" anchor="fingerprinting"> +<t> + Browser fingerprinting is a set of techniques for identifying a specific + user agent over time through its unique set of characteristics. These + characteristics might include information related to how it uses the underlying + transport protocol, + feature capabilities, and scripting environment, though of particular + interest here is the set of unique characteristics that might be + communicated via HTTP. Fingerprinting is considered a privacy concern + because it enables tracking of a user agent's behavior over time + (<xref target="Bujlow"/>) without + the corresponding controls that the user might have over other forms of + data collection (e.g., cookies). Many general-purpose user agents + (i.e., Web browsers) have taken steps to reduce their fingerprints. +</t> +<t> + There are a number of request header fields that might reveal information + to servers that is sufficiently unique to enable fingerprinting. + The <x:ref>From</x:ref> header field is the most obvious, though it is + expected that From will only be sent when self-identification is desired by + the user. Likewise, Cookie header fields are deliberately designed to + enable re-identification, so fingerprinting concerns only apply to + situations where cookies are disabled or restricted by the user agent's + configuration. +</t> +<t> + The <x:ref>User-Agent</x:ref> header field might contain enough information + to uniquely identify a specific device, usually when combined with other + characteristics, particularly if the user agent sends excessive details + about the user's system or extensions. However, the source of unique + information that is least expected by users is + <x:ref>proactive negotiation</x:ref> (<xref target="proactive.negotiation"/>), + including the <x:ref>Accept</x:ref>, <x:ref>Accept-Charset</x:ref>, + <x:ref>Accept-Encoding</x:ref>, and <x:ref>Accept-Language</x:ref> + header fields. +</t> +<t> + In addition to the fingerprinting concern, detailed use of the + <x:ref>Accept-Language</x:ref> header field can reveal information the + user might consider to be of a private nature. For example, understanding + a given language set might be strongly correlated to membership in a + particular ethnic group. + An approach that limits such loss of privacy would be for a user agent + to omit the sending of Accept-Language except for sites that have been + explicitly permitted, perhaps via interaction after detecting a <x:ref>Vary</x:ref> + header field that indicates language negotiation might be useful. +</t> +<t> + In environments where proxies are used to enhance privacy, user agents + ought to be conservative in sending proactive negotiation header fields. + General-purpose user agents that provide a high degree of header field + configurability ought to inform users about the loss of privacy that might + result if too much detail is provided. As an extreme privacy measure, + proxies could filter the proactive negotiation header fields in relayed + requests. +</t> +</section> + +<section title="Validator Retention" anchor="security.validators"> +<t> + The validators defined by this specification are not intended to ensure + the validity of a representation, guard against malicious changes, or + detect on-path attacks. At best, they enable more efficient cache + updates and optimistic concurrent writes when all participants are behaving + nicely. At worst, the conditions will fail and the client will receive a + response that is no more harmful than an HTTP exchange without conditional + requests. +</t> +<t> + An entity tag can be abused in ways that create privacy risks. For example, + a site might deliberately construct a semantically invalid entity tag that + is unique to the user or user agent, send it in a cacheable response with a + long freshness time, and then read that entity tag in later conditional + requests as a means of re-identifying that user or user agent. Such an + identifying tag would become a persistent identifier for as long as the + user agent retained the original cache entry. User agents that cache + representations ought to ensure that the cache is cleared or replaced + whenever the user performs privacy-maintaining actions, such as clearing + stored cookies or changing to a private browsing mode. +</t> +</section> + +<section title="Denial-of-Service Attacks Using Range" anchor="overlapping.ranges"> +<t> + Unconstrained multiple range requests are susceptible to denial-of-service + attacks because the effort required to request many overlapping ranges of + the same data is tiny compared to the time, memory, and bandwidth consumed + by attempting to serve the requested data in many parts. + Servers ought to ignore, coalesce, or reject egregious range requests, such + as requests for more than two overlapping ranges or for many small ranges + in a single set, particularly when the ranges are requested out of order + for no apparent reason. Multipart range requests are not designed to + support random access. +</t> +</section> + +<section title="Authentication Considerations" anchor="security.auth"> +<t> + Everything about the topic of HTTP authentication is a security + consideration, so the list of considerations below is not exhaustive. + Furthermore, it is limited to security considerations regarding the + authentication framework, in general, rather than discussing all of the + potential considerations for specific authentication schemes (which ought + to be documented in the specifications that define those schemes). + Various organizations maintain topical information and links to current + research on Web application security (e.g., <xref target="OWASP"/>), + including common pitfalls for implementing and using the authentication + schemes found in practice. +</t> + +<section title="Confidentiality of Credentials" anchor="confidentiality.of.credentials"> +<t> + The HTTP authentication framework does not define a single mechanism for + maintaining the confidentiality of credentials; instead, each + authentication scheme defines how the credentials are encoded prior to + transmission. While this provides flexibility for the development of future + authentication schemes, it is inadequate for the protection of existing + schemes that provide no confidentiality on their own, or that do not + sufficiently protect against replay attacks. Furthermore, if the server + expects credentials that are specific to each individual user, the exchange + of those credentials will have the effect of identifying that user even if + the content within credentials remains confidential. +</t> +<t> + HTTP depends on the security properties of the underlying transport- or + session-level connection to provide confidential transmission of + fields. Services that depend on individual user authentication require a + <x:ref>secured</x:ref> connection prior to exchanging credentials + (<xref target="https.uri"/>). +</t> +</section> + +<section title="Credentials and Idle Clients" anchor="auth.credentials.and.idle.clients"> +<t> + Existing HTTP clients and user agents typically retain authentication + information indefinitely. HTTP does not provide a mechanism for the + origin server to direct clients to discard these cached credentials, since + the protocol has no awareness of how credentials are obtained or managed + by the user agent. The mechanisms for expiring or revoking credentials can + be specified as part of an authentication scheme definition. +</t> +<t> + Circumstances under which credential caching can interfere with the + application's security model include but are not limited to: +</t> +<ul> + <li>Clients that have been idle for an extended period, following + which the server might wish to cause the client to re-prompt the + user for credentials.</li> + <li>Applications that include a session termination indication + (such as a "logout" or "commit" button on a page) after which + the server side of the application "knows" that there is no + further reason for the client to retain the credentials.</li> +</ul> +<t> + User agents that cache credentials are encouraged to provide a readily + accessible mechanism for discarding cached credentials under user control. +</t> +</section> + +<section title="Protection Spaces" anchor="protection.spaces"> +<t> + Authentication schemes that solely rely on the "realm" mechanism for + establishing a protection space will expose credentials to all resources on + an origin server. Clients that have successfully made authenticated requests + with a resource can use the same authentication credentials for other + resources on the same origin server. This makes it possible for a different + resource to harvest authentication credentials for other resources. +</t> +<t> + This is of particular concern when an origin server hosts resources for multiple + parties under the same origin (<xref target="protection.space"/>). + Possible mitigation strategies include restricting direct access to + authentication credentials (i.e., not making the content of the + <x:ref>Authorization</x:ref> request header field available), and separating protection + spaces by using a different host name (or port number) for each party. +</t> +</section> + +<section title="Additional Response Fields" anchor="security.auth.add.resp"> +<t> + Adding information to responses that are sent over an unencrypted + channel can affect security and privacy. The presence of the + <x:ref>Authentication-Info</x:ref> and <x:ref>Proxy-Authentication-Info</x:ref> + header fields alone indicates that HTTP authentication is in use. Additional + information could be exposed by the contents of the authentication-scheme + specific parameters; this will have to be considered in the definitions of these + schemes. +</t> +</section> + +</section> +</section> + +<section title="IANA Considerations" anchor="IANA.considerations"> +<t> + The change controller for the following registrations is: + "IETF (iesg@ietf.org) - Internet Engineering Task Force". +</t> + +<section title="URI Scheme Registration" anchor="uri.scheme.registration"> +<t> + IANA has updated the "Uniform Resource Identifier (URI) Schemes" registry + <xref target="BCP35"/> at + <eref target="https://www.iana.org/assignments/uri-schemes/"/> with the + permanent schemes listed in <xref target="uri.scheme.table"/> in <xref target="uri.schemes"/>. +</t> +</section> + +<section title="Method Registration" anchor="method.registration"> +<t> + IANA has updated the "Hypertext Transfer Protocol (HTTP) Method Registry" at + <eref target="https://www.iana.org/assignments/http-methods"/> with the + registration procedure of <xref target="method.registry"/> and the method + names summarized in the following table. +</t> +<?BEGININC build/draft-ietf-httpbis-semantics-latest.iana-methods ?> +<!--AUTOGENERATED FROM extract-method-defs.xslt, do not edit manually--> +<table anchor="iana.method.registration.table"> + <thead> + <tr> + <th>Method</th> + <th>Safe</th> + <th>Idempotent</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>CONNECT</td> + <td>no</td> + <td>no</td> + <td> + <xref target="CONNECT" format="counter"/> + </td> + </tr> + <tr> + <td>DELETE</td> + <td>no</td> + <td>yes</td> + <td> + <xref target="DELETE" format="counter"/> + </td> + </tr> + <tr> + <td>GET</td> + <td>yes</td> + <td>yes</td> + <td> + <xref target="GET" format="counter"/> + </td> + </tr> + <tr> + <td>HEAD</td> + <td>yes</td> + <td>yes</td> + <td> + <xref target="HEAD" format="counter"/> + </td> + </tr> + <tr> + <td>OPTIONS</td> + <td>yes</td> + <td>yes</td> + <td> + <xref target="OPTIONS" format="counter"/> + </td> + </tr> + <tr> + <td>POST</td> + <td>no</td> + <td>no</td> + <td> + <xref target="POST" format="counter"/> + </td> + </tr> + <tr> + <td>PUT</td> + <td>no</td> + <td>yes</td> + <td> + <xref target="PUT" format="counter"/> + </td> + </tr> + <tr> + <td>TRACE</td> + <td>yes</td> + <td>yes</td> + <td> + <xref target="TRACE" format="counter"/> + </td> + </tr> + <tr> + <td>*</td> + <td>no</td> + <td>no</td> + <td> + <xref target="method.registration" format="counter"/> + </td> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-semantics-latest.iana-methods ?> +<t> + <iref primary="true" item="Method" subitem="*" x:for-anchor=""/> + The method name "*" is reserved because using "*" as a method name would + conflict with its usage as a wildcard in some fields (e.g., + "Access-Control-Request-Method"). +</t> +</section> + +<section title="Status Code Registration" anchor="status.code.registration"> +<t> + IANA has updated the "Hypertext Transfer Protocol (HTTP) Status Code Registry" + at <eref target="https://www.iana.org/assignments/http-status-codes"/> with + the registration procedure of <xref target="status.code.registry"/> and the + status code values summarized in the following table. +</t> +<?BEGININC build/draft-ietf-httpbis-semantics-latest.iana-status-codes ?> +<!--AUTOGENERATED FROM extract-status-code-defs.xslt, do not edit manually--> +<table anchor="iana.status.code.registration.table"> + <thead> + <tr> + <th>Value</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>100</td> + <td>Continue</td> + <td> + <xref target="status.100" format="counter"/> + </td> + </tr> + <tr> + <td>101</td> + <td>Switching Protocols</td> + <td> + <xref target="status.101" format="counter"/> + </td> + </tr> + <tr> + <td>200</td> + <td>OK</td> + <td> + <xref target="status.200" format="counter"/> + </td> + </tr> + <tr> + <td>201</td> + <td>Created</td> + <td> + <xref target="status.201" format="counter"/> + </td> + </tr> + <tr> + <td>202</td> + <td>Accepted</td> + <td> + <xref target="status.202" format="counter"/> + </td> + </tr> + <tr> + <td>203</td> + <td>Non-Authoritative Information</td> + <td> + <xref target="status.203" format="counter"/> + </td> + </tr> + <tr> + <td>204</td> + <td>No Content</td> + <td> + <xref target="status.204" format="counter"/> + </td> + </tr> + <tr> + <td>205</td> + <td>Reset Content</td> + <td> + <xref target="status.205" format="counter"/> + </td> + </tr> + <tr> + <td>206</td> + <td>Partial Content</td> + <td> + <xref target="status.206" format="counter"/> + </td> + </tr> + <tr> + <td>300</td> + <td>Multiple Choices</td> + <td> + <xref target="status.300" format="counter"/> + </td> + </tr> + <tr> + <td>301</td> + <td>Moved Permanently</td> + <td> + <xref target="status.301" format="counter"/> + </td> + </tr> + <tr> + <td>302</td> + <td>Found</td> + <td> + <xref target="status.302" format="counter"/> + </td> + </tr> + <tr> + <td>303</td> + <td>See Other</td> + <td> + <xref target="status.303" format="counter"/> + </td> + </tr> + <tr> + <td>304</td> + <td>Not Modified</td> + <td> + <xref target="status.304" format="counter"/> + </td> + </tr> + <tr> + <td>305</td> + <td>Use Proxy</td> + <td> + <xref target="status.305" format="counter"/> + </td> + </tr> + <tr> + <td>306</td> + <td>(Unused)</td> + <td> + <xref target="status.306" format="counter"/> + </td> + </tr> + <tr> + <td>307</td> + <td>Temporary Redirect</td> + <td> + <xref target="status.307" format="counter"/> + </td> + </tr> + <tr> + <td>308</td> + <td>Permanent Redirect</td> + <td> + <xref target="status.308" format="counter"/> + </td> + </tr> + <tr> + <td>400</td> + <td>Bad Request</td> + <td> + <xref target="status.400" format="counter"/> + </td> + </tr> + <tr> + <td>401</td> + <td>Unauthorized</td> + <td> + <xref target="status.401" format="counter"/> + </td> + </tr> + <tr> + <td>402</td> + <td>Payment Required</td> + <td> + <xref target="status.402" format="counter"/> + </td> + </tr> + <tr> + <td>403</td> + <td>Forbidden</td> + <td> + <xref target="status.403" format="counter"/> + </td> + </tr> + <tr> + <td>404</td> + <td>Not Found</td> + <td> + <xref target="status.404" format="counter"/> + </td> + </tr> + <tr> + <td>405</td> + <td>Method Not Allowed</td> + <td> + <xref target="status.405" format="counter"/> + </td> + </tr> + <tr> + <td>406</td> + <td>Not Acceptable</td> + <td> + <xref target="status.406" format="counter"/> + </td> + </tr> + <tr> + <td>407</td> + <td>Proxy Authentication Required</td> + <td> + <xref target="status.407" format="counter"/> + </td> + </tr> + <tr> + <td>408</td> + <td>Request Timeout</td> + <td> + <xref target="status.408" format="counter"/> + </td> + </tr> + <tr> + <td>409</td> + <td>Conflict</td> + <td> + <xref target="status.409" format="counter"/> + </td> + </tr> + <tr> + <td>410</td> + <td>Gone</td> + <td> + <xref target="status.410" format="counter"/> + </td> + </tr> + <tr> + <td>411</td> + <td>Length Required</td> + <td> + <xref target="status.411" format="counter"/> + </td> + </tr> + <tr> + <td>412</td> + <td>Precondition Failed</td> + <td> + <xref target="status.412" format="counter"/> + </td> + </tr> + <tr> + <td>413</td> + <td>Content Too Large</td> + <td> + <xref target="status.413" format="counter"/> + </td> + </tr> + <tr> + <td>414</td> + <td>URI Too Long</td> + <td> + <xref target="status.414" format="counter"/> + </td> + </tr> + <tr> + <td>415</td> + <td>Unsupported Media Type</td> + <td> + <xref target="status.415" format="counter"/> + </td> + </tr> + <tr> + <td>416</td> + <td>Range Not Satisfiable</td> + <td> + <xref target="status.416" format="counter"/> + </td> + </tr> + <tr> + <td>417</td> + <td>Expectation Failed</td> + <td> + <xref target="status.417" format="counter"/> + </td> + </tr> + <tr> + <td>418</td> + <td>(Unused)</td> + <td> + <xref target="status.418" format="counter"/> + </td> + </tr> + <tr> + <td>421</td> + <td>Misdirected Request</td> + <td> + <xref target="status.421" format="counter"/> + </td> + </tr> + <tr> + <td>422</td> + <td>Unprocessable Content</td> + <td> + <xref target="status.422" format="counter"/> + </td> + </tr> + <tr> + <td>426</td> + <td>Upgrade Required</td> + <td> + <xref target="status.426" format="counter"/> + </td> + </tr> + <tr> + <td>500</td> + <td>Internal Server Error</td> + <td> + <xref target="status.500" format="counter"/> + </td> + </tr> + <tr> + <td>501</td> + <td>Not Implemented</td> + <td> + <xref target="status.501" format="counter"/> + </td> + </tr> + <tr> + <td>502</td> + <td>Bad Gateway</td> + <td> + <xref target="status.502" format="counter"/> + </td> + </tr> + <tr> + <td>503</td> + <td>Service Unavailable</td> + <td> + <xref target="status.503" format="counter"/> + </td> + </tr> + <tr> + <td>504</td> + <td>Gateway Timeout</td> + <td> + <xref target="status.504" format="counter"/> + </td> + </tr> + <tr> + <td>505</td> + <td>HTTP Version Not Supported</td> + <td> + <xref target="status.505" format="counter"/> + </td> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-semantics-latest.iana-status-codes ?> +</section> + +<section title="Field Name Registration" anchor="field.name.registration"> +<t> + This specification updates the HTTP-related aspects of the existing + registration procedures for message header fields defined in <xref target="RFC3864"/>. + It replaces the old procedures as they relate to HTTP by defining a new + registration procedure and moving HTTP field definitions into a separate + registry. +</t> +<t> + IANA has created a new registry titled "Hypertext Transfer Protocol (HTTP) + Field Name Registry" as outlined in <xref target="fields.registry"/>. +</t> +<t> + IANA has moved all entries in the "Permanent Message Header Field + Names" and "Provisional Message Header Field Names" registries (see + <eref target="https://www.iana.org/assignments/message-headers/"/>) with the + protocol 'http' to this registry and has applied the following changes: +</t> +<ol> + <li>The 'Applicable Protocol' field has been omitted.</li> + <li>Entries that had a status of 'standard', 'experimental', 'reserved', or + 'informational' have been made to have a status of 'permanent'.</li> + <li>Provisional entries without a status have been made to have a status of + 'provisional'.</li> + <li>Permanent entries without a status (after confirmation that the + registration document did not define one) have been made to have a status of + 'provisional'. The expert(s) can choose to update the entries' status if there is + evidence that another is more appropriate.</li> +</ol> +<t> + IANA has annotated the "Permanent Message Header Field + Names" and "Provisional Message Header Field Names" registries with the + following note to indicate that HTTP field name registrations have moved: +</t> +<aside> + <t><strong>Note</strong></t> + <t> + HTTP field name registrations have been moved to + [<eref target="https://www.iana.org/assignments/http-fields" brackets="none"/>] per + [RFC9110]. + </t> +</aside> +<t> + IANA has updated the "Hypertext Transfer Protocol (HTTP) Field Name Registry" + with the field names listed in the following table. +</t> +<?BEGININC build/draft-ietf-httpbis-semantics-latest.iana-headers ?> +<!--AUTOGENERATED FROM extract-header-defs.xslt, do not edit manually--> +<table align="left" anchor="iana.header.registration.table"> + <thead> + <tr> + <th>Field Name</th> + <th>Status</th> + <th>Section</th> + <th>Comments</th> + </tr> + </thead> + <tbody> + <tr> + <td>Accept</td> + <td>permanent</td> + <td> + <xref target="field.accept" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Accept-Charset</td> + <td>deprecated</td> + <td> + <xref target="field.accept-charset" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Accept-Encoding</td> + <td>permanent</td> + <td> + <xref target="field.accept-encoding" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Accept-Language</td> + <td>permanent</td> + <td> + <xref target="field.accept-language" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Accept-Ranges</td> + <td>permanent</td> + <td> + <xref target="field.accept-ranges" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Allow</td> + <td>permanent</td> + <td> + <xref target="field.allow" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Authentication-Info</td> + <td>permanent</td> + <td> + <xref target="field.authentication-info" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Authorization</td> + <td>permanent</td> + <td> + <xref target="field.authorization" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Connection</td> + <td>permanent</td> + <td> + <xref target="field.connection" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Encoding</td> + <td>permanent</td> + <td> + <xref target="field.content-encoding" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Language</td> + <td>permanent</td> + <td> + <xref target="field.content-language" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Length</td> + <td>permanent</td> + <td> + <xref target="field.content-length" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Location</td> + <td>permanent</td> + <td> + <xref target="field.content-location" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Range</td> + <td>permanent</td> + <td> + <xref target="field.content-range" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Content-Type</td> + <td>permanent</td> + <td> + <xref target="field.content-type" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Date</td> + <td>permanent</td> + <td> + <xref target="field.date" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>ETag</td> + <td>permanent</td> + <td> + <xref target="field.etag" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Expect</td> + <td>permanent</td> + <td> + <xref target="field.expect" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>From</td> + <td>permanent</td> + <td> + <xref target="field.from" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Host</td> + <td>permanent</td> + <td> + <xref target="field.host" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>If-Match</td> + <td>permanent</td> + <td> + <xref target="field.if-match" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>If-Modified-Since</td> + <td>permanent</td> + <td> + <xref target="field.if-modified-since" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>If-None-Match</td> + <td>permanent</td> + <td> + <xref target="field.if-none-match" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>If-Range</td> + <td>permanent</td> + <td> + <xref target="field.if-range" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>If-Unmodified-Since</td> + <td>permanent</td> + <td> + <xref target="field.if-unmodified-since" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Last-Modified</td> + <td>permanent</td> + <td> + <xref target="field.last-modified" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Location</td> + <td>permanent</td> + <td> + <xref target="field.location" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Max-Forwards</td> + <td>permanent</td> + <td> + <xref target="field.max-forwards" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Proxy-Authenticate</td> + <td>permanent</td> + <td> + <xref target="field.proxy-authenticate" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Proxy-Authentication-Info</td> + <td>permanent</td> + <td> + <xref target="field.proxy-authentication-info" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Proxy-Authorization</td> + <td>permanent</td> + <td> + <xref target="field.proxy-authorization" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Range</td> + <td>permanent</td> + <td> + <xref target="field.range" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Referer</td> + <td>permanent</td> + <td> + <xref target="field.referer" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Retry-After</td> + <td>permanent</td> + <td> + <xref target="field.retry-after" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Server</td> + <td>permanent</td> + <td> + <xref target="field.server" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>TE</td> + <td>permanent</td> + <td> + <xref target="field.te" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Trailer</td> + <td>permanent</td> + <td> + <xref target="field.trailer" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Upgrade</td> + <td>permanent</td> + <td> + <xref target="field.upgrade" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>User-Agent</td> + <td>permanent</td> + <td> + <xref target="field.user-agent" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Vary</td> + <td>permanent</td> + <td> + <xref target="field.vary" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Via</td> + <td>permanent</td> + <td> + <xref target="field.via" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>WWW-Authenticate</td> + <td>permanent</td> + <td> + <xref target="field.www-authenticate" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>*</td> + <td>permanent</td> + <td> + <xref target="field.vary" format="counter"/> + </td> + <td>(reserved)</td> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-semantics-latest.iana-headers ?> +<t anchor="field.asterisk"> + <iref primary="true" item="Fields" subitem="*" x:for-anchor=""/> + <rdf:Description> + <comments xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">(reserved)</comments> + <ref xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">field.vary</ref> + </rdf:Description> + The field name "*" is reserved because using that name as + an HTTP header field might conflict with its special semantics in the + <x:ref>Vary</x:ref> header field (<xref target="field.vary"/>). +</t> +<t> + <iref primary="true" item="Fields" subitem="Content-MD5" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Content-MD5" x:for-anchor=""/><iref primary="true" item="Content-MD5 header field"/> + <rdf:Description> + <hide-in-iana-table xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">yes</hide-in-iana-table> + </rdf:Description> + IANA has updated the "Content-MD5" entry in the new registry to have + a status of 'obsoleted' with references to + <xref target="RFC2616" x:fmt="of" x:sec="14.15"/> (for the definition + of the header field) and + <xref target="RFC7231" x:fmt="of" x:sec="B"/> (which removed the field + definition from the updated specification). +</t> +</section> + +<section title="Authentication Scheme Registration" anchor="auth.scheme.registration"> +<t> + IANA has updated the + "Hypertext Transfer Protocol (HTTP) Authentication Scheme Registry" + at <eref target="https://www.iana.org/assignments/http-authschemes"/> with + the registration procedure of <xref target="auth.scheme.registry"/>. + No authentication schemes are defined in this document. +</t> +</section> + +<section title="Content Coding Registration" anchor="content.coding.registration"> +<t> + IANA has updated the "HTTP Content Coding Registry" at + <eref target="https://www.iana.org/assignments/http-parameters/"/> + with the registration procedure of <xref target="content.coding.registry"/> + and the content coding names summarized in the table below. +</t> +<table align="left" anchor="iana.content.coding.registration.table"> + <thead> + <tr> + <th>Name</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>compress</td> + <td>UNIX "compress" data format <xref target="Welch"/></td> + <td><xref target="compress.coding" format="counter"/></td> + </tr> + <tr> + <td>deflate</td> + <td>"deflate" compressed data (<xref target="RFC1951"/>) inside + the "zlib" data format (<xref target="RFC1950"/>)</td> + <td><xref target="deflate.coding" format="counter"/></td> + </tr> + <tr> + <td>gzip</td> + <td>GZIP file format <xref target="RFC1952"/></td> + <td><xref target="gzip.coding" format="counter"/></td> + </tr> + <tr> + <td>identity</td> + <td>Reserved</td> + <td><xref target="field.accept-encoding" format="counter"/></td> + </tr> + <tr> + <td>x-compress</td> + <td>Deprecated (alias for compress)</td> + <td><xref target="compress.coding" format="counter"/></td> + </tr> + <tr> + <td>x-gzip</td> + <td>Deprecated (alias for gzip)</td> + <td><xref target="gzip.coding" format="counter"/></td> + </tr> + </tbody> +</table> +</section> + +<section title="Range Unit Registration" anchor="range.unit.registration"> +<t> + IANA has updated the "HTTP Range Unit Registry" at + <eref target="https://www.iana.org/assignments/http-parameters/"/> + with the registration procedure of <xref target="range.unit.registry"/> + and the range unit names summarized in the table below. +</t> +<table align="left" anchor="iana.range.units.table"> + <thead> + <tr> + <th>Range Unit Name</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>bytes</td> + <td>a range of octets</td> + <td><xref target="byte.ranges" format="counter"/></td> + </tr> + <tr> + <td>none</td> + <td>reserved as keyword to indicate range requests are not supported</td> + <td><xref target="field.accept-ranges" format="counter"/></td> + </tr> + </tbody> +</table> +</section> + +<section title="Media Type Registration" anchor="media.type.reg"> +<t> + IANA has updated the "Media Types" registry at + <eref target="https://www.iana.org/assignments/media-types"/> + with the registration information in + <xref target="multipart.byteranges"/> + for the media type "multipart/byteranges". +</t> +<t> + IANA has updated the registry note about "q" parameters with + a link to <xref target="field.accept"/> of this document. +</t> +</section> + +<section title="Port Registration" anchor="port.reg"> +<t> + IANA has updated the "Service Name and Transport Protocol Port Number + Registry" at <eref target="https://www.iana.org/assignments/service-names-port-numbers/"/> + for the services on ports 80 and 443 that use UDP or TCP to: +</t> +<ol> + <li>use this document as "Reference", and</li> + <li>when currently unspecified, set "Assignee" to "IESG" and "Contact" to + "IETF_Chair".</li> +</ol> +</section> + +<section title="Upgrade Token Registration" anchor="upgrade.token.registration"> +<t> + IANA has updated the + "Hypertext Transfer Protocol (HTTP) Upgrade Token Registry" at + <eref target="https://www.iana.org/assignments/http-upgrade-tokens"/> + with the registration procedure described in <xref target="upgrade.token.registry"/> + and the upgrade token names summarized in the following table. +</t> +<table align="left"> + <thead> + <tr> + <th>Name</th> + <th>Description</th> + <th>Expected Version Tokens</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>HTTP</td> + <td>Hypertext Transfer Protocol</td> + <td>any DIGIT.DIGIT (e.g., "2.0")</td> + <td><xref target="protocol.version" format="counter"/></td> + </tr> + </tbody> +</table> +</section> +</section> +</middle> +<back> +<displayreference target="HTTP10" to="HTTP/1.0"/> +<displayreference target="HTTP11" to="HTTP/1.1"/> +<displayreference target="HTTP2" to="HTTP/2"/> +<displayreference target="HTTP3" to="HTTP/3"/> + +<references title="Normative References"> + +<reference anchor="CACHING"> + <x:source href="rfc9111.xml" basename="rfc9111"/> +</reference> + +<reference anchor="RFC2046"> + <front> + <title abbrev="Media Types">Multipurpose Internet Mail Extensions (MIME) Part Two: Media Types</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="N." surname="Borenstein" fullname="Nathaniel S. Borenstein"/> + <date month="November" year="1996"/> + </front> + <seriesInfo name="RFC" value="2046"/> +</reference> + +<reference anchor="RFC2119"> + <front> + <title>Key words for use in RFCs to Indicate Requirement Levels</title> + <author initials="S." surname="Bradner" fullname="Scott Bradner"/> + <date month="March" year="1997"/> + </front> + <seriesInfo name="BCP" value="14"/> + <seriesInfo name="RFC" value="2119"/> +</reference> + +<reference anchor="RFC8174"> + <front> + <title>Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words</title> + <author initials="B." surname="Leiba" fullname="Barry Leiba"/> + <date year="2017" month="May"/> + </front> + <seriesInfo name="BCP" value="14"/> + <seriesInfo name="RFC" value="8174"/> +</reference> + +<reference anchor="URI"> + <front> + <title abbrev="URI Generic Syntax">Uniform Resource Identifier (URI): Generic Syntax</title> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="L." surname="Masinter" fullname="Larry Masinter"/> + <date month="January" year="2005"/> + </front> + <seriesInfo name="STD" value="66"/> + <seriesInfo name="RFC" value="3986"/> +</reference> + +<reference anchor="TCP"> + <front> + <title>Transmission Control Protocol</title> + <author initials="J." surname="Postel" fullname="Jon Postel"/> + <date year="1981" month="September"/> + </front> + <seriesInfo name="STD" value="7"/> + <seriesInfo name="RFC" value="793"/> +</reference> + +<reference anchor="RFC4647"> + <front> + <title>Matching of Language Tags</title> + <author initials="A." surname="Phillips" fullname="Addison Phillips" role="editor"/> + <author initials="M." surname="Davis" fullname="Mark Davis" role="editor"/> + <date year="2006" month="September"/> + </front> + <seriesInfo name="BCP" value="47"/> + <seriesInfo name="RFC" value="4647"/> +</reference> + +<reference anchor="RFC4648"> + <front> + <title>The Base16, Base32, and Base64 Data Encodings</title> + <author fullname="S. Josefsson" initials="S." surname="Josefsson"/> + <date year="2006" month="October"/> + </front> + <seriesInfo value="4648" name="RFC"/> +</reference> + +<reference anchor="RFC5234"> + <front> + <title abbrev="ABNF for Syntax Specifications">Augmented BNF for Syntax Specifications: ABNF</title> + <author initials="D." surname="Crocker" fullname="Dave Crocker" role="editor"/> + <author initials="P." surname="Overell" fullname="Paul Overell"/> + <date month="January" year="2008"/> + </front> + <seriesInfo name="STD" value="68"/> + <seriesInfo name="RFC" value="5234"/> +</reference> + +<reference anchor="RFC5322"> + <front> + <title>Internet Message Format</title> + <author initials="P." surname="Resnick" fullname="P. Resnick" role="editor"/> + <date year="2008" month="October"/> + </front> + <seriesInfo name="RFC" value="5322"/> +</reference> + +<reference anchor="RFC5646"> + <front> + <title>Tags for Identifying Languages</title> + <author initials="A." surname="Phillips" fullname="Addison Phillips" role="editor"/> + <author initials="M." surname="Davis" fullname="Mark Davis" role="editor"/> + <date month="September" year="2009"/> + </front> + <seriesInfo name="BCP" value="47"/> + <seriesInfo name="RFC" value="5646"/> +</reference> + +<reference anchor="RFC6125"> + <front> + <title>Representation and Verification of Domain-Based Application Service Identity within Internet Public Key Infrastructure Using X.509 (PKIX) Certificates in the Context of Transport Layer Security (TLS)</title> + <author initials="P." surname="Saint-Andre" fullname="P. Saint-Andre"/> + <author initials="J." surname="Hodges" fullname="J. Hodges"/> + <date year="2011" month="March"/> + </front> + <seriesInfo name="RFC" value="6125"/> +</reference> + +<reference anchor="RFC6365"> + <front> + <title>Terminology Used in Internationalization in the IETF</title> + <author initials="P." surname="Hoffman" fullname="P. Hoffman"/> + <author initials="J." surname="Klensin" fullname="J. Klensin"/> + <date year="2011" month="September"/> + </front> + <seriesInfo name="BCP" value="166"/> + <seriesInfo name="RFC" value="6365"/> +</reference> + +<reference anchor="RFC7405"> + <front> + <title>Case-Sensitive String Support in ABNF</title> + <author initials="P." surname="Kyzivat" fullname="Dave Kyzivat"/> + <date month="December" year="2014"/> + </front> + <seriesInfo name="RFC" value="7405"/> +</reference> + +<reference anchor="TLS13"> + <front> + <title>The Transport Layer Security (TLS) Protocol Version 1.3</title> + <author initials="E." surname="Rescorla" fullname="Eric Rescorla"/> + <date year="2018" month="August"/> + </front> + <seriesInfo name="RFC" value="8446"/> +</reference> + +<reference anchor="USASCII"> + <front> + <title>Coded Character Set -- 7-bit American Standard Code for Information Interchange</title> + <author> + <organization>American National Standards Institute</organization> + </author> + <date year="1986"/> + </front> + <seriesInfo name="ANSI" value="X3.4"/> +</reference> + +<reference anchor="RFC1950"> + <front> + <title>ZLIB Compressed Data Format Specification version 3.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <author initials="J-L." surname="Gailly" fullname="Jean-Loup Gailly"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1950"/> +</reference> + +<reference anchor="RFC1951"> + <front> + <title>DEFLATE Compressed Data Format Specification version 1.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1951"/> +</reference> + +<reference anchor="RFC1952"> + <front> + <title>GZIP file format specification version 4.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1952"/> +</reference> + +<reference anchor="Welch" target="https://ieeexplore.ieee.org/document/1659158/"> + <front> + <title>A Technique for High-Performance Data Compression</title> + <author initials="T." surname="Welch" fullname="Terry A. Welch"/> + <date month="June" year="1984"/> + </front> + <seriesInfo name="IEEE Computer" value="17(6)"/> + <seriesInfo name="DOI" value="10.1109/MC.1984.1659158"/> +</reference> + +<reference anchor="RFC5280"> + <front> + <title>Internet X.509 Public Key Infrastructure Certificate and + Certificate Revocation List (CRL) Profile</title> + <author initials="D." surname="Cooper" fullname="D. Cooper"/> + <author initials="S." surname="Santesson" fullname="S. Santesson"/> + <author initials="S." surname="Farrell" fullname="S. Farrell"/> + <author initials="S." surname="Boeyen" fullname="S. Boeyen"/> + <author initials="R." surname="Housley" fullname="R. Housley"/> + <author initials="W." surname="Polk" fullname="W. Polk"/> + <date year="2008" month="May"/> + </front> + <seriesInfo name="RFC" value="5280"/> +</reference> + +</references> + +<references title="Informative References"> + +<reference anchor="HTTP11"> + <x:source href="rfc9112.xml" basename="rfc9112"/> +</reference> + +<reference anchor="Err1912" target="https://www.rfc-editor.org/errata/eid1912" quoteTitle="false"> + <front> + <title>Erratum ID 1912</title> + <author> + <organization>RFC Errata</organization> + </author> + <date/> + </front> + <refcontent>RFC 2978</refcontent> +</reference> + +<reference anchor="Err5433" target="https://www.rfc-editor.org/errata/eid5433" quoteTitle="false"> + <front> + <title>Erratum ID 5433</title> + <author> + <organization>RFC Errata</organization> + </author> + <date/> + </front> + <refcontent>RFC 2978</refcontent> +</reference> + +<reference anchor="BREACH" target="http://breachattack.com/resources/BREACH%20-%20SSL,%20gone%20in%2030%20seconds.pdf"> + <front> + <title>BREACH: Reviving the CRIME Attack</title> + <author initials="Y." surname="Gluck" fullname="Yoel Gluck"/> + <author initials="N." surname="Harris" fullname="Neal Harris"/> + <author initials="A." surname="Prado" fullname="Angelo Prado"/> + <date year="2013" month="July"/> + </front> +</reference> + +<reference anchor="Bujlow"> + <front> + <title>A Survey on Web Tracking: Mechanisms, Implications, and Defenses</title> + <author initials="T." surname="Bujlow" fullname="Tomasz Bujlow"/> + <author initials="V." surname="Carela-Español" fullname="Valentin Carela-Español"/> + <author initials="J." surname="Solé-Pareta" fullname="Josep Solé-Pareta"/> + <author initials="P." surname="Barlet-Ros" fullname="Pere Barlet-Ros"/> + <date year="2017" month="August"/> + </front> + <seriesInfo name="DOI" value="10.1109/JPROC.2016.2637878"/> + <refcontent>In Proceedings of the IEEE 105(8)</refcontent> +</reference> + +<reference anchor="Georgiev"> + <front> + <title>The Most Dangerous Code in the World: Validating SSL Certificates in Non-Browser Software</title> + <author initials="M." surname="Georgiev" fullname="Martin Georgiev"/> + <author initials="S." surname="Iyengar" fullname="Subodh Iyengar"/> + <author initials="S." surname="Jana" fullname="Suman Jana"/> + <author initials="R." surname="Anubhai" fullname="Rishita Anubhai"/> + <author initials="D." surname="Boneh" fullname="Dan Boneh"/> + <author initials="V." surname="Shmatikov" fullname="Vitaly Shmatikov"/> + <date year="2012" month="October"/> + </front> + <seriesInfo name="DOI" value="10.1145/2382196.2382204"/> + <refcontent>In Proceedings of the 2012 ACM Conference on Computer and Communications Security (CCS '12), pp. 38-49</refcontent> +</reference> + +<reference anchor="ISO-8859-1"> + <front> + <title> + Information technology -- 8-bit single-byte coded graphic character sets -- Part 1: Latin alphabet No. 1 + </title> + <author> + <organization>International Organization for Standardization</organization> + </author> + <date year="1998"/> + </front> + <seriesInfo name="ISO/IEC" value="8859-1:1998"/> +</reference> + +<reference anchor="Kri2001" target="http://arxiv.org/abs/cs.SE/0105018"> + <front> + <title>HTTP Cookies: Standards, Privacy, and Politics</title> + <author initials="D." surname="Kristol" fullname="David M. Kristol"/> + <date year="2001" month="November"/> + </front> + <seriesInfo name="ACM Transactions on Internet Technology" value="1(2)"/> +</reference> + +<reference anchor="Sniffing" target="https://mimesniff.spec.whatwg.org"> + <front> + <title>MIME Sniffing</title> + <author> + <organization>WHATWG</organization> + </author> + <date/> + </front> +</reference> + +<reference anchor="REST" target="https://roy.gbiv.com/pubs/dissertation/top.htm"> + <front> + <title>Architectural Styles and the Design of Network-based Software Architectures</title> + <author initials="R.T." surname="Fielding" fullname="Roy T. Fielding"/> + <date month="September" year="2000"/> + </front> + <refcontent>Doctoral Dissertation, University of California, Irvine</refcontent> +</reference> + +<reference anchor="RFC1919"> + <front> + <title>Classical versus Transparent IP Proxies</title> + <author initials="M." surname="Chatel" fullname="Marc Chatel"/> + <date year="1996" month="March"/> + </front> + <seriesInfo name="RFC" value="1919"/> +</reference> + +<reference anchor="HTTP10"> + <front> + <title abbrev="HTTP/1.0">Hypertext Transfer Protocol -- HTTP/1.0</title> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="H." surname="Frystyk" fullname="Henrik Frystyk Nielsen"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1945"/> +</reference> + +<reference anchor="RFC2047"> + <front> + <title abbrev="Message Header Extensions">MIME (Multipurpose Internet Mail Extensions) Part Three: Message Header Extensions for Non-ASCII Text</title> + <author initials="K." surname="Moore" fullname="Keith Moore"/> + <date month="November" year="1996"/> + </front> + <seriesInfo name="RFC" value="2047"/> +</reference> + +<reference anchor="RFC2068"> + <front> + <title>Hypertext Transfer Protocol -- HTTP/1.1</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="J." surname="Gettys" fullname="Jim Gettys"/> + <author initials="J." surname="Mogul" fullname="Jeffrey C. Mogul"/> + <author initials="H." surname="Frystyk" fullname="Henrik Frystyk Nielsen"/> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <date month="January" year="1997"/> + </front> + <seriesInfo name="RFC" value="2068"/> +</reference> + +<reference anchor="RFC2145"> + <front> + <title abbrev="HTTP Version Numbers">Use and Interpretation of HTTP Version Numbers</title> + <author initials="J. C." surname="Mogul" fullname="Jeffrey C. Mogul"/> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="J." surname="Gettys" fullname="Jim Gettys"/> + <author initials="H." surname="Frystyk" fullname="Henrik Frystyk Nielsen"/> + <date month="May" year="1997"/> + </front> + <seriesInfo name="RFC" value="2145"/> +</reference> + +<reference anchor="RFC2295"> + <front> + <title abbrev="HTTP Content Negotiation">Transparent Content Negotiation in HTTP</title> + <author initials="K." surname="Holtman" fullname="Koen Holtman"/> + <author initials="A." surname="Mutz" fullname="Andrew H. Mutz"/> + <date year="1998" month="March"/> + </front> + <seriesInfo name="RFC" value="2295"/> +</reference> + +<reference anchor="RFC2324"> + <front> + <title>Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)</title> + <author initials="L." surname="Masinter" fullname="L. Masinter"/> + <date year="1998" month="April" day="1" x:include-day="true"/> + </front> + <seriesInfo name="RFC" value="2324"/> +</reference> + +<reference anchor="RFC2557"> + <front> + <title abbrev="MIME Encapsulation of Aggregate Documents">MIME Encapsulation of Aggregate Documents, such as HTML (MHTML)</title> + <author initials="J." surname="Palme" fullname="Jacob Palme"/> + <author initials="A." surname="Hopmann" fullname="Alex Hopmann"/> + <author initials="N." surname="Shelness" fullname="Nick Shelness"/> + <date year="1999" month="March"/> + </front> + <seriesInfo name="RFC" value="2557"/> +</reference> + +<reference anchor="RFC2616"> + <front> + <title>Hypertext Transfer Protocol -- HTTP/1.1</title> + <author initials="R." surname="Fielding" fullname="R. Fielding"/> + <author initials="J." surname="Gettys" fullname="J. Gettys"/> + <author initials="J." surname="Mogul" fullname="J. Mogul"/> + <author initials="H." surname="Frystyk" fullname="H. Frystyk"/> + <author initials="L." surname="Masinter" fullname="L. Masinter"/> + <author initials="P." surname="Leach" fullname="P. Leach"/> + <author initials="T." surname="Berners-Lee" fullname="T. Berners-Lee"/> + <date month="June" year="1999"/> + </front> + <seriesInfo name="RFC" value="2616"/> +</reference> + +<reference anchor="RFC2617"> + <front> + <title abbrev="HTTP Authentication">HTTP Authentication: Basic and Digest Access Authentication</title> + <author initials="J." surname="Franks" fullname="John Franks"/> + <author initials="P." surname="Hallam-Baker" fullname="Phillip M. Hallam-Baker"/> + <author initials="J." surname="Hostetler" fullname="Jeffery L. Hostetler"/> + <author initials="S." surname="Lawrence" fullname="Scott D. Lawrence"/> + <author initials="P." surname="Leach" fullname="Paul J. Leach"/> + <author initials="A." surname="Luotonen" fullname="Ari Luotonen"/> + <author initials="L." surname="Stewart" fullname="Lawrence C. Stewart"/> + <date month="June" year="1999"/> + </front> + <seriesInfo name="RFC" value="2617"/> +</reference> + +<reference anchor="RFC2774"> + <front> + <title>An HTTP Extension Framework</title> + <author initials="H." surname="Nielsen" fullname="Henrik Frystyk Nielsen"/> + <author initials="P." surname="Leach" fullname="Paul J. Leach"/> + <author initials="S." surname="Lawrence" fullname="Scott Lawrence"/> + <date year="2000" month="February"/> + </front> + <seriesInfo name="RFC" value="2774"/> +</reference> + +<reference anchor="RFC2818"> + <front> + <title>HTTP Over TLS</title> + <author initials="E." surname="Rescorla" fullname="Eric Rescorla"/> + <date year="2000" month="May"/> + </front> + <seriesInfo name="RFC" value="2818"/> +</reference> + +<reference anchor="RFC2978"> + <front> + <title>IANA Charset Registration Procedures</title> + <author initials="N." surname="Freed" fullname="N. Freed"/> + <author initials="J." surname="Postel" fullname="J. Postel"/> + <date year="2000" month="October"/> + </front> + <seriesInfo name="BCP" value="19"/> + <seriesInfo name="RFC" value="2978"/> +</reference> + +<reference anchor="RFC3040"> + <front> + <title>Internet Web Replication and Caching Taxonomy</title> + <author initials="I." surname="Cooper" fullname="I. Cooper"/> + <author initials="I." surname="Melve" fullname="I. Melve"/> + <author initials="G." surname="Tomlinson" fullname="G. Tomlinson"/> + <date year="2001" month="January"/> + </front> + <seriesInfo name="RFC" value="3040"/> +</reference> + +<reference anchor="RFC4033"> + <front> + <title>DNS Security Introduction and Requirements</title> + <author initials="R." surname="Arends" fullname="R. Arends"/> + <author initials="R." surname="Austein" fullname="R. Austein"/> + <author initials="M." surname="Larson" fullname="M. Larson"/> + <author initials="D." surname="Massey" fullname="D. Massey"/> + <author initials="S." surname="Rose" fullname="S. Rose"/> + <date year="2005" month="March"/> + </front> + <seriesInfo name="RFC" value="4033"/> +</reference> + +<reference anchor="RFC4559"> + <front> + <title>SPNEGO-based Kerberos and NTLM HTTP Authentication in Microsoft Windows</title> + <author initials="K." surname="Jaganathan" fullname="K. Jaganathan"/> + <author initials="L." surname="Zhu" fullname="L. Zhu"/> + <author initials="J." surname="Brezak" fullname="J. Brezak"/> + <date year="2006" month="June"/> + </front> + <seriesInfo name="RFC" value="4559"/> +</reference> + +<reference anchor="WEBDAV"> + <front> + <title>HTTP Extensions for Web Distributed Authoring and Versioning (WebDAV)</title> + <author initials="L." surname="Dusseault" fullname="Lisa Dusseault" role="editor"/> + <date month="June" year="2007"/> + </front> + <seriesInfo name="RFC" value="4918"/> +</reference> + +<reference anchor="HTTP2"> + <front> + <title>HTTP/2</title> + <author initials="M." surname="Thomson" fullname="Martin Thomson" role="editor"/> + <author initials="C." surname="Benfield" fullname="Cory Benfield" role="editor"/> + <date year="2022" month="June"/> + </front> + <seriesInfo name="RFC" value="9113"/> +</reference> + +<reference anchor="HPACK"> + <front> + <title>HPACK: Header Compression for HTTP/2</title> + <author initials="R." surname="Peon" fullname="R. Peon"/> + <author initials="H." surname="Ruellan" fullname="H. Ruellan"/> + <date year="2015" month="May"/> + </front> + <seriesInfo name="RFC" value="7541"/> +</reference> + +<reference anchor="RFC5905"> + <front> + <title>Network Time Protocol Version 4: Protocol and Algorithms Specification</title> + <author initials="D." surname="Mills" fullname="David L. Mills"/> + <author initials="J." surname="Martin" fullname="Jim Martin" role="editor"/> + <author initials="J." surname="Burbank" fullname="Jack Burbank"/> + <author initials="W." surname="Kasch" fullname="William Kasch"/> + <date year="2010" month="June"/> + </front> + <seriesInfo name="RFC" value="5905"/> +</reference> + +<reference anchor="RFC6454"> + <front> + <title>The Web Origin Concept</title> + <author initials="A." surname="Barth" fullname="A. Barth"/> + <date year="2011" month="December"/> + </front> + <seriesInfo name="RFC" value="6454"/> +</reference> + +<reference anchor="RFC7230"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Message Syntax and Routing</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7230"/> +</reference> + +<reference anchor="RFC7231"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Semantics and Content</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7231"/> +</reference> + +<reference anchor="RFC7232"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Conditional Requests</title> + <author fullname="Roy T. Fielding" initials="R." role="editor" surname="Fielding"/> + <author fullname="Julian F. Reschke" initials="J." role="editor" surname="Reschke"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7232"/> +</reference> + +<reference anchor="RFC7233"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Range Requests</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="Y." surname="Lafon" fullname="Yves Lafon" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7233"/> +</reference> + +<reference anchor="RFC7234"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Caching</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="M." surname="Nottingham" fullname="Mark Nottingham" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7234"/> +</reference> + +<reference anchor="RFC7235"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Authentication</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7235"/> +</reference> + +<reference anchor="RFC7578"> + <front> + <title>Returning Values from Forms: multipart/form-data</title> + <author initials="L." surname="Masinter" fullname="Larry Masinter"/> + <date year="2015" month="July"/> + </front> + <seriesInfo name="RFC" value="7578"/> +</reference> + +<reference anchor="RFC7615"> + <front> + <title>HTTP Authentication-Info and Proxy-Authentication-Info Response Header Fields</title> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke"/> + <date year="2015" month="September"/> + </front> + <seriesInfo name="RFC" value="7615"/> +</reference> + +<reference anchor="ALTSVC"> + <front> + <title>HTTP Alternative Services</title> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <author initials="P." surname="McManus" fullname="P. McManus"/> + <author initials="J." surname="Reschke" fullname="J. Reschke"/> + <date year="2016" month="April"/> + </front> + <seriesInfo name="RFC" value="7838"/> +</reference> + +<reference anchor="RFC8336"> + <front> + <title>The ORIGIN HTTP/2 Frame</title> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <author initials="E." surname="Nygren" fullname="E. Nygren"/> + <date year="2018" month="March"/> + </front> + <seriesInfo name="RFC" value="8336"/> +</reference> + +<reference anchor="RFC8615"> + <front> + <title>Well-Known Uniform Resource Identifiers (URIs)</title> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <date year="2019" month="May"/> + </front> + <seriesInfo name="RFC" value="8615"/> +</reference> + +<reference anchor="HTTP3"> + <front> + <title>HTTP/3</title> + <author initials="M." surname="Bishop" fullname="Mike Bishop" role="editor"/> + <date year="2022" month="June"/> + </front> + <seriesInfo name="RFC" value="9114"/> +</reference> + +<referencegroup anchor="BCP13" target="https://www.rfc-editor.org/info/bcp13"> + <reference anchor="RFC4289"> + <front> + <title>Multipurpose Internet Mail Extensions (MIME) Part Four: Registration Procedures</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="J." surname="Klensin" fullname="John C. Klensin"/> + <date year="2005" month="December"/> + </front> + <seriesInfo name="BCP" value="13"/> + <seriesInfo name="RFC" value="4289"/> + </reference> + <reference anchor="RFC6838"> + <front> + <title>Media Type Specifications and Registration Procedures</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="J." surname="Klensin" fullname="John C. Klensin"/> + <author initials="T." surname="Hansen" fullname="Tony Hansen"/> + <date year="2013" month="January"/> + </front> + <seriesInfo name="BCP" value="13"/> + <seriesInfo name="RFC" value="6838"/> + </reference> +</referencegroup> + +<referencegroup anchor="BCP35" target="https://www.rfc-editor.org/info/bcp35"> + <reference anchor="RFC7595"> + <front> + <title>Guidelines and Registration Procedures for URI Schemes</title> + <author initials="D." surname="Thaler" fullname="Dave Thaler" role="editor"/> + <author initials="T." surname="Hansen" fullname="Tony Hansen"/> + <author initials="T." surname="Hardie" fullname="Ted Hardie"/> + <date year="2015" month="June"/> + </front> + <seriesInfo name="BCP" value="35"/> + <seriesInfo name="RFC" value="7595"/> + </reference> +</referencegroup> + +<referencegroup anchor="BCP178" target="https://www.rfc-editor.org/info/bcp178"> + <reference anchor="RFC6648"> + <front> + <title>Deprecating the "X-" Prefix and Similar Constructs in Application Protocols</title> + <author initials="P." surname="Saint-Andre" fullname="Peter Saint-Andre"/> + <author initials="D." surname="Crocker" fullname="Dave Crocker"/> + <author initials="M." surname="Nottingham" fullname="Mark Nottingham"/> + <date year="2012" month="June"/> + </front> + <seriesInfo name="BCP" value="178"/> + <seriesInfo name="RFC" value="6648"/> + </reference> +</referencegroup> + +<reference anchor="RFC3864"> + <front> + <title>Registration Procedures for Message Header Fields</title> + <author initials="G." surname="Klyne" fullname="G. Klyne"/> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <author initials="J." surname="Mogul" fullname="J. Mogul"/> + <date year="2004" month="September"/> + </front> + <seriesInfo name="BCP" value="90"/> + <seriesInfo name="RFC" value="3864"/> +</reference> + +<reference anchor="RFC3875"> + <front> + <title>The Common Gateway Interface (CGI) Version 1.1</title> + <author initials="D." surname="Robinson" fullname="David Robinson"/> + <author initials="K." surname="Coar" fullname="Ken A. L. Coar"/> + <date year="2004" month="October"/> + </front> + <seriesInfo name="RFC" value="3875"/> +</reference> + +<reference anchor="RFC5789"> + <front> + <title>PATCH Method for HTTP</title> + <author initials="L." surname="Dusseault" fullname="L. Dusseault"/> + <author initials="J." surname="Snell" fullname="J. Snell"/> + <date year="2010" month="March"/> + </front> + <seriesInfo name="RFC" value="5789"/> +</reference> + +<reference anchor="COOKIE"> + <front> + <title>HTTP State Management Mechanism</title> + <author initials="A." surname="Barth" fullname="Adam Barth"/> + <date year="2011" month="April"/> + </front> + <seriesInfo name="RFC" value="6265"/> +</reference> + +<reference anchor="RFC6585"> + <front> + <title>Additional HTTP Status Codes</title> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <author initials="R." surname="Fielding" fullname="R. Fielding"/> + <date year="2012" month="April"/> + </front> + <seriesInfo name="RFC" value="6585"/> +</reference> + +<reference anchor="RFC7538"> + <front> + <title>The Hypertext Transfer Protocol Status Code 308 (Permanent Redirect)</title> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke"/> + <date month="April" year="2015"/> + </front> + <seriesInfo name="RFC" value="7538"/> +</reference> + +<reference anchor="RFC7540"> + <front> + <title>Hypertext Transfer Protocol Version 2 (HTTP/2)</title> + <author initials="M." surname="Belshe" fullname="M. Belshe"/> + <author initials="R." surname="Peon" fullname="R. Peon"/> + <author initials="M." surname="Thomson" fullname="M. Thomson" role="editor"/> + <date year="2015" month="May"/> + </front> + <seriesInfo name="RFC" value="7540"/> +</reference> + +<reference anchor="RFC7616"> + <front> + <title>HTTP Digest Access Authentication</title> + <author initials="R." surname="Shekh-Yusef" fullname="R. Shekh-Yusef" role="editor"/> + <author initials="D." surname="Ahrens" fullname="D. Ahrens"/> + <author initials="S." surname="Bremer" fullname="S. Bremer"/> + <date year="2015" month="September"/> + </front> + <seriesInfo name="RFC" value="7616"/> +</reference> + +<reference anchor="RFC7617"> + <front> + <title>The 'Basic' HTTP Authentication Scheme</title> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke"/> + <date year="2015" month="September"/> + </front> + <seriesInfo name="RFC" value="7617"/> +</reference> + +<reference anchor="RFC7694"> + <front> + <title>Hypertext Transfer Protocol (HTTP) Client-Initiated Content-Encoding</title> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke"/> + <date year="2015" month="November"/> + </front> + <seriesInfo name="RFC" value="7694"/> +</reference> + +<reference anchor="RFC8126"> + <front> + <title>Guidelines for Writing an IANA Considerations Section in RFCs</title> + <author initials="M." surname="Cotton" fullname="M. Cotton"/> + <author initials="B." surname="Leiba" fullname="B. Leiba"/> + <author initials="T." surname="Narten" fullname="T. Narten"/> + <date year="2017" month="June"/> + </front> + <seriesInfo name="BCP" value="26"/> + <seriesInfo name="RFC" value="8126"/> +</reference> + +<reference anchor="RFC8187"> + <front> + <title>Indicating Character Encoding and Language for HTTP Header Field Parameters</title> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke"/> + <date month="September" year="2017"/> + </front> + <seriesInfo name="RFC" value="8187"/> +</reference> + +<reference anchor="RFC8246"> + <front> + <title>HTTP Immutable Responses</title> + <author initials="P." surname="McManus" fullname="P. McManus"/> + <date year="2017" month="September"/> + </front> + <seriesInfo name="RFC" value="8246"/> +</reference> + +<reference anchor="RFC8288"> + <front> + <title>Web Linking</title> + <author initials="M." surname="Nottingham" fullname="M. Nottingham"/> + <date year="2017" month="October"/> + </front> + <seriesInfo name="RFC" value="8288"/> +</reference> + +<reference anchor="RFC8941"> + <front> + <title>Structured Field Values for HTTP</title> + <author initials="M." surname="Nottingham" fullname="Mark Nottingham"/> + <author initials="P-H." surname="Kamp" fullname="Poul-Henning Kamp"/> + <date month="February" year="2021"/> + </front> + <seriesInfo name="RFC" value="8941"/> +</reference> + +<reference anchor="OWASP" target="https://www.owasp.org/" quoteTitle="false"> + <front> + <title>The Open Web Application Security Project</title> + <author><organization/></author> + </front> +</reference> +</references> + +<?BEGININC build/draft-ietf-httpbis-semantics-latest.abnf-appendix ?> +<section title="Collected ABNF" anchor="collected.abnf"><t>In the collected ABNF below, list rules are expanded per <xref target="abnf.extension"/>.</t><sourcecode type="abnf" name="rfc9110.parsed-abnf"> +<x:ref>Accept</x:ref> = [ ( media-range [ weight ] ) *( OWS "," OWS ( media-range [ + weight ] ) ) ] +<x:ref>Accept-Charset</x:ref> = [ ( ( token / "*" ) [ weight ] ) *( OWS "," OWS ( ( + token / "*" ) [ weight ] ) ) ] +<x:ref>Accept-Encoding</x:ref> = [ ( codings [ weight ] ) *( OWS "," OWS ( codings [ + weight ] ) ) ] +<x:ref>Accept-Language</x:ref> = [ ( language-range [ weight ] ) *( OWS "," OWS ( + language-range [ weight ] ) ) ] +<x:ref>Accept-Ranges</x:ref> = acceptable-ranges +<x:ref>Allow</x:ref> = [ method *( OWS "," OWS method ) ] +<x:ref>Authentication-Info</x:ref> = [ auth-param *( OWS "," OWS auth-param ) ] +<x:ref>Authorization</x:ref> = credentials + +<x:ref>BWS</x:ref> = OWS + +<x:ref>Connection</x:ref> = [ connection-option *( OWS "," OWS connection-option ) + ] +<x:ref>Content-Encoding</x:ref> = [ content-coding *( OWS "," OWS content-coding ) + ] +<x:ref>Content-Language</x:ref> = [ language-tag *( OWS "," OWS language-tag ) ] +<x:ref>Content-Length</x:ref> = 1*DIGIT +<x:ref>Content-Location</x:ref> = absolute-URI / partial-URI +<x:ref>Content-Range</x:ref> = range-unit SP ( range-resp / unsatisfied-range ) +<x:ref>Content-Type</x:ref> = media-type + +<x:ref>Date</x:ref> = HTTP-date + +<x:ref>ETag</x:ref> = entity-tag +<x:ref>Expect</x:ref> = [ expectation *( OWS "," OWS expectation ) ] + +<x:ref>From</x:ref> = mailbox + +<x:ref>GMT</x:ref> = %x47.4D.54 ; GMT + +<x:ref>HTTP-date</x:ref> = IMF-fixdate / obs-date +<x:ref>Host</x:ref> = uri-host [ ":" port ] + +<x:ref>IMF-fixdate</x:ref> = day-name "," SP date1 SP time-of-day SP GMT +<x:ref>If-Match</x:ref> = "*" / [ entity-tag *( OWS "," OWS entity-tag ) ] +<x:ref>If-Modified-Since</x:ref> = HTTP-date +<x:ref>If-None-Match</x:ref> = "*" / [ entity-tag *( OWS "," OWS entity-tag ) ] +<x:ref>If-Range</x:ref> = entity-tag / HTTP-date +<x:ref>If-Unmodified-Since</x:ref> = HTTP-date + +<x:ref>Last-Modified</x:ref> = HTTP-date +<x:ref>Location</x:ref> = URI-reference + +<x:ref>Max-Forwards</x:ref> = 1*DIGIT + +<x:ref>OWS</x:ref> = *( SP / HTAB ) + +<x:ref>Proxy-Authenticate</x:ref> = [ challenge *( OWS "," OWS challenge ) ] +<x:ref>Proxy-Authentication-Info</x:ref> = [ auth-param *( OWS "," OWS auth-param ) + ] +<x:ref>Proxy-Authorization</x:ref> = credentials + +<x:ref>RWS</x:ref> = 1*( SP / HTAB ) +<x:ref>Range</x:ref> = ranges-specifier +<x:ref>Referer</x:ref> = absolute-URI / partial-URI +<x:ref>Retry-After</x:ref> = HTTP-date / delay-seconds + +<x:ref>Server</x:ref> = product *( RWS ( product / comment ) ) + +<x:ref>TE</x:ref> = [ t-codings *( OWS "," OWS t-codings ) ] +<x:ref>Trailer</x:ref> = [ field-name *( OWS "," OWS field-name ) ] + +<x:ref>URI-reference</x:ref> = &lt;URI-reference, see <xref target="URI" x:fmt="," x:sec="4.1"/>&gt; +<x:ref>Upgrade</x:ref> = [ protocol *( OWS "," OWS protocol ) ] +<x:ref>User-Agent</x:ref> = product *( RWS ( product / comment ) ) + +<x:ref>Vary</x:ref> = [ ( "*" / field-name ) *( OWS "," OWS ( "*" / field-name ) ) + ] +<x:ref>Via</x:ref> = [ ( received-protocol RWS received-by [ RWS comment ] ) *( OWS + "," OWS ( received-protocol RWS received-by [ RWS comment ] ) ) ] + +<x:ref>WWW-Authenticate</x:ref> = [ challenge *( OWS "," OWS challenge ) ] + +<x:ref>absolute-URI</x:ref> = &lt;absolute-URI, see <xref target="URI" x:fmt="," x:sec="4.3"/>&gt; +<x:ref>absolute-path</x:ref> = 1*( "/" segment ) +<x:ref>acceptable-ranges</x:ref> = range-unit *( OWS "," OWS range-unit ) +<x:ref>asctime-date</x:ref> = day-name SP date3 SP time-of-day SP year +<x:ref>auth-param</x:ref> = token BWS "=" BWS ( token / quoted-string ) +<x:ref>auth-scheme</x:ref> = token +<x:ref>authority</x:ref> = &lt;authority, see <xref target="URI" x:fmt="," x:sec="3.2"/>&gt; + +<x:ref>challenge</x:ref> = auth-scheme [ 1*SP ( token68 / [ auth-param *( OWS "," + OWS auth-param ) ] ) ] +<x:ref>codings</x:ref> = content-coding / "identity" / "*" +<x:ref>comment</x:ref> = "(" *( ctext / quoted-pair / comment ) ")" +<x:ref>complete-length</x:ref> = 1*DIGIT +<x:ref>connection-option</x:ref> = token +<x:ref>content-coding</x:ref> = token +<x:ref>credentials</x:ref> = auth-scheme [ 1*SP ( token68 / [ auth-param *( OWS "," + OWS auth-param ) ] ) ] +<x:ref>ctext</x:ref> = HTAB / SP / %x21-27 ; '!'-''' + / %x2A-5B ; '*'-'[' + / %x5D-7E ; ']'-'~' + / obs-text + +<x:ref>date1</x:ref> = day SP month SP year +<x:ref>date2</x:ref> = day "-" month "-" 2DIGIT +<x:ref>date3</x:ref> = month SP ( 2DIGIT / ( SP DIGIT ) ) +<x:ref>day</x:ref> = 2DIGIT +<x:ref>day-name</x:ref> = %x4D.6F.6E ; Mon + / %x54.75.65 ; Tue + / %x57.65.64 ; Wed + / %x54.68.75 ; Thu + / %x46.72.69 ; Fri + / %x53.61.74 ; Sat + / %x53.75.6E ; Sun +<x:ref>day-name-l</x:ref> = %x4D.6F.6E.64.61.79 ; Monday + / %x54.75.65.73.64.61.79 ; Tuesday + / %x57.65.64.6E.65.73.64.61.79 ; Wednesday + / %x54.68.75.72.73.64.61.79 ; Thursday + / %x46.72.69.64.61.79 ; Friday + / %x53.61.74.75.72.64.61.79 ; Saturday + / %x53.75.6E.64.61.79 ; Sunday +<x:ref>delay-seconds</x:ref> = 1*DIGIT + +<x:ref>entity-tag</x:ref> = [ weak ] opaque-tag +<x:ref>etagc</x:ref> = "!" / %x23-7E ; '#'-'~' + / obs-text +<x:ref>expectation</x:ref> = token [ "=" ( token / quoted-string ) parameters ] + +<x:ref>field-content</x:ref> = field-vchar [ 1*( SP / HTAB / field-vchar ) + field-vchar ] +<x:ref>field-name</x:ref> = token +<x:ref>field-value</x:ref> = *field-content +<x:ref>field-vchar</x:ref> = VCHAR / obs-text +<x:ref>first-pos</x:ref> = 1*DIGIT + +<x:ref>hour</x:ref> = 2DIGIT +<x:ref>http-URI</x:ref> = "http://" authority path-abempty [ "?" query ] +<x:ref>https-URI</x:ref> = "https://" authority path-abempty [ "?" query ] + +<x:ref>incl-range</x:ref> = first-pos "-" last-pos +<x:ref>int-range</x:ref> = first-pos "-" [ last-pos ] + +<x:ref>language-range</x:ref> = &lt;language-range, see <xref target="RFC4647" x:fmt="," x:sec="2.1"/>&gt; +<x:ref>language-tag</x:ref> = &lt;Language-Tag, see <xref target="RFC5646" x:fmt="," x:sec="2.1"/>&gt; +<x:ref>last-pos</x:ref> = 1*DIGIT + +<x:ref>mailbox</x:ref> = &lt;mailbox, see <xref target="RFC5322" x:fmt="," x:sec="3.4"/>&gt; +<x:ref>media-range</x:ref> = ( "*/*" / ( type "/*" ) / ( type "/" subtype ) ) + parameters +<x:ref>media-type</x:ref> = type "/" subtype parameters +<x:ref>method</x:ref> = token +<x:ref>minute</x:ref> = 2DIGIT +<x:ref>month</x:ref> = %x4A.61.6E ; Jan + / %x46.65.62 ; Feb + / %x4D.61.72 ; Mar + / %x41.70.72 ; Apr + / %x4D.61.79 ; May + / %x4A.75.6E ; Jun + / %x4A.75.6C ; Jul + / %x41.75.67 ; Aug + / %x53.65.70 ; Sep + / %x4F.63.74 ; Oct + / %x4E.6F.76 ; Nov + / %x44.65.63 ; Dec + +<x:ref>obs-date</x:ref> = rfc850-date / asctime-date +<x:ref>obs-text</x:ref> = %x80-FF +<x:ref>opaque-tag</x:ref> = DQUOTE *etagc DQUOTE +<x:ref>other-range</x:ref> = 1*( %x21-2B ; '!'-'+' + / %x2D-7E ; '-'-'~' + ) + +<x:ref>parameter</x:ref> = parameter-name "=" parameter-value +<x:ref>parameter-name</x:ref> = token +<x:ref>parameter-value</x:ref> = ( token / quoted-string ) +<x:ref>parameters</x:ref> = *( OWS ";" OWS [ parameter ] ) +<x:ref>partial-URI</x:ref> = relative-part [ "?" query ] +<x:ref>path-abempty</x:ref> = &lt;path-abempty, see <xref target="URI" x:fmt="," x:sec="3.3"/>&gt; +<x:ref>port</x:ref> = &lt;port, see <xref target="URI" x:fmt="," x:sec="3.2.3"/>&gt; +<x:ref>product</x:ref> = token [ "/" product-version ] +<x:ref>product-version</x:ref> = token +<x:ref>protocol</x:ref> = protocol-name [ "/" protocol-version ] +<x:ref>protocol-name</x:ref> = token +<x:ref>protocol-version</x:ref> = token +<x:ref>pseudonym</x:ref> = token + +<x:ref>qdtext</x:ref> = HTAB / SP / "!" / %x23-5B ; '#'-'[' + / %x5D-7E ; ']'-'~' + / obs-text +<x:ref>query</x:ref> = &lt;query, see <xref target="URI" x:fmt="," x:sec="3.4"/>&gt; +<x:ref>quoted-pair</x:ref> = "\" ( HTAB / SP / VCHAR / obs-text ) +<x:ref>quoted-string</x:ref> = DQUOTE *( qdtext / quoted-pair ) DQUOTE +<x:ref>qvalue</x:ref> = ( "0" [ "." *3DIGIT ] ) / ( "1" [ "." *3"0" ] ) + +<x:ref>range-resp</x:ref> = incl-range "/" ( complete-length / "*" ) +<x:ref>range-set</x:ref> = range-spec *( OWS "," OWS range-spec ) +<x:ref>range-spec</x:ref> = int-range / suffix-range / other-range +<x:ref>range-unit</x:ref> = token +<x:ref>ranges-specifier</x:ref> = range-unit "=" range-set +<x:ref>received-by</x:ref> = pseudonym [ ":" port ] +<x:ref>received-protocol</x:ref> = [ protocol-name "/" ] protocol-version +<x:ref>relative-part</x:ref> = &lt;relative-part, see <xref target="URI" x:fmt="," x:sec="4.2"/>&gt; +<x:ref>rfc850-date</x:ref> = day-name-l "," SP date2 SP time-of-day SP GMT + +<x:ref>second</x:ref> = 2DIGIT +<x:ref>segment</x:ref> = &lt;segment, see <xref target="URI" x:fmt="," x:sec="3.3"/>&gt; +<x:ref>subtype</x:ref> = token +<x:ref>suffix-length</x:ref> = 1*DIGIT +<x:ref>suffix-range</x:ref> = "-" suffix-length + +<x:ref>t-codings</x:ref> = "trailers" / ( transfer-coding [ weight ] ) +<x:ref>tchar</x:ref> = "!" / "#" / "$" / "%" / "&amp;" / "'" / "*" / "+" / "-" / "." / + "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +<x:ref>time-of-day</x:ref> = hour ":" minute ":" second +<x:ref>token</x:ref> = 1*tchar +<x:ref>token68</x:ref> = 1*( ALPHA / DIGIT / "-" / "." / "_" / "~" / "+" / "/" ) + *"=" +<x:ref>transfer-coding</x:ref> = token *( OWS ";" OWS transfer-parameter ) +<x:ref>transfer-parameter</x:ref> = token BWS "=" BWS ( token / quoted-string ) +<x:ref>type</x:ref> = token + +<x:ref>unsatisfied-range</x:ref> = "*/" complete-length +<x:ref>uri-host</x:ref> = &lt;host, see <xref target="URI" x:fmt="," x:sec="3.2.2"/>&gt; + +<x:ref>weak</x:ref> = %x57.2F ; W/ +<x:ref>weight</x:ref> = OWS ";" OWS "q=" qvalue + +<x:ref>year</x:ref> = 4DIGIT +</sourcecode> +</section> +<?ENDINC build/draft-ietf-httpbis-semantics-latest.abnf-appendix ?> + +<section title="Changes from Previous RFCs" anchor="changes.from.previous.rfcs"> + +<section title="Changes from RFC 2818" anchor="changes.from.rfc.2818"> +<t> + None. +</t> +</section> + +<section title="Changes from RFC 7230" anchor="changes.from.rfc.7230"> +<t> + The sections introducing HTTP's design goals, history, architecture, + conformance criteria, protocol versioning, URIs, message routing, and + header fields have been moved here. +</t> +<t> + The requirement on semantic conformance has been replaced with permission to + ignore or work around implementation-specific failures. + (<xref target="requirements.notation"/>) +</t> +<t> + The description of an origin and authoritative access to origin servers has + been extended for both "http" and "https" URIs to account for alternative + services and secured connections that are not necessarily based on TCP. + (Sections <xref target="http.uri" format="counter"/>, <xref target="https.uri" format="counter"/>, + <xref target="origin" format="counter"/>, and <xref target="routing.origin" format="counter"/>) +</t> +<t> + Explicit requirements have been added to check the target URI scheme's semantics + and reject requests that don't meet any associated requirements. + (<xref target="routing.reject"/>) +</t> +<t> + Parameters in media type, media range, and expectation can be empty via + one or more trailing semicolons. + (<xref target="parameter"/>) +</t> +<t> + "Field value" now refers to the value after multiple field lines are combined + with commas — by far the most common use. To refer to a single header + line's value, use "field line value". + (<xref target="header.fields"/>) +</t> +<t> + Trailer field semantics now transcend the specifics of chunked transfer coding. + The use of trailer fields has been further limited to allow generation + as a trailer field only when the sender knows the field defines that usage and + to allow merging into the header section only if the recipient knows the + corresponding field definition permits and defines how to merge. In all + other cases, implementations are encouraged either to store the trailer + fields separately or to discard them instead of merging. + (<xref target="trailers.limitations"/>) +</t> +<t> + The priority of the absolute form of the request URI over the Host + header field by origin servers has been made explicit to align with proxy handling. + (<xref target="field.host"/>) +</t> +<t> + The grammar definition for the Via field's "received-by" was + expanded in RFC 7230 due to changes in the URI grammar for host + <xref target="URI"/> that are not desirable for Via. For simplicity, + we have removed uri-host from the received-by production because it can + be encompassed by the existing grammar for pseudonym. In particular, this + change removed comma from the allowed set of characters for a host name in + received-by. + (<xref target="field.via"/>) +</t> +</section> + +<section title="Changes from RFC 7231" anchor="changes.from.rfc.7231"> +<t> + Minimum URI lengths to be supported by implementations are now recommended. + (<xref target="uri.references"/>) +</t> +<t> + The following have been clarified: CR and NUL in field values are to be + rejected or mapped to SP, and leading and trailing whitespace needs to be + stripped from field values before they are consumed. + (<xref target="fields.values"/>) +</t> +<t> + Parameters in media type, media range, and expectation can be empty via + one or more trailing semicolons. + (<xref target="parameter"/>) +</t> +<t> + An abstract data type for HTTP messages has been introduced to define the + components of a message and their semantics as an abstraction across + multiple HTTP versions, rather than in terms of the specific syntax form of + HTTP/1.1 in <xref target="HTTP11"/>, and reflect the contents after the + message is parsed. This makes it easier to distinguish between requirements + on the content (what is conveyed) versus requirements on the messaging + syntax (how it is conveyed) and avoids baking limitations of early protocol + versions into the future of HTTP. (<xref target="message.abstraction"/>) +</t> +<t> + The terms "payload" and "payload body" have been replaced with "content", to better + align with its usage elsewhere (e.g., in field names) and to avoid confusion + with frame payloads in HTTP/2 and HTTP/3. + (<xref target="content"/>) +</t> +<t> + The term "effective request URI" has been replaced with "target URI". + (<xref target="target.resource"/>) +</t> +<t> + Restrictions on client retries have been loosened to reflect implementation + behavior. + (<xref target="idempotent.methods"/>) +</t> +<t> + The fact that request bodies on GET, HEAD, and DELETE are not interoperable + has been clarified. + (Sections <xref target="GET" format="counter"/>, <xref target="HEAD" format="counter"/>, and <xref target="DELETE" format="counter"/>) +</t> +<t> + The use of the Content-Range header field + (<xref target="field.content-range"/>) as a request modifier on PUT is allowed. + (<xref target="PUT"/>) +</t> +<t> + A superfluous requirement about setting <x:ref>Content-Length</x:ref> + has been removed from the description of the OPTIONS method. + (<xref target="OPTIONS"/>) +</t> +<t> + The normative requirement to use the "message/http" media type in + TRACE responses has been removed. + (<xref target="TRACE"/>) +</t> +<t> + List-based grammar for <x:ref>Expect</x:ref> has been restored for + compatibility with RFC 2616. + (<xref target="field.expect"/>) +</t> +<t> + <x:ref>Accept</x:ref> and <x:ref>Accept-Encoding</x:ref> are allowed in response + messages; the latter was introduced by <xref target="RFC7694"/>. + (<xref target="request.content.negotiation"/>) +</t> +<t> + "Accept Parameters" (accept-params and accept-ext ABNF production) have + been removed from the definition of the Accept field. + (<xref target="field.accept"/>) +</t> +<t> + The Accept-Charset field is now deprecated. + (<xref target="field.accept-charset"/>) +</t> +<t> + The semantics of "*" in the <x:ref>Vary</x:ref> header field when other + values are present was clarified. + (<xref target="field.vary"/>) +</t> +<t> + Range units are compared in a case-insensitive fashion. + (<xref target="range.units"/>) +</t> +<t> + The use of the Accept-Ranges field is not restricted to origin servers. + (<xref target="field.accept-ranges"/>) +</t> +<t> + The process of creating a redirected request has been clarified. + (<xref target="status.3xx"/>) +</t> +<t> + Status code 308 (previously defined in <xref target="RFC7538"/>) has been + added so that it's defined closer to status codes 301, 302, and 307. + (<xref target="status.308"/>) +</t> +<t> + Status code 421 (previously defined in + <xref target="RFC7540" section="9.1.2"/>) has been added because of its general + applicability. 421 is no longer defined as heuristically cacheable since + the response is specific to the connection (not the target resource). + (<xref target="status.421"/>) +</t> +<t> + Status code 422 (previously defined in + <xref target="WEBDAV" x:fmt="of" x:sec="11.2"/>) has been added + because of its general applicability. + (<xref target="status.422"/>) +</t> +</section> + +<section title="Changes from RFC 7232" anchor="changes.from.rfc.7232"> +<t> + Previous revisions of HTTP imposed an arbitrary 60-second limit on the + determination of whether Last-Modified was a strong validator to guard + against the possibility that the Date and Last-Modified values are + generated from different clocks or at somewhat different times during the + preparation of the response. This specification has relaxed that to allow + reasonable discretion. + (<xref target="lastmod.comparison"/>) +</t> +<t> + An edge-case requirement on If-Match and If-Unmodified-Since + has been removed that required a validator not to be sent in a 2xx + response if validation fails because the change request has already + been applied. + (Sections <xref target="field.if-match" format="counter"/> and + <xref target="field.if-unmodified-since" format="counter"/>) +</t> +<t> + The fact that If-Unmodified-Since does not apply to a resource + without a concept of modification time has been clarified. + (<xref target="field.if-unmodified-since"/>) +</t> +<t> + Preconditions can now be evaluated before the request content is processed + rather than waiting until the response would otherwise be successful. + (<xref target="evaluation"/>) +</t> +</section> + +<section title="Changes from RFC 7233" anchor="changes.from.rfc.7233"> +<t> + Refactored the range-unit and ranges-specifier grammars to simplify + and reduce artificial distinctions between bytes and other + (extension) range units, removing the overlapping grammar of + other-range-unit by defining range units generically as a token and + placing extensions within the scope of a range-spec (other-range). + This disambiguates the role of list syntax (commas) in all range sets, + including extension range units, for indicating a range-set of more than + one range. Moving the extension grammar into range specifiers also allows + protocol specific to byte ranges to be specified separately. +</t> +<t> + It is now possible to define Range handling on extension methods. + (<xref target="field.range"/>) +</t> +<t> + Described use of the <x:ref>Content-Range</x:ref> header field + (<xref target="field.content-range"/>) as a request modifier to perform a + partial PUT. + (<xref target="partial.PUT"/>) +</t> +</section> + +<section title="Changes from RFC 7235" anchor="changes.from.rfc.7235"> +<t> + None. +</t> +</section> + +<section title="Changes from RFC 7538" anchor="changes.from.rfc.7538"> +<t> + None. +</t> +</section> + +<section title="Changes from RFC 7615" anchor="changes.from.rfc.7615"> +<t> + None. +</t> +</section> + +<section title="Changes from RFC 7694" anchor="changes.from.rfc.7694"> +<t> + This specification includes the extension defined in <xref target="RFC7694"/> + but leaves out examples and deployment considerations. +</t> +</section> +</section> + +<section title="Acknowledgements" anchor="acks" numbered="false"> +<t> + Aside from the current editors, the following individuals deserve special + recognition for their contributions to early aspects of HTTP and its + core specifications: + <contact fullname="Marc Andreessen"/>, <contact fullname="Tim Berners-Lee"/>, + <contact fullname="Robert Cailliau"/>, <contact fullname="Daniel W. Connolly"/>, + <contact fullname="Bob Denny"/>, <contact fullname="John Franks"/>, + <contact fullname="Jim Gettys"/>, <contact fullname="Jean-François Groff"/>, + <contact fullname="Phillip M. Hallam-Baker"/>, <contact fullname="Koen Holtman"/>, + <contact fullname="Jeffery L. Hostetler"/>, <contact fullname="Shel Kaphan"/>, + <contact fullname="Dave Kristol"/>, <contact fullname="Yves Lafon"/>, + <contact fullname="Scott D. Lawrence"/>, <contact fullname="Paul J. Leach"/>, + <contact fullname="Håkon W. Lie"/>, <contact fullname="Ari Luotonen"/>, + <contact fullname="Larry Masinter"/>, <contact fullname="Rob McCool"/>, + <contact fullname="Jeffrey C. Mogul"/>, <contact fullname="Lou Montulli"/>, + <contact fullname="David Morris"/>, <contact fullname="Henrik Frystyk Nielsen"/>, + <contact fullname="Dave Raggett"/>, <contact fullname="Eric Rescorla"/>, + <contact fullname="Tony Sanders"/>, <contact fullname="Lawrence C. Stewart"/>, + <contact fullname="Marc VanHeyningen"/>, and <contact fullname="Steve Zilles"/>. +</t> +<t> + This document builds on the many contributions + that went into past specifications of HTTP, including + <xref target="HTTP10"/>, + <xref target="RFC2068"/>, + <xref target="RFC2145"/>, + <xref target="RFC2616"/>, + <xref target="RFC2617"/>, + <xref target="RFC2818"/>, + <xref target="RFC7230"/>, + <xref target="RFC7231"/>, + <xref target="RFC7232"/>, + <xref target="RFC7233"/>, + <xref target="RFC7234"/>, and + <xref target="RFC7235"/>. + The acknowledgements within those documents still apply. +</t> +<t> + Since 2014, the following contributors have helped improve this + specification by reporting bugs, asking smart questions, drafting or + reviewing text, and evaluating issues: +</t> +<t> + <contact fullname="Alan Egerton"/>, + <contact fullname="Alex Rousskov"/>, + <contact fullname="Amichai Rothman"/>, + <contact fullname="Amos Jeffries"/>, + <contact fullname="Anders Kaseorg"/>, + <contact fullname="Andreas Gebhardt"/>, + <contact fullname="Anne van Kesteren"/>, + <contact fullname="Armin Abfalterer"/>, + <contact fullname="Aron Duby"/>, + <contact fullname="Asanka Herath"/>, + <contact fullname="Asbjørn Ulsberg"/>, + <contact fullname="Asta Olofsson"/>, + <contact fullname="Attila Gulyas"/>, + <contact fullname="Austin Wright"/>, + <contact fullname="Barry Pollard"/>, + <contact fullname="Ben Burkert"/>, + <contact fullname="Benjamin Kaduk"/>, + <contact fullname="Björn Höhrmann"/>, + <contact fullname="Brad Fitzpatrick"/>, + <contact fullname="Chris Pacejo"/>, + <contact fullname="Colin Bendell"/>, + <contact fullname="Cory Benfield"/>, + <contact fullname="Cory Nelson"/>, + <contact fullname="Daisuke Miyakawa"/>, + <contact fullname="Dale Worley"/>, + <contact fullname="Daniel Stenberg"/>, + <contact fullname="Danil Suits"/>, + <contact fullname="David Benjamin"/>, + <contact fullname="David Matson"/>, + <contact fullname="David Schinazi"/>, + <contact fullname="Дилян Палаузов" asciiFullname="Dilyan Palauzov"/>, + <contact fullname="Eric Anderson"/>, + <contact fullname="Eric Rescorla"/>, + <contact fullname="Éric Vyncke"/>, + <contact fullname="Erik Kline"/>, + <contact fullname="Erwin Pe"/>, + <contact fullname="Etan Kissling"/>, + <contact fullname="Evert Pot"/>, + <contact fullname="Evgeny Vrublevsky"/>, + <contact fullname="Florian Best"/>, + <contact fullname="Francesca Palombini"/>, + <contact fullname="Igor Lubashev"/>, + <contact fullname="James Callahan"/>, + <contact fullname="James Peach"/>, + <contact fullname="Jeffrey Yasskin"/>, + <contact fullname="Kalin Gyokov"/>, + <contact fullname="Kannan Goundan"/>, + <contact fullname="奥 一穂" asciiFullname="Kazuho Oku"/>, + <contact fullname="Ken Murchison"/>, + <contact fullname="Krzysztof Maczyński"/>, + <contact fullname="Lars Eggert"/>, + <contact fullname="Lucas Pardue"/>, + <contact fullname="Martin Duke"/>, + <contact fullname="Martin Dürst"/>, + <contact fullname="Martin Thomson"/>, + <contact fullname="Martynas Jusevičius"/>, + <contact fullname="Matt Menke"/>, + <contact fullname="Matthias Pigulla"/>, + <contact fullname="Mattias Grenfeldt"/>, + <contact fullname="Michael Osipov"/>, + <contact fullname="Mike Bishop"/>, + <contact fullname="Mike Pennisi"/>, + <contact fullname="Mike Taylor"/>, + <contact fullname="Mike West"/>, + <contact fullname="Mohit Sethi"/>, + <contact fullname="Murray Kucherawy"/>, + <contact fullname="Nathaniel J. Smith"/>, + <contact fullname="Nicholas Hurley"/>, + <contact fullname="Nikita Prokhorov"/>, + <contact fullname="Patrick McManus"/>, + <contact fullname="Piotr Sikora"/>, + <contact fullname="Poul-Henning Kamp"/>, + <contact fullname="Rick van Rein"/>, + <contact fullname="Robert Wilton"/>, + <contact fullname="Roberto Polli"/>, + <contact fullname="Roman Danyliw"/>, + <contact fullname="Samuel Williams"/>, + <contact fullname="Semyon Kholodnov"/>, + <contact fullname="Simon Pieters"/>, + <contact fullname="Simon Schüppel"/>, + <contact fullname="Stefan Eissing"/>, + <contact fullname="Taylor Hunt"/>, + <contact fullname="Todd Greer"/>, + <contact fullname="Tommy Pauly"/>, + <contact fullname="Vasiliy Faronov"/>, + <contact fullname="Vladimir Lashchev"/>, + <contact fullname="Wenbo Zhu"/>, + <contact fullname="William A. Rowe Jr."/>, + <contact fullname="Willy Tarreau"/>, + <contact fullname="Xingwei Liu"/>, + <contact fullname="Yishuai Li"/>, and + <contact fullname="Zaheduzzaman Sarker"/>. +</t> +</section> +</back> +</rfc> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/rfc9111.html b/test/fixtures/cache-tests/spec/rfc9111.html new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/rfc9111.html @@ -0,0 +1,179 @@ +<!DOCTYPE html + SYSTEM "about:legacy-compat"> +<html lang="en"><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"><title>HTTP Caching</title><script type="application/javascript"> +function anchorRewrite() { + map = { }; + if (window.location.hash.length >= 1) { + var fragid = window.location.hash.substr(1); + if (fragid) { + if (! document.getElementById(fragid)) { + var prefix = "rfc."; + var mapped = map[fragid]; + if (mapped) { + window.location.hash = mapped; + } else if (fragid.indexOf("section-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(8); + } else if (fragid.indexOf("appendix-") == 0) { + window.location.hash = prefix + "section." + fragid.substring(9); + } else if (fragid.indexOf("s-") == 0) { + var postfix = fragid.substring(2); + if (postfix.startsWith("abstract")) { + window.location.hash = prefix + postfix; + } else if (postfix.startsWith("note-")) { + window.location.hash = prefix + "note." + postfix.substring(5); + } else { + window.location.hash = prefix + "section." + postfix; + } + } else if (fragid.indexOf("p-") == 0) { + var r = fragid.substring(2); + var p = r.indexOf("-"); + if (p >= 0) { + window.location.hash = prefix + "section." + r.substring(0, p) + ".p." + r.substring(p + 1); + } + } + } + } + } +} +window.addEventListener('hashchange', anchorRewrite); +window.addEventListener('DOMContentLoaded', anchorRewrite); +</script><meta name="viewport" content="width=device-width, initial-scale=1"><meta property="og:type" content="article"><meta property="og:title" content="HTTP Caching"><meta property="og:description" content="The Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP caches and the associated header fields that control cache behavior or indicate cacheable response messages. This document obsoletes RFC 7234."><meta property="og:url" content="https://httpwg.org/specs/rfc9111.html"><link rel="stylesheet" type="text/css" href="/spec/bootstrap.min.css"><style type="text/css"> + body { + padding-top: 80px; + padding-bottom: 80px; + position: relative; + } + .table.header th, .table.header td { + border-top: none; + padding: 0; + } + #sidebar { + margin-top: -10px; + height: 90%; + overflow-y: auto; + font-size: 90%; + } + #rfc\.meta { + width: 40%; + float: right + } + .toc ul { + list-style: none; + } + .filename { + color: rgb(119, 119, 119); + font-size: 23px; + font-weight: normal; + height: auto; + line-height: 23px; + } + dl { + margin-left: 1em; + } + dl.dl-horizontal: { + margin-left: 0; + } + dl > dt { + float: left; + margin-right: 1em; + } + dl.nohang > dt { + float: none; + } + dl > dd { + margin-bottom: .5em; + } + dl.compact > dd { + margin-bottom: 0em; + } + dl > dd > dl { + margin-top: 0.5em; + margin-bottom: 0em; + } + ul.empty { + list-style-type: none; + } + ul.empty li { + margin-top: .5em; + } + td.reference { + padding-right: 1em; + vertical-align: top; + } + .feedback { + position: fixed; + bottom: 5px; + right: 5px; + } + .fbbutton { + margin-left: 5px; + } + h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { + color: rgb(51, 51, 51); + } + span.tt { + font: 11pt consolas, monospace; + font-size-adjust: none; + } + div.banner { + background-color: #fee; + border: 2px solid #633; + padding: 8px 12px; + margin-bottom: 10px; + } + div.banner p { + font-size: 1.2em; + } + </style><link rel="stylesheet" type="text/css" href="/spec/style.css"><link rel="Contents" href="#rfc.toc"><link rel="Author" href="#rfc.authors"><link rel="Copyright" href="#rfc.copyrightnotice"><link rel="Index" href="#rfc.index"><link rel="Chapter" title="1 Introduction" href="#rfc.section.1"><link rel="Chapter" title="2 Overview of Cache Operation" href="#rfc.section.2"><link rel="Chapter" title="3 Storing Responses in Caches" href="#rfc.section.3"><link rel="Chapter" title="4 Constructing Responses from Caches" href="#rfc.section.4"><link rel="Chapter" title="5 Field Definitions" href="#rfc.section.5"><link rel="Chapter" title="6 Relationship to Applications and Other Caches" href="#rfc.section.6"><link rel="Chapter" title="7 Security Considerations" href="#rfc.section.7"><link rel="Chapter" title="8 IANA Considerations" href="#rfc.section.8"><link rel="Chapter" href="#rfc.section.9" title="9 References"><link rel="Appendix" title="A Collected ABNF" href="#rfc.section.A"><link rel="Appendix" title="B Changes from RFC 7234" href="#rfc.section.B"><link rel="Appendix" title="C Change Log" href="#rfc.section.C"><link rel="Appendix" title="Acknowledgments" href="#rfc.section.unnumbered-1"><meta name="generator" content="https://github.com/mnot/RFCBootstrap XSLT vendor: Saxonica http://www.saxonica.com/"><meta name="keywords" content="Hypertext Transfer Protocol, HTTP, HTTP Caching"><link rel="schema.dcterms" href="http://purl.org/dc/terms/"><meta name="dcterms.creator" content="Fielding, R."><meta name="dcterms.creator" content="Nottingham, M."><meta name="dcterms.creator" content="Reschke, J."><meta name="dcterms.identifier" content="urn:ietf:id:draft-ietf-httpbis-cache-latest"><meta name="dcterms.issued" content="2022-07-28"><meta name="dct.replaces" content="urn:ietf:rfc:7234"><meta name="dcterms.abstract" content="The Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP caches and the associated header fields that control cache behavior or indicate cacheable response messages. This document obsoletes RFC 7234."><meta name="description" content="The Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP caches and the associated header fields that control cache behavior or indicate cacheable response messages. This document obsoletes RFC 7234."></head><body><div class="container" id="top"><div class="row"><div class="col-lg-4 order-last d-none d-lg-block" id="sidebar" role="navigation"><div class="navbar"><div class="navbar-brand"><a href="#top"></a></div><br clear="all"><div class=""><div class="toc "><ul><li><a href="#rfc.section.1">1.</a>&nbsp;&nbsp;&nbsp;<a href="#caching">Introduction</a><ul><li><a href="#rfc.section.1.1">1.1.</a>&nbsp;&nbsp;&nbsp;<a href="#requirements.notation">Requirements Notation</a></li><li><a href="#rfc.section.1.2">1.2.</a>&nbsp;&nbsp;&nbsp;<a href="#notation">Syntax Notation</a></li><li><a href="#rfc.section.1.3">1.3.</a>&nbsp;&nbsp;&nbsp;<a href="#delta-seconds">Delta Seconds</a></li></ul></li><li><a href="#rfc.section.2">2.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.overview">Overview of Cache Operation</a></li><li><a href="#rfc.section.3">3.</a>&nbsp;&nbsp;&nbsp;<a href="#response.cacheability">Storing Responses in Caches</a><ul><li><a href="#rfc.section.3.1">3.1.</a>&nbsp;&nbsp;&nbsp;<a href="#storing.fields">Storing Header and Trailer Fields</a></li><li><a href="#rfc.section.3.2">3.2.</a>&nbsp;&nbsp;&nbsp;<a href="#update">Updating Stored Header Fields</a></li><li><a href="#rfc.section.3.3">3.3.</a>&nbsp;&nbsp;&nbsp;<a href="#incomplete.responses">Storing Incomplete Responses</a></li><li><a href="#rfc.section.3.4">3.4.</a>&nbsp;&nbsp;&nbsp;<a href="#combining.responses">Combining Partial Content</a></li><li><a href="#rfc.section.3.5">3.5.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.authenticated.responses">Storing Responses to Authenticated Requests</a></li></ul></li><li><a href="#rfc.section.4">4.</a>&nbsp;&nbsp;&nbsp;<a href="#constructing.responses.from.caches">Constructing Responses from Caches</a><ul><li><a href="#rfc.section.4.1">4.1.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.negotiated.responses">Calculating Cache Keys with Vary</a></li><li><a href="#rfc.section.4.2">4.2.</a>&nbsp;&nbsp;&nbsp;<a href="#expiration.model">Freshness</a><ul><li><a href="#rfc.section.4.2.1">4.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#calculating.freshness.lifetime">Calculating Freshness Lifetime</a></li><li><a href="#rfc.section.4.2.2">4.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#heuristic.freshness">Calculating Heuristic Freshness</a></li><li><a href="#rfc.section.4.2.3">4.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#age.calculations">Calculating Age</a></li><li><a href="#rfc.section.4.2.4">4.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#serving.stale.responses">Serving Stale Responses</a></li></ul></li><li><a href="#rfc.section.4.3">4.3.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.model">Validation</a><ul><li><a href="#rfc.section.4.3.1">4.3.1.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.sent">Sending a Validation Request</a></li><li><a href="#rfc.section.4.3.2">4.3.2.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.received">Handling a Received Validation Request</a></li><li><a href="#rfc.section.4.3.3">4.3.3.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.response">Handling a Validation Response</a></li><li><a href="#rfc.section.4.3.4">4.3.4.</a>&nbsp;&nbsp;&nbsp;<a href="#freshening.responses">Freshening Stored Responses upon Validation</a></li><li><a href="#rfc.section.4.3.5">4.3.5.</a>&nbsp;&nbsp;&nbsp;<a href="#head.effects">Freshening Responses with HEAD</a></li></ul></li><li><a href="#rfc.section.4.4">4.4.</a>&nbsp;&nbsp;&nbsp;<a href="#invalidation">Invalidating Stored Responses</a></li></ul></li><li><a href="#rfc.section.5">5.</a>&nbsp;&nbsp;&nbsp;<a href="#header.field.definitions">Field Definitions</a><ul><li><a href="#rfc.section.5.1">5.1.</a>&nbsp;&nbsp;&nbsp;<a href="#field.age">Age</a></li><li><a href="#rfc.section.5.2">5.2.</a>&nbsp;&nbsp;&nbsp;<a href="#field.cache-control">Cache-Control</a><ul><li><a href="#rfc.section.5.2.1">5.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive">Request Cache-Control Directives</a><ul><li><a href="#rfc.section.5.2.1.1">5.2.1.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.max-age">max-age</a></li><li><a href="#rfc.section.5.2.1.2">5.2.1.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.max-stale">max-stale</a></li><li><a href="#rfc.section.5.2.1.3">5.2.1.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.min-fresh">min-fresh</a></li><li><a href="#rfc.section.5.2.1.4">5.2.1.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-cache">no-cache</a></li><li><a href="#rfc.section.5.2.1.5">5.2.1.5.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-store">no-store</a></li><li><a href="#rfc.section.5.2.1.6">5.2.1.6.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-transform">no-transform</a></li><li><a href="#rfc.section.5.2.1.7">5.2.1.7.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.only-if-cached">only-if-cached</a></li></ul></li><li><a href="#rfc.section.5.2.2">5.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive">Response Cache-Control Directives</a><ul><li><a href="#rfc.section.5.2.2.1">5.2.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.max-age">max-age</a></li><li><a href="#rfc.section.5.2.2.2">5.2.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.must-revalidate">must-revalidate</a></li><li><a href="#rfc.section.5.2.2.3">5.2.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.must-understand">must-understand</a></li><li><a href="#rfc.section.5.2.2.4">5.2.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-cache">no-cache</a></li><li><a href="#rfc.section.5.2.2.5">5.2.2.5.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-store">no-store</a></li><li><a href="#rfc.section.5.2.2.6">5.2.2.6.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-transform">no-transform</a></li><li><a href="#rfc.section.5.2.2.7">5.2.2.7.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.private">private</a></li><li><a href="#rfc.section.5.2.2.8">5.2.2.8.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.proxy-revalidate">proxy-revalidate</a></li><li><a href="#rfc.section.5.2.2.9">5.2.2.9.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.public">public</a></li><li><a href="#rfc.section.5.2.2.10">5.2.2.10.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.s-maxage">s-maxage</a></li></ul></li><li><a href="#rfc.section.5.2.3">5.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.control.extensions">Cache Control Extensions</a></li><li><a href="#rfc.section.5.2.4">5.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.directive.registry">Cache Directive Registry</a></li></ul></li><li><a href="#rfc.section.5.3">5.3.</a>&nbsp;&nbsp;&nbsp;<a href="#field.expires">Expires</a></li><li><a href="#rfc.section.5.4">5.4.</a>&nbsp;&nbsp;&nbsp;<a href="#field.pragma">Pragma</a></li><li><a href="#rfc.section.5.5">5.5.</a>&nbsp;&nbsp;&nbsp;<a href="#field.warning">Warning</a></li></ul></li><li><a href="#rfc.section.6">6.</a>&nbsp;&nbsp;&nbsp;<a href="#history.lists">Relationship to Applications and Other Caches</a></li><li><a href="#rfc.section.7">7.</a>&nbsp;&nbsp;&nbsp;<a href="#security.considerations">Security Considerations</a><ul><li><a href="#rfc.section.7.1">7.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.poisoning">Cache Poisoning</a></li><li><a href="#rfc.section.7.2">7.2.</a>&nbsp;&nbsp;&nbsp;<a href="#security.timing">Timing Attacks</a></li><li><a href="#rfc.section.7.3">7.3.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.of.sensitive.information">Caching of Sensitive Information</a></li></ul></li><li><a href="#rfc.section.8">8.</a>&nbsp;&nbsp;&nbsp;<a href="#iana.considerations">IANA Considerations</a><ul><li><a href="#rfc.section.8.1">8.1.</a>&nbsp;&nbsp;&nbsp;<a href="#field.name.registration">Field Name Registration</a></li><li><a href="#rfc.section.8.2">8.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.directive.registration">Cache Directive Registration</a></li><li><a href="#rfc.section.8.3">8.3.</a>&nbsp;&nbsp;&nbsp;<a href="#warn.code.registration">Warn Code Registry</a></li></ul></li><li><a href="#rfc.section.9">9.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references">References</a><ul><li><a href="#rfc.section.9.1">9.1.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references.1">Normative References</a></li><li><a href="#rfc.section.9.2">9.2.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references.2">Informative References</a></li></ul></li><li><a href="#rfc.section.A">A.</a>&nbsp;&nbsp;&nbsp;<a href="#collected.abnf">Collected ABNF</a></li><li><a href="#rfc.section.B">B.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.from.rfc.7234">Changes from RFC 7234</a></li><li><a href="#rfc.section.C">C.</a>&nbsp;&nbsp;&nbsp;<a href="#change.log">Change Log</a><ul><li><a href="#rfc.section.C.1">C.1.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.publication.as.rfc">Between RFC7234 and draft 00</a></li><li><a href="#rfc.section.C.2">C.2.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.00">Since draft-ietf-httpbis-cache-00</a></li><li><a href="#rfc.section.C.3">C.3.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.01">Since draft-ietf-httpbis-cache-01</a></li><li><a href="#rfc.section.C.4">C.4.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.02">Since draft-ietf-httpbis-cache-02</a></li><li><a href="#rfc.section.C.5">C.5.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.03">Since draft-ietf-httpbis-cache-03</a></li><li><a href="#rfc.section.C.6">C.6.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.04">Since draft-ietf-httpbis-cache-04</a></li><li><a href="#rfc.section.C.7">C.7.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.05">Since draft-ietf-httpbis-cache-05</a></li><li><a href="#rfc.section.C.8">C.8.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.06">Since draft-ietf-httpbis-cache-06</a></li><li><a href="#rfc.section.C.9">C.9.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.07">Since draft-ietf-httpbis-cache-07</a></li><li><a href="#rfc.section.C.10">C.10.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.08">Since draft-ietf-httpbis-cache-08</a></li><li><a href="#rfc.section.C.11">C.11.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.09">Since draft-ietf-httpbis-cache-09</a></li><li><a href="#rfc.section.C.12">C.12.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.10">Since draft-ietf-httpbis-cache-10</a></li><li><a href="#rfc.section.C.13">C.13.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.11">Since draft-ietf-httpbis-cache-11</a></li><li><a href="#rfc.section.C.14">C.14.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.12">Since draft-ietf-httpbis-cache-12</a></li><li><a href="#rfc.section.C.15">C.15.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.13">Since draft-ietf-httpbis-cache-13</a></li><li><a href="#rfc.section.C.16">C.16.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.14">Since draft-ietf-httpbis-cache-14</a></li></ul></li><li><a href="#acks">Acknowledgments</a></li><li><a href="#rfc.index">Index</a></li><li><a href="#rfc.authors">Authors' Addresses</a></li></ul></div></div></div></div><div class="col-lg-8 order-first main" role="main"><header><table class="table table-condensed header" id="rfc.headerblock"><tbody><tr><td class="text-left">HTTP Working Group</td><td class="text-right">R. Fielding, Editor</td></tr><tr><td class="text-left">Internet-Draft</td><td class="text-right">Adobe</td></tr><tr><td class="text-left">Obsoletes: <a href="https://tools.ietf.org/html/rfc7234">7234</a> (if approved)</td><td class="text-right">M. Nottingham, Editor</td></tr><tr><td class="text-left">Intended status: Standards Track</td><td class="text-right">Fastly</td></tr><tr><td class="text-left">Expires: January 29, 2023</td><td class="text-right">J. Reschke, Editor</td></tr><tr><td class="text-left"></td><td class="text-right">greenbytes</td></tr><tr><td class="text-left"></td><td class="text-right">July 28, 2022</td></tr></tbody></table><div id="rfc.title"><h1>HTTP Caching</h1><div class="filename">draft-ietf-httpbis-cache-latest</div></div></header><hr><h2 id="rfc.abstract"><a href="#rfc.abstract">Abstract</a></h2><div class="lead"><div id="rfc.abstract.p.1"><p>The Hypertext Transfer Protocol (HTTP) is a stateless application-level protocol for distributed, collaborative, hypertext information systems. This document defines HTTP caches and the associated header fields that control cache behavior or indicate cacheable response messages.</p></div><div id="rfc.abstract.p.2"><p>This document obsoletes RFC 7234.</p></div></div><div class="banner"><p>This copy of the specification has test results interspersed throughout; click on ℹ️ to see them.</p></div><section id="rfc.note.1" class="note"><h2><a href="#rfc.note.1">Editorial Note</a></h2><div id="rfc.note.1.p.1"><p>This note is to be removed before publishing as an RFC.</p></div><div id="rfc.note.1.p.2"><p>Discussion of this draft takes place on the HTTP working group mailing list (ietf-http-wg@w3.org), which is archived at &lt;<a href="https://lists.w3.org/Archives/Public/ietf-http-wg/">https://lists.w3.org/Archives/Public/ietf-http-wg/</a>&gt;.</p></div><div id="rfc.note.1.p.3"><p>Working Group information can be found at &lt;<a href="https://httpwg.org/">https://httpwg.org/</a>&gt;; source code and issues list for this draft can be found at &lt;<a href="https://github.com/httpwg/http-core">https://github.com/httpwg/http-core</a>&gt;.</p></div><div id="rfc.note.1.p.4"><p>The changes in this draft are summarized in <a href="#changes.since.14" title="Since draft-ietf-httpbis-cache-14">Appendix&nbsp;C.16</a>.</p></div></section><section id="rfc.status"><h2><a href="#rfc.status">Status of This Memo</a></h2><div id="rfc.boilerplate.1.p.1"><p>This Internet-Draft is submitted in full conformance with the provisions of BCP 78 and BCP 79.</p></div><div id="rfc.boilerplate.1.p.2"><p>Internet-Drafts are working documents of the Internet Engineering Task Force (IETF). Note that other groups may also distribute working documents as Internet-Drafts. The list of current Internet-Drafts is at <a href="http://datatracker.ietf.org/drafts/current/">http://datatracker.ietf.org/drafts/current/</a>.</p></div><div id="rfc.boilerplate.1.p.3"><p>Internet-Drafts are draft documents valid for a maximum of six months and may be updated, replaced, or obsoleted by other documents at any time. It is inappropriate to use Internet-Drafts as reference material or to cite them other than as “work in progress”.</p></div><div id="rfc.boilerplate.1.p.4"><p>This Internet-Draft will expire on January 29, 2023.</p></div></section><section id="rfc.copyrightnotice"><h2><a href="#rfc.copyrightnotice">Copyright Notice</a></h2><div id="rfc.boilerplate.2.p.1"><p>Copyright © 2022 IETF Trust and the persons identified as the document authors. All rights reserved.</p></div><div id="rfc.boilerplate.2.p.2"><p>This document is subject to BCP 78 and the IETF Trust's Legal Provisions Relating to IETF Documents (<a href="http://trustee.ietf.org/license-info">http://trustee.ietf.org/license-info</a>) in effect on the date of publication of this document. Please review these documents carefully, as they describe your rights and restrictions with respect to this document. Code Components extracted from this document must include Simplified BSD License text as described in Section 4.e of the Trust Legal Provisions and are provided without warranty as described in the Simplified BSD License.</p></div><div id="rfc.boilerplate.2.p.3"><p>This document may contain material from IETF Documents or IETF Contributions published or made publicly available before November 10, 2008. The person(s) controlling the copyright in some of this material may not have granted the IETF Trust the right to allow modifications of such material outside the IETF Standards Process. Without obtaining an adequate license from the person(s) controlling the copyright in such materials, this document may not be modified outside the IETF Standards Process, and derivative works of it may not be created outside the IETF Standards Process, except to format it for publication as an RFC or to translate it into languages other than English.</p></div></section><div class="toc d-lg-none"><ul><li><a href="#rfc.section.1">1.</a>&nbsp;&nbsp;&nbsp;<a href="#caching">Introduction</a><ul><li><a href="#rfc.section.1.1">1.1.</a>&nbsp;&nbsp;&nbsp;<a href="#requirements.notation">Requirements Notation</a></li><li><a href="#rfc.section.1.2">1.2.</a>&nbsp;&nbsp;&nbsp;<a href="#notation">Syntax Notation</a></li><li><a href="#rfc.section.1.3">1.3.</a>&nbsp;&nbsp;&nbsp;<a href="#delta-seconds">Delta Seconds</a></li></ul></li><li><a href="#rfc.section.2">2.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.overview">Overview of Cache Operation</a></li><li><a href="#rfc.section.3">3.</a>&nbsp;&nbsp;&nbsp;<a href="#response.cacheability">Storing Responses in Caches</a><ul><li><a href="#rfc.section.3.1">3.1.</a>&nbsp;&nbsp;&nbsp;<a href="#storing.fields">Storing Header and Trailer Fields</a></li><li><a href="#rfc.section.3.2">3.2.</a>&nbsp;&nbsp;&nbsp;<a href="#update">Updating Stored Header Fields</a></li><li><a href="#rfc.section.3.3">3.3.</a>&nbsp;&nbsp;&nbsp;<a href="#incomplete.responses">Storing Incomplete Responses</a></li><li><a href="#rfc.section.3.4">3.4.</a>&nbsp;&nbsp;&nbsp;<a href="#combining.responses">Combining Partial Content</a></li><li><a href="#rfc.section.3.5">3.5.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.authenticated.responses">Storing Responses to Authenticated Requests</a></li></ul></li><li><a href="#rfc.section.4">4.</a>&nbsp;&nbsp;&nbsp;<a href="#constructing.responses.from.caches">Constructing Responses from Caches</a><ul><li><a href="#rfc.section.4.1">4.1.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.negotiated.responses">Calculating Cache Keys with Vary</a></li><li><a href="#rfc.section.4.2">4.2.</a>&nbsp;&nbsp;&nbsp;<a href="#expiration.model">Freshness</a><ul><li><a href="#rfc.section.4.2.1">4.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#calculating.freshness.lifetime">Calculating Freshness Lifetime</a></li><li><a href="#rfc.section.4.2.2">4.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#heuristic.freshness">Calculating Heuristic Freshness</a></li><li><a href="#rfc.section.4.2.3">4.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#age.calculations">Calculating Age</a></li><li><a href="#rfc.section.4.2.4">4.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#serving.stale.responses">Serving Stale Responses</a></li></ul></li><li><a href="#rfc.section.4.3">4.3.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.model">Validation</a><ul><li><a href="#rfc.section.4.3.1">4.3.1.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.sent">Sending a Validation Request</a></li><li><a href="#rfc.section.4.3.2">4.3.2.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.received">Handling a Received Validation Request</a></li><li><a href="#rfc.section.4.3.3">4.3.3.</a>&nbsp;&nbsp;&nbsp;<a href="#validation.response">Handling a Validation Response</a></li><li><a href="#rfc.section.4.3.4">4.3.4.</a>&nbsp;&nbsp;&nbsp;<a href="#freshening.responses">Freshening Stored Responses upon Validation</a></li><li><a href="#rfc.section.4.3.5">4.3.5.</a>&nbsp;&nbsp;&nbsp;<a href="#head.effects">Freshening Responses with HEAD</a></li></ul></li><li><a href="#rfc.section.4.4">4.4.</a>&nbsp;&nbsp;&nbsp;<a href="#invalidation">Invalidating Stored Responses</a></li></ul></li><li><a href="#rfc.section.5">5.</a>&nbsp;&nbsp;&nbsp;<a href="#header.field.definitions">Field Definitions</a><ul><li><a href="#rfc.section.5.1">5.1.</a>&nbsp;&nbsp;&nbsp;<a href="#field.age">Age</a></li><li><a href="#rfc.section.5.2">5.2.</a>&nbsp;&nbsp;&nbsp;<a href="#field.cache-control">Cache-Control</a><ul><li><a href="#rfc.section.5.2.1">5.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive">Request Cache-Control Directives</a><ul><li><a href="#rfc.section.5.2.1.1">5.2.1.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.max-age">max-age</a></li><li><a href="#rfc.section.5.2.1.2">5.2.1.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.max-stale">max-stale</a></li><li><a href="#rfc.section.5.2.1.3">5.2.1.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.min-fresh">min-fresh</a></li><li><a href="#rfc.section.5.2.1.4">5.2.1.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-cache">no-cache</a></li><li><a href="#rfc.section.5.2.1.5">5.2.1.5.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-store">no-store</a></li><li><a href="#rfc.section.5.2.1.6">5.2.1.6.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.no-transform">no-transform</a></li><li><a href="#rfc.section.5.2.1.7">5.2.1.7.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-request-directive.only-if-cached">only-if-cached</a></li></ul></li><li><a href="#rfc.section.5.2.2">5.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive">Response Cache-Control Directives</a><ul><li><a href="#rfc.section.5.2.2.1">5.2.2.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.max-age">max-age</a></li><li><a href="#rfc.section.5.2.2.2">5.2.2.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.must-revalidate">must-revalidate</a></li><li><a href="#rfc.section.5.2.2.3">5.2.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.must-understand">must-understand</a></li><li><a href="#rfc.section.5.2.2.4">5.2.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-cache">no-cache</a></li><li><a href="#rfc.section.5.2.2.5">5.2.2.5.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-store">no-store</a></li><li><a href="#rfc.section.5.2.2.6">5.2.2.6.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.no-transform">no-transform</a></li><li><a href="#rfc.section.5.2.2.7">5.2.2.7.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.private">private</a></li><li><a href="#rfc.section.5.2.2.8">5.2.2.8.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.proxy-revalidate">proxy-revalidate</a></li><li><a href="#rfc.section.5.2.2.9">5.2.2.9.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.public">public</a></li><li><a href="#rfc.section.5.2.2.10">5.2.2.10.</a>&nbsp;&nbsp;&nbsp;<a href="#cache-response-directive.s-maxage">s-maxage</a></li></ul></li><li><a href="#rfc.section.5.2.3">5.2.3.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.control.extensions">Cache Control Extensions</a></li><li><a href="#rfc.section.5.2.4">5.2.4.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.directive.registry">Cache Directive Registry</a></li></ul></li><li><a href="#rfc.section.5.3">5.3.</a>&nbsp;&nbsp;&nbsp;<a href="#field.expires">Expires</a></li><li><a href="#rfc.section.5.4">5.4.</a>&nbsp;&nbsp;&nbsp;<a href="#field.pragma">Pragma</a></li><li><a href="#rfc.section.5.5">5.5.</a>&nbsp;&nbsp;&nbsp;<a href="#field.warning">Warning</a></li></ul></li><li><a href="#rfc.section.6">6.</a>&nbsp;&nbsp;&nbsp;<a href="#history.lists">Relationship to Applications and Other Caches</a></li><li><a href="#rfc.section.7">7.</a>&nbsp;&nbsp;&nbsp;<a href="#security.considerations">Security Considerations</a><ul><li><a href="#rfc.section.7.1">7.1.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.poisoning">Cache Poisoning</a></li><li><a href="#rfc.section.7.2">7.2.</a>&nbsp;&nbsp;&nbsp;<a href="#security.timing">Timing Attacks</a></li><li><a href="#rfc.section.7.3">7.3.</a>&nbsp;&nbsp;&nbsp;<a href="#caching.of.sensitive.information">Caching of Sensitive Information</a></li></ul></li><li><a href="#rfc.section.8">8.</a>&nbsp;&nbsp;&nbsp;<a href="#iana.considerations">IANA Considerations</a><ul><li><a href="#rfc.section.8.1">8.1.</a>&nbsp;&nbsp;&nbsp;<a href="#field.name.registration">Field Name Registration</a></li><li><a href="#rfc.section.8.2">8.2.</a>&nbsp;&nbsp;&nbsp;<a href="#cache.directive.registration">Cache Directive Registration</a></li><li><a href="#rfc.section.8.3">8.3.</a>&nbsp;&nbsp;&nbsp;<a href="#warn.code.registration">Warn Code Registry</a></li></ul></li><li><a href="#rfc.section.9">9.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references">References</a><ul><li><a href="#rfc.section.9.1">9.1.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references.1">Normative References</a></li><li><a href="#rfc.section.9.2">9.2.</a>&nbsp;&nbsp;&nbsp;<a href="#rfc.references.2">Informative References</a></li></ul></li><li><a href="#rfc.section.A">A.</a>&nbsp;&nbsp;&nbsp;<a href="#collected.abnf">Collected ABNF</a></li><li><a href="#rfc.section.B">B.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.from.rfc.7234">Changes from RFC 7234</a></li><li><a href="#rfc.section.C">C.</a>&nbsp;&nbsp;&nbsp;<a href="#change.log">Change Log</a><ul><li><a href="#rfc.section.C.1">C.1.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.publication.as.rfc">Between RFC7234 and draft 00</a></li><li><a href="#rfc.section.C.2">C.2.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.00">Since draft-ietf-httpbis-cache-00</a></li><li><a href="#rfc.section.C.3">C.3.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.01">Since draft-ietf-httpbis-cache-01</a></li><li><a href="#rfc.section.C.4">C.4.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.02">Since draft-ietf-httpbis-cache-02</a></li><li><a href="#rfc.section.C.5">C.5.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.03">Since draft-ietf-httpbis-cache-03</a></li><li><a href="#rfc.section.C.6">C.6.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.04">Since draft-ietf-httpbis-cache-04</a></li><li><a href="#rfc.section.C.7">C.7.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.05">Since draft-ietf-httpbis-cache-05</a></li><li><a href="#rfc.section.C.8">C.8.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.06">Since draft-ietf-httpbis-cache-06</a></li><li><a href="#rfc.section.C.9">C.9.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.07">Since draft-ietf-httpbis-cache-07</a></li><li><a href="#rfc.section.C.10">C.10.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.08">Since draft-ietf-httpbis-cache-08</a></li><li><a href="#rfc.section.C.11">C.11.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.09">Since draft-ietf-httpbis-cache-09</a></li><li><a href="#rfc.section.C.12">C.12.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.10">Since draft-ietf-httpbis-cache-10</a></li><li><a href="#rfc.section.C.13">C.13.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.11">Since draft-ietf-httpbis-cache-11</a></li><li><a href="#rfc.section.C.14">C.14.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.12">Since draft-ietf-httpbis-cache-12</a></li><li><a href="#rfc.section.C.15">C.15.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.13">Since draft-ietf-httpbis-cache-13</a></li><li><a href="#rfc.section.C.16">C.16.</a>&nbsp;&nbsp;&nbsp;<a href="#changes.since.14">Since draft-ietf-httpbis-cache-14</a></li></ul></li><li><a href="#acks">Acknowledgments</a></li><li><a href="#rfc.index">Index</a></li><li><a href="#rfc.authors">Authors' Addresses</a></li></ul></div><section id="caching"><h2 id="rfc.section.1" class="np"><a href="#rfc.section.1">1.</a>&nbsp;<a href="#caching">Introduction</a></h2><div id="rfc.section.1.p.1"><p>The Hypertext Transfer Protocol (HTTP) is a stateless application-level request/response protocol that uses extensible semantics and self-descriptive messages for flexible interaction with network-based hypertext information systems. It is typically used for distributed information systems, where the use of response caches can improve performance. This document defines aspects of HTTP related to caching and reusing response messages.</p></div><div id="rfc.iref.c.1"></div><div id="rfc.section.1.p.2"><p>An HTTP cache is a local store of response messages and the subsystem that controls storage, retrieval, and deletion of messages in it. A cache stores cacheable responses to reduce the response time and network bandwidth consumption on future equivalent requests. Any client or server MAY use a cache, though not when acting as a tunnel.</p></div><div id="rfc.iref.s.1"></div><div id="rfc.iref.p.1"></div><div id="rfc.section.1.p.3"><p id="shared.and.private.caches">A shared cache is a cache that stores responses for reuse by more than one user; shared caches are usually (but not always) deployed as a part of an intermediary. A private cache, in contrast, is dedicated to a single user; often, they are deployed as a component of a user agent.</p></div><div id="rfc.section.1.p.4"><p>HTTP caching's goal is significantly improving performance by reusing a prior response message to satisfy a current request. A cache considers a stored response "fresh", as defined in <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a>, if it can be reused without "validation" (checking with the origin server to see if the cached response remains valid for this request). A fresh response can therefore reduce both latency and network overhead each time the cache reuses it. When a cached response is not fresh, it might still be reusable if validation can freshen it (<a href="#validation.model" title="Validation">Section&nbsp;4.3</a>) or if the origin is unavailable (<a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>).</p></div><div id="rfc.section.1.p.5"><p>This document obsoletes <a href="#RFC7234">RFC 7234</a>, with the changes being summarized in <a href="#changes.from.rfc.7234" title="Changes from RFC 7234">Appendix&nbsp;B</a>.</p></div><section id="requirements.notation"><h3 id="rfc.section.1.1"><a href="#rfc.section.1.1">1.1.</a>&nbsp;<a href="#requirements.notation">Requirements Notation</a></h3><div id="rfc.section.1.1.p.1"><p>The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be interpreted as described in BCP 14 <a href="#RFC2119"><cite title="Key words for use in RFCs to Indicate Requirement Levels">[RFC2119]</cite></a> <a href="#RFC8174"><cite title="Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words">[RFC8174]</cite></a> when, and only when, they appear in all capitals, as shown here.</p></div><div id="rfc.section.1.1.p.2"><p>Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> defines conformance criteria and contains considerations regarding error handling.</p></div></section><section id="notation"><h3 id="rfc.section.1.2"><a href="#rfc.section.1.2">1.2.</a>&nbsp;<a href="#notation">Syntax Notation</a></h3><div id="rfc.section.1.2.p.1"><p>This specification uses the Augmented Backus-Naur Form (ABNF) notation of <a href="#RFC5234"><cite title="Augmented BNF for Syntax Specifications: ABNF">[RFC5234]</cite></a>, extended with the notation for case-sensitivity in strings defined in <a href="#RFC7405"><cite title="Case-Sensitive String Support in ABNF">[RFC7405]</cite></a>.</p></div><div id="rfc.section.1.2.p.2"><p>It also uses a list extension, defined in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>, that allows for compact definition of comma-separated lists using a '#' operator (similar to how the '*' operator indicates repetition). <a href="#collected.abnf" title="Collected ABNF">Appendix&nbsp;A</a> shows the collected grammar with all list operators expanded to standard ABNF notation.</p></div><div id="rfc.section.1.2.p.3"><p id="core.rules">The following core rule is included by reference, as defined in <a href="#RFC5234"><cite title="Augmented BNF for Syntax Specifications: ABNF">[RFC5234]</cite></a>, Appendix B.1: DIGIT (decimal 0-9).</p></div><div id="rfc.section.1.2.p.4" class="avoidbreakafter"><p id="imported.rules"><a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> defines the following rules:</p></div><div id="rfc.figure.u.1"><pre class="inline"> + HTTP-date = &lt;HTTP-date, see [Semantics], Appendix ERROR: Anchor 'http.date' in Semantics not found in source file 'rfc9112.xml'.&gt; + OWS = &lt;OWS, see [Semantics], Appendix ERROR: Anchor 'whitespace' in Semantics not found in source file 'rfc9112.xml'.&gt; + field-name = &lt;field-name, see [Semantics], Appendix ERROR: Anchor 'field-names' in Semantics not found in source file 'rfc9112.xml'.&gt; + quoted-string = &lt;quoted-string, see [Semantics], Appendix ERROR: Anchor 'quoted.strings' in Semantics not found in source file 'rfc9112.xml'.&gt; + token = &lt;token, see [Semantics], Appendix ERROR: Anchor 'tokens' in Semantics not found in source file 'rfc9112.xml'.&gt; +</pre></div></section><section id="delta-seconds"><h3 id="rfc.section.1.3"><a href="#rfc.section.1.3">1.3.</a>&nbsp;<a href="#delta-seconds">Delta Seconds</a></h3><div id="rfc.section.1.3.p.1"><p>The delta-seconds rule specifies a non-negative integer, representing time in seconds.</p></div><div id="rfc.figure.u.2"><div id="rfc.iref.g.1"></div><pre class="inline"> + delta-seconds = 1*DIGIT +</pre></div><div id="rfc.section.1.3.p.2"><p>A recipient parsing a delta-seconds value and converting it to binary form ought to use an arithmetic type of at least 31 bits of non-negative integer range. If a cache receives a delta-seconds value greater than the greatest integer it can represent, or if any of its subsequent calculations overflows, the cache MUST consider the value to be 2147483648 (2^31) or the greatest positive integer it can conveniently represent.</p></div><div id="rfc.section.1.3.p.3"><ul class="empty"><li>Note: The value 2147483648 is here for historical reasons, represents infinity (over 68 years), and does not need to be stored in binary form; an implementation could produce it as a canned string if any overflow occurs, even if the calculations are performed with an arithmetic type incapable of directly representing that number. What matters here is that an overflow be detected and not treated as a negative value in later calculations.</li></ul></div></section></section><section id="caching.overview"><h2 id="rfc.section.2"><a href="#rfc.section.2">2.</a>&nbsp;<a href="#caching.overview">Overview of Cache Operation</a></h2><div id="rfc.section.2.p.1"><p>Proper cache operation preserves the semantics of HTTP transfers (<a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) while reducing the transmission of information already held in the cache. Although caching is an entirely OPTIONAL feature of HTTP, it can be assumed that reusing a cached response is desirable and that such reuse is the default behavior when no requirement or local configuration prevents it. Therefore, HTTP cache requirements are focused on preventing a cache from either storing a non-reusable response or reusing a stored response inappropriately, rather than mandating that caches always store and reuse particular responses.</p></div><div id="rfc.iref.c.2"></div><div id="rfc.section.2.p.2"><p>The cache key is the information a cache uses to select a response and is comprised of, at a minimum, the request method and target URI used to retrieve the stored response; the method determines under which circumstances that response can be used to satisfy a subsequent request. However, many HTTP caches in common use today only cache GET responses, and therefore only use the URI as the cache key, forwarding other methods.</p></div><div id="rfc.section.2.p.3"><p>If a request target is subject to content negotiation, the cache might store multiple responses for it. Caches differentiate these responses by incorporating values of the original request's selecting header fields into the cache key as well, using information in the Vary response header field, as per <a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>.</p></div><div id="rfc.section.2.p.4"><p>Caches might incorporate additional material into the cache key. For example, user agent caches might include the referring site's identity, thereby "double keying" the cache to avoid some privacy risks (see <a href="#security.timing" title="Timing Attacks">Section&nbsp;7.2</a>).</p></div><div id="rfc.section.2.p.5"><p>Most commonly, caches store the successful result of a retrieval request: i.e., a 200 (OK) response to a GET request, which contains a representation of the target resource (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>). However, it is also possible to store redirects, negative results (e.g., 404 (Not Found)), incomplete results (e.g., 206 (Partial Content)), and responses to methods other than GET if the method's definition allows such caching and defines something suitable for use as a cache key.</p></div><div id="rfc.section.2.p.6"><p>A cache is disconnected when it cannot contact the origin server or otherwise find a forward path for a request. A disconnected cache can serve stale responses in some circumstances (<a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>).</p></div></section><section id="response.cacheability"><h2 id="rfc.section.3"><a href="#rfc.section.3">3.</a>&nbsp;<a href="#response.cacheability">Storing Responses in Caches</a></h2><div id="rfc.section.3.p.1" class="avoidbreakafter"><p>A cache MUST NOT store a response to a request unless:</p></div><div id="rfc.section.3.p.2"><ul><li>the request method is understood by the cache;</li><li>the response status code is final (see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>);</li><li>if the response status code is 206 or 304, or the "must-understand" cache directive (see <a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a>) is present: the cache understands the response status code;</li><li>the "no-store" cache directive is not present in the response (see <a href="#cache-response-directive.no-store" title="no-store">Section&nbsp;5.2.2.5</a>);</li><li>if the cache is shared: the "private" response directive is either not present or allows a shared cache to store a modified response; see <a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a>);</li><li>if the cache is shared: the Authorization header field is not present in the request (see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) or a response directive is present that explicitly allows shared caching (see <a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a>); and,</li><li>the response contains at least one of:<br><br> <ul><li>a public response directive (see <a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a>);</li><li>a private response directive, if the cache is not shared (see <a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a>);</li><li>an <a href="#field.expires">Expires</a> header field (see <a href="#field.expires" title="Expires">Section&nbsp;5.3</a>);</li><li>a max-age response directive (see <a href="#cache-response-directive.max-age" title="max-age">Section&nbsp;5.2.2.1</a>);</li><li>if the cache is shared: an s-maxage response directive (see <a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a>);</li><li>a Cache Control Extension that allows it to be cached (see <a href="#cache.control.extensions" title="Cache Control Extensions">Section&nbsp;5.2.3</a>); or,</li><li>a status code that is defined as heuristically cacheable (see <a href="#heuristic.freshness" title="Calculating Heuristic Freshness">Section&nbsp;4.2.2</a>).</li></ul><br><br> </li></ul></div><div id="rfc.section.3.p.3"><p>Note that a cache-control extension can override any of the requirements listed; see <a href="#cache.control.extensions" title="Cache Control Extensions">Section&nbsp;5.2.3</a>.</p></div><div id="rfc.section.3.p.4"><p>In this context, a cache has "understood" a request method or a response status code if it recognizes it and implements all specified caching-related behavior.</p></div><div id="rfc.section.3.p.5"><p>Note that, in normal operation, some caches will not store a response that has neither a cache validator nor an explicit expiration time, as such responses are not usually useful to store. However, caches are not prohibited from storing such responses.</p></div><section id="storing.fields"><h3 id="rfc.section.3.1"><a href="#rfc.section.3.1">3.1.</a>&nbsp;<a href="#storing.fields">Storing Header and Trailer Fields</a></h3><div id="rfc.section.3.1.p.1" class="avoidbreakafter"><p>Caches MUST include all received response header fields — including unrecognised ones — when storing a response; this assures that new HTTP header fields can be successfully deployed. However, the following exceptions are made:</p></div><div id="rfc.section.3.1.p.2"><ul><li>The Connection header field and fields whose names are listed in it are required by Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> to be removed before forwarding the message. This MAY be implemented by doing so before storage.</li><li>Likewise, some fields' semantics require them to be removed before forwarding the message, and this MAY be implemented by doing so before storage; see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> for some examples.</li><li>The no-cache (<a href="#cache-response-directive.no-cache" title="no-cache">Section&nbsp;5.2.2.4</a>) and private (<a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a>) cache directives can have arguments that prevent storage of header fields by all caches and shared caches, respectively.</li><li>Header fields that are specific to a client's proxy configuration MUST NOT be stored, unless the cache incorporates the identity of the proxy into the cache key. Effectively, this is limited to Proxy-Authenticate (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), Proxy-Authentication-Info (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), and Proxy-Authorization (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>).</li></ul></div><div id="rfc.section.3.1.p.3"><p>Caches MAY either store trailer fields separate from header fields, or discard them. Caches MUST NOT combine trailer fields with header fields.</p></div></section><section id="update"><h3 id="rfc.section.3.2"><a href="#rfc.section.3.2">3.2.</a>&nbsp;<a href="#update">Updating Stored Header Fields</a></h3><div id="rfc.section.3.2.p.1"><p>Caches are required to update a stored response's header fields from another (typically newer) response in several situations; for example, see <a href="#combining.responses" title="Combining Partial Content">Section&nbsp;3.4</a>, <a href="#freshening.responses" title="Freshening Stored Responses upon Validation">Section&nbsp;4.3.4</a> and <a href="#head.effects" title="Freshening Responses with HEAD">Section&nbsp;4.3.5</a>.</p></div><div id="rfc.section.3.2.p.2" class="avoidbreakafter"><p>When doing so, the cache MUST add each header field in the provided response to the stored response, replacing field values that are already present, with the following exceptions:</p></div><div id="rfc.section.3.2.p.3"><ul><li>Header fields excepted from storage in <a href="#storing.fields" title="Storing Header and Trailer Fields">Section&nbsp;3.1</a>,</li><li>Header fields that the cache's stored response depends upon, as described below,</li><li>Header fields that are automatically processed and removed by the recipient, as described below, and</li><li>The Content-Length header field.</li></ul></div><div id="rfc.section.3.2.p.4"><p>In some cases, caches (especially in user agents) store the results of processing the received response, rather than the response itself, and updating header fields that affect that processing can result in inconsistent behavior and security issues. Caches in this situation MAY omit these header fields from updating stored responses on an exceptional basis, but SHOULD limit such omission to those fields necessary to assure integrity of the stored response.</p></div><div id="rfc.section.3.2.p.5"><p>For example, a browser might decode the content coding of a response while it is being received, creating a disconnect between the data it has stored and the response's original metadata. Updating that stored metadata with a different Content-Encoding header field would be problematic. Likewise, a browser might store a post-parse HTML tree, rather than the content received in the response; updating the Content-Type header field would not be workable in this case, because any assumptions about the format made in parsing would now be invalid.</p></div><div id="rfc.section.3.2.p.6"><p>Furthermore, some fields are automatically processed and removed by the HTTP implementation; for example, the Content-Range header field. Implementations MAY automatically omit such header fields from updates, even when the processing does not actually occur.</p></div><div id="rfc.section.3.2.p.7"><p>Note that the Content-* prefix is not a signal that a header field is omitted from update; it is a convention for MIME header fields, not HTTP.</p></div></section><section id="incomplete.responses"><h3 id="rfc.section.3.3"><a href="#rfc.section.3.3">3.3.</a>&nbsp;<a href="#incomplete.responses">Storing Incomplete Responses</a></h3><div id="rfc.section.3.3.p.1"><p>If the request method is GET, the response status code is 200 (OK), and the entire response header section has been received, a cache MAY store a response body that is not complete (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) if the stored response is recorded as being incomplete. Likewise, a 206 (Partial Content) response MAY be stored as if it were an incomplete 200 (OK) response. However, a cache MUST NOT store incomplete or partial-content responses if it does not support the Range and Content-Range header fields or if it does not understand the range units used in those fields.</p></div><div id="rfc.section.3.3.p.2"><p>A cache MAY complete a stored incomplete response by making a subsequent range request (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) and combining the successful response with the stored response, as defined in <a href="#combining.responses" title="Combining Partial Content">Section&nbsp;3.4</a>. A cache MUST NOT use an incomplete response to answer requests unless the response has been made complete, or the request is partial and specifies a range wholly within the incomplete response. A cache MUST NOT send a partial response to a client without explicitly marking it using the 206 (Partial Content) status code.</p></div></section><section id="combining.responses"><h3 id="rfc.section.3.4"><a href="#rfc.section.3.4">3.4.</a>&nbsp;<a href="#combining.responses">Combining Partial Content</a></h3><div id="rfc.section.3.4.p.1"><p>A response might transfer only a partial representation if the connection closed prematurely or if the request used one or more Range specifiers (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>). After several such transfers, a cache might have received several ranges of the same representation. A cache MAY combine these ranges into a single stored response, and reuse that response to satisfy later requests, if they all share the same strong validator and the cache complies with the client requirements in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div><div id="rfc.section.3.4.p.2"><p>When combining the new response with one or more stored responses, a cache MUST update the stored response header fields using the header fields provided in the new response, as per <a href="#update" title="Updating Stored Header Fields">Section&nbsp;3.2</a>.</p></div></section><section id="caching.authenticated.responses"><h3 id="rfc.section.3.5"><a href="#rfc.section.3.5">3.5.</a>&nbsp;<a href="#caching.authenticated.responses">Storing Responses to Authenticated Requests</a></h3><div id="rfc.section.3.5.p.1"><p>A shared cache MUST NOT use a cached response to a request with an Authorization header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) to satisfy any subsequent request unless the response contains a <a href="#field.cache-control">Cache-Control</a> field with a response directive (<a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a>) that allows it to be stored by a shared cache and the cache conforms to the requirements of that directive for that response.</p></div><div id="rfc.section.3.5.p.2"><p>In this specification, the following response directives have such an effect: must-revalidate (<a href="#cache-response-directive.must-revalidate" title="must-revalidate">Section&nbsp;5.2.2.2</a>), public (<a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a>), and s-maxage (<a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a>).</p></div></section></section><section id="constructing.responses.from.caches"><h2 id="rfc.section.4"><a href="#rfc.section.4">4.</a>&nbsp;<a href="#constructing.responses.from.caches">Constructing Responses from Caches</a></h2><div id="rfc.section.4.p.1" class="avoidbreakafter"><p>When presented with a request, a cache MUST NOT reuse a stored response, unless:</p></div><div id="rfc.section.4.p.2"><ul><li>The presented target URI (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) and that of the stored response match, and</li><li>the request method associated with the stored response allows it to be used for the presented request, and</li><li>selecting header fields nominated by the stored response (if any) match those presented (see <a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>), and</li><li>the stored response does not contain the no-cache cache directive (<a href="#cache-response-directive.no-cache" title="no-cache">Section&nbsp;5.2.2.4</a>), unless it is successfully validated (<a href="#validation.model" title="Validation">Section&nbsp;4.3</a>), and</li><li>the stored response is either:<br><br> <ul><li>fresh (see <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a>), or</li><li>allowed to be served stale (see <a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>), or</li><li>successfully validated (see <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>).</li></ul><br><br> </li></ul></div><div id="rfc.section.4.p.3"><p>Note that a cache-control extension can override any of the requirements listed; see <a href="#cache.control.extensions" title="Cache Control Extensions">Section&nbsp;5.2.3</a>.</p></div><div id="rfc.section.4.p.4"><p>When a stored response is used to satisfy a request without validation, a cache MUST generate an <a href="#field.age">Age</a> header field (<a href="#field.age" title="Age">Section&nbsp;5.1</a>), replacing any present in the response with a value equal to the stored response's current_age; see <a href="#age.calculations" title="Calculating Age">Section&nbsp;4.2.3</a>.</p></div><div id="rfc.section.4.p.5"><p>A cache MUST write through requests with methods that are unsafe (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) to the origin server; i.e., a cache is not allowed to generate a reply to such a request before having forwarded the request and having received a corresponding response.</p></div><div id="rfc.section.4.p.6"><p>Also, note that unsafe requests might invalidate already-stored responses; see <a href="#invalidation" title="Invalidating Stored Responses">Section&nbsp;4.4</a>.</p></div><div id="rfc.iref.c.3"></div><div id="rfc.section.4.p.7"><p>A response that is stored or storable can be used to satisfy multiple requests, provided that it is allowed to reuse that response for the requests in question. This enables caches to collapse requests — or combine multiple incoming requests into a single forward one upon a cache miss — thereby reducing load on the origin server and network. However, note that if the response returned is not able to be used for some or all of the collapsed requests, additional latency might be introduced, because they will need to be forwarded to be satisfied.</p></div><div id="rfc.section.4.p.8"><p>When more than one suitable response is stored, a cache MUST use the most recent one (as determined by the Date header field). It can also forward the request with "Cache-Control: max-age=0" or "Cache-Control: no-cache" to disambiguate which response to use.</p></div><div id="rfc.section.4.p.9"><p>A cache that does not have a clock available MUST NOT use stored responses without revalidating them upon every use.</p></div><section id="caching.negotiated.responses"><h3 id="rfc.section.4.1"><a href="#rfc.section.4.1">4.1.</a>&nbsp;<a href="#caching.negotiated.responses">Calculating Cache Keys with Vary</a></h3><div id="rfc.section.4.1.p.1"><p>When a cache receives a request that can be satisfied by a stored response that has a Vary header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), it MUST NOT use that response unless all the selecting header fields nominated by the Vary header field match in both the original request (i.e., that associated with the stored response), and the presented request.</p></div><div id="rfc.section.4.1.p.2" class="avoidbreakafter"><p>The selecting header fields from two requests are defined to match if and only if those in the first request can be transformed to those in the second request by applying any of:</p></div><div id="rfc.section.4.1.p.3"><ul><li>adding or removing whitespace, where allowed in the header field's syntax</li><li>combining multiple header field lines with the same field name (see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>)</li><li>normalizing both header field values in a way that is known to have identical semantics, according to the header field's specification (e.g., reordering field values when order is not significant; case-normalization, where values are defined to be case-insensitive)</li></ul></div><div id="rfc.section.4.1.p.4"><p>If (after any normalization that might take place) a header field is absent from a request, it can only match another request if it is also absent there.</p></div><div id="rfc.section.4.1.p.5"><p>A Vary header field value containing a member "*" always fails to match.</p></div><div id="rfc.iref.s.2"></div><div id="rfc.section.4.1.p.6"><p>The stored response with matching selecting header fields is known as the selected response.</p></div><div id="rfc.section.4.1.p.7"><p>If multiple selected responses are available (potentially including responses without a Vary header field), the cache will need to choose one to use. When a selecting header field has a known mechanism for doing so (e.g., qvalues on Accept and similar request header fields), that mechanism MAY be used to select a preferred response. If such a mechanism is not available, or leads to equally preferred responses, the most recent response (as determined by the Date header field) is used, as per <a href="#constructing.responses.from.caches" title="Constructing Responses from Caches">Section&nbsp;4</a>.</p></div><div id="rfc.section.4.1.p.8"><p>Some resources mistakenly omit the Vary header field from their default response (i.e., the one sent when no more preferable response is available), with the effect of selecting it for requests to that resource even when more preferable responses are available. When a cache has multiple responses for a target URI and one or more omits the Vary header field, it SHOULD use the most recent (see <a href="#age.calculations" title="Calculating Age">Section&nbsp;4.2.3</a>) valid Vary field value available to select an appropriate response for the request.</p></div><div id="rfc.section.4.1.p.9"><p>If no selected response is available, the cache cannot satisfy the presented request. Typically, it is forwarded to the origin server in a (possibly conditional; see <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>) request.</p></div></section><section id="expiration.model"><h3 id="rfc.section.4.2"><a href="#rfc.section.4.2">4.2.</a>&nbsp;<a href="#expiration.model">Freshness</a></h3><div id="rfc.section.4.2.p.1"><p>A fresh response is one whose age has not yet exceeded its freshness lifetime. Conversely, a stale response is one where it has.</p></div><div id="rfc.iref.f.1"></div><div id="rfc.iref.e.1"></div><div id="rfc.iref.h.1"></div><div id="rfc.section.4.2.p.2"><p>A response's freshness lifetime is the length of time between its generation by the origin server and its expiration time. An explicit expiration time is the time at which the origin server intends that a stored response can no longer be used by a cache without further validation, whereas a heuristic expiration time is assigned by a cache when no explicit expiration time is available.</p></div><div id="rfc.iref.a.1"></div><div id="rfc.section.4.2.p.3"><p>A response's age is the time that has passed since it was generated by, or successfully validated with, the origin server.</p></div><div id="rfc.section.4.2.p.4"><p>When a response is fresh, it can be used to satisfy subsequent requests without contacting the origin server, thereby improving efficiency.</p></div><div id="rfc.section.4.2.p.5"><p>The primary mechanism for determining freshness is for an origin server to provide an explicit expiration time in the future, using either the <a href="#field.expires">Expires</a> header field (<a href="#field.expires" title="Expires">Section&nbsp;5.3</a>) or the max-age response directive (<a href="#cache-response-directive.max-age" title="max-age">Section&nbsp;5.2.2.1</a>). Generally, origin servers will assign future explicit expiration times to responses in the belief that the representation is not likely to change in a semantically significant way before the expiration time is reached.</p></div><div id="rfc.section.4.2.p.6"><p>If an origin server wishes to force a cache to validate every request, it can assign an explicit expiration time in the past to indicate that the response is already stale. Compliant caches will normally validate a stale cached response before reusing it for subsequent requests (see <a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>).</p></div><div id="rfc.section.4.2.p.7"><p>Since origin servers do not always provide explicit expiration times, caches are also allowed to use a heuristic to determine an expiration time under certain circumstances (see <a href="#heuristic.freshness" title="Calculating Heuristic Freshness">Section&nbsp;4.2.2</a>).</p></div><div id="rfc.section.4.2.p.8" class="avoidbreakafter"><p>The calculation to determine if a response is fresh is:</p></div><div id="rfc.figure.u.3"><pre class="text"> + response_is_fresh = (freshness_lifetime &gt; current_age) +</pre></div><div id="rfc.section.4.2.p.9"><p>freshness_lifetime is defined in <a href="#calculating.freshness.lifetime" title="Calculating Freshness Lifetime">Section&nbsp;4.2.1</a>; current_age is defined in <a href="#age.calculations" title="Calculating Age">Section&nbsp;4.2.3</a>.</p></div><div id="rfc.section.4.2.p.10"><p>Clients can send the max-age or min-fresh request directives (<a href="#cache-request-directive" title="Request Cache-Control Directives">Section&nbsp;5.2.1</a>) to constrain or relax freshness calculations for the corresponding response. However, caches are not required to honor them.</p></div><div id="rfc.section.4.2.p.11" class="avoidbreakafter"><p>When calculating freshness, to avoid common problems in date parsing:</p></div><div id="rfc.section.4.2.p.12"><ul><li>Although all date formats are specified to be case-sensitive, a cache recipient SHOULD match the field value case-insensitively.</li><li>If a cache recipient's internal implementation of time has less resolution than the value of an HTTP-date, the recipient MUST internally represent a parsed <a href="#field.expires">Expires</a> date as the nearest time equal to or earlier than the received value.</li><li>A cache recipient MUST NOT allow local time zones to influence the calculation or comparison of an age or expiration time.</li><li>A cache recipient SHOULD consider a date with a zone abbreviation other than "GMT" to be invalid for calculating expiration.</li></ul></div><div id="rfc.section.4.2.p.13"><p>Note that freshness applies only to cache operation; it cannot be used to force a user agent to refresh its display or reload a resource. See <a href="#history.lists" title="Relationship to Applications and Other Caches">Section&nbsp;6</a> for an explanation of the difference between caches and history mechanisms.</p></div><section id="calculating.freshness.lifetime"><h4 id="rfc.section.4.2.1"><a href="#rfc.section.4.2.1">4.2.1.</a>&nbsp;<a href="#calculating.freshness.lifetime">Calculating Freshness Lifetime</a></h4><div id="rfc.section.4.2.1.p.1" class="avoidbreakafter"><p>A cache can calculate the freshness lifetime (denoted as freshness_lifetime) of a response by using the first match of:</p></div><div id="rfc.section.4.2.1.p.2"><ul><li>If the cache is shared and the s-maxage response directive (<a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a>) is present, use its value, or</li><li>If the max-age response directive (<a href="#cache-response-directive.max-age" title="max-age">Section&nbsp;5.2.2.1</a>) is present, use its value, or</li><li>If the <a href="#field.expires">Expires</a> response header field (<a href="#field.expires" title="Expires">Section&nbsp;5.3</a>) is present, use its value minus the value of the Date response header field (using the time the message was received if it is not present, as per Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), or</li><li>Otherwise, no explicit expiration time is present in the response. A heuristic freshness lifetime might be applicable; see <a href="#heuristic.freshness" title="Calculating Heuristic Freshness">Section&nbsp;4.2.2</a>.</li></ul></div><div id="rfc.section.4.2.1.p.3"><p>Note that this calculation is not vulnerable to clock skew, since all of the information comes from the origin server.</p></div><div id="rfc.section.4.2.1.p.4"><p>When there is more than one value present for a given directive (e.g., two <a href="#field.expires">Expires</a> header field lines or multiple Cache-Control: max-age directives), either the first occurrence should be used, or the response should be considered stale. If directives conflict (e.g., both max-age and no-cache are present), the most restrictive directive should be honored. Caches are encouraged to consider responses that have invalid freshness information (e.g., a max-age directive with non-integer content) to be stale.</p></div></section><section id="heuristic.freshness"><h4 id="rfc.section.4.2.2"><a href="#rfc.section.4.2.2">4.2.2.</a>&nbsp;<a href="#heuristic.freshness">Calculating Heuristic Freshness</a></h4><div id="rfc.section.4.2.2.p.1"><p>Since origin servers do not always provide explicit expiration times, a cache MAY assign a heuristic expiration time when an explicit time is not specified, employing algorithms that use other field values (such as the Last-Modified time) to estimate a plausible expiration time. This specification does not provide specific algorithms, but does impose worst-case constraints on their results.</p></div><div id="rfc.section.4.2.2.p.2"><p>A cache MUST NOT use heuristics to determine freshness when an explicit expiration time is present in the stored response. Because of the requirements in <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>, this means that heuristics can only be used on responses without explicit freshness whose status codes are defined as heuristically cacheable (e.g., see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), and those responses without explicit freshness that have been marked as explicitly cacheable (e.g., with a "public" response directive).</p></div><div id="rfc.section.4.2.2.p.3"><p>Note that in previous specifications heuristically cacheable response status codes were called "cacheable by default."</p></div><div id="rfc.section.4.2.2.p.4"><p>If the response has a Last-Modified header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), caches are encouraged to use a heuristic expiration value that is no more than some fraction of the interval since that time. A typical setting of this fraction might be 10%.</p></div><div id="rfc.section.4.2.2.p.5"><ul class="empty"><li>Note: Section 13.9 of <a href="#RFC2616"><cite title="Hypertext Transfer Protocol -- HTTP/1.1">[RFC2616]</cite></a> prohibited caches from calculating heuristic freshness for URIs with query components (i.e., those containing '?'). In practice, this has not been widely implemented. Therefore, origin servers are encouraged to send explicit directives (e.g., Cache-Control: no-cache) if they wish to prevent caching.</li></ul></div></section><section id="age.calculations"><h4 id="rfc.section.4.2.3"><a href="#rfc.section.4.2.3">4.2.3.</a>&nbsp;<a href="#age.calculations">Calculating Age</a></h4><div id="rfc.section.4.2.3.p.1"><p>The <a href="#field.age">Age</a> header field is used to convey an estimated age of the response message when obtained from a cache. The Age field value is the cache's estimate of the number of seconds since the origin server generated or validated the response. The Age value is therefore the sum of the time that the response has been resident in each of the caches along the path from the origin server, plus the time it has been in transit along network paths.</p></div><div id="rfc.section.4.2.3.p.2" class="avoidbreakafter"><p>Age calculation uses the following data:</p></div><div id="rfc.section.4.2.3.p.3"><dl><dt>age_value</dt><dd>The term "age_value" denotes the value of the <a href="#field.age">Age</a> header field (<a href="#field.age" title="Age">Section&nbsp;5.1</a>), in a form appropriate for arithmetic operation; or 0, if not available.</dd><dt>date_value</dt><dd>The term "date_value" denotes the value of the Date header field, in a form appropriate for arithmetic operations. See Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> for the definition of the Date header field, and for requirements regarding responses without it.</dd><dt>now</dt><dd>The term "now" means "the current value of the clock at the host performing the calculation". A host ought to use NTP (<a href="#RFC5905"><cite title="Network Time Protocol Version 4: Protocol and Algorithms Specification">[RFC5905]</cite></a>) or some similar protocol to synchronize its clocks to Coordinated Universal Time.</dd><dt>request_time</dt><dd>The current value of the clock at the host at the time the request resulting in the stored response was made.</dd><dt>response_time</dt><dd>The current value of the clock at the host at the time the response was received.</dd></dl></div><div id="rfc.section.4.2.3.p.4" class="avoidbreakafter"><p>A response's age can be calculated in two entirely independent ways:</p></div><div id="rfc.section.4.2.3.p.5"><ol><li>the "apparent_age": response_time minus date_value, if the local clock is reasonably well synchronized to the origin server's clock. If the result is negative, the result is replaced by zero.</li><li>the "corrected_age_value", if all of the caches along the response path implement HTTP/1.1 or greater. A cache MUST interpret this value relative to the time the request was initiated, not the time that the response was received.</li></ol></div><div id="rfc.figure.u.4"><pre class="text"> + apparent_age = max(0, response_time - date_value); + + response_delay = response_time - request_time; + corrected_age_value = age_value + response_delay; +</pre></div><div id="rfc.section.4.2.3.p.6"><p>The corrected_age_value MAY be used as the corrected_initial_age. In circumstances where very old cache implementations that might not correctly insert <a href="#field.age">Age</a> are present, corrected_initial_age can be calculated more conservatively as</p></div><div id="rfc.figure.u.5"><pre class="text"> + corrected_initial_age = max(apparent_age, corrected_age_value); +</pre></div><div id="rfc.section.4.2.3.p.7"><p>The current_age of a stored response can then be calculated by adding the time (in seconds) since the stored response was last validated by the origin server to the corrected_initial_age.</p></div><div id="rfc.figure.u.6"><pre class="text"> + resident_time = now - response_time; + current_age = corrected_initial_age + resident_time; +</pre></div></section><section id="serving.stale.responses"><h4 id="rfc.section.4.2.4"><a href="#rfc.section.4.2.4">4.2.4.</a>&nbsp;<a href="#serving.stale.responses">Serving Stale Responses</a></h4><div id="rfc.section.4.2.4.p.1"><p>A "stale" response is one that either has explicit expiry information or is allowed to have heuristic expiry calculated, but is not fresh according to the calculations in <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a>.</p></div><div id="rfc.section.4.2.4.p.2"><p>A cache MUST NOT generate a stale response if it is prohibited by an explicit in-protocol directive (e.g., by a "no-cache" cache directive, a "must-revalidate" cache-response-directive, or an applicable "s-maxage" or "proxy-revalidate" cache-response-directive; see <a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a>).</p></div><div id="rfc.section.4.2.4.p.3"><p>A cache MUST NOT generate a stale response unless it is disconnected or doing so is explicitly permitted by the client or origin server (e.g., by the max-stale request directive in <a href="#cache-request-directive" title="Request Cache-Control Directives">Section&nbsp;5.2.1</a>, by extension directives such as those defined in <a href="#RFC5861"><cite title="HTTP Cache-Control Extensions for Stale Content">[RFC5861]</cite></a>, or by configuration in accordance with an out-of-band contract).</p></div></section></section><section id="validation.model"><h3 id="rfc.section.4.3"><a href="#rfc.section.4.3">4.3.</a>&nbsp;<a href="#validation.model">Validation</a></h3><div id="rfc.section.4.3.p.1"><p>When a cache has one or more stored responses for a requested URI, but cannot serve any of them (e.g., because they are not fresh, or one cannot be selected; see <a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>), it can use the conditional request mechanism (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) in the forwarded request to give the next inbound server an opportunity to select a valid stored response to use, updating the stored metadata in the process, or to replace the stored response(s) with a new response. This process is known as validating or revalidating the stored response.</p></div><section id="validation.sent"><h4 id="rfc.section.4.3.1"><a href="#rfc.section.4.3.1">4.3.1.</a>&nbsp;<a href="#validation.sent">Sending a Validation Request</a></h4><div id="rfc.section.4.3.1.p.1"><p>When generating a conditional request for validation, a cache starts with either a request it is attempting to satisfy, or — if it is initiating the request independently — it synthesises a request using a stored response by copying the method, target URI, and request header fields identified by the Vary header field (<a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>).</p></div><div id="rfc.section.4.3.1.p.2"><p>It then updates that request with one or more precondition header fields. These contain validator metadata sourced from stored response(s) that have the same cache key.</p></div><div id="rfc.section.4.3.1.p.3"><p>The precondition header fields are then compared by recipients to determine whether any stored response is equivalent to a current representation of the resource.</p></div><div id="rfc.section.4.3.1.p.4"><p>One such validator is the timestamp given in a Last-Modified header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), which can be used in an If-Modified-Since header field for response validation, or in an If-Unmodified-Since or If-Range header field for representation selection (i.e., the client is referring specifically to a previously obtained representation with that timestamp).</p></div><div id="rfc.section.4.3.1.p.5"><p>Another validator is the entity-tag given in an ETag field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>). One or more entity-tags, indicating one or more stored responses, can be used in an If-None-Match header field for response validation, or in an If-Match or If-Range header field for representation selection (i.e., the client is referring specifically to one or more previously obtained representations with the listed entity-tags).</p></div></section><section id="validation.received"><h4 id="rfc.section.4.3.2"><a href="#rfc.section.4.3.2">4.3.2.</a>&nbsp;<a href="#validation.received">Handling a Received Validation Request</a></h4><div id="rfc.section.4.3.2.p.1"><p>Each client in the request chain may have its own cache, so it is common for a cache at an intermediary to receive conditional requests from other (outbound) caches. Likewise, some user agents make use of conditional requests to limit data transfers to recently modified representations or to complete the transfer of a partially retrieved representation.</p></div><div id="rfc.section.4.3.2.p.2"><p>If a cache receives a request that can be satisfied by reusing one of its stored 200 (OK) or 206 (Partial Content) responses, the cache SHOULD evaluate any applicable conditional header field preconditions received in that request with respect to the corresponding validators contained within the selected response. A cache MUST NOT evaluate conditional header fields that only apply to an origin server, occur in a request with semantics that cannot be satisfied with a cached response, or occur in a request with a target resource for which it has no stored responses; such preconditions are likely intended for some other (inbound) server.</p></div><div id="rfc.section.4.3.2.p.3"><p>The proper evaluation of conditional requests by a cache depends on the received precondition header fields and their precedence. In summary, the If-Match and If-Unmodified-Since conditional header fields are not applicable to a cache, and If-None-Match takes precedence over If-Modified-Since. See Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a> for a complete specification of precondition precedence.</p></div><div id="rfc.section.4.3.2.p.4"><p>A request containing an If-None-Match header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) indicates that the client wants to validate one or more of its own stored responses in comparison to whichever stored response is selected by the cache.</p></div><div id="rfc.section.4.3.2.p.5"><p>When a cache decides to revalidate its own stored responses for a request that contains an If-None-Match list of entity-tags, the cache MAY combine the received list with a list of entity-tags from its own stored set of responses (fresh or stale) and send the union of the two lists as a replacement If-None-Match header field value in the forwarded request. If a stored response contains only partial content, the cache MUST NOT include its entity-tag in the union unless the request is for a range that would be fully satisfied by that partial stored response. If the response to the forwarded request is 304 (Not Modified) and has an ETag field value with an entity-tag that is not in the client's list, the cache MUST generate a 200 (OK) response for the client by reusing its corresponding stored response, as updated by the 304 response metadata (<a href="#freshening.responses" title="Freshening Stored Responses upon Validation">Section&nbsp;4.3.4</a>).</p></div><div id="rfc.section.4.3.2.p.6"><p>If an If-None-Match header field is not present, a request containing an If-Modified-Since header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) indicates that the client wants to validate one or more of its own stored responses by modification date.</p></div><div id="rfc.section.4.3.2.p.7"><p>If a request contains an If-Modified-Since header field and the Last-Modified header field is not present in a selected stored response, a cache SHOULD use the stored response's Date field value (or, if no Date field is present, the time that the stored response was received) to evaluate the conditional.</p></div><div id="rfc.section.4.3.2.p.8"><p>A cache that implements partial responses to range requests, as defined in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>, also needs to evaluate a received If-Range header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) regarding its selected stored response.</p></div></section><section id="validation.response"><h4 id="rfc.section.4.3.3"><a href="#rfc.section.4.3.3">4.3.3.</a>&nbsp;<a href="#validation.response">Handling a Validation Response</a></h4><div id="rfc.section.4.3.3.p.1" class="avoidbreakafter"><p>Cache handling of a response to a conditional request depends upon its status code:</p></div><div id="rfc.section.4.3.3.p.2"><ul><li>A 304 (Not Modified) response status code indicates that the stored response can be updated and reused; see <a href="#freshening.responses" title="Freshening Stored Responses upon Validation">Section&nbsp;4.3.4</a>.</li><li>A full response (i.e., one containing content) indicates that none of the stored responses nominated in the conditional request is suitable. Instead, the cache MUST use the full response to satisfy the request. The cache MAY store such a full response, subject to its constraints (see <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>).</li><li>However, if a cache receives a 5xx (Server Error) response while attempting to validate a response, it can either forward this response to the requesting client, or act as if the server failed to respond. In the latter case, the cache can send a previously stored response, subject to its constraints on doing so (see <a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>), or retry the validation request.</li></ul></div></section><section id="freshening.responses"><h4 id="rfc.section.4.3.4"><a href="#rfc.section.4.3.4">4.3.4.</a>&nbsp;<a href="#freshening.responses">Freshening Stored Responses upon Validation</a></h4><div id="rfc.section.4.3.4.p.1"><p>When a cache receives a 304 (Not Modified) response and already has one or more stored 200 (OK) responses for the applicable cache key, the cache needs to identify which (if any) are to be updated by the new information provided, and then do so.</p></div><div id="rfc.iref.s.3"></div><div id="rfc.section.4.3.4.p.2" class="avoidbreakafter"><p>The stored response(s) to update are identified by using the first match (if any) of:</p></div><div id="rfc.section.4.3.4.p.3"><ul><li>If the new response contains one or more strong validators (see Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), then each of those strong validators identify the selected representation for update. All the stored responses with one of those same strong validators are identified for update. If none of the stored responses contain at least one of the same strong validators, then the cache MUST NOT use the new response to update any stored responses.</li><li>If the new response contains no strong validators but does contain one or more weak validators, and those validators correspond to one of the cache's stored responses, then the most recent of those matching stored responses is identified for update.</li><li>If the new response does not include any form of validator (such as where a client generates an If-Modified-Since request from a source other than the Last-Modified response header field), and there is only one stored response, and that stored response also lacks a validator, then that stored response is identified for update.</li></ul></div><div id="rfc.section.4.3.4.p.4"><p>For each stored response identified, the cache MUST update its header fields with the header fields provided in the 304 (Not Modified) response, as per <a href="#update" title="Updating Stored Header Fields">Section&nbsp;3.2</a>.</p></div></section><section id="head.effects"><h4 id="rfc.section.4.3.5"><a href="#rfc.section.4.3.5">4.3.5.</a>&nbsp;<a href="#head.effects">Freshening Responses with HEAD</a></h4><div id="rfc.section.4.3.5.p.1"><p>A response to the HEAD method is identical to what an equivalent request made with a GET would have been, without sending the content. This property of HEAD responses can be used to invalidate or update a cached GET response if the more efficient conditional GET request mechanism is not available (due to no validators being present in the stored response) or if transmission of the content is not desired even if it has changed.</p></div><div id="rfc.section.4.3.5.p.2"><p>When a cache makes an inbound HEAD request for a target URI and receives a 200 (OK) response, the cache SHOULD update or invalidate each of its stored GET responses that could have been selected for that request (see <a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>).</p></div><div id="rfc.section.4.3.5.p.3"><p>For each of the stored responses that could have been selected, if the stored response and HEAD response have matching values for any received validator fields (ETag and Last-Modified) and, if the HEAD response has a Content-Length header field, the value of Content-Length matches that of the stored response, the cache SHOULD update the stored response as described below; otherwise, the cache SHOULD consider the stored response to be stale.</p></div><div id="rfc.section.4.3.5.p.4"><p>If a cache updates a stored response with the metadata provided in a HEAD response, the cache MUST use the header fields provided in the HEAD response to update the stored response (see <a href="#update" title="Updating Stored Header Fields">Section&nbsp;3.2</a>).</p></div></section></section><section id="invalidation"><h3 id="rfc.section.4.4"><a href="#rfc.section.4.4">4.4.</a>&nbsp;<a href="#invalidation">Invalidating Stored Responses</a></h3><div id="rfc.section.4.4.p.1"><p>Because unsafe request methods (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) such as PUT, POST or DELETE have the potential for changing state on the origin server, intervening caches are required to invalidate stored responses to keep their contents up to date.</p></div><div id="rfc.section.4.4.p.2"><p>A cache MUST invalidate the target URI (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) when a non-error status code is received in response to an unsafe request method (including methods whose safety is unknown).</p></div><div id="rfc.section.4.4.p.3"><p>A cache MAY invalidate other URIs when a non-error status code is received in response to an unsafe request method (including methods whose safety is unknown). In particular, the URI(s) in the Location and Content-Location response header fields (if present) are candidates for invalidation; other URIs might be discovered through mechanisms not specified in this document. However, a cache MUST NOT trigger an invalidation under these conditions if the origin (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>) of the URI to be invalidated differs from that of the target URI (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>). This helps prevent denial-of-service attacks.</p></div><div id="rfc.section.4.4.p.4"><p>Invalidate means that the cache will either remove all stored responses whose target URI matches the given URI, or will mark them as "invalid" and in need of a mandatory validation before they can be sent in response to a subsequent request.</p></div><div id="rfc.section.4.4.p.5"><p>A "non-error response" is one with a 2xx (Successful) or 3xx (Redirection) status code.</p></div><div id="rfc.section.4.4.p.6"><p>Note that this does not guarantee that all appropriate responses are invalidated globally; a state-changing request would only invalidate responses in the caches it travels through.</p></div></section></section><section id="header.field.definitions"><h2 id="rfc.section.5"><a href="#rfc.section.5">5.</a>&nbsp;<a href="#header.field.definitions">Field Definitions</a></h2><div id="rfc.section.5.p.1"><p>This section defines the syntax and semantics of HTTP fields related to caching.</p></div><section id="field.age"><h3 id="rfc.section.5.1"><a href="#rfc.section.5.1">5.1.</a>&nbsp;<a href="#field.age">Age</a></h3><div id="rfc.section.5.1.p.1"><p>The "Age" response header field conveys the sender's estimate of the time since the response was generated or successfully validated at the origin server. Age values are calculated as specified in <a href="#age.calculations" title="Calculating Age">Section&nbsp;4.2.3</a>.</p></div><div id="rfc.figure.u.7"><div id="rfc.iref.g.2"></div><pre class="inline"> + Age = delta-seconds +</pre></div><div id="rfc.section.5.1.p.2"><p>The Age field value is a non-negative integer, representing time in seconds (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>).</p></div><div id="rfc.section.5.1.p.3"><p>Although it is defined as a singleton header field, a cache encountering a message with multiple Age field lines SHOULD use the first field line, discarding subsequent ones.</p></div><div id="rfc.section.5.1.p.4"><p>If the field value (after discarding additional lines, as per above) is invalid (e.g., it contains a list or something other than a non-negative integer), a cache SHOULD consider the response to be stale.</p></div><div id="rfc.section.5.1.p.5"><p>The presence of an Age header field implies that the response was not generated or validated by the origin server for this request. However, lack of an Age header field does not imply the origin was contacted.</p></div></section><section id="field.cache-control"><h3 id="rfc.section.5.2"><a href="#rfc.section.5.2">5.2.</a>&nbsp;<a href="#field.cache-control">Cache-Control</a></h3><div id="rfc.section.5.2.p.1"><p>The "Cache-Control" header field is used to list directives for caches along the request/response chain. Such cache directives are unidirectional in that the presence of a directive in a request does not imply that the same directive is present in the response, or to be repeated in it.</p></div><div id="rfc.section.5.2.p.2"><p>See <a href="#cache.control.extensions" title="Cache Control Extensions">Section&nbsp;5.2.3</a> for information about how Cache-Control directives defined elsewhere are handled.</p></div><div id="rfc.section.5.2.p.3"><p>A proxy, whether or not it implements a cache, MUST pass cache directives through in forwarded messages, regardless of their significance to that application, since the directives might apply to all recipients along the request/response chain. It is not possible to target a directive to a specific cache.</p></div><div id="rfc.section.5.2.p.4"><p>Cache directives are identified by a token, to be compared case-insensitively, and have an optional argument that can use both token and quoted-string syntax. For the directives defined below that define arguments, recipients ought to accept both forms, even if a specific form is required for generation.</p></div><div id="rfc.figure.u.8"><div id="rfc.iref.g.3"></div><div id="rfc.iref.g.4"></div><pre class="inline"> + Cache-Control = #cache-directive + + cache-directive = token [ "=" ( token / quoted-string ) ] +</pre></div><div id="rfc.section.5.2.p.5"><p>For the cache directives defined below, no argument is defined (nor allowed) unless stated otherwise.</p></div><section id="cache-request-directive"><h4 id="rfc.section.5.2.1"><a href="#rfc.section.5.2.1">5.2.1.</a>&nbsp;<a href="#cache-request-directive">Request Cache-Control Directives</a></h4><div id="rfc.section.5.2.1.p.1"><p>This section defines cache request directives. They are advisory; caches MAY implement them, but are not required to.</p></div><section id="cache-request-directive.max-age"><h5 id="rfc.section.5.2.1.1"><a href="#rfc.section.5.2.1.1">5.2.1.1.</a>&nbsp;<a href="#cache-request-directive.max-age">max-age</a></h5><div id="rfc.section.5.2.1.1.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.1.1.p.2"><ul class="empty"><li><a href="#delta-seconds">delta-seconds</a> (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>)</li></ul></div><div id="rfc.section.5.2.1.1.p.3"><p>The "max-age" request directive indicates that the client prefers a response whose age is less than or equal to the specified number of seconds. Unless the max-stale request directive is also present, the client does not wish to receive a stale response.</p></div><div id="rfc.section.5.2.1.1.p.4"><p>This directive uses the token form of the argument syntax: e.g., 'max-age=5' not 'max-age="5"'. A sender MUST NOT generate the quoted-string form.</p></div></section><section id="cache-request-directive.max-stale"><h5 id="rfc.section.5.2.1.2"><a href="#rfc.section.5.2.1.2">5.2.1.2.</a>&nbsp;<a href="#cache-request-directive.max-stale">max-stale</a></h5><div id="rfc.section.5.2.1.2.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.1.2.p.2"><ul class="empty"><li><a href="#delta-seconds">delta-seconds</a> (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>)</li></ul></div><div id="rfc.section.5.2.1.2.p.3"><p>The "max-stale" request directive indicates that the client will accept a response that has exceeded its freshness lifetime. If a value is present, then the client is willing to accept a response that has exceeded its freshness lifetime by no more than the specified number of seconds. If no value is assigned to max-stale, then the client will accept a stale response of any age.</p></div><div id="rfc.section.5.2.1.2.p.4"><p>This directive uses the token form of the argument syntax: e.g., 'max-stale=10' not 'max-stale="10"'. A sender MUST NOT generate the quoted-string form.</p></div></section><section id="cache-request-directive.min-fresh"><h5 id="rfc.section.5.2.1.3"><a href="#rfc.section.5.2.1.3">5.2.1.3.</a>&nbsp;<a href="#cache-request-directive.min-fresh">min-fresh</a></h5><div id="rfc.section.5.2.1.3.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.1.3.p.2"><ul class="empty"><li><a href="#delta-seconds">delta-seconds</a> (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>)</li></ul></div><div id="rfc.section.5.2.1.3.p.3"><p>The "min-fresh" request directive indicates that the client prefers a response whose freshness lifetime is no less than its current age plus the specified time in seconds. That is, the client wants a response that will still be fresh for at least the specified number of seconds.</p></div><div id="rfc.section.5.2.1.3.p.4"><p>This directive uses the token form of the argument syntax: e.g., 'min-fresh=20' not 'min-fresh="20"'. A sender MUST NOT generate the quoted-string form.</p></div></section><section id="cache-request-directive.no-cache"><h5 id="rfc.section.5.2.1.4"><a href="#rfc.section.5.2.1.4">5.2.1.4.</a>&nbsp;<a href="#cache-request-directive.no-cache">no-cache</a></h5><div id="rfc.section.5.2.1.4.p.1"><p>The "no-cache" request directive indicates that the client prefers stored response not be used to satisfy the request without successful validation on the origin server.</p></div></section><section id="cache-request-directive.no-store"><h5 id="rfc.section.5.2.1.5"><a href="#rfc.section.5.2.1.5">5.2.1.5.</a>&nbsp;<a href="#cache-request-directive.no-store">no-store</a></h5><div id="rfc.section.5.2.1.5.p.1"><p>The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it. This directive applies to both private and shared caches. "MUST NOT store" in this context means that the cache MUST NOT intentionally store the information in non-volatile storage, and MUST make a best-effort attempt to remove the information from volatile storage as promptly as possible after forwarding it.</p></div><div id="rfc.section.5.2.1.5.p.2"><p>This directive is <em>not</em> a reliable or sufficient mechanism for ensuring privacy. In particular, malicious or compromised caches might not recognize or obey this directive, and communications networks might be vulnerable to eavesdropping.</p></div><div id="rfc.section.5.2.1.5.p.3"><p>Note that if a request containing this directive is satisfied from a cache, the no-store request directive does not apply to the already stored response.</p></div></section><section id="cache-request-directive.no-transform"><h5 id="rfc.section.5.2.1.6"><a href="#rfc.section.5.2.1.6">5.2.1.6.</a>&nbsp;<a href="#cache-request-directive.no-transform">no-transform</a></h5><div id="rfc.section.5.2.1.6.p.1"><p>The "no-transform" request directive indicates that the client is asking for intermediaries to avoid transforming the content, as defined in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div></section><section id="cache-request-directive.only-if-cached"><h5 id="rfc.section.5.2.1.7"><a href="#rfc.section.5.2.1.7">5.2.1.7.</a>&nbsp;<a href="#cache-request-directive.only-if-cached">only-if-cached</a></h5><div id="rfc.section.5.2.1.7.p.1"><p>The "only-if-cached" request directive indicates that the client only wishes to obtain a stored response. Caches that honor this request directive SHOULD, upon receiving it, either respond using a stored response consistent with the other constraints of the request, or respond with a 504 (Gateway Timeout) status code.</p></div></section></section><section id="cache-response-directive"><h4 id="rfc.section.5.2.2"><a href="#rfc.section.5.2.2">5.2.2.</a>&nbsp;<a href="#cache-response-directive">Response Cache-Control Directives</a></h4><div id="rfc.section.5.2.2.p.1"><p>This section defines cache response directives. A cache MUST obey the Cache-Control directives defined in this section.</p></div><section id="cache-response-directive.max-age"><h5 id="rfc.section.5.2.2.1"><a href="#rfc.section.5.2.2.1">5.2.2.1.</a>&nbsp;<a href="#cache-response-directive.max-age">max-age</a></h5><div id="rfc.section.5.2.2.1.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.2.1.p.2"><ul class="empty"><li><a href="#delta-seconds">delta-seconds</a> (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>)</li></ul></div><div id="rfc.section.5.2.2.1.p.3"><p>The "max-age" response directive indicates that the response is to be considered stale after its age is greater than the specified number of seconds.</p></div><div id="rfc.section.5.2.2.1.p.4"><p>This directive uses the token form of the argument syntax: e.g., 'max-age=5' not 'max-age="5"'. A sender MUST NOT generate the quoted-string form.</p></div></section><section id="cache-response-directive.must-revalidate"><h5 id="rfc.section.5.2.2.2"><a href="#rfc.section.5.2.2.2">5.2.2.2.</a>&nbsp;<a href="#cache-response-directive.must-revalidate">must-revalidate</a></h5><div id="rfc.section.5.2.2.2.p.1"><p>The "must-revalidate" response directive indicates that once the response has become stale, a cache MUST NOT reuse that response to satisfy another request until it has been successfully validated by the origin, as defined by <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>.</p></div><div id="rfc.section.5.2.2.2.p.2"><p>The must-revalidate directive is necessary to support reliable operation for certain protocol features. In all circumstances a cache MUST NOT ignore the must-revalidate directive; in particular, if a cache is disconnected, the cache MUST generate an error response rather than reuse the stale response. The generated status code SHOULD be 504 (Gateway Timeout) unless another error status code is more applicable.</p></div><div id="rfc.section.5.2.2.2.p.3"><p>The must-revalidate directive ought to be used by servers if and only if failure to validate a request could cause incorrect operation, such as a silently unexecuted financial transaction.</p></div><div id="rfc.section.5.2.2.2.p.4"><p>The must-revalidate directive also permits a shared cache to reuse a response to a request containing an Authorization header field (Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>), subject to the above requirement on revalidation (<a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a>).</p></div></section><section id="cache-response-directive.must-understand"><h5 id="rfc.section.5.2.2.3"><a href="#rfc.section.5.2.2.3">5.2.2.3.</a>&nbsp;<a href="#cache-response-directive.must-understand">must-understand</a></h5><div id="rfc.section.5.2.2.3.p.1"><p>The "must-understand" response directive limits caching of the response to a cache that understands and conforms to the requirements for that response's status code.</p></div><div id="rfc.section.5.2.2.3.p.2"><p>Responses containing "must-understand" SHOULD also contain the "no-store" directive; caches that implement "must-understand" SHOULD ignore the "no-store" directive in responses that contain both directives and a status code that the cache understands and conforms to any related caching requirements.</p></div></section><section id="cache-response-directive.no-cache"><h5 id="rfc.section.5.2.2.4"><a href="#rfc.section.5.2.2.4">5.2.2.4.</a>&nbsp;<a href="#cache-response-directive.no-cache">no-cache</a></h5><div id="rfc.section.5.2.2.4.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.2.4.p.2"><ul class="empty"><li>#<a href="#imported.rules">field-name</a></li></ul></div><div id="rfc.section.5.2.2.4.p.3"><p>The "no-cache" response directive, in its unqualified form (without an argument), indicates that the response MUST NOT be used to satisfy any other request without forwarding it for validation and receiving a successful response; see <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>.</p></div><div id="rfc.section.5.2.2.4.p.4"><p>This allows an origin server to prevent a cache from using the response to satisfy a request without contacting it, even by caches that have been configured to send stale responses.</p></div><div id="rfc.section.5.2.2.4.p.5"><p>The qualified form of no-cache response directive, with an argument that lists one or more field names, indicates that a cache MAY use the response to satisfy a subsequent request, subject to any other restrictions on caching, if the listed header fields are excluded from the subsequent response or the subsequent response has been successfully revalidated with the origin server (updating or removing those fields). This allows an origin server to prevent the re-use of certain header fields in a response, while still allowing caching of the rest of the response.</p></div><div id="rfc.section.5.2.2.4.p.6"><p>The field names given are not limited to the set of header fields defined by this specification. Field names are case-insensitive.</p></div><div id="rfc.section.5.2.2.4.p.7"><p>This directive uses the quoted-string form of the argument syntax. A sender SHOULD NOT generate the token form (even if quoting appears not to be needed for single-entry lists).</p></div><div id="rfc.section.5.2.2.4.p.8"><ul class="empty"><li>Note: The qualified form of the directive is often handled by caches as if an unqualified no-cache directive was received; i.e., the special handling for the qualified form is not widely implemented.</li></ul></div></section><section id="cache-response-directive.no-store"><h5 id="rfc.section.5.2.2.5"><a href="#rfc.section.5.2.2.5">5.2.2.5.</a>&nbsp;<a href="#cache-response-directive.no-store">no-store</a></h5><div id="rfc.section.5.2.2.5.p.1"><p>The "no-store" response directive indicates that a cache MUST NOT store any part of either the immediate request or response, and MUST NOT use the response to satisfy any other request.</p></div><div id="rfc.section.5.2.2.5.p.2"><p>This directive applies to both private and shared caches. "MUST NOT store" in this context means that the cache MUST NOT intentionally store the information in non-volatile storage, and MUST make a best-effort attempt to remove the information from volatile storage as promptly as possible after forwarding it.</p></div><div id="rfc.section.5.2.2.5.p.3"><p>This directive is <em>not</em> a reliable or sufficient mechanism for ensuring privacy. In particular, malicious or compromised caches might not recognize or obey this directive, and communications networks might be vulnerable to eavesdropping.</p></div><div id="rfc.section.5.2.2.5.p.4"><p>Note that the "must-understand" cache directive overrides "no-store" in certain circumstances; see <a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a>.</p></div></section><section id="cache-response-directive.no-transform"><h5 id="rfc.section.5.2.2.6"><a href="#rfc.section.5.2.2.6">5.2.2.6.</a>&nbsp;<a href="#cache-response-directive.no-transform">no-transform</a></h5><div id="rfc.section.5.2.2.6.p.1"><p>The "no-transform" response directive indicates that an intermediary (regardless of whether it implements a cache) MUST NOT transform the content, as defined in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div></section><section id="cache-response-directive.private"><h5 id="rfc.section.5.2.2.7"><a href="#rfc.section.5.2.2.7">5.2.2.7.</a>&nbsp;<a href="#cache-response-directive.private">private</a></h5><div id="rfc.section.5.2.2.7.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.2.7.p.2"><ul class="empty"><li>#<a href="#imported.rules">field-name</a></li></ul></div><div id="rfc.section.5.2.2.7.p.3"><p>The unqualified "private" response directive indicates that a shared cache MUST NOT store the response (i.e., the response is intended for a single user). It also indicates that a private cache MAY store the response, subject the constraints defined in <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>, even if the response would not otherwise be heuristically cacheable by a private cache.</p></div><div id="rfc.section.5.2.2.7.p.4"><p>If a qualified private response directive is present, with an argument that lists one or more field names, then only the listed header fields are limited to a single user: a shared cache MUST NOT store the listed header fields if they are present in the original response, but MAY store the remainder of the response message without those header fields, subject the constraints defined in <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>.</p></div><div id="rfc.section.5.2.2.7.p.5"><p>The field names given are not limited to the set of header fields defined by this specification. Field names are case-insensitive.</p></div><div id="rfc.section.5.2.2.7.p.6"><p>This directive uses the quoted-string form of the argument syntax. A sender SHOULD NOT generate the token form (even if quoting appears not to be needed for single-entry lists).</p></div><div id="rfc.section.5.2.2.7.p.7"><ul class="empty"><li>Note: This usage of the word "private" only controls where the response can be stored; it cannot ensure the privacy of the message content. Also, the qualified form of the directive is often handled by caches as if an unqualified private directive was received; i.e., the special handling for the qualified form is not widely implemented.</li></ul></div></section><section id="cache-response-directive.proxy-revalidate"><h5 id="rfc.section.5.2.2.8"><a href="#rfc.section.5.2.2.8">5.2.2.8.</a>&nbsp;<a href="#cache-response-directive.proxy-revalidate">proxy-revalidate</a></h5><div id="rfc.section.5.2.2.8.p.1"><p>The "proxy-revalidate" response directive indicates that once the response has become stale, a shared cache MUST NOT reuse that response to satisfy another request until it has been successfully validated by the origin, as defined by <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>. This is analogous to must-revalidate (<a href="#cache-response-directive.must-revalidate" title="must-revalidate">Section&nbsp;5.2.2.2</a>), except that proxy-revalidate does not apply to private caches.</p></div><div id="rfc.section.5.2.2.8.p.2"><p>Note that "proxy-revalidate" on its own does not imply that a response is cacheable. For example, it might be combined with the public directive (<a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a>), allowing the response to be cached while requiring only a shared cache to revalidate when stale.</p></div></section><section id="cache-response-directive.public"><h5 id="rfc.section.5.2.2.9"><a href="#rfc.section.5.2.2.9">5.2.2.9.</a>&nbsp;<a href="#cache-response-directive.public">public</a></h5><div id="rfc.section.5.2.2.9.p.1"><p>The "public" response directive indicates that a cache MAY store the response even if it would otherwise be prohibited, subject to the constraints defined in <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>. In other words, public explicitly marks the response as cacheable. For example, public permits a shared cache to reuse a response to a request containing an Authorization header field (<a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a>).</p></div><div id="rfc.section.5.2.2.9.p.2"><p>Note that it is unnecessary to add the public directive to a response that is already cacheable according to <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>.</p></div><div id="rfc.section.5.2.2.9.p.3"><p>If a response with the public directive has no explicit freshness information, it is heuristically cacheable (<a href="#heuristic.freshness" title="Calculating Heuristic Freshness">Section&nbsp;4.2.2</a>).</p></div></section><section id="cache-response-directive.s-maxage"><h5 id="rfc.section.5.2.2.10"><a href="#rfc.section.5.2.2.10">5.2.2.10.</a>&nbsp;<a href="#cache-response-directive.s-maxage">s-maxage</a></h5><div id="rfc.section.5.2.2.10.p.1" class="avoidbreakafter"><p>Argument syntax:</p></div><div id="rfc.section.5.2.2.10.p.2"><ul class="empty"><li><a href="#delta-seconds">delta-seconds</a> (see <a href="#delta-seconds" title="Delta Seconds">Section&nbsp;1.3</a>)</li></ul></div><div id="rfc.section.5.2.2.10.p.3"><p>The "s-maxage" response directive indicates that, for a shared cache, the maximum age specified by this directive overrides the maximum age specified by either the max-age directive or the <a href="#field.expires">Expires</a> header field.</p></div><div id="rfc.section.5.2.2.10.p.4"><p>The s-maxage directive incorporates the proxy-revalidate (<a href="#cache-response-directive.proxy-revalidate" title="proxy-revalidate">Section&nbsp;5.2.2.8</a>) response directive's semantics for a shared cache. A shared cache MUST NOT reuse a stale response with s-maxage to satisfy another request until it has been successfully validated by the origin, as defined by <a href="#validation.model" title="Validation">Section&nbsp;4.3</a>. This directive also permits a shared cache to reuse a response to a request containing an Authorization header field, subject to the above requirements on maximum age and revalidation (<a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a>).</p></div><div id="rfc.section.5.2.2.10.p.5"><p>This directive uses the token form of the argument syntax: e.g., 's-maxage=10' not 's-maxage="10"'. A sender MUST NOT generate the quoted-string form.</p></div></section></section><section id="cache.control.extensions"><h4 id="rfc.section.5.2.3"><a href="#rfc.section.5.2.3">5.2.3.</a>&nbsp;<a href="#cache.control.extensions">Cache Control Extensions</a></h4><div id="rfc.section.5.2.3.p.1"><p>The Cache-Control header field can be extended through the use of one or more cache-extension tokens, each with an optional value. A cache MUST ignore unrecognized cache directives.</p></div><div id="rfc.section.5.2.3.p.2"><p>Informational extensions (those that do not require a change in cache behavior) can be added without changing the semantics of other directives.</p></div><div id="rfc.section.5.2.3.p.3"><p>Behavioral extensions are designed to work by acting as modifiers to the existing base of cache directives. Both the new directive and the old directive are supplied, such that applications that do not understand the new directive will default to the behavior specified by the old directive, and those that understand the new directive will recognize it as modifying the requirements associated with the old directive. In this way, extensions to the existing cache-control directives can be made without breaking deployed caches.</p></div><div id="rfc.section.5.2.3.p.4"><p>For example, consider a hypothetical new response directive called "community" that acts as a modifier to the private directive: in addition to private caches, any cache that is shared only by members of the named community is allowed to cache the response. An origin server wishing to allow the UCI community to use an otherwise private response in their shared cache(s) could do so by including</p></div><div id="rfc.figure.u.9"><pre class="text"> + Cache-Control: private, community="UCI" +</pre></div><div id="rfc.section.5.2.3.p.5"><p>A cache that recognizes such a community cache-extension could broaden its behavior in accordance with that extension. A cache that does not recognize the community cache-extension would ignore it and adhere to the private directive.</p></div><div id="rfc.section.5.2.3.p.6" class="avoidbreakafter"><p>New extension directives ought to consider defining:</p></div><div id="rfc.section.5.2.3.p.7"><ul><li>What it means for a directive to be specified multiple times,</li><li>When the directive does not take an argument, what it means when an argument is present,</li><li>When the directive requires an argument, what it means when it is missing,</li><li>Whether the directive is specific to requests, responses, or able to be used in either.</li></ul></div></section><section id="cache.directive.registry"><h4 id="rfc.section.5.2.4"><a href="#rfc.section.5.2.4">5.2.4.</a>&nbsp;<a href="#cache.directive.registry">Cache Directive Registry</a></h4><div id="rfc.section.5.2.4.p.1"><p>The "Hypertext Transfer Protocol (HTTP) Cache Directive Registry" defines the namespace for the cache directives. It has been created and is now maintained at &lt;<a href="https://www.iana.org/assignments/http-cache-directives">https://www.iana.org/assignments/http-cache-directives</a>&gt;.</p></div><div id="rfc.section.5.2.4.p.2" class="avoidbreakafter"><p>A registration MUST include the following fields:</p></div><div id="rfc.section.5.2.4.p.3"><ul><li>Cache Directive Name</li><li>Pointer to specification text</li></ul></div><div id="rfc.section.5.2.4.p.4"><p>Values to be added to this namespace require IETF Review (see <a href="#RFC8126"><cite title="Guidelines for Writing an IANA Considerations Section in RFCs">[RFC8126]</cite></a>, Section 4.8).</p></div></section></section><section id="field.expires"><h3 id="rfc.section.5.3"><a href="#rfc.section.5.3">5.3.</a>&nbsp;<a href="#field.expires">Expires</a></h3><div id="rfc.section.5.3.p.1"><p>The "Expires" response header field gives the date/time after which the response is considered stale. See <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a> for further discussion of the freshness model.</p></div><div id="rfc.section.5.3.p.2"><p>The presence of an Expires header field does not imply that the original resource will change or cease to exist at, before, or after that time.</p></div><div id="rfc.section.5.3.p.3"><p>The Expires field value is an HTTP-date timestamp, as defined in Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div><div id="rfc.figure.u.10"><div id="rfc.iref.g.5"></div><pre class="inline"> + Expires = HTTP-date +</pre></div><div id="rfc.section.5.3.p.4"><p>For example</p></div><div id="rfc.figure.u.11"><pre class="text"> + Expires: Thu, 01 Dec 1994 16:00:00 GMT +</pre></div><div id="rfc.section.5.3.p.5"><p>A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired").</p></div><div id="rfc.section.5.3.p.6"><p>If a response includes a <a href="#field.cache-control">Cache-Control</a> header field with the max-age directive (<a href="#cache-response-directive.max-age" title="max-age">Section&nbsp;5.2.2.1</a>), a recipient MUST ignore the Expires header field. Likewise, if a response includes the s-maxage directive (<a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a>), a shared cache recipient MUST ignore the Expires header field. In both these cases, the value in Expires is only intended for recipients that have not yet implemented the Cache-Control header field.</p></div><div id="rfc.section.5.3.p.7"><p>An origin server without a clock MUST NOT generate an Expires header field unless its value represents a fixed time in the past (always expired) or its value has been associated with the resource by a system or user with a reliable clock.</p></div><div id="rfc.section.5.3.p.8"><p>Historically, HTTP required the Expires field value to be no more than a year in the future. While longer freshness lifetimes are no longer prohibited, extremely large values have been demonstrated to cause problems (e.g., clock overflows due to use of 32-bit integers for time values), and many caches will evict a response far sooner than that.</p></div></section><section id="field.pragma"><h3 id="rfc.section.5.4"><a href="#rfc.section.5.4">5.4.</a>&nbsp;<a href="#field.pragma">Pragma</a></h3><div id="rfc.section.5.4.p.1"><p>The "Pragma" request header field was defined for HTTP/1.0 caches, so that clients could specify a "no-cache" request (as <a href="#field.cache-control">Cache-Control</a> was not defined until HTTP/1.1).</p></div><div id="rfc.section.5.4.p.2"><p>However, support for Cache-Control is now widespread. As a result, this specification deprecates Pragma.</p></div><div id="rfc.section.5.4.p.3"><ul class="empty"><li>Note: Because the meaning of "Pragma: no-cache" in responses was never specified, it does not provide a reliable replacement for "Cache-Control: no-cache" in them.</li></ul></div></section><section id="field.warning"><h3 id="rfc.section.5.5"><a href="#rfc.section.5.5">5.5.</a>&nbsp;<a href="#field.warning">Warning</a></h3><div id="rfc.section.5.5.p.1"><p>The "Warning" header field was used to carry additional information about the status or transformation of a message that might not be reflected in the status code. This specification obsoletes it, as it is not widely generated or surfaced to users. The information it carried can be gleaned from examining other header fields, such as <a href="#field.age">Age</a>.</p></div></section></section><section id="history.lists"><h2 id="rfc.section.6"><a href="#rfc.section.6">6.</a>&nbsp;<a href="#history.lists">Relationship to Applications and Other Caches</a></h2><div id="rfc.section.6.p.1"><p>Applications using HTTP often specify additional forms of caching. For example, Web browsers often have history mechanisms such as "Back" buttons that can be used to redisplay a representation retrieved earlier in a session.</p></div><div id="rfc.section.6.p.2"><p>Likewise, some Web browsers implement caching of images and other assets within a page view; they may or may not honor HTTP caching semantics.</p></div><div id="rfc.section.6.p.3"><p>The requirements in this specification do not necessarily apply to how applications use data after it is retrieved from a HTTP cache. For example, a history mechanism can display a previous representation even if it has expired, and an application can use cached data in other ways beyond its freshness lifetime.</p></div><div id="rfc.section.6.p.4"><p>This specification does not prohibit the application from taking HTTP caching into account; for example, a history mechanism might tell the user that a view is stale, or it might honor cache directives (e.g., Cache-Control: no-store).</p></div><div id="rfc.section.6.p.5"><p>However, when an application caches data and does not make this apparent to or easily controllable by the user, it is strongly encouraged to define its operation with respect to HTTP cache directives, so as not to surprise authors who expect caching semantics to be honoured. For example, while it might be reasonable to define an application cache "above" HTTP that allows a response containing Cache-Control: no-store to be reused for requests that are directly related to the request that fetched it (such as those created during the same page load), it would likely be surprising and confusing to users and authors if it were allowed to be reused for requests unrelated in any way to the one from which it was obtained.</p></div></section><section id="security.considerations"><h2 id="rfc.section.7"><a href="#rfc.section.7">7.</a>&nbsp;<a href="#security.considerations">Security Considerations</a></h2><div id="rfc.section.7.p.1"><p>This section is meant to inform developers, information providers, and users of known security concerns specific to HTTP caching. More general security considerations are addressed in "HTTP/1.1" (Section 17 of <a href="#Messaging"><cite title="HTTP Semantics">[Messaging]</cite></a>) and "HTTP Semantics" (Section 11 of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>).</p></div><div id="rfc.section.7.p.2"><p>Caches expose additional potential vulnerabilities, since the contents of the cache represent an attractive target for malicious exploitation. Because cache contents persist after an HTTP request is complete, an attack on the cache can reveal information long after a user believes that the information has been removed from the network. Therefore, cache contents need to be protected as sensitive information.</p></div><section id="cache.poisoning"><h3 id="rfc.section.7.1"><a href="#rfc.section.7.1">7.1.</a>&nbsp;<a href="#cache.poisoning">Cache Poisoning</a></h3><div id="rfc.section.7.1.p.1"><p>Various attacks might be amplified by being stored in a cache. Such "cache poisoning" attacks happen when an attacker uses implementation flaws, elevated privileges, or other techniques to insert a response into a cache. This is especially effective when shared caches are used to distribute malicious content to many clients.</p></div><div id="rfc.section.7.1.p.2"><p>One common attack vector for cache poisoning is to exploit differences in message parsing on proxies and in user agents; see Appendix of <a href="#Messaging"><cite title="HTTP Semantics">[Messaging]</cite></a> for the relevant requirements regarding HTTP/1.1.</p></div></section><section id="security.timing"><h3 id="rfc.section.7.2"><a href="#rfc.section.7.2">7.2.</a>&nbsp;<a href="#security.timing">Timing Attacks</a></h3><div id="rfc.section.7.2.p.1"><p>Because one of the primary uses of a cache is to optimise performance, its use can "leak" information about what resources have been previously requested.</p></div><div id="rfc.section.7.2.p.2"><p>For example, if a user visits a site and their browser caches some of its responses, and then navigates to a second site, that site can attempt to load responses it knows exists on the first site. If they load quickly, it can be assumed that the user has visited that site, or even a specific page on it.</p></div><div id="rfc.section.7.2.p.3"><p>Such "timing attacks" can be mitigated by adding more information to the cache key, such as the identity of the referring site (to prevent the attack described above). This is sometimes called "double keying."</p></div></section><section id="caching.of.sensitive.information"><h3 id="rfc.section.7.3"><a href="#rfc.section.7.3">7.3.</a>&nbsp;<a href="#caching.of.sensitive.information">Caching of Sensitive Information</a></h3><div id="rfc.section.7.3.p.1"><p>Implementation and deployment flaws (as well as misunderstanding of cache operation) might lead to caching of sensitive information (e.g., authentication credentials) that is thought to be private, exposing it to unauthorized parties.</p></div><div id="rfc.section.7.3.p.2"><p>Note that the Set-Cookie response header field <a href="#RFC6265"><cite title="HTTP State Management Mechanism">[RFC6265]</cite></a> does not inhibit caching; a cacheable response with a Set-Cookie header field can be (and often is) used to satisfy subsequent requests to caches. Servers who wish to control caching of these responses are encouraged to emit appropriate Cache-Control response header fields.</p></div></section></section><section id="iana.considerations"><h2 id="rfc.section.8"><a href="#rfc.section.8">8.</a>&nbsp;<a href="#iana.considerations">IANA Considerations</a></h2><div id="rfc.section.8.p.1"><p>The change controller for the following registrations is: "IETF (iesg@ietf.org) - Internet Engineering Task Force".</p></div><section id="field.name.registration"><h3 id="rfc.section.8.1"><a href="#rfc.section.8.1">8.1.</a>&nbsp;<a href="#field.name.registration">Field Name Registration</a></h3><div id="rfc.section.8.1.p.1"><p>First, introduce the new "Hypertext Transfer Protocol (HTTP) Field Name Registry" at &lt;<a href="https://www.iana.org/assignments/http-fields">https://www.iana.org/assignments/http-fields</a>&gt; as described in Section 12.1 of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div><div id="rfc.section.8.1.p.2" class="avoidbreakafter"><p>Then, please update the registry with the field names listed in the table below:</p></div><div id="rfc.table.1" class="table table-condensed table-striped"><div id="iana.header.registration.table"></div><table class="table table-condensed table-striped full tleft"><caption>Table 1</caption><thead><tr><th class="left">Field Name</th><th class="left">Status</th><th class="left">Ref.</th><th class="left">Comments</th></tr></thead><tbody><tr><td class="text-left">Age</td><td class="text-left">standard</td><td class="text-left"><a href="#field.age" title="Age">5.1</a> </td><td class="text-left"></td></tr><tr><td class="text-left">Cache-Control</td><td class="text-left">standard</td><td class="text-left"><a href="#field.cache-control" title="Cache-Control">5.2</a> </td><td class="text-left"></td></tr><tr><td class="text-left">Expires</td><td class="text-left">standard</td><td class="text-left"><a href="#field.expires" title="Expires">5.3</a> </td><td class="text-left"></td></tr><tr><td class="text-left">Pragma</td><td class="text-left">standard</td><td class="text-left"><a href="#field.pragma" title="Pragma">5.4</a> </td><td class="text-left"></td></tr><tr><td class="text-left">Warning</td><td class="text-left">obsoleted</td><td class="text-left"><a href="#field.warning" title="Warning">5.5</a> </td><td class="text-left"></td></tr></tbody></table></div></section><section id="cache.directive.registration"><h3 id="rfc.section.8.2"><a href="#rfc.section.8.2">8.2.</a>&nbsp;<a href="#cache.directive.registration">Cache Directive Registration</a></h3><div id="rfc.section.8.2.p.1"><p>Please update the "Hypertext Transfer Protocol (HTTP) Cache Directive Registry" at &lt;<a href="https://www.iana.org/assignments/http-cache-directives">https://www.iana.org/assignments/http-cache-directives</a>&gt; with the registration procedure of <a href="#cache.directive.registry" title="Cache Directive Registry">Section&nbsp;5.2.4</a> and the cache directive names summarized in the table below.</p></div><div id="rfc.table.2" class="table table-condensed table-striped"><div id="iana.cache.directive.registration.table"></div><table class="table table-condensed table-striped full tleft"><caption>Table 2</caption><thead><tr><th class="left">Cache Directive</th><th class="left">Reference</th></tr></thead><tbody><tr><td class="text-left">max-age</td><td class="text-left"><a href="#cache-request-directive.max-age" title="max-age">Section&nbsp;5.2.1.1</a>, <a href="#cache-response-directive.max-age" title="max-age">Section&nbsp;5.2.2.1</a> </td></tr><tr><td class="text-left">max-stale</td><td class="text-left"><a href="#cache-request-directive.max-stale" title="max-stale">Section&nbsp;5.2.1.2</a> </td></tr><tr><td class="text-left">min-fresh</td><td class="text-left"><a href="#cache-request-directive.min-fresh" title="min-fresh">Section&nbsp;5.2.1.3</a> </td></tr><tr><td class="text-left">must-revalidate</td><td class="text-left"><a href="#cache-response-directive.must-revalidate" title="must-revalidate">Section&nbsp;5.2.2.2</a> </td></tr><tr><td class="text-left">must-understand</td><td class="text-left"><a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a> </td></tr><tr><td class="text-left">no-cache</td><td class="text-left"><a href="#cache-request-directive.no-cache" title="no-cache">Section&nbsp;5.2.1.4</a>, <a href="#cache-response-directive.no-cache" title="no-cache">Section&nbsp;5.2.2.4</a> </td></tr><tr><td class="text-left">no-store</td><td class="text-left"><a href="#cache-request-directive.no-store" title="no-store">Section&nbsp;5.2.1.5</a>, <a href="#cache-response-directive.no-store" title="no-store">Section&nbsp;5.2.2.5</a> </td></tr><tr><td class="text-left">no-transform</td><td class="text-left"><a href="#cache-request-directive.no-transform" title="no-transform">Section&nbsp;5.2.1.6</a>, <a href="#cache-response-directive.no-transform" title="no-transform">Section&nbsp;5.2.2.6</a> </td></tr><tr><td class="text-left">only-if-cached</td><td class="text-left"><a href="#cache-request-directive.only-if-cached" title="only-if-cached">Section&nbsp;5.2.1.7</a> </td></tr><tr><td class="text-left">private</td><td class="text-left"><a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a> </td></tr><tr><td class="text-left">proxy-revalidate</td><td class="text-left"><a href="#cache-response-directive.proxy-revalidate" title="proxy-revalidate">Section&nbsp;5.2.2.8</a> </td></tr><tr><td class="text-left">public</td><td class="text-left"><a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a> </td></tr><tr><td class="text-left">s-maxage</td><td class="text-left"><a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a> </td></tr></tbody></table></div></section><section id="warn.code.registration"><h3 id="rfc.section.8.3"><a href="#rfc.section.8.3">8.3.</a>&nbsp;<a href="#warn.code.registration">Warn Code Registry</a></h3><div id="rfc.section.8.3.p.1"><p>Please add a note to the "Hypertext Transfer Protocol (HTTP) Warn Codes" registry at &lt;<a href="https://www.iana.org/assignments/http-warn-codes">https://www.iana.org/assignments/http-warn-codes</a>&gt; to the effect that Warning is obsoleted.</p></div></section></section><section id="rfc.references"><h2 id="rfc.section.9"><a href="#rfc.section.9">9.</a> References</h2><section id="rfc.references.1"><h3 id="rfc.section.9.1"><a href="#rfc.section.9.1">9.1.</a> Normative References</h3><dl class="dl-horizontal"><dt id="Messaging">[Messaging]</dt><dd>Fielding, R., Ed., Nottingham, M., Ed., and J. Reschke, Ed., “<a href="https://tools.ietf.org/html/rfc9110">HTTP Semantics</a>”, RFC&nbsp;9110, <a href="http://dx.doi.org/10.17487/RFC9110">DOI&nbsp;10.17487/RFC9110</a>, June&nbsp;2022, &lt;<a href="https://www.rfc-editor.org/info/rfc9110">https://www.rfc-editor.org/info/rfc9110</a>&gt;.</dd><dt id="RFC2119">[RFC2119]</dt><dd>Bradner, S., “<a href="https://tools.ietf.org/html/rfc2119">Key words for use in RFCs to Indicate Requirement Levels</a>”, BCP&nbsp;14, RFC&nbsp;2119, <a href="http://dx.doi.org/10.17487/RFC2119">DOI&nbsp;10.17487/RFC2119</a>, March&nbsp;1997, &lt;<a href="https://www.rfc-editor.org/info/rfc2119">https://www.rfc-editor.org/info/rfc2119</a>&gt;.</dd><dt id="RFC5234">[RFC5234]</dt><dd>Crocker, D., Ed. and P. Overell, “<a href="https://tools.ietf.org/html/rfc5234">Augmented BNF for Syntax Specifications: ABNF</a>”, STD&nbsp;68, RFC&nbsp;5234, <a href="http://dx.doi.org/10.17487/RFC5234">DOI&nbsp;10.17487/RFC5234</a>, January&nbsp;2008, &lt;<a href="https://www.rfc-editor.org/info/rfc5234">https://www.rfc-editor.org/info/rfc5234</a>&gt;.</dd><dt id="RFC7405">[RFC7405]</dt><dd>Kyzivat, P., “<a href="https://tools.ietf.org/html/rfc7405">Case-Sensitive String Support in ABNF</a>”, RFC&nbsp;7405, <a href="http://dx.doi.org/10.17487/RFC7405">DOI&nbsp;10.17487/RFC7405</a>, December&nbsp;2014, &lt;<a href="https://www.rfc-editor.org/info/rfc7405">https://www.rfc-editor.org/info/rfc7405</a>&gt;.</dd><dt id="RFC8174">[RFC8174]</dt><dd>Leiba, B., “<a href="https://tools.ietf.org/html/rfc8174">Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words</a>”, BCP&nbsp;14, RFC&nbsp;8174, <a href="http://dx.doi.org/10.17487/RFC8174">DOI&nbsp;10.17487/RFC8174</a>, May&nbsp;2017, &lt;<a href="https://www.rfc-editor.org/info/rfc8174">https://www.rfc-editor.org/info/rfc8174</a>&gt;.</dd><dt id="Semantics">[Semantics]</dt><dd>Fielding, R., Ed., Nottingham, M., Ed., and J. Reschke, Ed., “<a href="https://tools.ietf.org/html/rfc9112">HTTP/1.1</a>”, RFC&nbsp;9112, <a href="http://dx.doi.org/10.17487/RFC9112">DOI&nbsp;10.17487/RFC9112</a>, June&nbsp;2022, &lt;<a href="https://www.rfc-editor.org/info/rfc9112">https://www.rfc-editor.org/info/rfc9112</a>&gt;.</dd></dl></section><section id="rfc.references.2"><h3 id="rfc.section.9.2"><a href="#rfc.section.9.2">9.2.</a> Informative References</h3><dl class="dl-horizontal"><dt id="RFC2616">[RFC2616]</dt><dd>Fielding, R., Gettys, J., Mogul, J., Frystyk, H., Masinter, L., Leach, P., and T. Berners-Lee, “<a href="https://tools.ietf.org/html/rfc2616">Hypertext Transfer Protocol -- HTTP/1.1</a>”, RFC&nbsp;2616, <a href="http://dx.doi.org/10.17487/RFC2616">DOI&nbsp;10.17487/RFC2616</a>, June&nbsp;1999, &lt;<a href="https://www.rfc-editor.org/info/rfc2616">https://www.rfc-editor.org/info/rfc2616</a>&gt;.</dd><dt id="RFC5861">[RFC5861]</dt><dd>Nottingham, M., “<a href="https://tools.ietf.org/html/rfc5861">HTTP Cache-Control Extensions for Stale Content</a>”, RFC&nbsp;5861, <a href="http://dx.doi.org/10.17487/RFC5861">DOI&nbsp;10.17487/RFC5861</a>, April&nbsp;2010, &lt;<a href="https://www.rfc-editor.org/info/rfc5861">https://www.rfc-editor.org/info/rfc5861</a>&gt;.</dd><dt id="RFC5905">[RFC5905]</dt><dd>Mills, D., Martin, J., Ed., Burbank, J., and W. Kasch, “<a href="https://tools.ietf.org/html/rfc5905">Network Time Protocol Version 4: Protocol and Algorithms Specification</a>”, RFC&nbsp;5905, <a href="http://dx.doi.org/10.17487/RFC5905">DOI&nbsp;10.17487/RFC5905</a>, June&nbsp;2010, &lt;<a href="https://www.rfc-editor.org/info/rfc5905">https://www.rfc-editor.org/info/rfc5905</a>&gt;.</dd><dt id="RFC6265">[RFC6265]</dt><dd>Barth, A., “<a href="https://tools.ietf.org/html/rfc6265">HTTP State Management Mechanism</a>”, RFC&nbsp;6265, <a href="http://dx.doi.org/10.17487/RFC6265">DOI&nbsp;10.17487/RFC6265</a>, April&nbsp;2011, &lt;<a href="https://www.rfc-editor.org/info/rfc6265">https://www.rfc-editor.org/info/rfc6265</a>&gt;.</dd><dt id="RFC7234">[RFC7234]</dt><dd>Fielding, R., Ed., Nottingham, M., Ed., and J. Reschke, Ed., “<a href="https://tools.ietf.org/html/rfc7234">Hypertext Transfer Protocol (HTTP): Caching</a>”, RFC&nbsp;7234, <a href="http://dx.doi.org/10.17487/RFC7234">DOI&nbsp;10.17487/RFC7234</a>, June&nbsp;2014, &lt;<a href="https://www.rfc-editor.org/info/rfc7234">https://www.rfc-editor.org/info/rfc7234</a>&gt;.</dd><dt id="RFC8126">[RFC8126]</dt><dd>Cotton, M., Leiba, B., and T. Narten, “<a href="https://tools.ietf.org/html/rfc8126">Guidelines for Writing an IANA Considerations Section in RFCs</a>”, BCP&nbsp;26, RFC&nbsp;8126, <a href="http://dx.doi.org/10.17487/RFC8126">DOI&nbsp;10.17487/RFC8126</a>, June&nbsp;2017, &lt;<a href="https://www.rfc-editor.org/info/rfc8126">https://www.rfc-editor.org/info/rfc8126</a>&gt;.</dd></dl></section></section><section id="collected.abnf"><h2 id="rfc.section.A" class="np"><a href="#rfc.section.A">A.</a>&nbsp;<a href="#collected.abnf">Collected ABNF</a></h2><div id="rfc.section.A.p.1"><p>In the collected ABNF below, list rules are expanded as per Appendix of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div><div id="rfc.figure.u.12"><pre class="inline"> +Age = delta-seconds + +Cache-Control = [ cache-directive *( OWS "," OWS cache-directive ) ] + +Expires = HTTP-date + +HTTP-date = &lt;HTTP-date, see [Semantics], Section 5.6.7&gt; + +OWS = &lt;OWS, see [Semantics], Section 5.6.3&gt; + +cache-directive = token [ "=" ( token / quoted-string ) ] + +delta-seconds = 1*DIGIT + +field-name = &lt;field-name, see [Semantics], Section 5.1&gt; + +quoted-string = &lt;quoted-string, see [Semantics], Section 5.6.4&gt; + +token = &lt;token, see [Semantics], Section 5.6.2&gt; +</pre></div></section><section id="changes.from.rfc.7234"><h2 id="rfc.section.B"><a href="#rfc.section.B">B.</a>&nbsp;<a href="#changes.from.rfc.7234">Changes from RFC 7234</a></h2><div id="rfc.section.B.p.1"><p>Handling of duplicate and conflicting cache directives has been clarified. (<a href="#calculating.freshness.lifetime" title="Calculating Freshness Lifetime">Section&nbsp;4.2.1</a>)</p></div><div id="rfc.section.B.p.2"><p>Cache invalidation of the URIs in the Location and Content-Location header fields is no longer required, but still allowed. (<a href="#invalidation" title="Invalidating Stored Responses">Section&nbsp;4.4</a>)</p></div><div id="rfc.section.B.p.3"><p>Cache invalidation of the URIs in the Location and Content-Location header fields is disallowed when the origin is different; previously, it was the host. (<a href="#invalidation" title="Invalidating Stored Responses">Section&nbsp;4.4</a>)</p></div><div id="rfc.section.B.p.4"><p>Handling invalid and multiple Age header field values has been clarified. (<a href="#field.age" title="Age">Section&nbsp;5.1</a>)</p></div><div id="rfc.section.B.p.5"><p>Some cache directives defined by this specification now have stronger prohibitions against generating the quoted form of their values, since this has been found to create interoperability problems. Consumers of extension cache directives are no longer required to accept both token and quoted-string forms, but they still need to parse them properly for unknown extensions. (<a href="#field.cache-control" title="Cache-Control">Section&nbsp;5.2</a>)</p></div><div id="rfc.section.B.p.6"><p>The "public" and "private" cache directives were clarified, so that they do not make responses reusable under any condition. (<a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a>)</p></div><div id="rfc.section.B.p.7"><p>The "must-understand" cache directive was introduced; caches are no longer required to understand the semantics of new response status codes unless it is present. (<a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a>)</p></div><div id="rfc.section.B.p.8"><p>The Warning response header was obsoleted. Much of the information supported by Warning could be gleaned by examining the response, and the remaining warn-codes — although potentially useful — were entirely advisory. In practice, Warning was not added by caches or intermediaries. (<a href="#field.warning" title="Warning">Section&nbsp;5.5</a>)</p></div></section><section id="change.log"><h2 id="rfc.section.C"><a href="#rfc.section.C">C.</a>&nbsp;<a href="#change.log">Change Log</a></h2><div id="rfc.section.C.p.1"><p>This section is to be removed before publishing as an RFC.</p></div><section id="changes.since.publication.as.rfc"><h3 id="rfc.section.C.1"><a href="#rfc.section.C.1">C.1.</a>&nbsp;<a href="#changes.since.publication.as.rfc">Between RFC7234 and draft 00</a></h3><div id="rfc.section.C.1.p.1" class="avoidbreakafter"><p>The changes were purely editorial:</p></div><div id="rfc.section.C.1.p.2"><ul><li>Change boilerplate and abstract to indicate the "draft" status, and update references to ancestor specifications.</li><li>Remove version "1.1" from document title, indicating that this specification applies to all HTTP versions.</li><li>Adjust historical notes.</li><li>Update links to sibling specifications.</li><li>Replace sections listing changes from RFC 2616 by new empty sections referring to RFC 723x.</li><li>Remove acknowledgements specific to RFC 723x.</li><li>Move "Acknowledgements" to the very end and make them unnumbered.</li></ul></div></section><section id="changes.since.00"><h3 id="rfc.section.C.2"><a href="#rfc.section.C.2">C.2.</a>&nbsp;<a href="#changes.since.00">Since draft-ietf-httpbis-cache-00</a></h3><div id="rfc.section.C.2.p.1" class="avoidbreakafter"><p>The changes are purely editorial:</p></div><div id="rfc.section.C.2.p.2"><ul><li>Moved all extensibility tips, registration procedures, and registry tables from the IANA considerations to normative sections, reducing the IANA considerations to just instructions that will be removed prior to publication as an RFC.</li></ul></div></section><section id="changes.since.01"><h3 id="rfc.section.C.3"><a href="#rfc.section.C.3">C.3.</a>&nbsp;<a href="#changes.since.01">Since draft-ietf-httpbis-cache-01</a></h3><div id="rfc.section.C.3.p.1"><ul><li>Cite RFC 8126 instead of RFC 5226 (&lt;<a href="https://github.com/httpwg/http-core/issues/75">https://github.com/httpwg/http-core/issues/75</a>&gt;)</li><li>In <a href="#field.pragma" title="Pragma">Section&nbsp;5.4</a>, misleading statement about the relation between Pragma and Cache-Control (&lt;<a href="https://github.com/httpwg/http-core/issues/92">https://github.com/httpwg/http-core/issues/92</a>&gt;, &lt;<a href="https://www.rfc-editor.org/errata/eid4674">https://www.rfc-editor.org/errata/eid4674</a>&gt;)</li></ul></div></section><section id="changes.since.02"><h3 id="rfc.section.C.4"><a href="#rfc.section.C.4">C.4.</a>&nbsp;<a href="#changes.since.02">Since draft-ietf-httpbis-cache-02</a></h3><div id="rfc.section.C.4.p.1"><ul><li>In <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>, explain that only final responses are cacheable (&lt;<a href="https://github.com/httpwg/http-core/issues/29">https://github.com/httpwg/http-core/issues/29</a>&gt;)</li><li>In <a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a>, clarify what responses various directives apply to (&lt;<a href="https://github.com/httpwg/http-core/issues/52">https://github.com/httpwg/http-core/issues/52</a>&gt;)</li><li>In <a href="#validation.sent" title="Sending a Validation Request">Section&nbsp;4.3.1</a>, clarify the source of validators in conditional requests (&lt;<a href="https://github.com/httpwg/http-core/issues/110">https://github.com/httpwg/http-core/issues/110</a>&gt;)</li><li>Revise <a href="#history.lists" title="Relationship to Applications and Other Caches">Section&nbsp;6</a> to apply to more than just History Lists (&lt;<a href="https://github.com/httpwg/http-core/issues/126">https://github.com/httpwg/http-core/issues/126</a>&gt;)</li><li>In <a href="#field.warning" title="Warning">Section&nbsp;5.5</a>, deprecated "Warning" header field (&lt;<a href="https://github.com/httpwg/http-core/issues/139">https://github.com/httpwg/http-core/issues/139</a>&gt;)</li><li>In <a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a>, remove a spurious note (&lt;<a href="https://github.com/httpwg/http-core/issues/141">https://github.com/httpwg/http-core/issues/141</a>&gt;)</li></ul></div></section><section id="changes.since.03"><h3 id="rfc.section.C.5"><a href="#rfc.section.C.5">C.5.</a>&nbsp;<a href="#changes.since.03">Since draft-ietf-httpbis-cache-03</a></h3><div id="rfc.section.C.5.p.1"><ul><li>In <a href="#caching.overview" title="Overview of Cache Operation">Section&nbsp;2</a>, define what a disconnected cache is (&lt;<a href="https://github.com/httpwg/http-core/issues/5">https://github.com/httpwg/http-core/issues/5</a>&gt;)</li><li>In <a href="#constructing.responses.from.caches" title="Constructing Responses from Caches">Section&nbsp;4</a>, clarify language around how to select a response when more than one matches (&lt;<a href="https://github.com/httpwg/http-core/issues/23">https://github.com/httpwg/http-core/issues/23</a>&gt;)</li><li>in <a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>, mention stale-while-revalidate and stale-if-error (&lt;<a href="https://github.com/httpwg/http-core/issues/122">https://github.com/httpwg/http-core/issues/122</a>&gt;)</li><li>Remove requirements around cache request directives (&lt;<a href="https://github.com/httpwg/http-core/issues/129">https://github.com/httpwg/http-core/issues/129</a>&gt;)</li><li>Deprecate Pragma (&lt;<a href="https://github.com/httpwg/http-core/issues/140">https://github.com/httpwg/http-core/issues/140</a>&gt;)</li><li>In <a href="#caching.authenticated.responses" title="Storing Responses to Authenticated Requests">Section&nbsp;3.5</a> and <a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a>, note effect of some directives on authenticated requests (&lt;<a href="https://github.com/httpwg/http-core/issues/161">https://github.com/httpwg/http-core/issues/161</a>&gt;)</li></ul></div></section><section id="changes.since.04"><h3 id="rfc.section.C.6"><a href="#rfc.section.C.6">C.6.</a>&nbsp;<a href="#changes.since.04">Since draft-ietf-httpbis-cache-04</a></h3><div id="rfc.section.C.6.p.1"><ul><li>In <a href="#field.cache-control" title="Cache-Control">Section&nbsp;5.2</a>, remove the registrations for stale-if-error and stale-while-revalidate which happened in RFC 7234 (&lt;<a href="https://github.com/httpwg/http-core/issues/207">https://github.com/httpwg/http-core/issues/207</a>&gt;)</li></ul></div></section><section id="changes.since.05"><h3 id="rfc.section.C.7"><a href="#rfc.section.C.7">C.7.</a>&nbsp;<a href="#changes.since.05">Since draft-ietf-httpbis-cache-05</a></h3><div id="rfc.section.C.7.p.1"><ul><li>In <a href="#incomplete.responses" title="Storing Incomplete Responses">Section&nbsp;3.3</a>, clarify how weakly framed content is considered for purposes of completeness (&lt;<a href="https://github.com/httpwg/http-core/issues/25">https://github.com/httpwg/http-core/issues/25</a>&gt;)</li><li>Throughout, describe Vary and cache key operations more clearly (&lt;<a href="https://github.com/httpwg/http-core/issues/28">https://github.com/httpwg/http-core/issues/28</a>&gt;)</li><li>In <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>, remove concept of "cacheable methods" in favor of prose (&lt;<a href="https://github.com/httpwg/http-core/issues/54">https://github.com/httpwg/http-core/issues/54</a>&gt;, &lt;<a href="https://www.rfc-editor.org/errata/eid5300">https://www.rfc-editor.org/errata/eid5300</a>&gt;)</li><li>Refactored <a href="#security.considerations" title="Security Considerations">Section&nbsp;7</a>, and added a section on timing attacks (&lt;<a href="https://github.com/httpwg/http-core/issues/233">https://github.com/httpwg/http-core/issues/233</a>&gt;)</li><li>Changed "cacheable by default" to "heuristically cacheable" throughout (&lt;<a href="https://github.com/httpwg/http-core/issues/242">https://github.com/httpwg/http-core/issues/242</a>&gt;)</li></ul></div></section><section id="changes.since.06"><h3 id="rfc.section.C.8"><a href="#rfc.section.C.8">C.8.</a>&nbsp;<a href="#changes.since.06">Since draft-ietf-httpbis-cache-06</a></h3><div id="rfc.section.C.8.p.1"><ul><li>In <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a> and <a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a>, change response cacheability to only require understanding the response status code if the must-understand cache directive is present (&lt;<a href="https://github.com/httpwg/http-core/issues/120">https://github.com/httpwg/http-core/issues/120</a>&gt;)</li><li>Change requirements for handling different forms of cache directives in <a href="#field.cache-control" title="Cache-Control">Section&nbsp;5.2</a> (&lt;<a href="https://github.com/httpwg/http-core/issues/128">https://github.com/httpwg/http-core/issues/128</a>&gt;)</li><li>Fix typo in <a href="#cache-response-directive.s-maxage" title="s-maxage">Section&nbsp;5.2.2.10</a> (&lt;<a href="https://github.com/httpwg/http-core/issues/264">https://github.com/httpwg/http-core/issues/264</a>&gt;)</li><li>In <a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a> and <a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a>, clarify "private" and "public" so that they do not override all other cache directives (&lt;<a href="https://github.com/httpwg/http-core/issues/268">https://github.com/httpwg/http-core/issues/268</a>&gt;)</li><li>In <a href="#response.cacheability" title="Storing Responses in Caches">Section&nbsp;3</a>, distinguish between private with and without qualifying headers (&lt;<a href="https://github.com/httpwg/http-core/issues/270">https://github.com/httpwg/http-core/issues/270</a>&gt;)</li><li>In <a href="#caching.negotiated.responses" title="Calculating Cache Keys with Vary">Section&nbsp;4.1</a>, clarify that any "*" as a member of Vary will disable caching (&lt;<a href="https://github.com/httpwg/http-core/issues/286">https://github.com/httpwg/http-core/issues/286</a>&gt;)</li><li>In <a href="#requirements.notation" title="Requirements Notation">Section&nbsp;1.1</a>, reference RFC 8174 as well (&lt;<a href="https://github.com/httpwg/http-core/issues/303">https://github.com/httpwg/http-core/issues/303</a>&gt;)</li></ul></div></section><section id="changes.since.07"><h3 id="rfc.section.C.9"><a href="#rfc.section.C.9">C.9.</a>&nbsp;<a href="#changes.since.07">Since draft-ietf-httpbis-cache-07</a></h3><div id="rfc.section.C.9.p.1"><ul><li>Throughout, replace "effective request URI", "request-target" and similar with "target URI" (&lt;<a href="https://github.com/httpwg/http-core/issues/259">https://github.com/httpwg/http-core/issues/259</a>&gt;)</li><li>In <a href="#cache-response-directive.public" title="public">Section&nbsp;5.2.2.9</a> and <a href="#cache-response-directive.private" title="private">Section&nbsp;5.2.2.7</a>, make it clear that these directives do not ignore other requirements for caching (&lt;<a href="https://github.com/httpwg/http-core/issues/320">https://github.com/httpwg/http-core/issues/320</a>&gt;)</li><li>In <a href="#incomplete.responses" title="Storing Incomplete Responses">Section&nbsp;3.3</a>, move definition of "complete" into semantics (&lt;<a href="https://github.com/httpwg/http-core/issues/334">https://github.com/httpwg/http-core/issues/334</a>&gt;)</li></ul></div></section><section id="changes.since.08"><h3 id="rfc.section.C.10"><a href="#rfc.section.C.10">C.10.</a>&nbsp;<a href="#changes.since.08">Since draft-ietf-httpbis-cache-08</a></h3><div id="rfc.section.C.10.p.1"><ul><li><a href="#collected.abnf" title="Collected ABNF">Appendix&nbsp;A</a> now uses the sender variant of the "#" list expansion (&lt;<a href="https://github.com/httpwg/http-core/issues/192">https://github.com/httpwg/http-core/issues/192</a>&gt;)</li></ul></div></section><section id="changes.since.09"><h3 id="rfc.section.C.11"><a href="#rfc.section.C.11">C.11.</a>&nbsp;<a href="#changes.since.09">Since draft-ietf-httpbis-cache-09</a></h3><div id="rfc.section.C.11.p.1"><ul><li>In <a href="#field.age" title="Age">Section&nbsp;5.1</a>, discuss handling of invalid and multiple Age header field values (&lt;<a href="https://github.com/httpwg/http-core/issues/193">https://github.com/httpwg/http-core/issues/193</a>&gt;)</li><li>Switch to xml2rfc v3 mode for draft generation (&lt;<a href="https://github.com/httpwg/http-core/issues/394">https://github.com/httpwg/http-core/issues/394</a>&gt;)</li></ul></div></section><section id="changes.since.10"><h3 id="rfc.section.C.12"><a href="#rfc.section.C.12">C.12.</a>&nbsp;<a href="#changes.since.10">Since draft-ietf-httpbis-cache-10</a></h3><div id="rfc.section.C.12.p.1"><ul><li>In <a href="#field.cache-control" title="Cache-Control">Section&nbsp;5.2</a> (<a href="#field.cache-control">Cache-Control</a>), adjust ABNF to allow empty lists (&lt;<a href="https://github.com/httpwg/http-core/issues/210">https://github.com/httpwg/http-core/issues/210</a>&gt;)</li></ul></div></section><section id="changes.since.11"><h3 id="rfc.section.C.13"><a href="#rfc.section.C.13">C.13.</a>&nbsp;<a href="#changes.since.11">Since draft-ietf-httpbis-cache-11</a></h3><div id="rfc.section.C.13.p.1"><p>None.</p></div></section><section id="changes.since.12"><h3 id="rfc.section.C.14"><a href="#rfc.section.C.14">C.14.</a>&nbsp;<a href="#changes.since.12">Since draft-ietf-httpbis-cache-12</a></h3><div id="rfc.section.C.14.p.1"><ul><li>In <a href="#serving.stale.responses" title="Serving Stale Responses">Section&nbsp;4.2.4</a>, remove 'no-store', as it won't be in cache in the first place (&lt;<a href="https://github.com/httpwg/http-core/issues/447">https://github.com/httpwg/http-core/issues/447</a>&gt;)</li><li>In <a href="#storing.fields" title="Storing Header and Trailer Fields">Section&nbsp;3.1</a>, make it clear that only response headers need be stored (&lt;<a href="https://github.com/httpwg/http-core/issues/457">https://github.com/httpwg/http-core/issues/457</a>&gt;)</li><li>Rewrote "Updating Stored Header Fields" <a href="#update" title="Updating Stored Header Fields">Section&nbsp;3.2</a> (&lt;<a href="https://github.com/httpwg/http-core/issues/458">https://github.com/httpwg/http-core/issues/458</a>&gt;)</li><li>In <a href="#calculating.freshness.lifetime" title="Calculating Freshness Lifetime">Section&nbsp;4.2.1</a> clarify how to handle invalid and conflicting directives (&lt;<a href="https://github.com/httpwg/http-core/issues/460">https://github.com/httpwg/http-core/issues/460</a>&gt;)</li><li>In <a href="#validation.response" title="Handling a Validation Response">Section&nbsp;4.3.3</a>, mention retry of failed validation requests (&lt;<a href="https://github.com/httpwg/http-core/issues/462">https://github.com/httpwg/http-core/issues/462</a>&gt;)</li><li>In <a href="#validation.response" title="Handling a Validation Response">Section&nbsp;4.3.3</a>, clarify requirement on storing a full response to a conditional request (&lt;<a href="https://github.com/httpwg/http-core/issues/463">https://github.com/httpwg/http-core/issues/463</a>&gt;)</li><li>In <a href="#field.age" title="Age">Section&nbsp;5.1</a>, clarify error handling (&lt;<a href="https://github.com/httpwg/http-core/issues/471">https://github.com/httpwg/http-core/issues/471</a>&gt;)</li><li>In <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a>, remove spurious "UTC" (&lt;<a href="https://github.com/httpwg/http-core/issues/472">https://github.com/httpwg/http-core/issues/472</a>&gt;)</li><li>In <a href="#expiration.model" title="Freshness">Section&nbsp;4.2</a>, correct the date-related rule names to consider case-insensitive (&lt;<a href="https://github.com/httpwg/http-core/issues/473">https://github.com/httpwg/http-core/issues/473</a>&gt;)</li><li>In <a href="#history.lists" title="Relationship to Applications and Other Caches">Section&nbsp;6</a>, strengthen recommendation for application caches to pay attention to cache directives (&lt;<a href="https://github.com/httpwg/http-core/issues/474">https://github.com/httpwg/http-core/issues/474</a>&gt;)</li><li>In <a href="#constructing.responses.from.caches" title="Constructing Responses from Caches">Section&nbsp;4</a>, mention collapsed requests (&lt;<a href="https://github.com/httpwg/http-core/issues/475">https://github.com/httpwg/http-core/issues/475</a>&gt;)</li><li>In <a href="#invalidation" title="Invalidating Stored Responses">Section&nbsp;4.4</a>, relax requirements on Content-Location and Location invalidation (&lt;<a href="https://github.com/httpwg/http-core/issues/478">https://github.com/httpwg/http-core/issues/478</a>&gt;)</li><li>In <a href="#freshening.responses" title="Freshening Stored Responses upon Validation">Section&nbsp;4.3.4</a>, refine the exceptions to update on a 304 (&lt;<a href="https://github.com/httpwg/http-core/issues/488">https://github.com/httpwg/http-core/issues/488</a>&gt;)</li><li>Moved table of Cache-Control directives into <a href="#cache.directive.registration" title="Cache Directive Registration">Section&nbsp;8.2</a> (&lt;<a href="https://github.com/httpwg/http-core/issues/506">https://github.com/httpwg/http-core/issues/506</a>&gt;)</li><li>In <a href="#notation" title="Syntax Notation">Section&nbsp;1.2</a>, remove unused core ABNF rules (&lt;<a href="https://github.com/httpwg/http-core/issues/529">https://github.com/httpwg/http-core/issues/529</a>&gt;)</li><li>Changed to using "payload data" when defining requirements about the data being conveyed within a message, instead of the terms "payload body" or "response body" or "representation body", since they often get confused with the HTTP/1.1 message body (which includes transfer coding) (&lt;<a href="https://github.com/httpwg/http-core/issues/553">https://github.com/httpwg/http-core/issues/553</a>&gt;)</li></ul></div></section><section id="changes.since.13"><h3 id="rfc.section.C.15"><a href="#rfc.section.C.15">C.15.</a>&nbsp;<a href="#changes.since.13">Since draft-ietf-httpbis-cache-13</a></h3><div id="rfc.section.C.15.p.1"><ul><li>In <a href="#cache-response-directive.must-revalidate" title="must-revalidate">Section&nbsp;5.2.2.2</a>, clarify requirements around generating an error response (&lt;<a href="https://github.com/httpwg/http-core/issues/608">https://github.com/httpwg/http-core/issues/608</a>&gt;)</li><li>Changed to using "content" instead of "payload" or "payload data" to avoid confusion with the payload of version-specific messaging frames (&lt;<a href="https://github.com/httpwg/http-core/issues/654">https://github.com/httpwg/http-core/issues/654</a>&gt;)</li><li>In <a href="#freshening.responses" title="Freshening Stored Responses upon Validation">Section&nbsp;4.3.4</a>, clarify how multiple validators are handled (&lt;<a href="https://github.com/httpwg/http-core/issues/659">https://github.com/httpwg/http-core/issues/659</a>&gt;)</li><li>In <a href="#age.calculations" title="Calculating Age">Section&nbsp;4.2.3</a>, <a href="#field.cache-control" title="Cache-Control">Section&nbsp;5.2</a>, and <a href="#cache-response-directive.no-cache" title="no-cache">Section&nbsp;5.2.2.4</a>, remove notes about very old HTTP/1.0 behaviours (&lt;<a href="https://github.com/httpwg/http-core/issues/660">https://github.com/httpwg/http-core/issues/660</a>&gt;)</li><li>In <a href="#cache-response-directive.must-understand" title="must-understand">Section&nbsp;5.2.2.3</a>, modify operation to be more backwards-compatible with existing implementations (&lt;<a href="https://github.com/httpwg/http-core/issues/661">https://github.com/httpwg/http-core/issues/661</a>&gt;)</li><li>In <a href="#cache.poisoning" title="Cache Poisoning">Section&nbsp;7.1</a>, cache poisoning can affect private caches too (&lt;<a href="https://github.com/httpwg/http-core/issues/730">https://github.com/httpwg/http-core/issues/730</a>&gt;)</li></ul></div></section><section id="changes.since.14"><h3 id="rfc.section.C.16"><a href="#rfc.section.C.16">C.16.</a>&nbsp;<a href="#changes.since.14">Since draft-ietf-httpbis-cache-14</a></h3><div id="rfc.section.C.16.p.1"><ul><li>Fix subsection ordering in <a href="#cache-response-directive" title="Response Cache-Control Directives">Section&nbsp;5.2.2</a> (&lt;<a href="https://github.com/httpwg/http-core/issues/674">https://github.com/httpwg/http-core/issues/674</a>&gt;)</li><li>In <a href="#caching.overview" title="Overview of Cache Operation">Section&nbsp;2</a>, define what a cache key is (&lt;<a href="https://github.com/httpwg/http-core/issues/728">https://github.com/httpwg/http-core/issues/728</a>&gt;)</li></ul></div></section></section><section id="acks"><h2 id="rfc.section.unnumbered-1"><a href="#acks">Acknowledgments</a></h2><div id="rfc.section.unnumbered-1.p.1"><p>See Appendix "Acknowledgements" of <a href="#Semantics"><cite title="HTTP/1.1">[Semantics]</cite></a>.</p></div></section><section id="rfc.index"><h2><a href="#rfc.index">Index</a></h2><p class="hidden-print"><a href="#rfc.index.A">A</a> <a href="#rfc.index.C">C</a> <a href="#rfc.index.E">E</a> <a href="#rfc.index.F">F</a> <a href="#rfc.index.G">G</a> <a href="#rfc.index.H">H</a> <a href="#rfc.index.M">M</a> <a href="#rfc.index.N">N</a> <a href="#rfc.index.O">O</a> <a href="#rfc.index.P">P</a> <a href="#rfc.index.S">S</a> <a href="#rfc.index.V">V</a> <a href="#rfc.index.W">W</a> </p><div class="print2col"><ul class="ind"><li><a id="rfc.index.A" href="#rfc.index.A"><b>A</b></a><ul><li>age&nbsp;&nbsp;<a href="#rfc.iref.a.1">4.2</a></li><li>Age header field&nbsp;&nbsp;<a href="#rfc.section.5.1"><b>5.1</b></a></li></ul></li><li><a id="rfc.index.C" href="#rfc.index.C"><b>C</b></a><ul><li>cache&nbsp;&nbsp;<a href="#rfc.iref.c.1">1</a></li><li>cache key&nbsp;&nbsp;<a href="#rfc.section.2">2</a>, <a href="#rfc.iref.c.2">2</a></li><li>Cache-Control header field&nbsp;&nbsp;<a href="#rfc.section.5.2"><b>5.2</b></a></li><li>collapsed requests&nbsp;&nbsp;<a href="#rfc.iref.c.3">4</a></li></ul></li><li><a id="rfc.index.E" href="#rfc.index.E"><b>E</b></a><ul><li>Expires header field&nbsp;&nbsp;<a href="#rfc.section.5.3"><b>5.3</b></a></li><li>explicit expiration time&nbsp;&nbsp;<a href="#rfc.iref.e.1">4.2</a></li></ul></li><li><a id="rfc.index.F" href="#rfc.index.F"><b>F</b></a><ul><li>Fields&nbsp;&nbsp;<ul><li>Age&nbsp;&nbsp;<a href="#rfc.section.5.1"><b>5.1</b></a>, <a href="#rfc.section.5.1"><b>5.1</b></a></li><li>Cache-Control&nbsp;&nbsp;<a href="#rfc.section.5.2"><b>5.2</b></a></li><li>Expires&nbsp;&nbsp;<a href="#rfc.section.5.3"><b>5.3</b></a>, <a href="#rfc.section.5.3"><b>5.3</b></a></li><li>Pragma&nbsp;&nbsp;<a href="#rfc.section.5.4"><b>5.4</b></a>, <a href="#rfc.section.5.4"><b>5.4</b></a></li><li>Warning&nbsp;&nbsp;<a href="#rfc.section.5.5"><b>5.5</b></a></li></ul></li><li>fresh&nbsp;&nbsp;<a href="#rfc.section.4.2">4.2</a></li><li>freshness lifetime&nbsp;&nbsp;<a href="#rfc.iref.f.1">4.2</a></li></ul></li><li><a id="rfc.index.G" href="#rfc.index.G"><b>G</b></a><ul><li>Grammar&nbsp;&nbsp;<ul><li>Age&nbsp;&nbsp;<a href="#rfc.iref.g.2"><b>5.1</b></a></li><li>Cache-Control&nbsp;&nbsp;<a href="#rfc.iref.g.3"><b>5.2</b></a></li><li>cache-directive&nbsp;&nbsp;<a href="#rfc.iref.g.4"><b>5.2</b></a></li><li>delta-seconds&nbsp;&nbsp;<a href="#rfc.iref.g.1"><b>1.3</b></a></li><li>DIGIT&nbsp;&nbsp;<a href="#rfc.section.1.2"><b>1.2</b></a></li><li>Expires&nbsp;&nbsp;<a href="#rfc.iref.g.5"><b>5.3</b></a></li></ul></li></ul></li><li><a id="rfc.index.H" href="#rfc.index.H"><b>H</b></a><ul><li>Header Fields&nbsp;&nbsp;<ul><li>Age&nbsp;&nbsp;<a href="#rfc.section.5.1"><b>5.1</b></a>, <a href="#rfc.section.5.1"><b>5.1</b></a></li><li>Cache-Control&nbsp;&nbsp;<a href="#rfc.section.5.2"><b>5.2</b></a></li><li>Expires&nbsp;&nbsp;<a href="#rfc.section.5.3"><b>5.3</b></a>, <a href="#rfc.section.5.3"><b>5.3</b></a></li><li>Pragma&nbsp;&nbsp;<a href="#rfc.section.5.4"><b>5.4</b></a>, <a href="#rfc.section.5.4"><b>5.4</b></a></li><li>Warning&nbsp;&nbsp;<a href="#rfc.section.5.5"><b>5.5</b></a></li></ul></li><li>heuristic expiration time&nbsp;&nbsp;<a href="#rfc.iref.h.1">4.2</a></li><li>heuristically cacheable&nbsp;&nbsp;<a href="#rfc.section.4.2.2">4.2.2</a></li></ul></li><li><a id="rfc.index.M" href="#rfc.index.M"><b>M</b></a><ul><li>max-age (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.1"><b>5.2.1.1</b></a>, <a href="#rfc.section.5.2.2.1"><b>5.2.2.1</b></a></li><li>max-stale (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.2"><b>5.2.1.2</b></a></li><li>min-fresh (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.3"><b>5.2.1.3</b></a></li><li>must-revalidate (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.2"><b>5.2.2.2</b></a></li><li>must-understand (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.3"><b>5.2.2.3</b></a></li></ul></li><li><a id="rfc.index.N" href="#rfc.index.N"><b>N</b></a><ul><li>no-cache (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.4"><b>5.2.1.4</b></a>, <a href="#rfc.section.5.2.2.4"><b>5.2.2.4</b></a></li><li>no-store (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.5"><b>5.2.1.5</b></a>, <a href="#rfc.section.5.2.2.5"><b>5.2.2.5</b></a></li><li>no-transform (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.6"><b>5.2.1.6</b></a>, <a href="#rfc.section.5.2.2.6"><b>5.2.2.6</b></a></li></ul></li><li><a id="rfc.index.O" href="#rfc.index.O"><b>O</b></a><ul><li>only-if-cached (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.1.7"><b>5.2.1.7</b></a></li></ul></li><li><a id="rfc.index.P" href="#rfc.index.P"><b>P</b></a><ul><li>Pragma header field&nbsp;&nbsp;<a href="#rfc.section.5.4"><b>5.4</b></a></li><li>private (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.7"><b>5.2.2.7</b></a></li><li>private cache&nbsp;&nbsp;<a href="#rfc.iref.p.1">1</a></li><li>proxy-revalidate (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.8"><b>5.2.2.8</b></a></li><li>public (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.9"><b>5.2.2.9</b></a></li></ul></li><li><a id="rfc.index.S" href="#rfc.index.S"><b>S</b></a><ul><li>s-maxage (cache directive)&nbsp;&nbsp;<a href="#rfc.section.5.2.2.10"><b>5.2.2.10</b></a></li><li>selected response&nbsp;&nbsp;<a href="#rfc.iref.s.2">4.1</a></li><li>shared cache&nbsp;&nbsp;<a href="#rfc.iref.s.1">1</a></li><li>stale&nbsp;&nbsp;<a href="#rfc.section.4.2">4.2</a></li><li>strong validator&nbsp;&nbsp;<a href="#rfc.iref.s.3">4.3.4</a></li></ul></li><li><a id="rfc.index.V" href="#rfc.index.V"><b>V</b></a><ul><li>validator&nbsp;&nbsp;<a href="#rfc.section.4.3.1">4.3.1</a></li></ul></li><li><a id="rfc.index.W" href="#rfc.index.W"><b>W</b></a><ul><li>Warning header field&nbsp;&nbsp;<a href="#rfc.section.5.5"><b>5.5</b></a></li></ul></li></ul></div></section><section id="rfc.authors" class="avoidbreakinside"><h2><a href="#rfc.authors">Authors' Addresses</a></h2><address><b>Roy T. Fielding</b> (editor)<br>Adobe<br>345 Park Ave<br>San Jose, CA&nbsp;95110<br>United States of America<br>Email: fielding@gbiv.com<br>URI: <a href="https://roy.gbiv.com/">https://roy.gbiv.com/</a></address><address><b>Mark Nottingham</b> (editor)<br>Fastly<br>Prahran, VIC<br>Australia<br>Email: mnot@mnot.net<br>URI: <a href="https://www.mnot.net/">https://www.mnot.net/</a></address><address><b>Julian Reschke</b> (editor)<br>greenbytes GmbH<br>Hafenweg 16<br>Münster, 48155<br>Germany<br>Email: julian.reschke@greenbytes.de<br>URI: <a href="https://greenbytes.de/tech/webdav/">https://greenbytes.de/tech/webdav/</a></address></section></div></div></div><script src="/spec/script.mjs" type="module"></script></body></html> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/rfc9111.xml b/test/fixtures/cache-tests/spec/rfc9111.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/rfc9111.xml @@ -0,0 +1,2535 @@ +<?xml version="1.0" encoding="utf-8"?> +<?xml-stylesheet type='text/xsl' href='lib/myxml2rfc.xslt'?> +<!DOCTYPE rfc [ + <!ENTITY MAY "<bcp14>MAY</bcp14>"> + <!ENTITY MUST "<bcp14>MUST</bcp14>"> + <!ENTITY MUST-NOT "<bcp14>MUST NOT</bcp14>"> + <!ENTITY OPTIONAL "<bcp14>OPTIONAL</bcp14>"> + <!ENTITY RECOMMENDED "<bcp14>RECOMMENDED</bcp14>"> + <!ENTITY REQUIRED "<bcp14>REQUIRED</bcp14>"> + <!ENTITY SHALL "<bcp14>SHALL</bcp14>"> + <!ENTITY SHALL-NOT "<bcp14>SHALL NOT</bcp14>"> + <!ENTITY SHOULD "<bcp14>SHOULD</bcp14>"> + <!ENTITY SHOULD-NOT "<bcp14>SHOULD NOT</bcp14>"> + <!ENTITY ID-VERSION "latest"> + <!ENTITY mdash "&#8212;"> + <!ENTITY uuml "&#252;"> + <!ENTITY Note "<x:h xmlns:x='http://purl.org/net/xml2rfc/ext'>Note:</x:h>"> +]> +<?rfc toc="yes" ?> +<?rfc tocdepth="4" ?> +<?rfc symrefs="yes" ?> +<?rfc sortrefs="yes" ?> +<?rfc compact="yes"?> +<?rfc subcompact="no" ?> +<?rfc linkmailto="no" ?> +<?rfc editing="no" ?> +<?rfc comments="yes"?> +<?rfc inline="yes"?> +<?rfc rfcedstyle="yes"?> +<?rfc-ext allow-markup-in-artwork="yes" ?> +<?rfc-ext html-pretty-print="prettyprint https://cdn.rawgit.com/google/code-prettify/master/loader/run_prettify.js"?> +<?rfc-ext include-references-in-index="yes" ?> +<?rfc-ext xml2rfc-backend="202007"?> +<?github-issue-label cache?> + +<rfc category="std" docName="draft-ietf-httpbis-cache-&ID-VERSION;" ipr="pre5378Trust200902" + obsoletes="7234" x:maturity-level="proposed" xmlns:x="http://purl.org/net/xml2rfc/ext" xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#' + tocDepth="4"> +<x:feedback template="mailto:ietf-http-wg@w3.org?subject={docname},%20%22{section}%22&amp;body=&lt;{ref}&gt;:"/> +<front> + + <title>HTTP Caching</title> + + <author fullname="Roy T. Fielding" initials="R." surname="Fielding" role="editor"> + <organization>Adobe</organization> + <address> + <postal> + <street>345 Park Ave</street> + <city>San Jose</city> + <region>CA</region> + <code>95110</code> + <country>United States of America</country> + </postal> + <email>fielding@gbiv.com</email> + <uri>https://roy.gbiv.com/</uri> + </address> + </author> + + <author fullname="Mark Nottingham" initials="M." surname="Nottingham" role="editor"> + <organization>Fastly</organization> + <address> + <postal> + <city>Prahran</city> + <region>VIC</region> + <country>Australia</country> + </postal> + <email>mnot@mnot.net</email> + <uri>https://www.mnot.net/</uri> + </address> + </author> + + <author fullname="Julian Reschke" initials="J." surname="Reschke" role="editor"> + <organization abbrev="greenbytes">greenbytes GmbH</organization> + <address> + <postal> + <street>Hafenweg 16</street> + <city>M&uuml;nster</city><code>48155</code> + <country>Germany</country> + </postal> + <email>julian.reschke@greenbytes.de</email> + <uri>https://greenbytes.de/tech/webdav/</uri> + </address> + </author> + + <date/> + + <area>Applications and Real-Time</area> + <workgroup>HTTP</workgroup> + + <keyword>Hypertext Transfer Protocol</keyword> + <keyword>HTTP</keyword> + <keyword>HTTP Caching</keyword> + +<abstract> +<t> + The Hypertext Transfer Protocol (HTTP) is a stateless application-level + protocol for distributed, collaborative, hypertext information systems. + This document defines HTTP caches and the associated header fields that + control cache behavior or indicate cacheable response messages. +</t> +<t> + This document obsoletes RFC 7234. +</t> +</abstract> + +<note title="Editorial Note" removeInRFC="true"> + <t> + Discussion of this draft takes place on the HTTP working group + mailing list (ietf-http-wg@w3.org), which is archived at + <eref target="https://lists.w3.org/Archives/Public/ietf-http-wg/"/>. + </t> + <t> + Working Group information can be found at <eref target="https://httpwg.org/"/>; + source code and issues list for this draft can be found at + <eref target="https://github.com/httpwg/http-core"/>. + </t> + <t> + The changes in this draft are summarized in <xref target="changes.since.14"/>. + </t> +</note> + + </front> + <middle> + +<section title="Introduction" anchor="caching"> +<t> + The Hypertext Transfer Protocol (HTTP) is a stateless application-level + request/response protocol that uses extensible semantics and + self-descriptive messages for flexible interaction with network-based + hypertext information systems. It is typically used for distributed information systems, where + the use of response caches can improve performance. This document + defines aspects of HTTP related to caching and reusing response + messages. +</t> + +<iref item="cache" /> +<t> + An HTTP <x:dfn>cache</x:dfn> is a local store of response messages and the + subsystem that controls storage, retrieval, and deletion of messages in it. + A cache stores cacheable responses to reduce the response time and + network bandwidth consumption on future equivalent requests. Any client or + server &MAY; use a cache, though not when acting as a tunnel. +</t> +<iref item="shared cache" /> +<iref item="private cache" /> +<t anchor="shared.and.private.caches"> + A <x:dfn>shared cache</x:dfn> is a cache that stores responses for reuse + by more than one user; shared caches are usually (but not always) deployed + as a part of an intermediary. A <x:dfn>private cache</x:dfn>, in contrast, + is dedicated to a single user; often, they are deployed as a component of + a user agent. +</t> +<t> + HTTP caching's goal is significantly improving performance + by reusing a prior response message to satisfy a current request. + A cache considers a stored response "fresh", as defined in + <xref target="expiration.model" />, if it can be reused without + "validation" (checking with the origin server to see if the cached response + remains valid for this request). A fresh response can therefore + reduce both latency and network overhead each time the cache reuses it. + When a cached response is not fresh, it might still be reusable if validation + can freshen it (<xref target="validation.model" />) or if the + origin is unavailable (<xref target="serving.stale.responses" />). +</t> +<t> + This document obsoletes <xref target="RFC7234" format="none">RFC 7234</xref>, + with the changes being summarized in <xref target="changes.from.rfc.7234"/>. +</t> + +<section title="Requirements Notation" anchor="requirements.notation"> +<t> + The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL + NOT", "SHOULD", "SHOULD NOT", "RECOMMENDED", "NOT RECOMMENDED", + "MAY", and "OPTIONAL" in this document are to be interpreted as + described in BCP 14 <xref target="RFC2119"/> <xref target="RFC8174"/> when, and only when, they + appear in all capitals, as shown here. +</t> +<t> + <xref target="Semantics-conformance"/> defines conformance criteria and contains considerations regarding error handling. +</t> +</section> + +<section title="Syntax Notation" anchor="notation"> +<iref primary="true" item="Grammar" subitem="DIGIT"/> +<t> + This specification uses the Augmented Backus-Naur Form (ABNF) notation of + <xref target="RFC5234"/>, extended with the notation for case-sensitivity + in strings defined in <xref target="RFC7405"/>. +</t> +<t> + It also uses a list extension, defined in <xref target="abnf.extension"/>, + that allows for compact definition of comma-separated lists using a '#' + operator (similar to how the '*' operator indicates repetition). <xref + target="collected.abnf"/> shows the collected grammar with all list + operators expanded to standard ABNF notation. +</t> +<t anchor="core.rules"> + <x:anchor-alias value="DIGIT"/> + The following core rule is included by + reference, as defined in <xref target="RFC5234" x:fmt="," x:sec="B.1"/>: + DIGIT (decimal 0-9). +</t> +<t anchor="imported.rules"> + <x:anchor-alias value="HTTP-date"/> + <x:anchor-alias value="OWS"/> + <x:anchor-alias value="field-name"/> + <x:anchor-alias value="quoted-string"/> + <x:anchor-alias value="token"/> + <xref target="Semantics"/> defines the following rules: +</t> +<sourcecode type="abnf7230"> + <x:ref>HTTP-date</x:ref> = &lt;HTTP-date, see <xref target="http.date"/>&gt; + <x:ref>OWS</x:ref> = &lt;OWS, see <xref target="whitespace"/>&gt; + <x:ref>field-name</x:ref> = &lt;field-name, see <xref target="field-names"/>&gt; + <x:ref>quoted-string</x:ref> = &lt;quoted-string, see <xref target="quoted.strings"/>&gt; + <x:ref>token</x:ref> = &lt;token, see <xref target="tokens"/>&gt; +</sourcecode> +</section> + +<section title="Delta Seconds" anchor="delta-seconds"> +<t> + The delta-seconds rule specifies a non-negative integer, representing time + in seconds. +</t> +<sourcecode type="abnf7230"><iref item="Grammar" primary="true" subitem="delta-seconds" /> + <x:ref>delta-seconds</x:ref> = 1*<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + A recipient parsing a delta-seconds value and converting it to binary form + ought to use an arithmetic type of at least 31 bits of non-negative integer + range. + If a cache receives a delta-seconds value greater than the greatest integer + it can represent, or if any of its subsequent calculations overflows, + the cache &MUST; consider the value to be 2147483648 + (2<sup>31</sup>) or the greatest positive integer it can conveniently + represent. +</t> +<aside> + <t> + &Note; The value 2147483648 is here for historical reasons, + represents infinity (over 68 years), and does not need to be stored in + binary form; an implementation could produce it as a canned string if + any overflow occurs, even if the calculations are performed with an + arithmetic type incapable of directly representing that number. + What matters here is that an overflow be detected and not treated as a + negative value in later calculations. + </t> +</aside> +</section> +</section> + +<section title="Overview of Cache Operation" anchor="caching.overview"> +<iref item="cache key" /> +<t> + Proper cache operation preserves the semantics of HTTP transfers (<xref + target="Semantics"/>) while reducing the transmission of information already + held in the cache. Although caching is an entirely &OPTIONAL; feature of + HTTP, it can be assumed that reusing a cached response is desirable and + that such reuse is the default behavior when no requirement or local + configuration prevents it. Therefore, HTTP cache requirements are focused + on preventing a cache from either storing a non-reusable response or + reusing a stored response inappropriately, rather than mandating that + caches always store and reuse particular responses. +</t> +<iref item="cache key" /> +<t> + The <x:dfn>cache key</x:dfn> is the information a cache uses to select a response and + is comprised of, at a minimum, the request method and target + URI used to retrieve the stored response; the method determines under which + circumstances that response can be used to satisfy a subsequent request. However, many + HTTP caches in common use today only cache GET responses, and therefore only + use the URI as the cache key, forwarding other methods. +</t> +<t> + If a request target is subject to content negotiation, the cache might + store multiple responses for it. Caches differentiate these responses + by incorporating values of the original request's selecting header fields + into the cache key as well, using information in the <x:ref>Vary</x:ref> + response header field, as per <xref target="caching.negotiated.responses"/>. +</t> +<t> + Caches might incorporate additional material into the cache key. + For example, user agent caches might include the referring site's identity, + thereby "double keying" the cache to avoid some privacy risks (see <xref + target="security.timing"/>). +</t> +<t> + Most commonly, caches store the successful result of a retrieval + request: i.e., a <x:ref>200 (OK)</x:ref> response to a GET request, which + contains a representation of the target resource + (<xref target="GET"/>). However, it is also possible to store + redirects, negative results (e.g., <x:ref>404 (Not Found)</x:ref>), + incomplete results (e.g., <x:ref>206 (Partial Content)</x:ref>), and + responses to methods other than GET if the method's definition allows such + caching and defines something suitable for use as a cache key. +</t> +<t> + A cache is <x:dfn>disconnected</x:dfn> when it cannot contact the origin + server or otherwise find a forward path for a request. A + disconnected cache can serve stale responses in some circumstances (<xref + target="serving.stale.responses"/>). +</t> + +</section> + +<section title="Storing Responses in Caches" anchor="response.cacheability"> +<t> + A cache &MUST-NOT; store a response to a request unless: +</t> +<ul> + <li><t>the request method is understood by the cache;</t></li> + <li><t>the response status code is final (see + <xref target="status.codes"/>);</t></li> + <li><t>if the response status code is 206 or 304, or the "must-understand" cache directive (see <xref + target="cache-response-directive.must-understand"/>) is present: the cache understands the response status code;</t></li> + <li><t>the "no-store" cache directive is not present in the response + (see <xref target="cache-response-directive.no-store"/>);</t></li> + <li><t>if the cache is shared: the "private" response directive is either not + present or allows a shared cache to store a modified response; + see <xref target="cache-response-directive.private" />);</t></li> + <li><t>if the cache is shared: the <x:ref>Authorization</x:ref> header field + is not present in the request + (see <xref target="field.authorization"/>) or a + response directive is present that explicitly allows shared caching + (see <xref target="caching.authenticated.responses" />); + and,</t></li> + <li><t>the response contains at least one of:</t> + <ul> + <li>a public response directive + (see <xref target="cache-response-directive.public"/>);</li> + <li>a private response directive, if the cache is not shared + (see <xref target="cache-response-directive.private"/>);</li> + <li>an <x:ref>Expires</x:ref> header field + (see <xref target="field.expires"/>);</li> + <li>a max-age response directive + (see <xref target="cache-response-directive.max-age" />);</li> + <li>if the cache is shared: an s-maxage response directive + (see <xref target="cache-response-directive.s-maxage" />);</li> + <li>a Cache Control Extension that allows it to be cached + (see <xref target="cache.control.extensions" />); or,</li> + <li>a status code that is defined as heuristically cacheable + (see <xref target="heuristic.freshness" />).</li> + </ul> + </li> +</ul> +<t> + Note that a cache-control extension can override any of the requirements + listed; see <xref target="cache.control.extensions" />. +</t> +<t> + In this context, a cache has "understood" a request method or a response + status code if it recognizes it and implements all specified + caching-related behavior. +</t> +<t> + Note that, in normal operation, some caches will not store a response that + has neither a cache validator nor an explicit expiration time, as such + responses are not usually useful to store. However, caches are not + prohibited from storing such responses. +</t> + +<section title="Storing Header and Trailer Fields" anchor="storing.fields"> +<t> + Caches &MUST; include all received response header fields &mdash; including + unrecognised ones &mdash; when storing a response; this assures that new HTTP + header fields can be successfully deployed. However, the following exceptions + are made: +</t> +<ul> + <li>The Connection header field and fields whose names are listed in it are + required by <xref target="field.connection"/> to be removed before + forwarding the message. This &MAY; be implemented by doing so + before storage.</li> + <li>Likewise, some fields' semantics require them to be removed + before forwarding the message, and this &MAY; be implemented by doing so + before storage; see <xref target="field.connection"/> for some + examples.</li> + <li>The no-cache (<xref target="cache-response-directive.no-cache"/>) and + private (<xref target="cache-response-directive.private"/>) cache + directives can have arguments that prevent storage of header fields by all + caches and shared caches, respectively.</li> + <li>Header fields that are specific to a client's proxy configuration + &MUST-NOT; be stored, unless the cache incorporates the identity of the + proxy into the cache key. Effectively, this is limited to Proxy-Authenticate + (<xref target="field.proxy-authenticate"/>), Proxy-Authentication-Info + (<xref target="field.proxy-authentication-info"/>), and Proxy-Authorization + (<xref target="field.proxy-authorization"/>).</li> +</ul> +<t> + Caches &MAY; either store trailer fields separate from header fields, or + discard them. Caches &MUST-NOT; combine trailer fields with header fields. +</t> +</section> + +<section title="Updating Stored Header Fields" anchor="update"> +<t> + Caches are required to update a stored response's header fields from another + (typically newer) response in several situations; for example, see <xref + target="combining.responses"/>, <xref target="freshening.responses"/> and + <xref target="head.effects"/>. +</t> +<t> + When doing so, the cache &MUST; add each header field in the provided response + to the stored response, replacing field values that are already present, + with the following exceptions: +</t> +<ul> + <li>Header fields excepted from storage in <xref target="storing.fields"/>,</li> + <li>Header fields that the cache's stored response depends upon, as described below,</li> + <li>Header fields that are automatically processed and removed by the recipient, as described below, and</li> + <li>The <x:ref>Content-Length</x:ref> header field.</li> +</ul> +<t> + In some cases, caches (especially in user agents) store the results of + processing the received response, rather than the response itself, + and updating header fields that affect that processing can result in + inconsistent behavior and security issues. Caches in this situation &MAY; + omit these header fields from updating stored responses on an + exceptional basis, but &SHOULD; limit such omission to those fields + necessary to assure integrity of the stored response. +</t> +<t> + For example, a browser might decode the content coding of a response + while it is being received, creating a disconnect between the data it has + stored and the response's original metadata. + Updating that stored metadata with a different <x:ref>Content-Encoding</x:ref> + header field would be problematic. Likewise, a browser might store a + post-parse HTML tree, rather than the content received in + the response; updating the <x:ref>Content-Type</x:ref> header field would not be workable + in this case, because any assumptions about the format made in parsing would + now be invalid. +</t> +<t> + Furthermore, some fields are automatically processed and removed by the + HTTP implementation; for example, the <x:ref>Content-Range</x:ref> header field. + Implementations &MAY; automatically omit such header fields from updates, + even when the processing does not actually occur. +</t> +<t> + Note that the Content-* prefix is not a signal that a header field is omitted + from update; it is a convention for MIME header fields, not HTTP. +</t> +</section> + +<section title="Storing Incomplete Responses" anchor="incomplete.responses"> +<t> + If the request method is GET, the response status code is <x:ref>200 + (OK)</x:ref>, and the entire response header section has been received, a + cache &MAY; store a response body that is not complete (<xref target="messages"/>) if the stored response + is recorded as being incomplete. Likewise, a <x:ref>206 (Partial + Content)</x:ref> response &MAY; be stored as if it were an incomplete + <x:ref>200 (OK)</x:ref> response. However, a cache &MUST-NOT; store + incomplete or partial-content responses if it does not support the + <x:ref>Range</x:ref> and <x:ref>Content-Range</x:ref> header fields or if + it does not understand the range units used in those fields. +</t> +<t> + A cache &MAY; complete a stored incomplete response by making a subsequent + range request (<xref target="field.range"/>) and combining the successful response with the + stored response, as defined in <xref target="combining.responses"/>. A cache + &MUST-NOT; use an incomplete response to answer requests unless the + response has been made complete, or the request is partial and specifies a + range wholly within the incomplete response. A cache &MUST-NOT; + send a partial response to a client without explicitly marking it + using the <x:ref>206 (Partial Content)</x:ref> status code. +</t> +</section> + +<section title="Combining Partial Content" anchor="combining.responses"> +<t> + A response might transfer only a partial representation if the + connection closed prematurely or if the request used one or more Range + specifiers (<xref target="field.range"/>). After several such transfers, a cache might have + received several ranges of the same representation. A cache &MAY; combine + these ranges into a single stored response, and reuse that response to + satisfy later requests, if they all share the same strong validator and + the cache complies with the client requirements in <xref target="combining.byte.ranges"/>. +</t> +<t> + When combining the new response with one or more stored responses, a cache + &MUST; update the stored response header fields using the header fields + provided in the new response, as per <xref target="update"/>. +</t> +</section> + +<section title="Storing Responses to Authenticated Requests" anchor="caching.authenticated.responses"> +<t> + A shared cache &MUST-NOT; use a cached response to a request with an + <x:ref>Authorization</x:ref> header field (<xref target="field.authorization"/>) to + satisfy any subsequent request unless the response contains a + <x:ref>Cache-Control</x:ref> field with a response directive + (<xref target="cache-response-directive"/>) that allows it to be stored by + a shared cache and the cache conforms to the requirements of that + directive for that response. +</t> +<t> + In this specification, the following response directives have such an effect: + must-revalidate (<xref target="cache-response-directive.must-revalidate"/>), + public (<xref target="cache-response-directive.public"/>), and + s-maxage (<xref target="cache-response-directive.s-maxage"/>). +</t> +</section> + +</section> + + +<section title="Constructing Responses from Caches" anchor="constructing.responses.from.caches"> +<t> + When presented with a request, a cache &MUST-NOT; reuse a stored response, + unless: +</t> +<ul> + <li><t>The presented target URI (<xref target="target.resource"/>) and + that of the stored response match, and</t></li> + <li><t>the request method associated with the stored response allows it to + be used for the presented request, and</t></li> + <li><t>selecting header fields nominated by the stored response (if any) + match those presented (see <xref target="caching.negotiated.responses" + />), and</t></li> + <li><t>the stored response does not contain the no-cache cache directive + (<xref target="cache-response-directive.no-cache"/>), unless it is + successfully validated (<xref target="validation.model"/>), and</t></li> + <li><t>the stored response is either:</t> + <ul> + <li>fresh (see <xref target="expiration.model" />), or</li> + <li>allowed to be served stale (see <xref + target="serving.stale.responses" />), or</li> + <li>successfully validated (see <xref target="validation.model" + />).</li> + </ul> + </li> +</ul> +<t> + Note that a cache-control extension can override any of the requirements + listed; see <xref target="cache.control.extensions" />. +</t> +<t> + When a stored response is used to satisfy a request without validation, a + cache &MUST; generate an <x:ref>Age</x:ref> header field (<xref + target="field.age"/>), replacing any present in the response with a value + equal to the stored response's current_age; see <xref + target="age.calculations" />. +</t> +<t> + A cache &MUST; write through requests with methods that are unsafe + (<xref target="safe.methods"/>) to the origin server; i.e., a cache is not allowed to + generate a reply to such a request before having forwarded the request and + having received a corresponding response. +</t> +<t> + Also, note that unsafe requests might invalidate already-stored responses; + see <xref target="invalidation" />. +</t> +<iref item="collapsed requests" /> +<t> + A response that is stored or storable can be used to satisfy multiple + requests, provided that it is allowed to reuse that response for the requests + in question. This enables caches to <x:dfn>collapse requests</x:dfn> &mdash; or combine multiple incoming requests into a single forward + one upon a cache miss &mdash; thereby reducing load on the origin server and network. + However, note that if the response returned is not able to be used for some or all + of the collapsed requests, additional latency might be introduced, because they will + need to be forwarded to be satisfied. +</t> +<t> + When more than one suitable response is stored, a cache &MUST; use the + most recent one (as determined by the <x:ref>Date</x:ref> header + field). It can also forward the request with "Cache-Control: max-age=0" or + "Cache-Control: no-cache" to disambiguate which response to use. +</t> +<t> + A cache that does not have a clock available &MUST-NOT; use stored + responses without revalidating them upon every use. +</t> + +<section title="Calculating Cache Keys with Vary" anchor="caching.negotiated.responses"> +<t> + When a cache receives a request that can be satisfied by a stored response + that has a <x:ref>Vary</x:ref> header field (<xref target="field.vary"/>), + it &MUST-NOT; use that response unless all the selecting header fields + nominated by the Vary header field match in both the original request + (i.e., that associated with the stored response), and the presented + request. +</t> +<t> + The selecting header fields from two requests are defined to match if + and only if those in the first request can be transformed to those in the + second request by applying any of: +</t> +<ul> + <li> + adding or removing whitespace, where allowed in the header field's + syntax + </li> + <li> + combining multiple header field lines with the same field name + (see <xref target="field.lines"/>) + </li> + <li> + normalizing both header field values in a way that is known to have + identical semantics, according to the header field's specification + (e.g., reordering field values when order is not significant; + case-normalization, where values are defined to be case-insensitive) + </li> +</ul> +<t> + If (after any normalization that might take place) a header field is absent + from a request, it can only match another request if it is also absent + there. +</t> +<t> + A <x:ref>Vary</x:ref> header field value containing a member "*" always fails to match. +</t> +<iref item="selected response" /> +<t> + The stored response with matching selecting header fields is known as the + <x:dfn>selected response</x:dfn>. +</t> +<t> + If multiple selected responses are available (potentially including + responses without a Vary header field), the cache will need to choose one to use. + When a selecting header field has a known mechanism for doing so (e.g., qvalues on + <x:ref>Accept</x:ref> and similar request header fields), that mechanism &MAY; be + used to select a preferred response. If such a mechanism is not available, or leads to equally preferred responses, the most recent + response (as determined by the <x:ref>Date</x:ref> header field) is used, as + per <xref target="constructing.responses.from.caches"/>. +</t> +<t> + Some resources mistakenly omit the Vary header field from their default + response (i.e., the one sent when no more preferable response is available), + with the effect of selecting it for requests to that resource even when + more preferable responses are available. When a cache has multiple responses for a + target URI and one or more omits the Vary header field, it &SHOULD; use the + most recent (see <xref target="age.calculations"/>) valid Vary field value available to select an appropriate response + for the request. +</t> +<t> + If no selected response is available, the cache cannot satisfy the + presented request. Typically, it is forwarded to the origin server + in a (possibly conditional; see <xref target="validation.model"/>) request. +</t> +</section> + +<section title="Freshness" anchor="expiration.model"> +<iref item="fresh" /> +<iref item="stale" /> +<t> + A <x:dfn>fresh</x:dfn> response is one whose age has not yet exceeded its + freshness lifetime. Conversely, a <x:dfn>stale</x:dfn> + response is one where it has. +</t> +<iref item="freshness lifetime" /> +<iref item="explicit expiration time" /> +<iref item="heuristic expiration time" /> +<t> + A response's <x:dfn>freshness lifetime</x:dfn> is the length of time + between its generation by the origin server and its expiration time. An + <x:dfn>explicit expiration time</x:dfn> is the time at which the origin + server intends that a stored response can no longer be used by a cache + without further validation, whereas a <x:dfn>heuristic expiration + time</x:dfn> is assigned by a cache when no explicit expiration time is + available. +</t> +<iref item="age" /> +<t> + A response's <x:dfn>age</x:dfn> is the time that has passed since it was + generated by, or successfully validated with, the origin server. +</t> +<t> + When a response is fresh, it can be used to satisfy + subsequent requests without contacting the origin server, thereby improving + efficiency. +</t> +<t> + The primary mechanism for determining freshness is for an origin server to + provide an explicit expiration time in the future, using either the + <x:ref>Expires</x:ref> header field (<xref target="field.expires" />) or + the max-age response directive (<xref + target="cache-response-directive.max-age" />). Generally, origin servers + will assign future explicit expiration times to responses in the belief + that the representation is not likely to change in a semantically + significant way before the expiration time is reached. +</t> +<t> + If an origin server wishes to force a cache to validate every request, it + can assign an explicit expiration time in the past to indicate that the + response is already stale. Compliant caches will normally validate a stale + cached response before reusing it for subsequent requests (see <xref + target="serving.stale.responses" />). +</t> +<t> + Since origin servers do not always provide explicit expiration times, + caches are also allowed to use a heuristic to determine an expiration time + under certain circumstances (see <xref target="heuristic.freshness"/>). +</t> +<t> + The calculation to determine if a response is fresh is: +</t> +<artwork type="code"> + response_is_fresh = (freshness_lifetime &gt; current_age) +</artwork> +<t> + freshness_lifetime is defined in <xref + target="calculating.freshness.lifetime" />; current_age is defined in + <xref target="age.calculations" />. +</t> +<t> + Clients can send the max-age or min-fresh request directives (<xref + target="cache-request-directive" />) to constrain or relax freshness + calculations for the corresponding response. However, caches are not + required to honor them. +</t> +<t> + When calculating freshness, to avoid common problems in date parsing: +</t> +<ul> + <li>Although all date formats are specified to be case-sensitive, + a cache recipient &SHOULD; match the field value + case-insensitively.</li> + <li>If a cache recipient's internal implementation of time has less + resolution than the value of an HTTP-date, the recipient &MUST; + internally represent a parsed <x:ref>Expires</x:ref> date as the + nearest time equal to or earlier than the received value.</li> + <li>A cache recipient &MUST-NOT; allow local time zones to influence the + calculation or comparison of an age or expiration time.</li> + <li>A cache recipient &SHOULD; consider a date with a zone abbreviation + other than "GMT" to be invalid for calculating expiration.</li> +</ul> +<t> + Note that freshness applies only to cache operation; it cannot be used to + force a user agent to refresh its display or reload a resource. See <xref + target="history.lists" /> for an explanation of the difference between + caches and history mechanisms. +</t> + +<section title="Calculating Freshness Lifetime" anchor="calculating.freshness.lifetime"> +<t> + A cache can calculate the freshness lifetime (denoted as + freshness_lifetime) of a response by using the first match of: +</t> +<ul> + <li>If the cache is shared and the s-maxage response directive + (<xref target="cache-response-directive.s-maxage" />) is present, use its value, + or</li> + <li>If the max-age response directive (<xref + target="cache-response-directive.max-age" />) is present, use its value, or</li> + <li>If the <x:ref>Expires</x:ref> response header field + (<xref target="field.expires" />) is present, use its value minus the + value of the <x:ref>Date</x:ref> response header field + (using the time the message was received if it is not present, as per <xref target="field.date"/>), or</li> + <li>Otherwise, no explicit expiration time is present in the response. A + heuristic freshness lifetime might be applicable; see <xref + target="heuristic.freshness" />.</li> +</ul> +<t> + Note that this calculation is not vulnerable to clock skew, since all of + the information comes from the origin server. +</t> +<t> + When there is more than one value present for a given directive (e.g., two + <x:ref>Expires</x:ref> header field lines or multiple Cache-Control: max-age + directives), either the first occurrence should be used, or the response should + be considered stale. If directives conflict (e.g., + both max-age and no-cache are present), the most restrictive directive should + be honored. Caches are encouraged to consider responses that have + invalid freshness information (e.g., a max-age directive with non-integer content) to + be stale. +</t> +</section> + +<section title="Calculating Heuristic Freshness" anchor="heuristic.freshness"> + <iref item="heuristically cacheable"/> +<t> + Since origin servers do not always provide explicit expiration times, a + cache &MAY; assign a heuristic expiration time when an explicit time is not + specified, employing algorithms that use other field values (such as + the <x:ref>Last-Modified</x:ref> time) to estimate a plausible expiration + time. This specification does not provide specific algorithms, but does + impose worst-case constraints on their results. +</t> +<t> + A cache &MUST-NOT; use heuristics to determine freshness when an explicit + expiration time is present in the stored response. Because of the + requirements in <xref target="response.cacheability"/>, this means that + heuristics can only be used on responses without explicit + freshness whose status codes are defined as <x:dfn>heuristically cacheable</x:dfn> (e.g., see + <xref target="overview.of.status.codes"/>), and those responses without + explicit freshness that have been marked as explicitly cacheable (e.g., + with a "public" response directive). +</t> +<t> + Note that in previous specifications heuristically cacheable response status + codes were called "cacheable by default." +</t> +<t> + If the response has a <x:ref>Last-Modified</x:ref> header field + (<xref target="field.last-modified"/>), caches are encouraged to use a heuristic + expiration value that is no more than some fraction of the interval since + that time. A typical setting of this fraction might be 10%. +</t> +<aside> + <t> + &Note; <xref target="RFC2616" x:fmt="of" x:sec="13.9"/> prohibited caches + from calculating heuristic freshness for URIs with query components + (i.e., those containing '?'). In practice, this has not been widely + implemented. Therefore, origin servers are encouraged to send explicit + directives (e.g., Cache-Control: no-cache) if they wish to prevent + caching. + </t> +</aside> +</section> + +<section title="Calculating Age" anchor="age.calculations"> +<t> + The <x:ref>Age</x:ref> header field is used to convey an estimated + age of the response message when obtained from a cache. The Age field value + is the cache's estimate of the number of seconds since the origin server generated + or validated the response. The Age value is therefore + the sum of the time that the response has been resident in each of the + caches along the path from the origin server, plus the time it + has been in transit along network paths. +</t> +<t> + Age calculation uses the following data: +</t> +<dl newline="false"> + <dt><x:dfn>age_value</x:dfn></dt> + <dd> + The term "age_value" denotes the value of the <x:ref>Age</x:ref> + header field (<xref target="field.age"/>), in a form appropriate for + arithmetic operation; or 0, if not available. + </dd> + <dt><x:dfn>date_value</x:dfn></dt> + <dd> + The term "date_value" denotes the value of + the Date header field, in a form appropriate for arithmetic + operations. See <xref target="field.date"/> for the definition of the Date header + field, and for requirements regarding responses without it. + </dd> + <dt><x:dfn>now</x:dfn></dt> + <dd> + The term "now" means "the current value of the clock at the host + performing the calculation". A host ought to use NTP (<xref + target="RFC5905"/>) or some similar protocol to synchronize its + clocks to Coordinated Universal Time. + </dd> + <dt><x:dfn>request_time</x:dfn></dt> + <dd> + The current value of the clock at the host at the time the request + resulting in the stored response was made. + </dd> + <dt><x:dfn>response_time</x:dfn></dt> + <dd> + The current value of the clock at the host at the time the response + was received. + </dd> +</dl> +<t> + A response's age can be calculated in two entirely independent ways: +</t> +<ol> + <li>the "apparent_age": response_time minus date_value, if the local + clock is reasonably well synchronized to the origin server's clock. If + the result is negative, the result is replaced by zero.</li> + <li>the "corrected_age_value", if all of the caches along the response + path implement HTTP/1.1 or greater. A cache &MUST; interpret this value + relative to the time the request was initiated, not the time that the + response was received.</li> +</ol> +<artwork type="code"> + apparent_age = max(0, response_time - date_value); + + response_delay = response_time - request_time; + corrected_age_value = age_value + response_delay; +</artwork> +<t> + The corrected_age_value &MAY; be used as the corrected_initial_age. In + circumstances where very old cache implementations that might not correctly + insert <x:ref>Age</x:ref> are present, corrected_initial_age can be calculated + more conservatively as +</t> +<artwork type="code"> + corrected_initial_age = max(apparent_age, corrected_age_value); +</artwork> +<t> + The current_age of a stored response can then be calculated by adding the + time (in seconds) since the stored response was last validated by + the origin server to the corrected_initial_age. +</t> +<artwork type="code"> + resident_time = now - response_time; + current_age = corrected_initial_age + resident_time; +</artwork> +</section> + +<section title="Serving Stale Responses" anchor="serving.stale.responses"> +<t> + A "stale" response is one that either has explicit expiry information or is + allowed to have heuristic expiry calculated, but is not fresh according to + the calculations in <xref target="expiration.model" />. +</t> +<t> + A cache &MUST-NOT; generate a stale response if it is prohibited by an + explicit in-protocol directive (e.g., by a "no-cache" cache + directive, a "must-revalidate" cache-response-directive, or an applicable + "s-maxage" or "proxy-revalidate" cache-response-directive; see <xref + target="cache-response-directive"/>). +</t> +<t> + A cache &MUST-NOT; generate a stale response unless it is disconnected + or doing so is explicitly permitted by the client or origin server + (e.g., by the max-stale request directive in <xref + target="cache-request-directive" />, by extension directives such as those + defined in <xref target="RFC5861"/>, or by configuration in accordance + with an out-of-band contract). +</t> +</section> +</section> + +<section title="Validation" anchor="validation.model"> +<t> + When a cache has one or more stored responses for a requested URI, but + cannot serve any of them (e.g., because they are not fresh, or one cannot + be selected; see <xref target="caching.negotiated.responses"/>), it can use + the conditional request mechanism (<xref target="preconditions"/>) in the forwarded request to + give the next inbound server an opportunity to select a valid stored + response to use, updating the stored metadata in the process, or to replace + the stored response(s) with a new response. This process is known as + <x:dfn>validating</x:dfn> or <x:dfn>revalidating</x:dfn> the stored + response. +</t> + +<section title="Sending a Validation Request" anchor="validation.sent"><iref item="validator" /> +<t> + When generating a conditional request for validation, a cache starts with + either a request it is attempting to satisfy, or &mdash; if it is initiating + the request independently &mdash; it synthesises a request using a stored + response by copying the method, target URI, and request header fields + identified by the Vary header field (<xref + target="caching.negotiated.responses"/>). +</t> +<t> + It then updates that request with one or more precondition header fields. + These contain validator metadata sourced from stored response(s) that have + the same cache key. +</t> +<t> + The precondition header fields are then compared by recipients to + determine whether any stored response is equivalent to a current + representation of the resource. +</t> +<t> + One such validator is the timestamp given in a <x:ref>Last-Modified</x:ref> + header field (<xref target="field.last-modified"/>), which can be used in an + <x:ref>If-Modified-Since</x:ref> header field for response validation, or + in an <x:ref>If-Unmodified-Since</x:ref> or <x:ref>If-Range</x:ref> header + field for representation selection (i.e., the client is referring + specifically to a previously obtained representation with that timestamp). +</t> +<t> + Another validator is the entity-tag given in an <x:ref>ETag</x:ref> + field (<xref target="field.etag"/>). One or more entity-tags, indicating one or more + stored responses, can be used in an <x:ref>If-None-Match</x:ref> header + field for response validation, or in an <x:ref>If-Match</x:ref> or + <x:ref>If-Range</x:ref> header field for representation selection (i.e., + the client is referring specifically to one or more previously obtained + representations with the listed entity-tags). +</t> +</section> + +<section title="Handling a Received Validation Request" anchor="validation.received"> +<t> + Each client in the request chain may have its own cache, so it is common + for a cache at an intermediary to receive conditional requests from other + (outbound) caches. Likewise, some user agents make use of conditional + requests to limit data transfers to recently modified representations or to + complete the transfer of a partially retrieved representation. +</t> +<t> + If a cache receives a request that can be satisfied by reusing one of + its stored <x:ref>200 (OK)</x:ref> or <x:ref>206 (Partial Content)</x:ref> + responses, the cache &SHOULD; evaluate any applicable conditional header + field preconditions received in that request with respect to the + corresponding validators contained within the selected response. + A cache &MUST-NOT; evaluate conditional header fields that only + apply to an origin server, occur in a request with semantics that + cannot be satisfied with a cached response, or occur in a request with a target resource + for which it has no stored responses; such preconditions are likely + intended for some other (inbound) server. +</t> +<t> + The proper evaluation of conditional requests by a cache depends on the + received precondition header fields and their precedence. In summary, the <x:ref>If-Match</x:ref> and + <x:ref>If-Unmodified-Since</x:ref> conditional header fields are not + applicable to a cache, and <x:ref>If-None-Match</x:ref> takes precedence over + <x:ref>If-Modified-Since</x:ref>. See <xref target="precedence"/> for + a complete specification of precondition precedence. +</t> +<t> + A request containing an <x:ref>If-None-Match</x:ref> header field + (<xref target="field.if-none-match"/>) indicates that the client wants to validate one + or more of its own stored responses in comparison to whichever stored + response is selected by the cache. +</t> +<t> + When a cache decides to revalidate its own stored responses for a + request that contains an <x:ref>If-None-Match</x:ref> list of entity-tags, + the cache &MAY; combine the received list with a list of entity-tags + from its own stored set of responses (fresh or stale) and send the union of + the two lists as a replacement <x:ref>If-None-Match</x:ref> header + field value in the forwarded request. + If a stored response contains only partial content, the + cache &MUST-NOT; include its entity-tag in the union unless the request is + for a range that would be fully satisfied by that partial stored response. + If the response to the forwarded request is + <x:ref>304 (Not Modified)</x:ref> and has an ETag field value with + an entity-tag that is not in the client's list, the cache &MUST; + generate a <x:ref>200 (OK)</x:ref> response for the client by reusing its + corresponding stored response, as updated by the 304 response metadata + (<xref target="freshening.responses"/>). +</t> +<t> + If an <x:ref>If-None-Match</x:ref> header field is not present, a request + containing an <x:ref>If-Modified-Since</x:ref> header field + (<xref target="field.if-modified-since"/>) indicates that the client wants to validate + one or more of its own stored responses by modification date. +</t> +<t> + If a request contains an <x:ref>If-Modified-Since</x:ref> header field and + the <x:ref>Last-Modified</x:ref> header field is not present in a selected + stored response, a cache &SHOULD; use the stored response's + <x:ref>Date</x:ref> field value (or, if no Date field is present, the time + that the stored response was received) to evaluate the conditional. +</t> +<t> + A cache that implements partial responses to range requests, as defined in + <xref target="field.range"/>, also needs to evaluate a received <x:ref>If-Range</x:ref> header + field (<xref target="field.if-range"/>) regarding its selected stored response. +</t> +</section> + +<section title="Handling a Validation Response" anchor="validation.response"> +<t> + Cache handling of a response to a conditional request depends upon its + status code: +</t> +<ul> + <li> + A <x:ref>304 (Not Modified)</x:ref> response status code indicates + that the stored response can be updated and reused; see <xref + target="freshening.responses"/>. + </li> + <li> + A full response (i.e., one containing content) indicates that none + of the stored responses nominated in the conditional request is + suitable. Instead, the cache &MUST; use the full response to + satisfy the request. The cache &MAY; store such a full response, + subject to its constraints (see <xref target="response.cacheability"/>). + </li> + <li> + However, if a cache receives a <x:ref>5xx (Server Error)</x:ref> + response while attempting to validate a response, it can either + forward this response to the requesting client, or act as if the + server failed to respond. In the latter case, the cache can send a + previously stored response, subject to its constraints on doing so (see <xref + target="serving.stale.responses" />), or retry the validation request. + </li> +</ul> +</section> + +<section title="Freshening Stored Responses upon Validation" anchor="freshening.responses"> +<t> + When a cache receives a <x:ref>304 (Not Modified)</x:ref> response and + already has one or more stored <x:ref>200 (OK)</x:ref> responses for the + applicable cache key, the cache needs to identify which (if any) are to + be updated by the new information provided, and then do so. +</t> +<iref item="strong validator" /> +<t> + The stored response(s) to update are identified by using the first match + (if any) of: +</t> +<ul> + <li> + If the new response contains one or more <x:dfn>strong validators</x:dfn> (see + <xref target="weak.and.strong.validators"/>), then each of those strong validators + identify the selected representation for update. All the stored + responses with one of those same strong validators are identified for update. If + none of the stored responses contain at least one of the same strong validators, then the + cache &MUST-NOT; use the new response to update any stored responses. + </li> + <li> + If the new response contains no strong validators but does contain + one or more <x:dfn>weak validators</x:dfn>, and those + validators correspond to one of the cache's stored responses, then the most + recent of those matching stored responses is identified for update. + </li> + <li> + If the new response does not include any form of validator (such as + where a client generates an <x:ref>If-Modified-Since</x:ref> request from + a source other than the <x:ref>Last-Modified</x:ref> response header + field), and there is only one stored response, and that stored response + also lacks a validator, then that stored response is identified for update. + </li> +</ul> +<t> + For each stored response identified, the cache &MUST; update + its header fields with the header fields provided in the <x:ref>304 (Not + Modified)</x:ref> response, as per <xref target="update"/>. +</t> +</section> + +<section title="Freshening Responses with HEAD" anchor="head.effects"> +<t> + A response to the HEAD method is identical to what an equivalent request + made with a GET would have been, without sending the content. This property + of HEAD responses can be used to invalidate or update a cached GET + response if the more efficient conditional GET request mechanism is not + available (due to no validators being present in the stored response) or + if transmission of the content is not desired even if it has + changed. +</t> +<t> + When a cache makes an inbound HEAD request for a target URI and + receives a <x:ref>200 (OK)</x:ref> response, the cache &SHOULD; update or + invalidate each of its stored GET responses that could have been selected + for that request (see <xref target="caching.negotiated.responses"/>). +</t> +<t> + For each of the stored responses that could have been selected, if the + stored response and HEAD response have matching values for any received + validator fields (<x:ref>ETag</x:ref> and <x:ref>Last-Modified</x:ref>) + and, if the HEAD response has a <x:ref>Content-Length</x:ref> header field, + the value of <x:ref>Content-Length</x:ref> matches that of the stored + response, the cache &SHOULD; update the stored response as described below; + otherwise, the cache &SHOULD; consider the stored response to be stale. +</t> +<t> + If a cache updates a stored response with the metadata provided in a HEAD + response, the cache &MUST; use the header fields provided in the HEAD + response to update the stored response (see <xref + target="update"/>). +</t> +</section> +</section> + +<section title="Invalidating Stored Responses" anchor="invalidation"> +<t> + Because unsafe request methods (<xref target="safe.methods"/>) such as PUT, POST or DELETE + have the potential for changing state on the origin server, intervening + caches are required to invalidate stored responses to keep their contents up to date. +</t> +<t> + A cache &MUST; invalidate the target URI + (<xref target="target.resource"/>) when a non-error status code is received in response to + an unsafe request method (including methods whose safety is unknown). +</t> +<t> + A cache &MAY; invalidate other URIs when a non-error status code is received + in response to an unsafe request method (including methods whose safety is unknown). + In particular, the URI(s) in the + <x:ref>Location</x:ref> and <x:ref>Content-Location</x:ref> response header + fields (if present) are candidates for invalidation; other URIs might be discovered + through mechanisms not specified in this document. + However, a cache &MUST-NOT; trigger an invalidation under these conditions + if the origin (<xref target="origin"/>) of the URI to be invalidated differs from that of the target URI + (<xref target="target.resource"/>). This helps prevent denial-of-service attacks. +</t> +<t> + <x:dfn>Invalidate</x:dfn> means that the cache will either remove all + stored responses whose target URI matches the given URI, or will mark them + as "invalid" and in need of a mandatory validation before they can be sent + in response to a subsequent request. +</t> +<t> + A "non-error response" is one with a <x:ref>2xx (Successful)</x:ref> + or <x:ref>3xx (Redirection)</x:ref> status code. +</t> +<t> + Note that this does not guarantee that all appropriate responses are + invalidated globally; a state-changing request would only invalidate + responses in the caches it travels through. +</t> +</section> +</section> + + + +<section title="Field Definitions" anchor="header.field.definitions"> +<t> + This section defines the syntax and semantics of HTTP fields + related to caching. +</t> + +<section title="Age" anchor="field.age"> + <x:anchor-alias value="header.age"/> + <iref primary="true" item="Fields" subitem="Age" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Age" x:for-anchor=""/><iref primary="true" item="Fields" subitem="Age" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Age" x:for-anchor=""/><iref item="Age header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="Age"/> + <x:anchor-alias value="age-value"/> +<t> + The "Age" response header field conveys the sender's estimate of the + time since the response was generated or successfully validated at the + origin server. Age values are calculated as specified in <xref + target="age.calculations" />. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Age"/> + <x:ref>Age</x:ref> = <x:ref>delta-seconds</x:ref> +</sourcecode> +<t> + The Age field value is a non-negative integer, representing time in seconds + (see <xref target="delta-seconds"/>). +</t> +<t> + Although it is defined as a singleton header field, a cache encountering a + message with multiple Age field lines &SHOULD; use the + first field line, discarding subsequent ones. +</t> +<t> + If the field value (after discarding additional lines, as per above) is invalid + (e.g., it contains a list or something other than a non-negative integer), + a cache &SHOULD; consider the response to be stale. +</t> +<t> + The presence of an Age header field implies that the response was not + generated or validated by the origin server for this request. However, + lack of an Age header field does not imply the origin was contacted. +</t> +</section> + +<section title="Cache-Control" anchor="field.cache-control"> + <x:anchor-alias value="header.cache-control"/> + <iref primary="true" item="Fields" subitem="Cache-Control" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Cache-Control" x:for-anchor=""/><iref item="Cache-Control header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="Cache-Control"/> + <x:anchor-alias value="cache-directive"/> +<t> + The "Cache-Control" header field is used to list directives for caches + along the request/response chain. Such cache directives are unidirectional + in that the presence of a directive in a request does not imply that the + same directive is present in the response, or to be repeated in it. +</t> +<t> + See <xref target="cache.control.extensions"/> for information about how + Cache-Control directives defined elsewhere are handled. +</t> +<t> + A proxy, whether or not it implements a cache, &MUST; pass cache directives + through in forwarded messages, regardless of their + significance to that application, since the directives might apply + to all recipients along the request/response chain. It is not possible to + target a directive to a specific cache. +</t> +<t> + Cache directives are identified by a token, to be compared case-insensitively, + and have an optional argument that can use both token and quoted-string + syntax. For the directives defined below that define arguments, recipients + ought to accept both forms, even if a specific form is required for generation. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Cache-Control"/><iref primary="true" item="Grammar" subitem="cache-directive"/> + <x:ref>Cache-Control</x:ref> = #<x:ref>cache-directive</x:ref> + + <x:ref>cache-directive</x:ref> = <x:ref>token</x:ref> [ "=" ( <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> ) ] +</sourcecode> +<t> + For the cache directives defined below, no argument is defined (nor allowed) + unless stated otherwise. +</t> + +<section title="Request Cache-Control Directives" anchor="cache-request-directive" x:assert-sorted-by="title"> + +<t> + This section defines cache request directives. They are advisory; caches + &MAY; implement them, but are not required to. +</t> + + +<section title="max-age" anchor="cache-request-directive.max-age"> + <iref item="max-age (cache directive)" primary="true"/> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li><x:ref>delta-seconds</x:ref> (see <xref target="delta-seconds"/>)</li> +</ul> +<t> + The "max-age" request directive indicates that the client prefers a + response whose age is less than or equal to the specified number of + seconds. Unless the max-stale request directive is also present, the + client does not wish to receive a stale response. +</t> +<t> + This directive uses the token form of the argument syntax: + e.g., 'max-age=5' not 'max-age="5"'. A sender &MUST-NOT; generate the + quoted-string form. +</t> +</section> + +<section title="max-stale" anchor="cache-request-directive.max-stale"> + <iref item="max-stale (cache directive)" primary="true"/> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li><x:ref>delta-seconds</x:ref> (see <xref target="delta-seconds"/>)</li> +</ul> +<t> + The "max-stale" request directive indicates that the client will + accept a response that has exceeded its freshness lifetime. If a value is + present, then the client is willing to accept a response that has exceeded + its freshness lifetime by no more than the specified number of seconds. If + no value is assigned to max-stale, then the client will accept a + stale response of any age. +</t> +<t> + This directive uses the token form of the argument syntax: + e.g., 'max-stale=10' not 'max-stale="10"'. A sender &MUST-NOT; generate + the quoted-string form. +</t> +</section> + +<section title="min-fresh" anchor="cache-request-directive.min-fresh"> + <iref item="min-fresh (cache directive)" primary="true"/> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li><x:ref>delta-seconds</x:ref> (see <xref target="delta-seconds"/>)</li> +</ul> +<t> + The "min-fresh" request directive indicates that the client prefers a + response whose freshness lifetime is no less than its current age plus the + specified time in seconds. That is, the client wants a response that will + still be fresh for at least the specified number of seconds. +</t> +<t> + This directive uses the token form of the argument syntax: + e.g., 'min-fresh=20' not 'min-fresh="20"'. A sender &MUST-NOT; generate + the quoted-string form. +</t> +</section> + +<section title="no-cache" anchor="cache-request-directive.no-cache"> + <iref item="no-cache (cache directive)" primary="true" /> +<t> + The "no-cache" request directive indicates that the client prefers + stored response not be used to satisfy the request without successful + validation on the origin server. +</t> +</section> + +<section title="no-store" anchor="cache-request-directive.no-store"> + <iref item="no-store (cache directive)" primary="true" /> +<t> + The "no-store" request directive indicates that a cache &MUST-NOT; + store any part of either this request or any response to it. This + directive applies to both private and shared caches. "MUST NOT + store" in this context means that the cache &MUST-NOT; intentionally + store the information in non-volatile storage, and &MUST; make a + best-effort attempt to remove the information from volatile storage as + promptly as possible after forwarding it. +</t> +<t> + This directive is <em>not</em> a reliable or sufficient mechanism for ensuring + privacy. In particular, malicious or compromised caches might not + recognize or obey this directive, and communications networks might be + vulnerable to eavesdropping. +</t> +<t> + Note that if a request containing this directive is satisfied from a + cache, the no-store request directive does not apply to the already + stored response. +</t> +</section> + +<section title="no-transform" anchor="cache-request-directive.no-transform"> + <iref item="no-transform (cache directive)" primary="true" /> +<t> + The "no-transform" request directive indicates that the client is asking + for intermediaries to avoid + transforming the content, as defined in <xref + target="message.transformations"/>. +</t> +</section> + +<section title="only-if-cached" anchor="cache-request-directive.only-if-cached"> + <iref item="only-if-cached (cache directive)" primary="true" /> +<t> + The "only-if-cached" request directive indicates that the client only + wishes to obtain a stored response. Caches that honor this request + directive &SHOULD;, upon receiving it, either respond using a stored + response consistent with the other constraints of the request, or + respond with a <x:ref>504 (Gateway Timeout)</x:ref> status code. +</t> +</section> +</section> + +<section title="Response Cache-Control Directives" anchor="cache-response-directive" x:assert-sorted-by="title"> + <x:anchor-alias value="cache-response-directive" /> + +<t> + This section defines cache response directives. A cache &MUST; obey the + Cache-Control directives defined in this section. +</t> + +<section title="max-age" anchor="cache-response-directive.max-age"> + <iref item="max-age (cache directive)" primary="true" /> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li><x:ref>delta-seconds</x:ref> (see <xref target="delta-seconds"/>)</li> +</ul> +<t> + The "max-age" response directive indicates that the response is to be + considered stale after its age is greater than the specified number of + seconds. +</t> +<t> + This directive uses the token form of the argument syntax: + e.g., 'max-age=5' not 'max-age="5"'. A sender &MUST-NOT; generate the + quoted-string form. +</t> +</section> + +<section title="must-revalidate" anchor="cache-response-directive.must-revalidate"> + <iref item="must-revalidate (cache directive)" primary="true" /> +<t> + The "must-revalidate" response directive indicates that once the response + has become stale, a cache &MUST-NOT; reuse that response to satisfy + another request until it has been successfully validated by the origin, as + defined by <xref target="validation.model"/>. +</t> +<t> + The must-revalidate directive is necessary to support reliable operation + for certain protocol features. In all circumstances a cache &MUST-NOT; ignore + the must-revalidate directive; in particular, if a cache is disconnected, + the cache &MUST; generate an error response rather than reuse the stale response. + The generated status code &SHOULD; be <x:ref>504 (Gateway Timeout)</x:ref> + unless another error status code is more applicable. +</t> +<t> + The must-revalidate directive ought to be used by servers if and only + if failure to validate a request could cause + incorrect operation, such as a silently unexecuted financial + transaction. +</t> +<t> + The must-revalidate directive also permits a shared cache to + reuse a response to a request containing an <x:ref>Authorization</x:ref> + header field (<xref target="field.authorization"/>), + subject to the above requirement on revalidation + (<xref target="caching.authenticated.responses"/>). +</t> +</section> + +<section title="must-understand" anchor="cache-response-directive.must-understand"> + <iref item="must-understand (cache directive)" primary="true" /> +<t> + The "must-understand" response directive limits caching of the response to + a cache that understands and conforms to the requirements for that + response's status code. +</t> +<t> + Responses containing "must-understand" &SHOULD; also contain the "no-store" directive; + caches that implement "must-understand" &SHOULD; ignore the "no-store" directive + in responses that contain both directives and a status code that the cache + understands and conforms to any related caching requirements. +</t> +</section> + +<section title="no-cache" anchor="cache-response-directive.no-cache"> + <iref item="no-cache (cache directive)" primary="true" /> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li>#<x:ref>field-name</x:ref></li> +</ul> +<t> + The "no-cache" response directive, in its unqualified form (without an + argument), indicates that the response &MUST-NOT; be used to satisfy any + other request without forwarding it for validation and receiving a + successful response; see <xref target="validation.model"/>. +</t> +<t> + This allows an origin server to prevent a cache from using + the response to satisfy a request without contacting it, even by caches that have + been configured to send stale responses. +</t> +<t> + The qualified form of no-cache response directive, with an argument that + lists one or more field names, indicates that a cache &MAY; use the + response to satisfy a subsequent request, subject to any other restrictions + on caching, if the listed header fields are excluded from the subsequent + response or the subsequent response has been successfully revalidated with + the origin server (updating or removing those fields). + This allows an origin server to prevent the re-use of certain header + fields in a response, while still allowing caching of the rest of the + response. +</t> +<t> + The field names given are not limited to the set of header + fields defined by this specification. Field names are case-insensitive. +</t> +<t> + This directive uses the quoted-string form of the argument syntax. + A sender &SHOULD-NOT; generate the token form (even if quoting appears not + to be needed for single-entry lists). +</t> +<aside><t> + &Note; The + qualified form of the directive is often handled by caches as if an + unqualified no-cache directive was received; i.e., the special handling + for the qualified form is not widely implemented. +</t></aside> +</section> + +<section title="no-store" anchor="cache-response-directive.no-store"> + <iref item="no-store (cache directive)" primary="true" /> +<t> + The "no-store" response directive indicates that a cache &MUST-NOT; store + any part of either the immediate request or response, and &MUST-NOT; use + the response to satisfy any other request. +</t> +<t> + This directive applies to both private and shared caches. "MUST NOT + store" in this context means that the cache &MUST-NOT; intentionally store + the information in non-volatile storage, and &MUST; make a best-effort + attempt to remove the information from volatile storage as promptly as + possible after forwarding it. +</t> +<t> + This directive is <em>not</em> a reliable or sufficient mechanism for ensuring + privacy. In particular, malicious or compromised caches might not + recognize or obey this directive, and communications networks might be + vulnerable to eavesdropping. +</t> +<t> + Note that the "must-understand" cache directive overrides "no-store" in certain + circumstances; see <xref target="cache-response-directive.must-understand"/>. +</t> +</section> + +<section title="no-transform" anchor="cache-response-directive.no-transform"> + <iref item="no-transform (cache directive)" primary="true" /> +<t> + The "no-transform" response directive indicates that an intermediary + (regardless of whether it implements a cache) &MUST-NOT; transform the + content, as defined in <xref target="message.transformations"/>. +</t> +</section> + +<section title="private" anchor="cache-response-directive.private"> + <iref item="private (cache directive)" primary="true" /> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li>#<x:ref>field-name</x:ref></li> +</ul> +<t> + The unqualified "private" response directive indicates that + a shared cache &MUST-NOT; store the response (i.e., the response is + intended for a single user). + It also indicates that a private cache &MAY; store the response, subject + the constraints defined in <xref target="response.cacheability"/>, even if + the response would not otherwise be heuristically cacheable by a private + cache. +</t> +<t> + If a qualified private response directive is present, with an argument that + lists one or more field names, then only the listed header fields are limited to a + single user: a shared cache &MUST-NOT; store the listed header fields if they + are present in the original response, but &MAY; store the remainder of the + response message without those header fields, subject + the constraints defined in <xref target="response.cacheability"/>. +</t> +<t> + The field names given are not limited to the set of header + fields defined by this specification. Field names are case-insensitive. +</t> +<t> + This directive uses the quoted-string form of the argument syntax. + A sender &SHOULD-NOT; generate the token form (even if quoting appears not + to be needed for single-entry lists). +</t> +<aside><t> + &Note; This usage of the word "private" only controls + where the response can be stored; it cannot ensure the privacy of the + message content. Also, the qualified form of the directive is + often handled by caches as if an unqualified private directive + was received; i.e., the special handling for the qualified form is not + widely implemented. +</t></aside> +</section> + +<section title="proxy-revalidate" anchor="cache-response-directive.proxy-revalidate"> + <iref item="proxy-revalidate (cache directive)" primary="true" /> +<t> + The "proxy-revalidate" response directive indicates that once the response + has become stale, a shared cache &MUST-NOT; reuse that response to satisfy + another request until it has been successfully validated by the origin, + as defined by <xref target="validation.model"/>. This is analogous to + must-revalidate (<xref target="cache-response-directive.must-revalidate"/>), + except that proxy-revalidate does not apply to private caches. +</t> +<t> + Note that "proxy-revalidate" on its own does not imply that a response is + cacheable. For example, it might be combined with the public directive + (<xref target="cache-response-directive.public"/>), allowing the response + to be cached while requiring only a shared cache to revalidate when stale. +</t> +</section> + +<section title="public" anchor="cache-response-directive.public"> + <iref item="public (cache directive)" primary="true" /> +<t> + The "public" response directive indicates that a cache &MAY; store the + response even if it would otherwise be prohibited, subject to the + constraints defined in <xref target="response.cacheability"/>. In other words, + public explicitly marks the response as cacheable. For example, + public permits a shared cache to reuse a response to a request containing + an Authorization header field (<xref target="caching.authenticated.responses"/>). +</t> +<t> + Note that it is unnecessary to add the public directive to a response that + is already cacheable according to <xref target="response.cacheability"/>. +</t> +<t> + If a response with the public directive has no explicit freshness information, + it is heuristically cacheable (<xref target="heuristic.freshness"/>). +</t> +</section> + +<section title="s-maxage" anchor="cache-response-directive.s-maxage"> + <iref item="s-maxage (cache directive)" primary="true" /> +<t> + Argument syntax: +</t> +<ul empty="true"> + <li><x:ref>delta-seconds</x:ref> (see <xref target="delta-seconds"/>)</li> +</ul> +<t> + The "s-maxage" response directive indicates that, for a shared cache, the + maximum age specified by this directive overrides the maximum age + specified by either the max-age directive or the <x:ref>Expires</x:ref> + header field. +</t> +<t> + The s-maxage directive incorporates the + proxy-revalidate (<xref target="cache-response-directive.proxy-revalidate"/>) + response directive's semantics for a shared cache. + A shared cache &MUST-NOT; reuse a stale response with s-maxage to satisfy + another request until it has been successfully validated by the origin, as + defined by <xref target="validation.model"/>. + This directive also permits a shared cache to reuse a response to a + request containing an Authorization header field, subject to the above + requirements on maximum age and revalidation + (<xref target="caching.authenticated.responses"/>). +</t> +<t> + This directive uses the token form of the argument syntax: + e.g., 's-maxage=10' not 's-maxage="10"'. A sender &MUST-NOT; generate + the quoted-string form. +</t> +</section> +</section> + +<section title="Cache Control Extensions" anchor="cache.control.extensions"> +<t> + The Cache-Control header field can be extended through the use of one or + more cache-extension tokens, each with an optional value. + A cache &MUST; ignore unrecognized cache directives. +</t> +<t> + Informational extensions (those that do not require a change in cache + behavior) can be added without changing the semantics of other directives. +</t> +<t> + Behavioral extensions are designed to work by acting as modifiers to the + existing base of cache directives. + Both the new directive and the old directive are supplied, such that + applications that do not understand the new directive will default to the + behavior specified by the old directive, and those that understand the + new directive will recognize it as modifying the requirements associated + with the old directive. In this way, extensions to the existing + cache-control directives can be made without breaking deployed caches. +</t> +<t> + For example, consider a hypothetical new response directive called + "community" that acts as a modifier to the private directive: in addition + to private caches, any cache that is shared only by members of the named + community is allowed to cache the response. An origin server wishing to + allow the UCI community to use an otherwise private response in their + shared cache(s) could do so by including +</t> +<artwork type="example"> + Cache-Control: private, community="UCI" +</artwork> +<t> + A cache that recognizes such a community cache-extension could broaden its + behavior in accordance with that extension. A cache that does not + recognize the community cache-extension would ignore it and adhere to the + private directive. +</t> +<t> + New extension directives ought to consider defining: +</t> +<ul> + <li>What it means for a directive to be specified multiple times,</li> + <li>When the directive does not take an argument, what it means when an + argument is present,</li> + <li>When the directive requires an argument, what it means when it is + missing,</li> + <li>Whether the directive is specific to requests, responses, or able + to be used in either.</li> +</ul> +</section> + +<section title="Cache Directive Registry" anchor="cache.directive.registry"> +<t> + The "Hypertext Transfer Protocol (HTTP) Cache Directive Registry" defines the namespace for the + cache directives. It has been created and is now maintained at + <eref target="https://www.iana.org/assignments/http-cache-directives"/>. +</t> +<t> + A registration &MUST; include the following fields: +</t> +<ul> + <li>Cache Directive Name</li> + <li>Pointer to specification text</li> +</ul> +<t> + Values to be added to this namespace require IETF Review (see <xref + target="RFC8126" x:fmt="," x:sec="4.8"/>). +</t> +</section> +</section> + +<section title="Expires" anchor="field.expires"> + <x:anchor-alias value="header.expires"/> + <iref primary="true" item="Fields" subitem="Expires" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Expires" x:for-anchor=""/><iref primary="true" item="Fields" subitem="Expires" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Expires" x:for-anchor=""/><iref item="Expires header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="Expires"/> +<t> + The "Expires" response header field gives the date/time after which the + response is considered stale. See <xref target="expiration.model" /> for + further discussion of the freshness model. +</t> +<t> + The presence of an Expires header field does not imply that the original resource + will change or cease to exist at, before, or after that time. +</t> +<t> + The Expires field value is an HTTP-date timestamp, as defined in <xref target="http.date"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Expires"/> + <x:ref>Expires</x:ref> = <x:ref>HTTP-date</x:ref> +</sourcecode> +<t> + For example +</t> +<artwork type="example"> + Expires: Thu, 01 Dec 1994 16:00:00 GMT +</artwork> +<t> + A cache recipient &MUST; interpret invalid date formats, especially the + value "0", as representing a time in the past (i.e., "already expired"). +</t> +<t> + If a response includes a <x:ref>Cache-Control</x:ref> header field with + the max-age directive (<xref target="cache-response-directive.max-age"/>), + a recipient &MUST; ignore the Expires header field. + Likewise, if a response includes the s-maxage directive + (<xref target="cache-response-directive.s-maxage" />), a shared cache + recipient &MUST; ignore the Expires header field. In both these cases, the value + in Expires is only intended for recipients that have not yet implemented + the Cache-Control header field. +</t> +<t> + An origin server without a clock &MUST-NOT; generate an Expires header field + unless its value represents a fixed time in the past (always expired) + or its value has been associated with the resource by a system or user + with a reliable clock. +</t> +<t> + Historically, HTTP required the Expires field value to be no more than a + year in the future. While longer freshness lifetimes are no longer + prohibited, extremely large values have been demonstrated to cause + problems (e.g., clock overflows due to use of 32-bit integers for + time values), and many caches will evict a response far sooner than + that. +</t> +</section> + +<section title="Pragma" anchor="field.pragma"> + <x:anchor-alias value="header.pragma"/> + <iref primary="true" item="Fields" subitem="Pragma" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Pragma" x:for-anchor=""/><iref primary="true" item="Fields" subitem="Pragma" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Pragma" x:for-anchor=""/><iref item="Pragma header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="extension-pragma"/> + <x:anchor-alias value="Pragma"/> + <x:anchor-alias value="pragma-directive"/> +<t> + The "Pragma" request header field was defined for HTTP/1.0 caches, so that clients + could specify a "no-cache" request (as <x:ref>Cache-Control</x:ref> was + not defined until HTTP/1.1). +</t> +<t> + However, support for Cache-Control is now widespread. As a result, this + specification deprecates Pragma. +</t> + +<aside> + <t> + &Note; Because the meaning of "Pragma: no-cache" in responses was never + specified, it does not provide a reliable replacement for + "Cache-Control: no-cache" in them. + </t> +</aside> +</section> + +<section title="Warning" anchor="field.warning"> + <x:anchor-alias value="header.warning"/> + <rdf:Description> + <status xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">obsoleted</status> + </rdf:Description> + <iref primary="true" item="Fields" subitem="Warning" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Warning" x:for-anchor=""/><iref item="Warning header field" primary="true" x:for-anchor=""/> + <x:anchor-alias value="Warning"/> +<t> + The "Warning" header field was used to carry additional information + about the status or transformation of a message that might not be reflected + in the status code. This specification obsoletes it, as it is not widely + generated or surfaced to users. The information it carried can be gleaned + from examining other header fields, such as <x:ref>Age</x:ref>. +</t> +</section> +</section> + +<section title="Relationship to Applications and Other Caches" anchor="history.lists"> +<t> + Applications using HTTP often specify additional forms of caching. For + example, Web browsers often have history mechanisms such as "Back" buttons + that can be used to redisplay a representation retrieved earlier in a + session. +</t> +<t> + Likewise, some Web browsers implement caching of images and other assets + within a page view; they may or may not honor HTTP caching semantics. +</t> +<t> + The requirements in this specification do not necessarily apply to how + applications use data after it is retrieved from a HTTP cache. For example, a + history mechanism can display a previous representation even if it has + expired, and an application can use cached data in other ways beyond its + freshness lifetime. +</t> +<t> + This specification does not prohibit the application from taking HTTP caching into + account; for example, a history mechanism might tell the user that a view + is stale, or it might honor cache directives (e.g., Cache-Control: + no-store). +</t> +<t> + However, when an application caches data and does not make this + apparent to or easily controllable by the user, it is strongly encouraged to + define its operation with respect to HTTP cache directives, so as + not to surprise authors who expect caching semantics + to be honoured. For example, while it might be reasonable to define an + application cache "above" HTTP that allows a response containing + Cache-Control: no-store to be reused for requests that are directly related + to the request that fetched it (such as those created during the same page + load), it would likely be surprising and confusing to users and authors if it + were allowed to be reused for requests unrelated in any way to the one from + which it was obtained. +</t> +</section> + + +<section title="Security Considerations" anchor="security.considerations"> +<t> + This section is meant to inform developers, information providers, and + users of known security concerns specific to HTTP caching. + More general security considerations are addressed in "HTTP/1.1" + (<xref target="Messaging" x:rel="#security.considerations"/>) + and "HTTP Semantics" + (<xref target="Semantics" x:rel="#security.considerations"/>). +</t> +<t> + Caches expose additional potential vulnerabilities, since the contents of + the cache represent an attractive target for malicious exploitation. + Because cache contents persist after an HTTP request is complete, an attack + on the cache can reveal information long after a user believes that the + information has been removed from the network. Therefore, cache contents + need to be protected as sensitive information. +</t> + +<section title="Cache Poisoning" anchor="cache.poisoning"> +<t> + Various attacks might be amplified by being stored in a cache. Such + "cache poisoning" attacks happen when an attacker uses + implementation flaws, elevated privileges, or other techniques to insert + a response into a cache. This is especially effective when shared caches + are used to distribute malicious content to many clients. +</t> +<t> + One common attack vector for cache poisoning is to exploit differences in + message parsing on proxies and in user agents; see <xref + target="message.body.length"/> for the relevant requirements regarding + HTTP/1.1. +</t> +</section> + +<section title="Timing Attacks" anchor="security.timing"> +<t> + Because one of the primary uses of a cache is to optimise performance, + its use can "leak" information about what resources have been previously + requested. +</t> +<t> + For example, if a user visits a site and their browser caches some of its + responses, and then navigates to a second site, that site can attempt to + load responses it knows exists on the first site. If they load + quickly, it can be assumed that the user has visited that site, or even + a specific page on it. +</t> +<t> + Such "timing attacks" can be mitigated by adding more information to the + cache key, such as the identity of the referring site (to prevent the + attack described above). This is sometimes called "double keying." +</t> +</section> + +<section title="Caching of Sensitive Information" anchor="caching.of.sensitive.information"> +<t> + Implementation and deployment flaws (as well as misunderstanding of cache + operation) might lead to caching of sensitive information (e.g., + authentication credentials) that is thought to be private, exposing it to + unauthorized parties. +</t> +<t> + Note that the Set-Cookie response header field <xref target="RFC6265"/> + does not inhibit caching; a cacheable response with a Set-Cookie header + field can be (and often is) used to satisfy subsequent requests to caches. + Servers who wish to control caching of these responses are encouraged to + emit appropriate Cache-Control response header fields. +</t> +</section> +</section> + + +<section title="IANA Considerations" anchor="iana.considerations"> +<t> + The change controller for the following registrations is: + "IETF (iesg@ietf.org) - Internet Engineering Task Force". +</t> + +<section title="Field Name Registration" anchor="field.name.registration"> +<t> + First, introduce the new "Hypertext Transfer Protocol (HTTP) Field + Name Registry" at <eref target="https://www.iana.org/assignments/http-fields"/> + as described in + <xref target="Semantics" x:rel="#field.name.registration"/>. +</t> +<t> + Then, please update the registry with the field names listed in the table + below: +</t> +<?BEGININC build/draft-ietf-httpbis-cache-latest.iana-headers ?> +<!--AUTOGENERATED FROM extract-header-defs.xslt, do not edit manually--> +<table align="left" anchor="iana.header.registration.table"> + <thead> + <tr> + <th>Field Name</th> + <th>Status</th> + <th>Ref.</th> + <th>Comments</th> + </tr> + </thead> + <tbody> + <tr> + <td>Age</td> + <td>standard</td> + <td> + <xref target="field.age" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Cache-Control</td> + <td>standard</td> + <td> + <xref target="field.cache-control" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Expires</td> + <td>standard</td> + <td> + <xref target="field.expires" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Pragma</td> + <td>standard</td> + <td> + <xref target="field.pragma" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Warning</td> + <td>obsoleted</td> + <td> + <xref target="field.warning" format="counter"/> + </td> + <td/> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-cache-latest.iana-headers ?> +</section> + +<section title="Cache Directive Registration" anchor="cache.directive.registration"> +<t> + Please update the + "Hypertext Transfer Protocol (HTTP) Cache Directive Registry" + at <eref target="https://www.iana.org/assignments/http-cache-directives"/> + with the registration procedure of <xref target="cache.directive.registry"/> + and the cache directive names summarized in the table below. +</t> +<?BEGININC build/draft-ietf-httpbis-cache-latest.cache-directives ?> +<!--AUTOGENERATED FROM extract-cache-directives-defs.xslt, do not edit manually--> +<table align="left" anchor="iana.cache.directive.registration.table"> + <thead> + <tr> + <th>Cache Directive</th> + <th>Reference</th> + </tr> + </thead> + <tbody> + <tr> + <td>max-age</td> + <td> + <xref target="cache-request-directive.max-age"/>, <xref target="cache-response-directive.max-age"/> + </td> + </tr> + <tr> + <td>max-stale</td> + <td> + <xref target="cache-request-directive.max-stale"/> + </td> + </tr> + <tr> + <td>min-fresh</td> + <td> + <xref target="cache-request-directive.min-fresh"/> + </td> + </tr> + <tr> + <td>must-revalidate</td> + <td> + <xref target="cache-response-directive.must-revalidate"/> + </td> + </tr> + <tr> + <td>must-understand</td> + <td> + <xref target="cache-response-directive.must-understand"/> + </td> + </tr> + <tr> + <td>no-cache</td> + <td> + <xref target="cache-request-directive.no-cache"/>, <xref target="cache-response-directive.no-cache"/> + </td> + </tr> + <tr> + <td>no-store</td> + <td> + <xref target="cache-request-directive.no-store"/>, <xref target="cache-response-directive.no-store"/> + </td> + </tr> + <tr> + <td>no-transform</td> + <td> + <xref target="cache-request-directive.no-transform"/>, <xref target="cache-response-directive.no-transform"/> + </td> + </tr> + <tr> + <td>only-if-cached</td> + <td> + <xref target="cache-request-directive.only-if-cached"/> + </td> + </tr> + <tr> + <td>private</td> + <td> + <xref target="cache-response-directive.private"/> + </td> + </tr> + <tr> + <td>proxy-revalidate</td> + <td> + <xref target="cache-response-directive.proxy-revalidate"/> + </td> + </tr> + <tr> + <td>public</td> + <td> + <xref target="cache-response-directive.public"/> + </td> + </tr> + <tr> + <td>s-maxage</td> + <td> + <xref target="cache-response-directive.s-maxage"/> + </td> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-cache-latest.cache-directives ?> +</section> + +<section title="Warn Code Registry" anchor="warn.code.registration"> +<t> + Please add a note to the "Hypertext Transfer Protocol (HTTP) Warn Codes" + registry at <eref + target="https://www.iana.org/assignments/http-warn-codes"/> to the effect + that Warning is obsoleted. +</t> +</section> +</section> +</middle> + +<back> +<references title="Normative References"> + + <reference anchor="Messaging"> + <x:source basename="draft-ietf-httpbis-messaging-latest" href="rfc9110.xml"> + <x:has anchor="message.body.length"/> + </x:source> + </reference> + + <reference anchor="Semantics"> + <x:source basename="draft-ietf-httpbis-semantics-latest" href="rfc9112.xml"> + <x:defines>Content-Length</x:defines> + <x:has anchor="GET"/> + <x:has anchor="abnf.extension"/> + <x:has anchor="combining.byte.ranges"/> + <x:has anchor="field.authorization"/> + <x:has anchor="field.connection"/> + <x:has anchor="field.date"/> + <x:has anchor="field.etag"/> + <x:has anchor="field.if-modified-since"/> + <x:has anchor="field.if-none-match"/> + <x:has anchor="field.if-range"/> + <x:has anchor="field.last-modified"/> + <x:has anchor="field.lines"/> + <x:has anchor="field.proxy-authenticate"/> + <x:has anchor="field.proxy-authentication-info"/> + <x:has anchor="field.proxy-authorization"/> + <x:has anchor="field.range"/> + <x:has anchor="field.vary"/> + <x:has anchor="http.date"/> + <x:has anchor="field-names"/> + <x:has anchor="message.transformations"/> + <x:has anchor="messages"/> + <x:has anchor="origin"/> + <x:has anchor="overview.of.status.codes"/> + <x:has anchor="precedence"/> + <x:has anchor="preconditions"/> + <x:has anchor="quoted.strings"/> + <x:has anchor="safe.methods"/> + <x:has anchor="status.codes"/> + <x:has anchor="target.resource"/> + <x:has anchor="tokens"/> + <x:has anchor="weak.and.strong.validators"/> + <x:has anchor="whitespace"/> + <x:has anchor="Semantics-acks" target="acks"/> + <x:has anchor="Semantics-conformance" target="conformance"/> + </x:source> + </reference> + + <reference anchor="RFC2119"> + <front> + <title>Key words for use in RFCs to Indicate Requirement Levels</title> + <author initials="S." surname="Bradner" fullname="Scott Bradner"/> + <date month="March" year="1997"/> + </front> + <seriesInfo name="BCP" value="14"/> + <seriesInfo name="RFC" value="2119"/> + </reference> + + <reference anchor="RFC8174"> + <front> + <title>Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words</title> + <author initials='B.' surname='Leiba' fullname='Barry Leiba'/> + <date year="2017" month="May"/> + </front> + <seriesInfo name='BCP' value='14'/> + <seriesInfo name='RFC' value='8174'/> + </reference> + + <reference anchor="RFC5234"> + <front> + <title abbrev="ABNF for Syntax Specifications">Augmented BNF for Syntax Specifications: ABNF</title> + <author initials="D." surname="Crocker" fullname="Dave Crocker" role="editor"/> + <author initials="P." surname="Overell" fullname="Paul Overell"/> + <date month="January" year="2008"/> + </front> + <seriesInfo name="STD" value="68"/> + <seriesInfo name="RFC" value="5234"/> + </reference> + + <reference anchor="RFC7405"> + <front> + <title>Case-Sensitive String Support in ABNF</title> + <author initials="P." surname="Kyzivat" fullname="Dave Kyzivat"/> + <date month="December" year="2014"/> + </front> + <seriesInfo name="RFC" value="7405"/> + </reference> + +</references> + +<references title="Informative References"> + + <reference anchor="RFC2616"> + <front> + <title>Hypertext Transfer Protocol -- HTTP/1.1</title> + <author fullname="R. Fielding" initials="R." surname="Fielding"/> + <author fullname="J. Gettys" initials="J." surname="Gettys"/> + <author fullname="J. Mogul" initials="J." surname="Mogul"/> + <author fullname="H. Frystyk" initials="H." surname="Frystyk"/> + <author fullname="L. Masinter" initials="L." surname="Masinter"/> + <author fullname="P. Leach" initials="P." surname="Leach"/> + <author fullname="T. Berners-Lee" initials="T." surname="Berners-Lee"/> + <date month="June" year="1999" /> + </front> + <seriesInfo name="RFC" value="2616" /> + </reference> + + <reference anchor="RFC7234"> + <front> + <title>Hypertext Transfer Protocol (HTTP): Caching</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="M." surname="Nottingham" fullname="Mark Nottingham" role="editor"/> + <author initials="J. F." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7234"/> + </reference> + + <reference anchor='RFC5861'> + <front> + <title abbrev="HTTP stale controls">HTTP Cache-Control Extensions for Stale Content</title> + <author initials="M." surname="Nottingham" fullname="Mark Nottingham"/> + <date month="April" year="2010"/> + </front> + <seriesInfo name='RFC' value='5861' /> + </reference> + + <reference anchor='RFC5905'> + <front> + <title>Network Time Protocol Version 4: Protocol and Algorithms Specification</title> + <author initials='D.' surname='Mills' fullname='David L. Mills'/> + <author initials='J.' surname='Martin' fullname='Jim Martin' role="editor"/> + <author initials='J.' surname='Burbank' fullname='Jack Burbank'/> + <author initials='W.' surname='Kasch' fullname='William Kasch'/> + <date year='2010' month='June' /> + </front> + <seriesInfo name='RFC' value='5905' /> + </reference> + + <reference anchor="RFC6265"> + <front> + <title>HTTP State Management Mechanism</title> + <author initials="A." surname="Barth" fullname="Adam Barth"/> + <date year="2011" month="April" /> + </front> + <seriesInfo name="RFC" value="6265"/> + </reference> + + <reference anchor="RFC8126"> + <front> + <title>Guidelines for Writing an IANA Considerations Section in RFCs</title> + <author initials="M." surname="Cotton" fullname="M. Cotton"/> + <author initials="B." surname="Leiba" fullname="B. Leiba"/> + <author initials="T." surname="Narten" fullname="T. Narten"/> + <date year="2017" month="June" /> + </front> + <seriesInfo name="BCP" value="26"/> + <seriesInfo name="RFC" value="8126"/> + </reference> + +</references> + +<?BEGININC build/draft-ietf-httpbis-cache-latest.abnf-appendix ?> +<section xmlns:x="http://purl.org/net/xml2rfc/ext" title="Collected ABNF" anchor="collected.abnf"><t>In the collected ABNF below, list rules are expanded as per <xref target="Semantics" x:rel="#abnf.extension.sender"/>.</t><sourcecode type="abnf" name="draft-ietf-httpbis-cache-latest.parsed-abnf"> +<x:ref>Age</x:ref> = delta-seconds + +<x:ref>Cache-Control</x:ref> = [ cache-directive *( OWS "," OWS cache-directive ) ] + +<x:ref>Expires</x:ref> = HTTP-date + +<x:ref>HTTP-date</x:ref> = &lt;HTTP-date, see <xref target="Semantics" x:fmt="," x:sec="5.6.7"/>&gt; + +<x:ref>OWS</x:ref> = &lt;OWS, see <xref target="Semantics" x:fmt="," x:sec="5.6.3"/>&gt; + +<x:ref>cache-directive</x:ref> = token [ "=" ( token / quoted-string ) ] + +<x:ref>delta-seconds</x:ref> = 1*DIGIT + +<x:ref>field-name</x:ref> = &lt;field-name, see <xref target="Semantics" x:fmt="," x:sec="5.1"/>&gt; + +<x:ref>quoted-string</x:ref> = &lt;quoted-string, see <xref target="Semantics" x:fmt="," x:sec="5.6.4"/>&gt; + +<x:ref>token</x:ref> = &lt;token, see <xref target="Semantics" x:fmt="," x:sec="5.6.2"/>&gt; +</sourcecode> +</section> +<?ENDINC build/draft-ietf-httpbis-cache-latest.abnf-appendix ?> + +<section title="Changes from RFC 7234" anchor="changes.from.rfc.7234"> +<t> + Handling of duplicate and conflicting cache directives has been clarified. + (<xref target="calculating.freshness.lifetime"/>) +</t> +<t> + Cache invalidation of the URIs in the Location and Content-Location + header fields is no longer required, but still allowed. + (<xref target="invalidation"/>) +</t> +<t> + Cache invalidation of the URIs in the Location and Content-Location header fields is disallowed + when the origin is different; previously, it was the host. + (<xref target="invalidation"/>) +</t> +<t> + Handling invalid and multiple Age header field values has been clarified. + (<xref target="field.age"/>) +</t> +<t> + Some cache directives defined by this specification now have stronger + prohibitions against generating the quoted form of their values, since + this has been found to create interoperability problems. Consumers of + extension cache directives are no longer required to accept both token and + quoted-string forms, but they still need to parse them properly for + unknown extensions. + (<xref target="field.cache-control"/>) +</t> +<t> + The "public" and "private" cache directives were clarified, so that they + do not make responses reusable under any condition. + (<xref target="cache-response-directive"/>) +</t> +<t> + The "must-understand" cache directive was introduced; caches are no + longer required to understand the semantics of new response status codes + unless it is present. + (<xref target="cache-response-directive.must-understand"/>) +</t> +<t> + The Warning response header was obsoleted. Much of the information + supported by Warning could be gleaned by examining the response, and the + remaining warn-codes &mdash; although potentially useful &mdash; were entirely + advisory. In practice, Warning was not added by caches or intermediaries. + (<xref target="field.warning"/>) +</t> +</section> + +<section title="Change Log" anchor="change.log" removeInRFC="true"> + +<section title="Between RFC7234 and draft 00" anchor="changes.since.publication.as.rfc"> +<t> + The changes were purely editorial: +</t> +<ul> + <li>Change boilerplate and abstract to indicate the "draft" status, and update references to ancestor specifications.</li> + <li>Remove version "1.1" from document title, indicating that this specification applies to all HTTP versions.</li> + <li>Adjust historical notes.</li> + <li>Update links to sibling specifications.</li> + <li>Replace sections listing changes from RFC 2616 by new empty sections referring to RFC 723x.</li> + <li>Remove acknowledgements specific to RFC 723x.</li> + <li>Move "Acknowledgements" to the very end and make them unnumbered.</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-00" anchor="changes.since.00"> +<t> + The changes are purely editorial: +</t> +<ul> + <li>Moved all extensibility tips, registration procedures, and registry + tables from the IANA considerations to normative sections, reducing the + IANA considerations to just instructions that will be removed prior to + publication as an RFC.</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-01" anchor="changes.since.01"> +<ul> + <li>Cite RFC 8126 instead of RFC 5226 (<eref target="https://github.com/httpwg/http-core/issues/75"/>)</li> + <li>In <xref target="field.pragma"/>, misleading statement about the relation between Pragma and Cache-Control (<eref target="https://github.com/httpwg/http-core/issues/92"/>, <eref target="https://www.rfc-editor.org/errata/eid4674"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-02" anchor="changes.since.02"> +<ul> + <li>In <xref target="response.cacheability"/>, explain that only final responses are cacheable (<eref target="https://github.com/httpwg/http-core/issues/29"/>)</li> + <li>In <xref target="cache-response-directive"/>, clarify what responses various directives apply to (<eref target="https://github.com/httpwg/http-core/issues/52"/>)</li> + <li>In <xref target="validation.sent"/>, clarify the source of validators in conditional requests (<eref target="https://github.com/httpwg/http-core/issues/110"/>)</li> + <li>Revise <xref target="history.lists"/> to apply to more than just History Lists (<eref target="https://github.com/httpwg/http-core/issues/126"/>)</li> + <li>In <xref target="field.warning"/>, deprecated "Warning" header field (<eref target="https://github.com/httpwg/http-core/issues/139"/>)</li> + <li>In <xref target="caching.authenticated.responses"/>, remove a spurious note (<eref target="https://github.com/httpwg/http-core/issues/141"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-03" anchor="changes.since.03"> +<ul> + <li>In <xref target="caching.overview"/>, define what a disconnected cache is (<eref target="https://github.com/httpwg/http-core/issues/5"/>)</li> + <li>In <xref target="constructing.responses.from.caches"/>, clarify language around how to select a response when more than one matches (<eref target="https://github.com/httpwg/http-core/issues/23"/>)</li> + <li>in <xref target="serving.stale.responses"/>, mention stale-while-revalidate and stale-if-error (<eref target="https://github.com/httpwg/http-core/issues/122"/>)</li> + <li>Remove requirements around cache request directives (<eref target="https://github.com/httpwg/http-core/issues/129"/>)</li> + <li>Deprecate Pragma (<eref target="https://github.com/httpwg/http-core/issues/140"/>)</li> + <li>In <xref target="caching.authenticated.responses"/> and <xref target="cache-response-directive"/>, note effect of some directives on authenticated requests (<eref target="https://github.com/httpwg/http-core/issues/161"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-04" anchor="changes.since.04"> +<ul> + <li>In <xref target="field.cache-control"/>, remove the registrations for stale-if-error and stale-while-revalidate which happened in RFC 7234 (<eref target="https://github.com/httpwg/http-core/issues/207"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-05" anchor="changes.since.05"> +<ul x:when-empty="None yet."> + <li>In <xref target="incomplete.responses"/>, clarify how weakly framed content is considered for purposes of completeness (<eref target="https://github.com/httpwg/http-core/issues/25"/>)</li> + <li>Throughout, describe Vary and cache key operations more clearly (<eref target="https://github.com/httpwg/http-core/issues/28"/>)</li> + <li>In <xref target="response.cacheability"/>, remove concept of "cacheable methods" in favor of prose (<eref target="https://github.com/httpwg/http-core/issues/54"/>, <eref target="https://www.rfc-editor.org/errata/eid5300"/>)</li> + <li>Refactored <xref target="security.considerations"/>, and added a section on timing attacks (<eref target="https://github.com/httpwg/http-core/issues/233"/>)</li> + <li>Changed "cacheable by default" to "heuristically cacheable" throughout (<eref target="https://github.com/httpwg/http-core/issues/242"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-06" anchor="changes.since.06"> +<ul x:when-empty="None yet."> + <li>In <xref target="response.cacheability"/> and <xref target="cache-response-directive.must-understand"/>, change response cacheability to only require understanding the response status code if the must-understand cache directive is present (<eref target="https://github.com/httpwg/http-core/issues/120"/>)</li> + <li>Change requirements for handling different forms of cache directives in <xref target="field.cache-control"/> (<eref target="https://github.com/httpwg/http-core/issues/128"/>)</li> + <li>Fix typo in <xref target="cache-response-directive.s-maxage"/> (<eref target="https://github.com/httpwg/http-core/issues/264"/>)</li> + <li>In <xref target="cache-response-directive.public"/> and <xref target="cache-response-directive.private"/>, clarify "private" and "public" so that they do not override all other cache directives (<eref target="https://github.com/httpwg/http-core/issues/268"/>)</li> + <li>In <xref target="response.cacheability"/>, distinguish between private with and without qualifying headers (<eref target="https://github.com/httpwg/http-core/issues/270"/>)</li> + <li>In <xref target="caching.negotiated.responses"/>, clarify that any "*" as a member of Vary will disable caching (<eref target="https://github.com/httpwg/http-core/issues/286"/>)</li> + <li>In <xref target="requirements.notation"/>, reference RFC 8174 as well (<eref target="https://github.com/httpwg/http-core/issues/303"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-07" anchor="changes.since.07"> +<ul x:when-empty="None yet."> + <li>Throughout, replace "effective request URI", "request-target" and similar with "target URI" (<eref target="https://github.com/httpwg/http-core/issues/259"/>)</li> + <li>In <xref target="cache-response-directive.public"/> and <xref target="cache-response-directive.private"/>, make it clear that these directives do not ignore other requirements for caching (<eref target="https://github.com/httpwg/http-core/issues/320"/>)</li> + <li>In <xref target="incomplete.responses"/>, move definition of "complete" into semantics (<eref target="https://github.com/httpwg/http-core/issues/334"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-08" anchor="changes.since.08"> +<ul x:when-empty="None yet."> + <li><xref target="collected.abnf"/> now uses the sender variant of the "#" list expansion (<eref target="https://github.com/httpwg/http-core/issues/192"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-09" anchor="changes.since.09"> +<ul x:when-empty="None yet."> + <li>In <xref target="field.age"/>, discuss handling of invalid and multiple Age header field values (<eref target="https://github.com/httpwg/http-core/issues/193"/>)</li> + <li>Switch to xml2rfc v3 mode for draft generation (<eref target="https://github.com/httpwg/http-core/issues/394"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-10" anchor="changes.since.10"> +<ul x:when-empty="None yet."> + <li>In <xref target="field.cache-control"/> (<x:ref>Cache-Control</x:ref>), adjust ABNF to allow empty lists (<eref target="https://github.com/httpwg/http-core/issues/210"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-11" anchor="changes.since.11"> +<ul x:when-empty="None."> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-12" anchor="changes.since.12"> +<ul x:when-empty="None yet."> + <li>In <xref target="serving.stale.responses"/>, remove 'no-store', as it won't be in cache in the first place (<eref target="https://github.com/httpwg/http-core/issues/447"/>)</li> + <li>In <xref target="storing.fields"/>, make it clear that only response headers need be stored (<eref target="https://github.com/httpwg/http-core/issues/457"/>)</li> + <li>Rewrote "Updating Stored Header Fields" <xref target="update"/> (<eref target="https://github.com/httpwg/http-core/issues/458"/>)</li> + <li>In <xref target="calculating.freshness.lifetime"/> clarify how to handle invalid and conflicting directives (<eref target="https://github.com/httpwg/http-core/issues/460"/>)</li> + <li>In <xref target="validation.response"/>, mention retry of failed validation requests (<eref target="https://github.com/httpwg/http-core/issues/462"/>)</li> + <li>In <xref target="validation.response"/>, clarify requirement on storing a full response to a conditional request (<eref target="https://github.com/httpwg/http-core/issues/463"/>)</li> + <li>In <xref target="field.age"/>, clarify error handling (<eref target="https://github.com/httpwg/http-core/issues/471"/>)</li> + <li>In <xref target="expiration.model"/>, remove spurious "UTC" (<eref target="https://github.com/httpwg/http-core/issues/472"/>)</li> + <li>In <xref target="expiration.model"/>, correct the date-related rule names to consider case-insensitive (<eref target="https://github.com/httpwg/http-core/issues/473"/>)</li> + <li>In <xref target="history.lists"/>, strengthen recommendation for application caches to pay attention to cache directives (<eref target="https://github.com/httpwg/http-core/issues/474"/>)</li> + <li>In <xref target="constructing.responses.from.caches"/>, mention collapsed requests (<eref target="https://github.com/httpwg/http-core/issues/475"/>)</li> + <li>In <xref target="invalidation"/>, relax requirements on Content-Location and Location invalidation (<eref target="https://github.com/httpwg/http-core/issues/478"/>)</li> + <li>In <xref target="freshening.responses"/>, refine the exceptions to update on a 304 (<eref target="https://github.com/httpwg/http-core/issues/488"/>)</li> + <li>Moved table of Cache-Control directives into <xref target="cache.directive.registration"/> (<eref target="https://github.com/httpwg/http-core/issues/506"/>)</li> + <li>In <xref target="notation"/>, remove unused core ABNF rules (<eref target="https://github.com/httpwg/http-core/issues/529"/>)</li> + <li>Changed to using "payload data" when defining requirements about the data being conveyed within a message, instead of the terms "payload body" or "response body" or "representation body", since they often get confused with the HTTP/1.1 message body (which includes transfer coding) (<eref target="https://github.com/httpwg/http-core/issues/553"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-13" anchor="changes.since.13"> +<ul x:when-empty="None yet."> + <li>In <xref target="cache-response-directive.must-revalidate"/>, clarify requirements around generating an error response (<eref target="https://github.com/httpwg/http-core/issues/608"/>)</li> + <li>Changed to using "content" instead of "payload" or "payload data" to avoid confusion with the payload of version-specific messaging frames (<eref target="https://github.com/httpwg/http-core/issues/654"/>)</li> + <li>In <xref target="freshening.responses"/>, clarify how multiple validators are handled (<eref target="https://github.com/httpwg/http-core/issues/659"/>)</li> + <li>In <xref target="age.calculations"/>, <xref target="field.cache-control"/>, and <xref target="cache-response-directive.no-cache"/>, remove notes about very old HTTP/1.0 behaviours (<eref target="https://github.com/httpwg/http-core/issues/660"/>)</li> + <li>In <xref target="cache-response-directive.must-understand"/>, modify operation to be more backwards-compatible with existing implementations (<eref target="https://github.com/httpwg/http-core/issues/661"/>)</li> + <li>In <xref target="cache.poisoning"/>, cache poisoning can affect private caches too (<eref target="https://github.com/httpwg/http-core/issues/730"/>)</li> +</ul> +</section> + +<section title="Since draft-ietf-httpbis-cache-14" anchor="changes.since.14"> +<ul x:when-empty="None yet."> + <li>Fix subsection ordering in <xref target="cache-response-directive"/> (<eref target="https://github.com/httpwg/http-core/issues/674"/>)</li> + <li>In <xref target="caching.overview"/>, define what a cache key is (<eref target="https://github.com/httpwg/http-core/issues/728"/>)</li> +</ul> +</section> +</section> + +<section title="Acknowledgments" anchor="acks" numbered="false"> +<t> + See <xref target="Semantics-acks"/>. +</t> +</section> +</back> +</rfc> diff --git a/test/fixtures/cache-tests/spec/rfc9112.xml b/test/fixtures/cache-tests/spec/rfc9112.xml new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/rfc9112.xml @@ -0,0 +1,2954 @@ +<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type='text/xsl' href='lib/myxml2rfc.xslt'?> +<?rfc compact="yes"?> +<?rfc subcompact="no" ?> +<?rfc linkmailto="no" ?> +<?rfc editing="no" ?> +<?rfc comments="yes"?> +<?rfc inline="yes"?> +<?rfc rfcedstyle="yes"?> +<?rfc-ext allow-markup-in-artwork="yes" ?> +<?rfc-ext map-dfn="none" ?> +<?rfc-ext html-pretty-print="prettyprint https://cdn.rawgit.com/google/code-prettify/master/loader/run_prettify.js"?> +<?rfc-ext include-references-in-index="yes" ?> +<?rfc-ext xml2rfc-backend="202007"?> +<rfc xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:x="http://purl.org/net/xml2rfc/ext" obsoletes="7230" category="std" x:maturity-level="internet" ipr="pre5378Trust200902" number="9112" seriesNo="99" version="3" tocDepth="4" sortRefs="true"> +<front> + + <title>HTTP/1.1</title> + + <author fullname="Roy T. Fielding" initials="R." surname="Fielding" role="editor"> + <organization>Adobe</organization> + <address> + <postal> + <postalLine>345 Park Ave</postalLine> + <postalLine>San Jose, CA 95110</postalLine> + <country>United States of America</country> + </postal> + <email>fielding@gbiv.com</email> + <uri>https://roy.gbiv.com/</uri> + </address> + </author> + + <author fullname="Mark Nottingham" initials="M." surname="Nottingham" role="editor"> + <organization>Fastly</organization> + <address> + <postal> + <postalLine>Prahran</postalLine> + <country>Australia</country> + </postal> + <email>mnot@mnot.net</email> + <uri>https://www.mnot.net/</uri> + </address> + </author> + + <author fullname="Julian Reschke" initials="J." surname="Reschke" role="editor"> + <organization abbrev="greenbytes">greenbytes GmbH</organization> + <address> + <postal> + <postalLine>Hafenweg 16</postalLine> + <postalLine>48155 Münster</postalLine> + <country>Germany</country> + </postal> + <email>julian.reschke@greenbytes.de</email> + <uri>https://greenbytes.de/tech/webdav/</uri> + </address> + </author> + + <date year="2022" month="June"/> + + <area>Applications and Real-Time</area> + <workgroup>HTTP</workgroup> + + <keyword>Hypertext Transfer Protocol</keyword> + <keyword>HTTP</keyword> + <keyword>HTTP message format</keyword> + +<abstract> +<t> + The Hypertext Transfer Protocol (HTTP) is a stateless application-level + protocol for distributed, collaborative, hypertext information systems. + This document specifies the HTTP/1.1 message syntax, message parsing, + connection management, and related security concerns. +</t> +<t> + This document obsoletes portions of RFC 7230. +</t> +</abstract> +</front> +<middle> +<section title="Introduction" anchor="introduction"> +<t> + The Hypertext Transfer Protocol (HTTP) is a stateless application-level + request/response protocol that uses extensible semantics and + self-descriptive messages for flexible interaction with network-based + hypertext information systems. HTTP/1.1 is defined by: +</t> +<ul> + <li>This document</li> + <li>"HTTP Semantics" <xref target="HTTP"/></li> + <li>"HTTP Caching" <xref target="CACHING"/></li> +</ul> +<t> + This document specifies how HTTP semantics are conveyed using the + HTTP/1.1 message syntax, framing, and connection management mechanisms. + Its goal is to define the complete set of requirements for HTTP/1.1 + message parsers and message-forwarding intermediaries. +</t> +<t> + This document obsoletes the portions of + <xref target="RFC7230" format="none">RFC 7230</xref> related to HTTP/1.1 + messaging and connection management, with the changes being summarized in + <xref target="changes.from.rfc.7230"/>. The other parts of + <xref target="RFC7230" format="none">RFC 7230</xref> are obsoleted by + "HTTP Semantics" <xref target="HTTP"/>. +</t> + +<section title="Requirements Notation" anchor="requirements.notation"> +<t> + The key words "<bcp14>MUST</bcp14>", "<bcp14>MUST NOT</bcp14>", "<bcp14>REQUIRED</bcp14>", "<bcp14>SHALL</bcp14>", "<bcp14>SHALL NOT</bcp14>", + "<bcp14>SHOULD</bcp14>", "<bcp14>SHOULD NOT</bcp14>", "<bcp14>RECOMMENDED</bcp14>", "<bcp14>NOT RECOMMENDED</bcp14>", + "<bcp14>MAY</bcp14>", and "<bcp14>OPTIONAL</bcp14>" in this document are to be interpreted as + described in BCP 14 <xref target="RFC2119"/> <xref target="RFC8174"/> when, and only when, they + appear in all capitals, as shown here. +</t> +<t> + Conformance criteria and considerations regarding error handling + are defined in <xref target="HTTP" x:rel="#conformance"/>. +</t> +</section> + +<section title="Syntax Notation" anchor="notation"> +<iref primary="true" item="Grammar" subitem="ALPHA"/> +<iref primary="true" item="Grammar" subitem="CR"/> +<iref primary="true" item="Grammar" subitem="CRLF"/> +<iref primary="true" item="Grammar" subitem="CTL"/> +<iref primary="true" item="Grammar" subitem="DIGIT"/> +<iref primary="true" item="Grammar" subitem="DQUOTE"/> +<iref primary="true" item="Grammar" subitem="HEXDIG"/> +<iref primary="true" item="Grammar" subitem="HTAB"/> +<iref primary="true" item="Grammar" subitem="LF"/> +<iref primary="true" item="Grammar" subitem="OCTET"/> +<iref primary="true" item="Grammar" subitem="SP"/> +<iref primary="true" item="Grammar" subitem="VCHAR"/> +<t> + This specification uses the Augmented Backus-Naur Form (ABNF) notation of + <xref target="RFC5234"/>, extended with the notation for case-sensitivity + in strings defined in <xref target="RFC7405"/>. +</t> +<t> + It also uses a list extension, defined in <xref target="HTTP" x:rel="#abnf.extension"/>, + that allows for compact definition of comma-separated lists using a "#" + operator (similar to how the "*" operator indicates repetition). <xref target="collected.abnf"/> shows the collected grammar with all list + operators expanded to standard ABNF notation. +</t> +<t> + As a convention, ABNF rule names prefixed with "obs-" denote + obsolete grammar rules that appear for historical reasons. +</t> +<t anchor="core.rules"> + <x:anchor-alias value="ALPHA"/> + <x:anchor-alias value="CR"/> + <x:anchor-alias value="CRLF"/> + <x:anchor-alias value="CTL"/> + <x:anchor-alias value="DIGIT"/> + <x:anchor-alias value="DQUOTE"/> + <x:anchor-alias value="HEXDIG"/> + <x:anchor-alias value="HTAB"/> + <x:anchor-alias value="LF"/> + <x:anchor-alias value="OCTET"/> + <x:anchor-alias value="SP"/> + <x:anchor-alias value="VCHAR"/> + The following core rules are included by + reference, as defined in <xref target="RFC5234" x:fmt="," x:sec="B.1"/>: + ALPHA (letters), CR (carriage return), CRLF (CR LF), CTL (controls), + DIGIT (decimal 0-9), DQUOTE (double quote), + HEXDIG (hexadecimal 0-9/A-F/a-f), HTAB (horizontal tab), LF (line feed), + OCTET (any 8-bit sequence of data), SP (space), and + VCHAR (any visible <xref target="USASCII"/> character). +</t> +<t anchor="imported.rules"> + <x:anchor-alias value="absolute-path"/> + <x:anchor-alias value="comment"/> + <x:anchor-alias value="field-name"/> + <x:anchor-alias value="field-value"/> + <x:anchor-alias value="obs-text"/> + <x:anchor-alias value="quoted-string"/> + <x:anchor-alias value="token"/> + The rules below are defined in <xref target="HTTP"/>: +</t> +<sourcecode type="abnf7230"> + <x:ref>BWS</x:ref> = &lt;BWS, see <xref target="HTTP" x:rel="#whitespace"/>&gt; + <x:ref>OWS</x:ref> = &lt;OWS, see <xref target="HTTP" x:rel="#whitespace"/>&gt; + <x:ref>RWS</x:ref> = &lt;RWS, see <xref target="HTTP" x:rel="#whitespace"/>&gt; + <x:ref>absolute-path</x:ref> = &lt;absolute-path, see <xref target="HTTP" x:rel="#uri.references"/>&gt; + <x:ref>field-name</x:ref> = &lt;field-name, see <xref target="HTTP" x:rel="#fields.names"/>&gt; + <x:ref>field-value</x:ref> = &lt;field-value, see <xref target="HTTP" x:rel="#fields.values"/>&gt; + <x:ref>obs-text</x:ref> = &lt;obs-text, see <xref target="HTTP" x:rel="#quoted.strings"/>&gt; + <x:ref>quoted-string</x:ref> = &lt;quoted-string, see <xref target="HTTP" x:rel="#quoted.strings"/>&gt; + <x:ref>token</x:ref> = &lt;token, see <xref target="HTTP" x:rel="#tokens"/>&gt; + <x:ref>transfer-coding</x:ref> = + &lt;transfer-coding, see <xref target="HTTP" x:rel="#field.te"/>&gt; +</sourcecode> +<t anchor="imported.uri.rules"> + <x:anchor-alias value="absolute-URI"/> + <x:anchor-alias value="authority"/> + <x:anchor-alias value="query"/> + The rules below are defined in <xref target="URI"/>: +</t> +<sourcecode type="abnf7230"> + <x:ref>absolute-URI</x:ref> = &lt;absolute-URI, see <xref target="URI" x:fmt="," x:sec="4.3"/>&gt; + <x:ref>authority</x:ref> = &lt;authority, see <xref target="URI" x:fmt="," x:sec="3.2"/>&gt; + <x:ref>uri-host</x:ref> = &lt;host, see <xref target="URI" x:fmt="," x:sec="3.2.2"/>&gt; + <x:ref>port</x:ref> = &lt;port, see <xref target="URI" x:fmt="," x:sec="3.2.3"/>&gt; + <x:ref>query</x:ref> = &lt;query, see <xref target="URI" x:fmt="," x:sec="3.4"/>&gt; +</sourcecode> +</section> +</section> + +<section title="Message" anchor="http.message"> +<t> + HTTP/1.1 clients and servers communicate by sending messages. + See <xref target="HTTP" x:fmt="of" x:rel="#terminology"/> for + the general terminology and core concepts of HTTP. +</t> +<section title="Message Format" anchor="message.format"> +<x:anchor-alias value="generic-message"/> +<x:anchor-alias value="message.types"/> +<x:anchor-alias value="HTTP-message"/> +<x:anchor-alias value="start-line"/> +<iref item="header section"/> +<iref item="headers"/> +<iref item="header line"/> +<t> + An HTTP/1.1 message consists of a start-line followed by a CRLF and a + sequence of + octets in a format similar to the Internet Message Format + <xref target="RFC5322"/>: zero or more header field lines (collectively + referred to as the "headers" or the "header section"), an empty line + indicating the end of the header section, and an optional message body. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="HTTP-message"><!--terminal production--></iref> + <x:ref>HTTP-message</x:ref> = <x:ref>start-line</x:ref> <x:ref>CRLF</x:ref> + *( <x:ref>field-line</x:ref> <x:ref>CRLF</x:ref> ) + <x:ref>CRLF</x:ref> + [ <x:ref>message-body</x:ref> ] +</sourcecode> +<t> + A message can be either a request from client to server or a + response from server to client. Syntactically, the two types of messages + differ only in the start-line, which is either a request-line (for requests) + or a status-line (for responses), and in the algorithm for determining + the length of the message body (<xref target="message.body"/>). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="start-line"/> + <x:ref>start-line</x:ref> = <x:ref>request-line</x:ref> / <x:ref>status-line</x:ref> +</sourcecode> +<t> + In theory, a client could receive requests and a server could receive + responses, distinguishing them by their different start-line formats. + In practice, servers are implemented to only expect a request + (a response is interpreted as an unknown or invalid request method), + and clients are implemented to only expect a response. +</t> +<t> + HTTP makes use of some protocol elements similar to the + Multipurpose Internet Mail Extensions (MIME) <xref target="RFC2045"/>. + See <xref target="differences.between.http.and.mime"/> for the + differences between HTTP and MIME messages. +</t> +</section> + +<section title="Message Parsing" anchor="message.parsing"> +<t> + The normal procedure for parsing an HTTP message is to read the + start-line into a structure, read each header field line into a hash + table by field name until the empty line, and then use the parsed + data to determine if a message body is expected. If a message body + has been indicated, then it is read as a stream until an amount + of octets equal to the message body length is read or the connection + is closed. +</t> +<t> + A recipient <bcp14>MUST</bcp14> parse an HTTP message as a sequence of octets in an + encoding that is a superset of US-ASCII <xref target="USASCII"/>. + Parsing an HTTP message as a stream of Unicode characters, without regard + for the specific encoding, creates security vulnerabilities due to the + varying ways that string processing libraries handle invalid multibyte + character sequences that contain the octet LF (%x0A). String-based + parsers can only be safely used within protocol elements after the element + has been extracted from the message, such as within a header field line value + after message parsing has delineated the individual field lines. +</t> +<t> + Although the line terminator for the start-line and + fields is the sequence CRLF, a recipient <bcp14>MAY</bcp14> recognize a + single LF as a line terminator and ignore any preceding CR. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> generate a bare CR (a CR character not immediately + followed by LF) within any protocol elements other than the content. + A recipient of such a bare CR <bcp14>MUST</bcp14> consider that element to be invalid or + replace each bare CR with SP before processing the element or forwarding + the message. +</t> +<t> + Older HTTP/1.0 user agent implementations might send an extra CRLF + after a POST request as a workaround for some early server + applications that failed to read message body content that was + not terminated by a line-ending. An HTTP/1.1 user agent <bcp14>MUST NOT</bcp14> + preface or follow a request with an extra CRLF. If terminating + the request message body with a line-ending is desired, then the + user agent <bcp14>MUST</bcp14> count the terminating CRLF octets as part of the + message body length. +</t> +<t> + In the interest of robustness, a server that is expecting to receive and + parse a request-line <bcp14>SHOULD</bcp14> ignore at least one empty line (CRLF) + received prior to the request-line. +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> send whitespace between the start-line and + the first header field. +</t> +<t> + A recipient that receives whitespace between the start-line and + the first header field <bcp14>MUST</bcp14> either reject the message as invalid or + consume each whitespace-preceded line without further processing of it + (i.e., ignore the entire line, along with any subsequent lines preceded + by whitespace, until a properly formed header field is received or the + header section is terminated). + Rejection or removal of invalid whitespace-preceded lines is necessary + to prevent their misinterpretation by downstream recipients that might + be vulnerable to request smuggling (<xref target="request.smuggling"/>) + or response splitting (<xref target="response.splitting"/>) attacks. +</t> +<t> + When a server listening only for HTTP request messages, or processing + what appears from the start-line to be an HTTP request message, + receives a sequence of octets that does not match the HTTP-message + grammar aside from the robustness exceptions listed above, the + server <bcp14>SHOULD</bcp14> respond with a <x:ref>400 (Bad Request)</x:ref> response + and close the connection. +</t> +</section> + +<section title="HTTP Version" anchor="http.version"> + <x:anchor-alias value="HTTP-version"/> + <x:anchor-alias value="HTTP-name"/> +<t> + HTTP uses a "&lt;major&gt;.&lt;minor&gt;" numbering scheme to indicate + versions of the protocol. This specification defines version "1.1". + <xref target="HTTP" x:rel="#protocol.version"/> specifies the semantics of HTTP version + numbers. +</t> +<t> + The version of an HTTP/1.x message is indicated by an HTTP-version field + in the <x:ref>start-line</x:ref>. HTTP-version is case-sensitive. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="HTTP-version"/><iref primary="true" item="Grammar" subitem="HTTP-name"/> + <x:ref>HTTP-version</x:ref> = <x:ref>HTTP-name</x:ref> "/" <x:ref>DIGIT</x:ref> "." <x:ref>DIGIT</x:ref> + <x:ref>HTTP-name</x:ref> = %s"HTTP" +</sourcecode> +<t> + When an HTTP/1.1 message is sent to an HTTP/1.0 recipient + <xref target="HTTP10"/> or a recipient whose version is unknown, + the HTTP/1.1 message is constructed such that it can be interpreted + as a valid HTTP/1.0 message if all of the newer features are ignored. + This specification places recipient-version requirements on some + new features so that a conformant sender will only use compatible + features until it has determined, through configuration or the + receipt of a message, that the recipient supports HTTP/1.1. +</t> +<t> + Intermediaries that process HTTP messages (i.e., all intermediaries + other than those acting as tunnels) <bcp14>MUST</bcp14> send their own HTTP-version + in forwarded messages, unless it is purposefully downgraded as a workaround + for an upstream issue. In other words, an intermediary is not allowed to blindly + forward the <x:ref>start-line</x:ref> without ensuring that the + protocol version in that message matches a version to which that + intermediary is conformant for both the receiving and + sending of messages. Forwarding an HTTP message without rewriting + the HTTP-version might result in communication errors when downstream + recipients use the message sender's version to determine what features + are safe to use for later communication with that sender. +</t> +<t> + A server <bcp14>MAY</bcp14> send an HTTP/1.0 response to an HTTP/1.1 request + if it is known or suspected that the client incorrectly implements the + HTTP specification and is incapable of correctly processing later + version responses, such as when a client fails to parse the version + number correctly or when an intermediary is known to blindly forward + the HTTP-version even when it doesn't conform to the given minor + version of the protocol. Such protocol downgrades <bcp14>SHOULD NOT</bcp14> be + performed unless triggered by specific client attributes, such as when + one or more of the request header fields (e.g., <x:ref>User-Agent</x:ref>) + uniquely match the values sent by a client known to be in error. +</t> +</section> +</section> + +<section title="Request Line" anchor="request.line"> + <x:anchor-alias value="Request"/> + <x:anchor-alias value="request-line"/> +<t> + A request-line begins with a method token, followed by a single + space (SP), the request-target, and another single space (SP), and ends + with the protocol version. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="request-line"/> + <x:ref>request-line</x:ref> = <x:ref>method</x:ref> <x:ref>SP</x:ref> <x:ref>request-target</x:ref> <x:ref>SP</x:ref> <x:ref>HTTP-version</x:ref> +</sourcecode> +<t> + Although the request-line grammar rule requires that each of the component + elements be separated by a single SP octet, recipients <bcp14>MAY</bcp14> instead parse + on whitespace-delimited word boundaries and, aside from the CRLF + terminator, treat any form of whitespace as the SP separator while + ignoring preceding or trailing whitespace; such whitespace includes one or + more of the following octets: SP, HTAB, VT (%x0B), FF (%x0C), or bare CR. + However, lenient parsing can result in request smuggling security + vulnerabilities if there are multiple recipients of the message and each + has its own unique interpretation of robustness + (see <xref target="request.smuggling"/>). +</t> +<t> + HTTP does not place a predefined limit on the length of a request-line, + as described in <xref target="HTTP" x:rel="#length.requirements"/>. + A server that receives a method longer than any that it implements + <bcp14>SHOULD</bcp14> respond with a <x:ref>501 (Not Implemented)</x:ref> status code. + A server that receives a request-target longer than any URI it wishes to + parse <bcp14>MUST</bcp14> respond with a + <x:ref>414 (URI Too Long)</x:ref> status code (see <xref target="HTTP" x:rel="#status.414"/>). +</t> +<t> + Various ad hoc limitations on request-line length are found in practice. + It is <bcp14>RECOMMENDED</bcp14> that all HTTP senders and recipients support, at a + minimum, request-line lengths of 8000 octets. +</t> + +<section title="Method" anchor="request.method"> + <iref primary="true" item="method"/> + <x:anchor-alias value="method"/> +<t> + The method token indicates the request method to be performed on the + target resource. The request method is case-sensitive. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="method"/> + <x:ref>method</x:ref> = <x:ref>token</x:ref> +</sourcecode> +<t> + The request methods defined by this specification can be found in + <xref target="HTTP" x:rel="#methods"/>, along with information regarding the HTTP method + registry and considerations for defining new methods. +</t> +</section> + +<section title="Request Target" anchor="request.target"> + <iref primary="true" item="request-target"/> + <x:anchor-alias value="request-target"/> +<t> + The request-target identifies the target resource upon which to apply the + request. The client derives a request-target from its desired target URI. + There are four distinct formats for the request-target, depending on both + the method being requested and whether the request is to a proxy. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="request-target"/><iref primary="false" item="Grammar" subitem="origin-form"/><iref primary="false" item="Grammar" subitem="absolute-form"/><iref primary="false" item="Grammar" subitem="authority-form"/><iref primary="false" item="Grammar" subitem="asterisk-form"/> + <x:ref>request-target</x:ref> = <x:ref>origin-form</x:ref> + / <x:ref>absolute-form</x:ref> + / <x:ref>authority-form</x:ref> + / <x:ref>asterisk-form</x:ref> +</sourcecode> +<t> + No whitespace is allowed in the request-target. + Unfortunately, some user agents fail to properly encode or exclude + whitespace found in hypertext references, resulting in those disallowed + characters being sent as the request-target in a malformed request-line. +</t> +<t> + Recipients of an invalid request-line <bcp14>SHOULD</bcp14> respond with either a + <x:ref>400 (Bad Request)</x:ref> error or a <x:ref>301 (Moved Permanently)</x:ref> + redirect with the request-target properly encoded. A recipient <bcp14>SHOULD NOT</bcp14> + attempt to autocorrect and then process the request without a redirect, + since the invalid request-line might be deliberately crafted to bypass + security filters along the request chain. +</t> +<t> + A client <bcp14>MUST</bcp14> send a <x:ref>Host</x:ref> header field (<xref target="HTTP" x:rel="#field.host"/>) + in all HTTP/1.1 request messages. + If the target URI includes an authority component, then a client <bcp14>MUST</bcp14> + send a field value for Host that is identical to that authority + component, excluding any userinfo subcomponent and its "@" delimiter + (<xref target="HTTP" x:rel="#uri.schemes"/>). + If the authority component is missing or undefined for the target URI, + then a client <bcp14>MUST</bcp14> send a Host header field with an empty field value. +</t> +<t> + A server <bcp14>MUST</bcp14> respond with a <x:ref>400 (Bad Request)</x:ref> status code + to any HTTP/1.1 request message that lacks a Host header field and + to any request message that contains more than one Host header field line + or a Host header field with an invalid field value. +</t> + +<section title="origin-form" anchor="origin-form"> + <iref item="origin-form (of request-target)"/> +<t> + The most common form of request-target is the <x:dfn>origin-form</x:dfn>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="origin-form"/> + <x:ref>origin-form</x:ref> = <x:ref>absolute-path</x:ref> [ "?" <x:ref>query</x:ref> ] +</sourcecode> +<t> + When making a request directly to an origin server, other than a CONNECT + or server-wide OPTIONS request (as detailed below), + a client <bcp14>MUST</bcp14> send only the absolute path and query components of + the target URI as the request-target. + If the target URI's path component is empty, the client <bcp14>MUST</bcp14> send + "/" as the path within the origin-form of request-target. + A <x:ref>Host</x:ref> header field is also sent, as defined in + <xref target="HTTP" x:rel="#field.host"/>. +</t> +<t> + For example, a client wishing to retrieve a representation of the resource + identified as +</t> +<artwork x:indent-with=" " type="example"> +http://www.example.org/where?q=now +</artwork> +<t> + directly from the origin server would open (or reuse) a TCP connection + to port 80 of the host "www.example.org" and send the lines: +</t> +<sourcecode type="http-message"> +GET /where?q=now HTTP/1.1 +Host: www.example.org +</sourcecode> +<t> + followed by the remainder of the request message. +</t> +</section> + +<section title="absolute-form" anchor="absolute-form"> + <iref item="absolute-form (of request-target)"/> +<t> + When making a request to a proxy, other than a CONNECT or server-wide + OPTIONS request (as detailed below), a client <bcp14>MUST</bcp14> send the target URI + in <x:dfn>absolute-form</x:dfn> as the request-target. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="absolute-form"/> + <x:ref>absolute-form</x:ref> = <x:ref>absolute-URI</x:ref> +</sourcecode> +<t> + The proxy is requested to either service that request from a valid cache, + if possible, or make the same request on the client's behalf either to + the next inbound proxy server or directly to the origin server indicated + by the request-target. Requirements on such "forwarding" of messages are + defined in <xref target="HTTP" x:rel="#message.forwarding"/>. +</t> +<t> + An example absolute-form of request-line would be: +</t> +<sourcecode type="http-message"> +GET http://www.example.org/pub/WWW/TheProject.html HTTP/1.1 +</sourcecode> +<t> + A client <bcp14>MUST</bcp14> send a Host header field in an HTTP/1.1 request even + if the request-target is in the absolute-form, since this + allows the Host information to be forwarded through ancient HTTP/1.0 + proxies that might not have implemented Host. +</t> +<t> + When a proxy receives a request with an absolute-form of + request-target, the proxy <bcp14>MUST</bcp14> ignore the received + Host header field (if any) and instead replace it with the host + information of the request-target. A proxy that forwards such a request + <bcp14>MUST</bcp14> generate a new Host field value based on the received + request-target rather than forward the received Host field value. +</t> +<t> + When an origin server receives a request with an absolute-form of + request-target, the origin server <bcp14>MUST</bcp14> ignore the received Host header + field (if any) and instead use the host information of the request-target. + Note that if the request-target does not have an authority component, an + empty Host header field will be sent in this case. +</t> +<t> + A server <bcp14>MUST</bcp14> accept the absolute-form in requests even though most + HTTP/1.1 clients will only send the absolute-form to a proxy. +</t> +</section> + +<section title="authority-form" anchor="authority-form"> + <iref item="authority-form (of request-target)"/> +<t> + The <x:dfn>authority-form</x:dfn> of request-target is only used for + CONNECT requests (<xref target="HTTP" x:rel="#CONNECT"/>). It consists of only the + <x:ref>uri-host</x:ref> and <x:ref>port</x:ref> number of the tunnel + destination, separated by a colon (":"). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="authority-form"/> + <x:ref>authority-form</x:ref> = <x:ref>uri-host</x:ref> ":" <x:ref>port</x:ref> +</sourcecode> +<t> + When making a CONNECT request to establish a tunnel through one or more + proxies, a client <bcp14>MUST</bcp14> send only the host and port of the tunnel + destination as the request-target. The client obtains the host and port + from the target URI's <x:ref>authority</x:ref> component, except that it + sends the scheme's default port if the target URI elides the port. + For example, a CONNECT request to "http://www.example.com" looks like the + following: +</t> +<sourcecode type="http-message"> +CONNECT www.example.com:80 HTTP/1.1 +Host: www.example.com + +</sourcecode> +</section> + +<section title="asterisk-form" anchor="asterisk-form"> + <iref item="asterisk-form (of request-target)"/> +<t> + The <x:dfn>asterisk-form</x:dfn> of request-target is only used for a server-wide + OPTIONS request (<xref target="HTTP" x:rel="#OPTIONS"/>). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="asterisk-form"/> + <x:ref>asterisk-form</x:ref> = "*" +</sourcecode> +<t> + When a client wishes to request OPTIONS + for the server as a whole, as opposed to a specific named resource of + that server, the client <bcp14>MUST</bcp14> send only "*" (%x2A) as the request-target. + For example, +</t> +<sourcecode type="http-message"> +OPTIONS * HTTP/1.1 +</sourcecode> +<t> + If a proxy receives an OPTIONS request with an absolute-form of + request-target in which the URI has an empty path and no query component, + then the last proxy on the request chain <bcp14>MUST</bcp14> send a request-target + of "*" when it forwards the request to the indicated origin server. +</t> +<t> + For example, the request +</t> +<sourcecode type="http-message"> +OPTIONS http://www.example.org:8001 HTTP/1.1 +</sourcecode> +<t> + would be forwarded by the final proxy as +</t> +<sourcecode type="http-message"> +OPTIONS * HTTP/1.1 +Host: www.example.org:8001 +</sourcecode> +<t> + after connecting to port 8001 of host "www.example.org". +</t> +</section> +</section> + +<section title="Reconstructing the Target URI" anchor="reconstructing.target.uri"> + <x:anchor-alias value="h1.effective.request.uri"/> +<t> + The target URI is the <x:ref>request-target</x:ref> when the + request-target is in <x:ref>absolute-form</x:ref>. In that case, + a server will parse the URI into its generic components for further + evaluation. +</t> +<t> + Otherwise, the server reconstructs the target URI from the connection + context and various parts of the request message in order to identify the + target resource (<xref target="HTTP" x:rel="#target.resource"/>): +</t> +<ul> +<li> + If the server's configuration provides for a fixed URI scheme, or a scheme + is provided by a trusted outbound gateway, that scheme is used for the + target URI. This is common in large-scale deployments because a gateway + server will receive the client's connection context and replace that with + their own connection to the inbound server. + Otherwise, if the request is received over a secured connection, + the target URI's scheme is "https"; if not, the scheme is "http". +</li> +<li> + If the request-target is in <x:ref>authority-form</x:ref>, + the target URI's authority component is the request-target. + Otherwise, the target URI's authority component is the field value of the + <x:ref>Host</x:ref> header field. If there is no <x:ref>Host</x:ref> + header field or if its field value is empty or invalid, + the target URI's authority component is empty. +</li> +<li> + If the request-target is in <x:ref>authority-form</x:ref> or + <x:ref>asterisk-form</x:ref>, the target URI's combined + <x:ref>path</x:ref> and <x:ref>query</x:ref> component is empty. + Otherwise, the target URI's combined <x:ref>path</x:ref> and + <x:ref>query</x:ref> component is the request-target. +</li> +<li> + The components of a reconstructed target URI, once determined as above, + can be recombined into <x:ref>absolute-URI</x:ref> form by concatenating + the scheme, "://", authority, and combined path and query component. +</li> +</ul> +<t> + Example 1: The following message received over a secure connection +</t> +<sourcecode type="http-message"> +GET /pub/WWW/TheProject.html HTTP/1.1 +Host: www.example.org +</sourcecode> +<t> + has a target URI of +</t> +<artwork type="example" x:indent-with=" "> +https://www.example.org/pub/WWW/TheProject.html +</artwork> +<t> + Example 2: The following message received over an insecure connection +</t> +<sourcecode type="http-message"> +OPTIONS * HTTP/1.1 +Host: www.example.org:8080 +</sourcecode> +<t> + has a target URI of +</t> +<artwork type="example" x:indent-with=" "> +http://www.example.org:8080 +</artwork> +<t> + If the target URI's authority component is empty and its URI scheme + requires a non-empty authority (as is the case for "http" and "https"), + the server can reject the request or determine whether a configured + default applies that is consistent with the incoming connection's context. + Context might include connection details like address and port, what + security has been applied, and locally defined information specific to + that server's configuration. An empty authority is replaced with the + configured default before further processing of the request. +</t> +<t> + Supplying a default name for authority within the context of a secured + connection is inherently unsafe if there is any chance that the user + agent's intended authority might differ from the default. + A server that can uniquely identify an authority from the request + context <bcp14>MAY</bcp14> use that identity as a default without this risk. + Alternatively, it might be better to redirect the request to a safe + resource that explains how to obtain a new client. +</t> +<t> + Note that reconstructing the client's target URI is only half of the + process for identifying a target resource. The other half is determining + whether that target URI identifies a resource for which the server is + willing and able to send a response, as defined in + <xref target="HTTP" x:rel="#routing.reject"/>. +</t> +</section> +</section> + +<section title="Status Line" anchor="status.line"> + <x:anchor-alias value="response"/> + <x:anchor-alias value="status-line"/> + <x:anchor-alias value="status-code"/> + <x:anchor-alias value="status code"/> + <x:anchor-alias value="reason-phrase"/> +<t> + The first line of a response message is the status-line, consisting + of the protocol version, a space (SP), the status code, and another space + and ending with an <bcp14>OPTIONAL</bcp14> textual phrase describing the status code. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="status-line"/> + <x:ref>status-line</x:ref> = <x:ref>HTTP-version</x:ref> <x:ref>SP</x:ref> <x:ref>status-code</x:ref> <x:ref>SP</x:ref> [ <x:ref>reason-phrase</x:ref> ] +</sourcecode> +<t> + Although the status-line grammar rule requires that each of the component + elements be separated by a single SP octet, recipients <bcp14>MAY</bcp14> instead parse + on whitespace-delimited word boundaries and, aside from the line + terminator, treat any form of whitespace as the SP separator while + ignoring preceding or trailing whitespace; such whitespace includes one or + more of the following octets: SP, HTAB, VT (%x0B), FF (%x0C), or bare CR. + However, lenient parsing can result in response splitting security + vulnerabilities if there are multiple recipients of the message and each + has its own unique interpretation of robustness + (see <xref target="response.splitting"/>). +</t> +<t> + The status-code element is a 3-digit integer code describing the + result of the server's attempt to understand and satisfy the client's + corresponding request. A recipient parses and interprets the remainder + of the response message in light of the semantics defined for that + status code, if the status code is recognized by that recipient, + or in accordance with the class of that status code when the specific + code is unrecognized. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="status-code"/> + <x:ref>status-code</x:ref> = 3<x:ref>DIGIT</x:ref> +</sourcecode> +<t> + HTTP's core status codes are defined in <xref target="HTTP" x:rel="#status.codes"/>, + along with the classes of status codes, considerations for the + definition of new status codes, and the IANA registry for collecting + such definitions. +</t> +<t> + The reason-phrase element exists for the sole purpose of providing a + textual description associated with the numeric status code, mostly out of + deference to earlier Internet application protocols that were more + frequently used with interactive text clients. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="reason-phrase"/> + <x:ref>reason-phrase</x:ref> = 1*( <x:ref>HTAB</x:ref> / <x:ref>SP</x:ref> / <x:ref>VCHAR</x:ref> / <x:ref>obs-text</x:ref> ) +</sourcecode> +<t> + A client <bcp14>SHOULD</bcp14> ignore the reason-phrase content because it is not a + reliable channel for information (it might be translated for a given locale, + overwritten by intermediaries, or discarded when the message is forwarded + via other versions of HTTP). + A server <bcp14>MUST</bcp14> send the space that separates the status-code from the + reason-phrase even when the reason-phrase is absent (i.e., the status-line + would end with the space). +</t> +</section> + +<section title="Field Syntax" anchor="header.field.syntax"> + <x:anchor-alias value="field-line"/> +<t> + Each field line consists of a case-insensitive field name + followed by a colon (":"), optional leading whitespace, the field line value, + and optional trailing whitespace. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="field-line"/><iref primary="false" item="Grammar" subitem="field-name"/><iref primary="false" item="Grammar" subitem="field-value"/> + <x:ref>field-line</x:ref> = <x:ref>field-name</x:ref> ":" <x:ref>OWS</x:ref> <x:ref>field-value</x:ref> <x:ref>OWS</x:ref> +</sourcecode> +<t> + Rules for parsing within field values are + defined in <xref target="HTTP" x:rel="#fields.values"/>. This section covers the + generic syntax for header field inclusion within, and extraction from, + HTTP/1.1 messages. +</t> + +<section title="Field Line Parsing" anchor="field.parsing"> + <x:anchor-alias value="header.parsing"/> +<t> + Messages are parsed using a generic algorithm, independent of the + individual field names. The contents within a given field line value are + not parsed until a later stage of message interpretation (usually after the + message's entire field section has been processed). +</t> +<t> + No whitespace is allowed between the field name and colon. + In the past, differences in the handling of such whitespace have led to + security vulnerabilities in request routing and response handling. + A server <bcp14>MUST</bcp14> reject, with a response status code of + <x:ref>400 (Bad Request)</x:ref>, any received request message that contains + whitespace between a header field name and colon. + A proxy <bcp14>MUST</bcp14> remove any such whitespace + from a response message before forwarding the message downstream. +</t> +<t> + A field line value might be preceded and/or followed by optional whitespace + (OWS); a single SP preceding the field line value is preferred for consistent + readability by humans. + The field line value does not include that leading or trailing whitespace: OWS + occurring before the first non-whitespace octet of the field line value, + or after the last non-whitespace octet of the field line value, is excluded by + parsers when extracting the field line value from a field line. +</t> +</section> + +<section title="Obsolete Line Folding" anchor="line.folding"> + <x:anchor-alias value="obs-fold"/> +<t> + Historically, HTTP/1.x field values could be extended over multiple + lines by preceding each extra line with at least one space or horizontal + tab (obs-fold). This specification deprecates such line folding except + within the "message/http" media type + (<xref target="media.type.message.http"/>). +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="obs-fold"/> + <x:ref>obs-fold</x:ref> = <x:ref>OWS</x:ref> <x:ref>CRLF</x:ref> <x:ref>RWS</x:ref> + ; obsolete line folding +</sourcecode> +<t> + A sender <bcp14>MUST NOT</bcp14> generate a message that includes line folding + (i.e., that has any field line value that contains a match to the + <x:ref>obs-fold</x:ref> rule) unless the message is intended for packaging + within the "message/http" media type. +</t> +<t> + A server that receives an <x:ref>obs-fold</x:ref> in a request message that + is not within a "message/http" container <bcp14>MUST</bcp14> either reject the message by + sending a <x:ref>400 (Bad Request)</x:ref>, preferably with a + representation explaining that obsolete line folding is unacceptable, or + replace each received <x:ref>obs-fold</x:ref> with one or more + <x:ref>SP</x:ref> octets prior to interpreting the field value or + forwarding the message downstream. +</t> +<t> + A proxy or gateway that receives an <x:ref>obs-fold</x:ref> in a response + message that is not within a "message/http" container <bcp14>MUST</bcp14> either discard + the message and replace it with a <x:ref>502 (Bad Gateway)</x:ref> + response, preferably with a representation explaining that unacceptable + line folding was received, or replace each received <x:ref>obs-fold</x:ref> + with one or more <x:ref>SP</x:ref> octets prior to interpreting the field + value or forwarding the message downstream. +</t> +<t> + A user agent that receives an <x:ref>obs-fold</x:ref> in a response message + that is not within a "message/http" container <bcp14>MUST</bcp14> replace each received + <x:ref>obs-fold</x:ref> with one or more <x:ref>SP</x:ref> octets prior to + interpreting the field value. +</t> +</section> +</section> + +<section title="Message Body" anchor="message.body"> + <x:anchor-alias value="message-body"/> +<t> + The message body (if any) of an HTTP/1.1 message is used to carry content + (<xref target="HTTP" x:rel="#content"/>) for the request or response. The + message body is identical to the content unless a transfer coding has + been applied, as described in <xref target="field.transfer-encoding"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="message-body"/> + <x:ref>message-body</x:ref> = *OCTET +</sourcecode> +<t> + The rules for determining when a message body is present in an HTTP/1.1 + message differ for requests and responses. +</t> +<t> + The presence of a message body in a request is signaled by a + <x:ref>Content-Length</x:ref> or <x:ref>Transfer-Encoding</x:ref> header + field. Request message framing is independent of method semantics. +</t> +<t> + The presence of a message body in a response, as detailed in + <xref target="message.body.length"/>, depends on both the request + method to which it is responding and the response status code. This + corresponds to when response content is allowed by HTTP semantics + (<xref target="HTTP" x:rel="#content.semantics"/>). +</t> + +<section title="Transfer-Encoding" anchor="field.transfer-encoding"> + <x:anchor-alias value="header.transfer-encoding"/> + <iref primary="true" item="Fields" subitem="Transfer-Encoding" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="Transfer-Encoding" x:for-anchor=""/><iref primary="true" item="Transfer-Encoding header field" x:for-anchor=""/> + <iref item="chunked (Coding Format)"/> + <x:anchor-alias value="Transfer-Encoding"/> +<t> + The Transfer-Encoding header field lists the transfer coding names + corresponding to the sequence of transfer codings that have been + (or will be) applied to the content in order to form the message body. + Transfer codings are defined in <xref target="transfer.codings"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="Transfer-Encoding"/> + <x:ref>Transfer-Encoding</x:ref> = #<x:ref>transfer-coding</x:ref> + ; defined in <xref target="HTTP" x:rel="#field.te"/> +</sourcecode> +<t> + Transfer-Encoding is analogous to the Content-Transfer-Encoding field of + MIME, which was designed to enable safe transport of binary data over a + 7-bit transport service (<xref target="RFC2045" x:fmt="," x:sec="6"/>). + However, safe transport has a different focus for an 8bit-clean transfer + protocol. In HTTP's case, Transfer-Encoding is primarily intended to + accurately delimit dynamically generated content. It also serves to + distinguish encodings that are only applied in transit from the encodings + that are a characteristic of the selected representation. +</t> +<t> + A recipient <bcp14>MUST</bcp14> be able to parse the chunked transfer coding + (<xref target="chunked.encoding"/>) because it plays a crucial role in + framing messages when the content size is not known in advance. + A sender <bcp14>MUST NOT</bcp14> apply the chunked transfer coding more than once to a + message body (i.e., chunking an already chunked message is not allowed). + If any transfer coding other than chunked is applied to a request's content, + the sender <bcp14>MUST</bcp14> apply chunked as the final transfer coding to + ensure that the message is properly framed. + If any transfer coding other than chunked is applied to a response's content, + the sender <bcp14>MUST</bcp14> either apply chunked as the final transfer coding + or terminate the message by closing the connection. +</t> +<t> + For example, +</t> +<sourcecode type="http-message"> +Transfer-Encoding: gzip, chunked +</sourcecode> +<t> + indicates that the content has been compressed using the gzip + coding and then chunked using the chunked coding while forming the + message body. +</t> +<t> + Unlike <x:ref>Content-Encoding</x:ref> (<xref target="HTTP" x:rel="#content.codings"/>), + Transfer-Encoding is a property of the message, not of the representation. + Any recipient along the request/response chain <bcp14>MAY</bcp14> decode the received + transfer coding(s) or apply additional transfer coding(s) to the message + body, assuming that corresponding changes are made to the Transfer-Encoding + field value. Additional information about the encoding parameters can be + provided by other header fields not defined by this specification. +</t> +<t> + Transfer-Encoding <bcp14>MAY</bcp14> be sent in a response to a HEAD request or in a + <x:ref>304 (Not Modified)</x:ref> response (<xref target="HTTP" x:rel="#status.304"/>) to a GET request, + neither of which includes a message body, + to indicate that the origin server would have applied a transfer coding + to the message body if the request had been an unconditional GET. + This indication is not required, however, because any recipient on + the response chain (including the origin server) can remove transfer + codings when they are not needed. +</t> +<t> + A server <bcp14>MUST NOT</bcp14> send a Transfer-Encoding header field in any response + with a status code of + <x:ref>1xx (Informational)</x:ref> or <x:ref>204 (No Content)</x:ref>. + A server <bcp14>MUST NOT</bcp14> send a Transfer-Encoding header field in any + <x:ref>2xx (Successful)</x:ref> response to a CONNECT request (<xref target="HTTP" x:rel="#CONNECT"/>). +</t> +<t> + A server that receives a request message with a transfer coding it does + not understand <bcp14>SHOULD</bcp14> respond with <x:ref>501 (Not Implemented)</x:ref>. +</t> +<t> + Transfer-Encoding was added in HTTP/1.1. It is generally assumed that + implementations advertising only HTTP/1.0 support will not understand + how to process transfer-encoded content, and that an HTTP/1.0 message + received with a Transfer-Encoding is likely to have been forwarded + without proper handling of the chunked transfer coding in transit. +</t> +<t> + A client <bcp14>MUST NOT</bcp14> send a request containing Transfer-Encoding unless it + knows the server will handle HTTP/1.1 requests (or later minor revisions); + such knowledge might be in the form of specific user configuration or by + remembering the version of a prior received response. + A server <bcp14>MUST NOT</bcp14> send a response containing Transfer-Encoding unless + the corresponding request indicates HTTP/1.1 (or later minor revisions). +</t> +<t> + Early implementations of Transfer-Encoding would occasionally send both + a chunked transfer coding for message framing and an estimated Content-Length + header field for use by progress bars. This is why Transfer-Encoding is + defined as overriding Content-Length, as opposed to them being mutually + incompatible. Unfortunately, forwarding such a message can lead to + vulnerabilities regarding + request smuggling (<xref target="request.smuggling"/>) or + response splitting (<xref target="response.splitting"/>) attacks + if any downstream recipient fails to parse the message according to this + specification, particularly when a downstream recipient only implements + HTTP/1.0. +</t> +<t> + A server <bcp14>MAY</bcp14> reject a request that contains both Content-Length and + Transfer-Encoding or process such a request in accordance with the + Transfer-Encoding alone. Regardless, the server <bcp14>MUST</bcp14> close the connection + after responding to such a request to avoid the potential attacks. +</t> +<t> + A server or client that receives an HTTP/1.0 message containing a + Transfer-Encoding header field <bcp14>MUST</bcp14> treat the message as if the framing + is faulty, even if a Content-Length is present, and close the connection + after processing the message. The message sender might have retained a + portion of the message, in buffer, that could be misinterpreted by further + use of the connection. +</t> +</section> + +<section title="Content-Length" anchor="body.content-length"> + <iref primary="false" item="Content-Length header field" x:for-anchor=""/> + <x:anchor-alias value="Content-Length"/> +<t> + When a message does not have a <x:ref>Transfer-Encoding</x:ref> header + field, a Content-Length header field (<xref target="HTTP" x:rel="#field.content-length"/>) can provide the anticipated size, + as a decimal number of octets, for potential content. + For messages that do include content, the Content-Length field value + provides the framing information necessary for determining where the data + (and message) ends. For messages that do not include content, the + Content-Length indicates the size of the selected representation + (<xref target="HTTP" x:rel="#field.content-length"/>). +</t> +<t> + A sender <bcp14>MUST NOT</bcp14> send a Content-Length header field in any message that + contains a <x:ref>Transfer-Encoding</x:ref> header field. +</t> +<aside> + <t> + <x:h>Note:</x:h> HTTP's use of Content-Length for message framing differs + significantly from the same field's use in MIME, where it is an optional + field used only within the "message/external-body" media-type. + </t> +</aside> +</section> + +<section title="Message Body Length" anchor="message.body.length"> + <iref item="chunked (Coding Format)"/> +<t> + The length of a message body is determined by one of the following + (in order of precedence): +</t> +<ol> + <li><t> + Any response to a HEAD request and any response with a + <x:ref>1xx (Informational)</x:ref>, <x:ref>204 (No Content)</x:ref>, or + <x:ref>304 (Not Modified)</x:ref> status code is always + terminated by the first empty line after the header fields, regardless of + the header fields present in the message, and thus cannot contain a + message body or trailer section. + </t></li> + <li><t> + Any <x:ref>2xx (Successful)</x:ref> response to a CONNECT request implies that the + connection will become a tunnel immediately after the empty line that + concludes the header fields. A client <bcp14>MUST</bcp14> ignore any + <x:ref>Content-Length</x:ref> or <x:ref>Transfer-Encoding</x:ref> header + fields received in such a message. + </t></li> + <li><t> + If a message is received with both a <x:ref>Transfer-Encoding</x:ref> + and a <x:ref>Content-Length</x:ref> header field, the Transfer-Encoding + overrides the Content-Length. Such a message might indicate an attempt to + perform request smuggling (<xref target="request.smuggling"/>) or + response splitting (<xref target="response.splitting"/>) and ought to be + handled as an error. + An intermediary that chooses to forward the message <bcp14>MUST</bcp14> first remove the + received Content-Length field and process the Transfer-Encoding + (as described below) prior to forwarding the message downstream. + </t></li> + <li><t> + If a <x:ref>Transfer-Encoding</x:ref> header field is present + and the chunked transfer coding (<xref target="chunked.encoding"/>) + is the final encoding, the message body length is determined by reading + and decoding the chunked data until the transfer coding indicates the + data is complete. + </t> + <t> + If a <x:ref>Transfer-Encoding</x:ref> header field is present in a + response and the chunked transfer coding is not the final encoding, the + message body length is determined by reading the connection until it is + closed by the server. + </t> + <t> + If a <x:ref>Transfer-Encoding</x:ref> header field is present in a request + and the chunked transfer coding is not the final encoding, the message body + length cannot be determined reliably; the server <bcp14>MUST</bcp14> respond with + the <x:ref>400 (Bad Request)</x:ref> status code and then close the + connection. + </t></li> + <li><t> + If a message is received without <x:ref>Transfer-Encoding</x:ref> and with + an invalid <x:ref>Content-Length</x:ref> header field, then the message + framing is invalid and the recipient <bcp14>MUST</bcp14> treat it as an unrecoverable + error, unless the field value can be successfully parsed as a + comma-separated list (<xref target="HTTP" x:rel="#abnf.extension"/>), all values in the + list are valid, and all values in the list are the same (in which case, the + message is processed with that single value used as the Content-Length field + value). + If the unrecoverable error is in a request message, + the server <bcp14>MUST</bcp14> respond with + a <x:ref>400 (Bad Request)</x:ref> status code and then close the connection. + If it is in a response message received by a proxy, + the proxy <bcp14>MUST</bcp14> close the connection to the server, discard the received + response, and send a <x:ref>502 (Bad Gateway)</x:ref> response to the + client. + If it is in a response message received by a user agent, + the user agent <bcp14>MUST</bcp14> close the connection to the server and discard the + received response. + </t></li> + <li><t> + If a valid <x:ref>Content-Length</x:ref> header field is present without + <x:ref>Transfer-Encoding</x:ref>, its decimal value defines the + expected message body length in octets. + If the sender closes the connection or the recipient times out before the + indicated number of octets are received, the recipient <bcp14>MUST</bcp14> consider + the message to be incomplete and close the connection. + </t></li> + <li><t> + If this is a request message and none of the above are true, then the + message body length is zero (no message body is present). + </t></li> + <li><t> + Otherwise, this is a response message without a declared message body + length, so the message body length is determined by the number of octets + received prior to the server closing the connection. + </t></li> +</ol> +<t> + Since there is no way to distinguish a successfully completed, + close-delimited response message from a partially received message interrupted + by network failure, a server <bcp14>SHOULD</bcp14> generate encoding or + length-delimited messages whenever possible. The close-delimiting + feature exists primarily for backwards compatibility with HTTP/1.0. +</t> +<aside> + <t> + <x:h>Note:</x:h> Request messages are never close-delimited because they are always + explicitly framed by length or transfer coding, with the absence of both implying + the request ends immediately after the header section. + </t> +</aside> +<t> + A server <bcp14>MAY</bcp14> reject a request that contains a message body but + not a <x:ref>Content-Length</x:ref> by responding with + <x:ref>411 (Length Required)</x:ref>. +</t> +<t> + Unless a transfer coding other than chunked has been applied, + a client that sends a request containing a message body <bcp14>SHOULD</bcp14> + use a valid <x:ref>Content-Length</x:ref> header field if the message body + length is known in advance, rather than the chunked transfer coding, since some + existing services respond to chunked with a <x:ref>411 (Length Required)</x:ref> + status code even though they understand the chunked transfer coding. This + is typically because such services are implemented via a gateway that + requires a content length in advance of being called, and the server + is unable or unwilling to buffer the entire request before processing. +</t> +<t> + A user agent that sends a request that contains a message body <bcp14>MUST</bcp14> send + either a valid <x:ref>Content-Length</x:ref> header field or use the + chunked transfer coding. A client <bcp14>MUST NOT</bcp14> use the chunked transfer + coding unless it knows the server will handle HTTP/1.1 (or later) + requests; such knowledge can be in the form of specific user configuration + or by remembering the version of a prior received response. +</t> +<t> + If the final response to the last request on a connection has been + completely received and there remains additional data to read, a user agent + <bcp14>MAY</bcp14> discard the remaining data or attempt to determine if that data + belongs as part of the prior message body, which might be the case if the + prior message's Content-Length value is incorrect. A client <bcp14>MUST NOT</bcp14> + process, cache, or forward such extra data as a separate response, since + such behavior would be vulnerable to cache poisoning. +</t> +</section> +</section> + +<section title="Transfer Codings" anchor="transfer.codings"> +<t> + Transfer coding names are used to indicate an encoding + transformation that has been, can be, or might need to be applied to a + message's content in order to ensure "safe transport" through the network. + This differs from a content coding in that the transfer coding is a + property of the message rather than a property of the representation + that is being transferred. +</t> +<t> + All transfer-coding names are case-insensitive and ought to be registered + within the HTTP Transfer Coding registry, as defined in + <xref target="transfer.coding.registry"/>. + They are used in the <x:ref>Transfer-Encoding</x:ref> + (<xref target="field.transfer-encoding"/>) and <x:ref>TE</x:ref> + (<xref target="HTTP" x:rel="#field.te"/>) header fields (the latter also + defining the "transfer-coding" grammar). +</t> + +<section title="Chunked Transfer Coding" anchor="chunked.encoding"> + <iref primary="true" item="chunked (transfer coding)"/> + <x:anchor-alias value="chunk"/> + <x:anchor-alias value="chunked-body"/> + <x:anchor-alias value="chunk-data"/> + <x:anchor-alias value="chunk-size"/> + <x:anchor-alias value="last-chunk"/> +<t> + The chunked transfer coding wraps content in order to transfer it + as a series of chunks, each with its own size indicator, followed by an + <bcp14>OPTIONAL</bcp14> trailer section containing trailer fields. Chunked enables content + streams of unknown size to be transferred as a sequence of length-delimited + buffers, which enables the sender to retain connection persistence and the + recipient to know when it has received the entire message. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="chunked-body"><!--terminal production--></iref><iref primary="true" item="Grammar" subitem="chunk"/><iref primary="true" item="Grammar" subitem="chunk-size"/><iref primary="true" item="Grammar" subitem="last-chunk"/><iref primary="false" item="Grammar" subitem="trailer-section"/><iref primary="false" item="Grammar" subitem="chunk-ext"/><iref primary="true" item="Grammar" subitem="chunk-data"/> + <x:ref>chunked-body</x:ref> = *<x:ref>chunk</x:ref> + <x:ref>last-chunk</x:ref> + <x:ref>trailer-section</x:ref> + <x:ref>CRLF</x:ref> + + <x:ref>chunk</x:ref> = <x:ref>chunk-size</x:ref> [ <x:ref>chunk-ext</x:ref> ] <x:ref>CRLF</x:ref> + <x:ref>chunk-data</x:ref> <x:ref>CRLF</x:ref> + <x:ref>chunk-size</x:ref> = 1*<x:ref>HEXDIG</x:ref> + <x:ref>last-chunk</x:ref> = 1*("0") [ <x:ref>chunk-ext</x:ref> ] <x:ref>CRLF</x:ref> + + <x:ref>chunk-data</x:ref> = 1*<x:ref>OCTET</x:ref> ; a sequence of chunk-size octets +</sourcecode> +<t> + The chunk-size field is a string of hex digits indicating the size of + the chunk-data in octets. The chunked transfer coding is complete when a + chunk with a chunk-size of zero is received, possibly followed by a + trailer section, and finally terminated by an empty line. +</t> +<t> + A recipient <bcp14>MUST</bcp14> be able to parse and decode the chunked transfer coding. +</t> +<t> + HTTP/1.1 does not define any means to limit the size of a + chunked response such that an intermediary can be assured of buffering the + entire response. Additionally, very large chunk sizes may cause overflows + or loss of precision if their values are not represented accurately in a + receiving implementation. Therefore, recipients <bcp14>MUST</bcp14> anticipate + potentially large hexadecimal numerals and prevent parsing errors due to + integer conversion overflows or precision loss due to integer + representation. +</t> +<t> + The chunked coding does not define any parameters. Their presence + <bcp14>SHOULD</bcp14> be treated as an error. +</t> + +<section title="Chunk Extensions" anchor="chunked.extension"> + <x:anchor-alias value="chunk-ext"/> + <x:anchor-alias value="chunk-ext-name"/> + <x:anchor-alias value="chunk-ext-val"/> +<t> + The chunked coding allows each chunk to include zero or more chunk + extensions, immediately following the <x:ref>chunk-size</x:ref>, for the + sake of supplying per-chunk metadata (such as a signature or hash), + mid-message control information, or randomization of message body size. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="chunk-ext"/><iref primary="true" item="Grammar" subitem="chunk-ext-name"/><iref primary="true" item="Grammar" subitem="chunk-ext-val"/> + <x:ref>chunk-ext</x:ref> = *( <x:ref>BWS</x:ref> ";" <x:ref>BWS</x:ref> <x:ref>chunk-ext-name</x:ref> + [ <x:ref>BWS</x:ref> "=" <x:ref>BWS</x:ref> <x:ref>chunk-ext-val</x:ref> ] ) + + <x:ref>chunk-ext-name</x:ref> = <x:ref>token</x:ref> + <x:ref>chunk-ext-val</x:ref> = <x:ref>token</x:ref> / <x:ref>quoted-string</x:ref> +</sourcecode> +<t> + The chunked coding is specific to each connection and is likely to be + removed or recoded by each recipient (including intermediaries) before any + higher-level application would have a chance to inspect the extensions. + Hence, the use of chunk extensions is generally limited to specialized HTTP + services such as "long polling" (where client and server can have shared + expectations regarding the use of chunk extensions) or for padding within + an end-to-end secured connection. +</t> +<t> + A recipient <bcp14>MUST</bcp14> ignore unrecognized chunk extensions. + A server ought to limit the total length of chunk extensions received in a + request to an amount reasonable for the services provided, in the same way + that it applies length limitations and timeouts for other parts of a + message, and generate an appropriate <x:ref>4xx (Client Error)</x:ref> + response if that amount is exceeded. +</t> +</section> + +<section title="Chunked Trailer Section" anchor="chunked.trailer.section"> + <x:anchor-alias value="trailer-section"/> +<t> + A trailer section allows the sender to include additional fields at the end + of a chunked message in order to supply metadata that might be dynamically + generated while the content is sent, such as a message integrity + check, digital signature, or post-processing status. The proper use and + limitations of trailer fields are defined in <xref target="HTTP" x:rel="#trailer.fields"/>. +</t> +<sourcecode type="abnf7230"><iref primary="true" item="Grammar" subitem="trailer-section"/><iref primary="false" item="Grammar" subitem="field-line"/> + <x:ref>trailer-section</x:ref> = *( <x:ref>field-line</x:ref> <x:ref>CRLF</x:ref> ) +</sourcecode> +<t> + A recipient that removes the chunked coding from a message <bcp14>MAY</bcp14> + selectively retain or discard the received trailer fields. A recipient + that retains a received trailer field <bcp14>MUST</bcp14> either store/forward the + trailer field separately from the received header fields or merge the + received trailer field into the header section. + A recipient <bcp14>MUST NOT</bcp14> merge a received trailer field into the + header section unless its corresponding header field definition + explicitly permits and instructs how the trailer field value can be + safely merged. +</t> +</section> + +<section title="Decoding Chunked" anchor="decoding.chunked"> +<t> + A process for decoding the chunked transfer coding + can be represented in pseudo-code as: +</t> +<sourcecode type="pseudocode"> + length := 0 + read chunk-size, chunk-ext (if any), and CRLF + while (chunk-size &gt; 0) { + read chunk-data and CRLF + append chunk-data to content + length := length + chunk-size + read chunk-size, chunk-ext (if any), and CRLF + } + read trailer field + while (trailer field is not empty) { + if (trailer fields are stored/forwarded separately) { + append trailer field to existing trailer fields + } + else if (trailer field is understood and defined as mergeable) { + merge trailer field with existing header fields + } + else { + discard trailer field + } + read trailer field + } + Content-Length := length + Remove "chunked" from Transfer-Encoding +</sourcecode> +</section> +</section> + +<section title="Transfer Codings for Compression" anchor="compression.codings"> + <iref primary="true" item="compress (transfer coding)"/> + <iref primary="true" item="x-compress (transfer coding)"/> + <iref primary="true" item="deflate (transfer coding)"/> + <iref primary="true" item="gzip (transfer coding)"/> + <iref primary="true" item="x-gzip (transfer coding)"/> +<t> + The following transfer coding names for compression are defined by + the same algorithm as their corresponding content coding: +</t> +<dl newline="true"> + <dt>compress (and x-compress)</dt> + <dd>See <xref target="HTTP" x:rel="#compress.coding"/>.</dd> + <dt>deflate</dt> + <dd>See <xref target="HTTP" x:rel="#deflate.coding"/>.</dd> + <dt>gzip (and x-gzip)</dt> + <dd>See <xref target="HTTP" x:rel="#gzip.coding"/>.</dd> +</dl> +<t> + The compression codings do not define any parameters. The presence + of parameters with any of these compression codings <bcp14>SHOULD</bcp14> be treated + as an error. +</t> +</section> + +<section title="Transfer Coding Registry" anchor="transfer.coding.registry"> +<t> + The "HTTP Transfer Coding Registry" defines the namespace for transfer + coding names. It is maintained at <eref target="https://www.iana.org/assignments/http-parameters"/>. +</t> +<t> + Registrations <bcp14>MUST</bcp14> include the following fields: +</t> +<ul> + <li>Name</li> + <li>Description</li> + <li>Pointer to specification text</li> +</ul> +<t> + Names of transfer codings <bcp14>MUST NOT</bcp14> overlap with names of content codings + (<xref target="HTTP" x:rel="#content.codings"/>) unless the encoding transformation is identical, as + is the case for the compression codings defined in + <xref target="compression.codings"/>. +</t> +<t> + The <x:ref>TE</x:ref> header field (<xref target="HTTP" x:rel="#field.te"/>) uses a + pseudo-parameter named "q" as the rank value when multiple transfer codings + are acceptable. Future registrations of transfer codings <bcp14>SHOULD NOT</bcp14> + define parameters called "q" (case-insensitively) in order to avoid + ambiguities. +</t> +<t> + Values to be added to this namespace require IETF Review (see + <xref target="RFC8126" x:fmt="of" x:sec="4.8"/>) and <bcp14>MUST</bcp14> + conform to the purpose of transfer coding defined in this specification. +</t> +<t> + Use of program names for the identification of encoding formats + is not desirable and is discouraged for future encodings. +</t> +</section> + +<section title="Negotiating Transfer Codings" anchor="transfer.coding.negotiation"> +<t> + The TE field (<xref target="HTTP" x:rel="#field.te"/>) is used in HTTP/1.1 to indicate + what transfer codings, besides chunked, the client is willing to accept + in the response and whether the client is willing to preserve + trailer fields in a chunked transfer coding. +</t> +<t> + A client <bcp14>MUST NOT</bcp14> send the chunked transfer coding name in TE; + chunked is always acceptable for HTTP/1.1 recipients. +</t> +<t> + Three examples of TE use are below. +</t> +<sourcecode type="http-message"> +TE: deflate +TE: +TE: trailers, deflate;q=0.5 +</sourcecode> +<t> + When multiple transfer codings are acceptable, the client <bcp14>MAY</bcp14> rank the + codings by preference using a case-insensitive "q" parameter (similar to + the qvalues used in content negotiation fields; see <xref target="HTTP" x:rel="#quality.values"/>). The rank value + is a real number in the range 0 through 1, where 0.001 is the least + preferred and 1 is the most preferred; a value of 0 means "not acceptable". +</t> +<t> + If the TE field value is empty or if no TE field is present, the only + acceptable transfer coding is chunked. A message with no transfer coding + is always acceptable. +</t> +<t> + The keyword "trailers" indicates that the sender will not discard trailer + fields, as described in <xref target="HTTP" x:rel="#trailer.fields"/>. +</t> +<t> + Since the TE header field only applies to the immediate connection, + a sender of TE <bcp14>MUST</bcp14> also send a "TE" connection option within the + <x:ref>Connection</x:ref> header field (<xref target="HTTP" x:rel="#field.connection"/>) + in order to prevent the TE header field from being forwarded by intermediaries + that do not support its semantics. +</t> +</section> +</section> + +<section title="Handling Incomplete Messages" anchor="incomplete.messages"> +<t> + A server that receives an incomplete request message, usually due to a + canceled request or a triggered timeout exception, <bcp14>MAY</bcp14> send an error + response prior to closing the connection. +</t> +<t> + A client that receives an incomplete response message, which can occur + when a connection is closed prematurely or when decoding a supposedly + chunked transfer coding fails, <bcp14>MUST</bcp14> record the message as incomplete. + Cache requirements for incomplete responses are defined in + <xref target="CACHING" x:rel="#incomplete.responses"/>. +</t> +<t> + If a response terminates in the middle of the header section (before the + empty line is received) and the status code might rely on header fields to + convey the full meaning of the response, then the client cannot assume + that meaning has been conveyed; the client might need to repeat the + request in order to determine what action to take next. +</t> +<t> + A message body that uses the chunked transfer coding is + incomplete if the zero-sized chunk that terminates the encoding has not + been received. A message that uses a valid <x:ref>Content-Length</x:ref> is + incomplete if the size of the message body received (in octets) is less than + the value given by Content-Length. A response that has neither chunked + transfer coding nor Content-Length is terminated by closure of the + connection and, if the header section was received intact, is considered + complete unless an error was indicated by the underlying connection + (e.g., an "incomplete close" in TLS would leave the response incomplete, + as described in <xref target="tls.connection.closure"/>). +</t> +</section> + +<section title="Connection Management" anchor="connection.management"> +<t> + HTTP messaging is independent of the underlying transport- or + session-layer connection protocol(s). HTTP only presumes a reliable + transport with in-order delivery of requests and the corresponding + in-order delivery of responses. The mapping of HTTP request and + response structures onto the data units of an underlying transport + protocol is outside the scope of this specification. +</t> +<t> + As described in <xref target="HTTP" x:rel="#routing.inbound"/>, the specific + connection protocols to be used for an HTTP interaction are determined by + client configuration and the <x:ref>target URI</x:ref>. + For example, the "http" URI scheme + (<xref target="HTTP" x:rel="#http.uri"/>) indicates a default connection of TCP + over IP, with a default TCP port of 80, but the client might be + configured to use a proxy via some other connection, port, or protocol. +</t> +<t> + HTTP implementations are expected to engage in connection management, + which includes maintaining the state of current connections, + establishing a new connection or reusing an existing connection, + processing messages received on a connection, detecting connection + failures, and closing each connection. + Most clients maintain multiple connections in parallel, including + more than one connection per server endpoint. + Most servers are designed to maintain thousands of concurrent connections, + while controlling request queues to enable fair use and detect + denial-of-service attacks. +</t> + +<section title="Establishment" anchor="persistent.establishment"> +<t> + It is beyond the scope of this specification to describe how connections + are established via various transport- or session-layer protocols. + Each HTTP connection maps to one underlying transport connection. +</t> +</section> + +<section title="Associating a Response to a Request" anchor="associating.response.to.request"> +<t> + HTTP/1.1 does not include a request identifier for associating a given + request message with its corresponding one or more response messages. + Hence, it relies on the order of response arrival to correspond exactly + to the order in which requests are made on the same connection. + More than one response message per request only occurs when one or more + informational responses (<x:ref>1xx</x:ref>; see <xref target="HTTP" x:rel="#status.1xx"/>) precede a + final response to the same request. +</t> +<t> + A client that has more than one outstanding request on a connection <bcp14>MUST</bcp14> + maintain a list of outstanding requests in the order sent and <bcp14>MUST</bcp14> + associate each received response message on that connection to the + first outstanding request that has not yet received a final + (non-<x:ref>1xx</x:ref>) response. +</t> +<t> + If a client receives data on a connection that doesn't have + outstanding requests, the client <bcp14>MUST NOT</bcp14> consider that data to be a + valid response; the client <bcp14>SHOULD</bcp14> close the connection, since message + delimitation is now ambiguous, unless the data consists only of one or + more CRLF (which can be discarded per <xref target="message.parsing"/>). +</t> +</section> + +<section title="Persistence" anchor="persistent.connections"> + <x:anchor-alias value="persistent connections"/> + <iref primary="false" item="close" x:for-anchor=""/> +<t> + HTTP/1.1 defaults to the use of <x:dfn>persistent connections</x:dfn>, + allowing multiple requests and responses to be carried over a single + connection. HTTP implementations <bcp14>SHOULD</bcp14> support persistent connections. +</t> +<t> + A recipient determines whether a connection is persistent or not based on + the protocol version and <x:ref>Connection</x:ref> header field + (<xref target="HTTP" x:rel="#field.connection"/>) in the + most recently received message, if any: +</t> +<ul> + <li>If the "<x:ref>close</x:ref>" connection option is present + (<xref target="persistent.tear-down"/>), the + connection will not persist after the current response; else,</li> + <li>If the received protocol is HTTP/1.1 (or later), the connection will + persist after the current response; else,</li> + <li>If the received protocol is HTTP/1.0, the "keep-alive" connection + option is present, either the recipient is not a proxy or the + message is a response, and the recipient wishes to honor the + HTTP/1.0 "keep-alive" mechanism, the connection will persist after + the current response; otherwise,</li> + <li>The connection will close after the current response.</li> +</ul> +<t> + A client that does not support <x:ref>persistent connections</x:ref> <bcp14>MUST</bcp14> + send the "<x:ref>close</x:ref>" connection option in every request message. +</t> +<t> + A server that does not support <x:ref>persistent connections</x:ref> <bcp14>MUST</bcp14> + send the "<x:ref>close</x:ref>" connection option in every response message + that does not have a <x:ref>1xx (Informational)</x:ref> status code. +</t> +<t> + A client <bcp14>MAY</bcp14> send additional requests on a persistent connection until it + sends or receives a "<x:ref>close</x:ref>" connection option or receives an + HTTP/1.0 response without a "keep-alive" connection option. +</t> +<t> + In order to remain persistent, all messages on a connection need to + have a self-defined message length (i.e., one not defined by closure + of the connection), as described in <xref target="message.body"/>. + A server <bcp14>MUST</bcp14> read the entire request message body or close + the connection after sending its response; otherwise, the + remaining data on a persistent connection would be misinterpreted + as the next request. Likewise, + a client <bcp14>MUST</bcp14> read the entire response message body if it intends + to reuse the same connection for a subsequent request. +</t> +<t> + A proxy server <bcp14>MUST NOT</bcp14> maintain a persistent connection with an + HTTP/1.0 client (see <xref target="compatibility.with.http.1.0.persistent.connections"/> for + information and discussion of the problems with the Keep-Alive header field + implemented by many HTTP/1.0 clients). +</t> +<t> + See <xref target="compatibility.with.http.1.0.persistent.connections"/> + for more information on backwards compatibility with HTTP/1.0 clients. +</t> + +<section title="Retrying Requests" anchor="persistent.retrying.requests"> +<t> + Connections can be closed at any time, with or without intention. + Implementations ought to anticipate the need to recover + from asynchronous close events. The conditions under which a client can + automatically retry a sequence of outstanding requests are defined in + <xref target="HTTP" x:rel="#idempotent.methods"/>. + </t> +</section> + +<section title="Pipelining" anchor="pipelining"> + <x:anchor-alias value="pipeline"/> +<t> + A client that supports persistent connections <bcp14>MAY</bcp14> <x:dfn>pipeline</x:dfn> + its requests (i.e., send multiple requests without waiting for each + response). A server <bcp14>MAY</bcp14> process a sequence of pipelined requests in + parallel if they all have safe methods (<xref target="HTTP" x:rel="#safe.methods"/>), but it <bcp14>MUST</bcp14> send + the corresponding responses in the same order that the requests were + received. +</t> +<t> + A client that pipelines requests <bcp14>SHOULD</bcp14> retry unanswered requests if the + connection closes before it receives all of the corresponding responses. + When retrying pipelined requests after a failed connection (a connection + not explicitly closed by the server in its last complete response), a + client <bcp14>MUST NOT</bcp14> pipeline immediately after connection establishment, + since the first remaining request in the prior pipeline might have caused + an error response that can be lost again if multiple requests are sent on a + prematurely closed connection (see the TCP reset problem described in + <xref target="persistent.tear-down"/>). +</t> +<t> + Idempotent methods (<xref target="HTTP" x:rel="#idempotent.methods"/>) are significant to pipelining + because they can be automatically retried after a connection failure. + A user agent <bcp14>SHOULD NOT</bcp14> pipeline requests after a non-idempotent method, + until the final response status code for that method has been received, + unless the user agent has a means to detect and recover from partial + failure conditions involving the pipelined sequence. +</t> +<t> + An intermediary that receives pipelined requests <bcp14>MAY</bcp14> pipeline those + requests when forwarding them inbound, since it can rely on the outbound + user agent(s) to determine what requests can be safely pipelined. If the + inbound connection fails before receiving a response, the pipelining + intermediary <bcp14>MAY</bcp14> attempt to retry a sequence of requests that have yet + to receive a response if the requests all have idempotent methods; + otherwise, the pipelining intermediary <bcp14>SHOULD</bcp14> forward any received + responses and then close the corresponding outbound connection(s) so that + the outbound user agent(s) can recover accordingly. +</t> +</section> +</section> + +<section title="Concurrency" anchor="persistent.concurrency"> +<t> + A client ought to limit the number of simultaneous open + connections that it maintains to a given server. +</t> +<t> + Previous revisions of HTTP gave a specific number of connections as a + ceiling, but this was found to be impractical for many applications. As a + result, this specification does not mandate a particular maximum number of + connections but, instead, encourages clients to be conservative when opening + multiple connections. +</t> +<t> + Multiple connections are typically used to avoid the "head-of-line + blocking" problem, wherein a request that takes significant server-side + processing and/or transfers very large content would block subsequent + requests on the + same connection. However, each connection consumes server resources. +</t> +<t> + Furthermore, using multiple connections can cause undesirable side effects + in congested networks. + Using larger numbers of connections can also cause side effects in + otherwise uncongested networks, because their aggregate and initially + synchronized sending behavior can cause congestion that would not have + been present if fewer parallel connections had been used. +</t> +<t> + Note that a server might reject traffic that it deems abusive or + characteristic of a denial-of-service attack, such as an excessive number + of open connections from a single client. +</t> +</section> + +<section title="Failures and Timeouts" anchor="persistent.failures"> +<t> + Servers will usually have some timeout value beyond which they will + no longer maintain an inactive connection. Proxy servers might make + this a higher value since it is likely that the client will be making + more connections through the same proxy server. The use of persistent + connections places no requirements on the length (or existence) of + this timeout for either the client or the server. +</t> +<t> + A client or server that wishes to time out <bcp14>SHOULD</bcp14> issue a graceful close + on the connection. Implementations <bcp14>SHOULD</bcp14> constantly monitor open + connections for a received closure signal and respond to it as appropriate, + since prompt closure of both sides of a connection enables allocated system + resources to be reclaimed. +</t> +<t> + A client, server, or proxy <bcp14>MAY</bcp14> close the transport connection at any + time. For example, a client might have started to send a new request + at the same time that the server has decided to close the "idle" + connection. From the server's point of view, the connection is being + closed while it was idle, but from the client's point of view, a + request is in progress. +</t> +<t> + A server <bcp14>SHOULD</bcp14> sustain persistent connections, when possible, and allow + the underlying transport's flow-control mechanisms to resolve temporary overloads rather + than terminate connections with the expectation that clients will retry. + The latter technique can exacerbate network congestion or server load. +</t> +<t> + A client sending a message body <bcp14>SHOULD</bcp14> monitor + the network connection for an error response while it is transmitting + the request. If the client sees a response that indicates the server does + not wish to receive the message body and is closing the connection, the + client <bcp14>SHOULD</bcp14> immediately cease transmitting the body and close its side + of the connection. +</t> +</section> + +<section title="Tear-down" anchor="persistent.tear-down"> + <iref primary="false" item="Connection header field" x:for-anchor=""/> + <iref primary="true" item="close" x:for-anchor=""/> + <x:anchor-alias value="close"/> +<t> + The "close" connection option is defined as a signal that the sender + will close this connection after completion of the response. + A sender <bcp14>SHOULD</bcp14> send a <x:ref>Connection</x:ref> header field + (<xref target="HTTP" x:rel="#field.connection"/>) containing the "close" connection option + when it intends to close a connection. For example, +</t> +<sourcecode type="http-message"> +Connection: close +</sourcecode> +<t> + as a request header field indicates that this is the last request that + the client will send on this connection, while in a response, the same + field indicates that the server is going to close this connection after + the response message is complete. +</t> +<t anchor="field.close"> + <iref primary="true" item="Fields" subitem="Close" x:for-anchor=""/> + <rdf:Description> + <comments xmlns="urn:ietf:id:draft-ietf-httpbis-p2-semantics#">(reserved)</comments> + </rdf:Description> + Note that the field name "Close" is reserved, since using that name as a + header field might conflict with the "close" connection option. +</t> +<t> + A client that sends a "close" connection option <bcp14>MUST NOT</bcp14> + send further requests on that connection (after the one containing the + "close") and <bcp14>MUST</bcp14> close the connection after reading the + final response message corresponding to this request. +</t> +<t> + A server that receives a "close" connection option <bcp14>MUST</bcp14> + initiate closure of the connection (see below) after it sends the + final response to the request that contained the "close" connection option. + The server <bcp14>SHOULD</bcp14> send a "close" connection option in its final response + on that connection. The server <bcp14>MUST NOT</bcp14> process any further requests + received on that connection. +</t> +<t> + A server that sends a "close" connection option <bcp14>MUST</bcp14> + initiate closure of the connection (see below) after it sends the + response containing the "close" connection option. The server <bcp14>MUST NOT</bcp14> process + any further requests received on that connection. +</t> +<t> + A client that receives a "close" connection option <bcp14>MUST</bcp14> + cease sending requests on that connection and close the connection + after reading the response message containing the "close" connection option; + if additional pipelined requests had been sent on the connection, + the client <bcp14>SHOULD NOT</bcp14> assume that they will be processed by the server. +</t> +<t> + If a server performs an immediate close of a TCP connection, there is a + significant risk that the client will not be able to read the last HTTP + response. If the server receives additional data from the client on a + fully closed connection, such as another request sent by the + client before receiving the server's response, the server's TCP stack will + send a reset packet to the client; unfortunately, the reset packet might + erase the client's unacknowledged input buffers before they can be read + and interpreted by the client's HTTP parser. +</t> +<t> + To avoid the TCP reset problem, servers typically close a connection in + stages. First, the server performs a half-close by closing only the write + side of the read/write connection. The server then continues to read from + the connection until it receives a corresponding close by the client, or + until the server is reasonably certain that its own TCP stack has received + the client's acknowledgement of the packet(s) containing the server's last + response. Finally, the server fully closes the connection. +</t> +<t> + It is unknown whether the reset problem is exclusive to TCP or might also + be found in other transport connection protocols. +</t> +<t> + Note that a TCP connection that is half-closed by the client does not + delimit a request message, nor does it imply that the client is no longer + interested in a response. In general, transport signals cannot be relied + upon to signal edge cases, since HTTP/1.1 is independent of transport. +</t> +</section> + +<section title="TLS Connection Initiation" anchor="tls.connection.initiation"> +<t> + Conceptually, HTTP/TLS is simply sending HTTP messages over a connection + secured via TLS <xref target="TLS13"/>. +</t> +<t> + The HTTP client also acts as the TLS client. It initiates a connection to + the server on the appropriate port and sends the TLS ClientHello to begin + the TLS handshake. When the TLS handshake has finished, the client may then + initiate the first HTTP request. All HTTP data <bcp14>MUST</bcp14> be sent as TLS + "application data" but is otherwise treated like a normal connection for + HTTP (including potential reuse as a persistent connection). +</t> +</section> + +<section title="TLS Connection Closure" anchor="tls.connection.closure"> +<t> + TLS uses an exchange of closure alerts prior to (non-error) connection + closure to provide secure connection closure; see <xref section="6.1" target="TLS13"/>. When a + valid closure alert is received, an implementation can be assured that no + further data will be received on that connection. +</t> +<t> + When an implementation knows that it has sent or received all the + message data that it cares about, typically by detecting HTTP message + boundaries, it might generate an "incomplete close" by sending a + closure alert and then closing the connection without waiting to + receive the corresponding closure alert from its peer. +</t> +<t> + An incomplete close does not call into question the security of the data + already received, but it could indicate that subsequent data might have been + truncated. As TLS is not directly aware of HTTP message framing, it is + necessary to examine the HTTP data itself to determine whether messages are + complete. Handling of incomplete messages is defined in + <xref target="incomplete.messages"/>. +</t> +<t> + When encountering an incomplete close, a client <bcp14>SHOULD</bcp14> treat as completed + all requests for which it has received either +</t> +<ol> + <li> + as much data as specified in the <x:ref>Content-Length</x:ref> header + field or + </li> + <li> + the terminal zero-length chunk (when <x:ref>Transfer-Encoding</x:ref> of chunked is used). + </li> +</ol> +<t> + A response that has neither chunked + transfer coding nor Content-Length is complete only if a valid closure alert + has been received. Treating an incomplete message as complete could expose + implementations to attack. +</t> +<t> + A client detecting an incomplete close <bcp14>SHOULD</bcp14> recover gracefully. +</t> +<t> + Clients <bcp14>MUST</bcp14> send a closure alert before closing the connection. + Clients that do not expect to receive any more data <bcp14>MAY</bcp14> choose not + to wait for the server's closure alert and simply close the + connection, thus generating an incomplete close on the server side. +</t> +<t> + Servers <bcp14>SHOULD</bcp14> be prepared to receive an incomplete close from the client, + since the client can often locate the end of server data. +</t> +<t> + Servers <bcp14>MUST</bcp14> attempt to initiate an exchange of closure alerts with + the client before closing the connection. Servers <bcp14>MAY</bcp14> close the + connection after sending the closure alert, thus generating an + incomplete close on the client side. +</t> +</section> +</section> + +<section title="Enclosing Messages as Data" anchor="enclosing.messages"> + +<section title="Media Type message/http" anchor="media.type.message.http"> +<iref item="Media Type" subitem="message/http" primary="true"/> +<iref item="message/http Media Type" primary="true"/> +<t> + The "message/http" media type can be used to enclose a single HTTP request or + response message, provided that it obeys the MIME restrictions for all + "message" types regarding line length and encodings. Because of the line + length limitations, field values within "message/http" are allowed to use + line folding (<x:ref>obs-fold</x:ref>), as described in + <xref target="line.folding"/>, to convey the field value over multiple + lines. A recipient of "message/http" data <bcp14>MUST</bcp14> replace any obsolete line + folding with one or more SP characters when the message is consumed. +</t> +<dl> + <dt>Type name:</dt> + <dd>message</dd> + <dt>Subtype name:</dt> + <dd>http</dd> + <dt>Required parameters:</dt> + <dd>N/A</dd> + <dt>Optional parameters:</dt> + <dd> + <t>version, msgtype</t> + <dl> + <dt>version:</dt> + <dd> + The HTTP-version number of the enclosed message + (e.g., "1.1"). If not present, the version can be + determined from the first line of the body. + </dd> + <dt>msgtype:</dt> + <dd> + The message type — "request" or "response". If not + present, the type can be determined from the first + line of the body. + </dd> + </dl> + </dd> + <dt>Encoding considerations:</dt> + <dd>only "7bit", "8bit", or "binary" are permitted</dd> + <dt>Security considerations:</dt> + <dd>see <xref target="security.considerations"/></dd> + <dt>Interoperability considerations:</dt> + <dd>N/A</dd> + <dt>Published specification:</dt> + <dd>RFC 9112 (see <xref target="media.type.message.http"/>).</dd> + <dt>Applications that use this media type:</dt> + <dd>N/A</dd> + <dt>Fragment identifier considerations:</dt> + <dd>N/A</dd> + <dt>Additional information:</dt> + <dd> + <dl> + <dt>Magic number(s):</dt> + <dd>N/A</dd> + <dt>Deprecated alias names for this type:</dt> + <dd>N/A</dd> + <dt>File extension(s):</dt> + <dd>N/A</dd> + <dt>Macintosh file type code(s):</dt> + <dd>N/A</dd> + </dl> + </dd> + <dt>Person and email address to contact for further information:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Intended usage:</dt> + <dd>COMMON</dd> + <dt>Restrictions on usage:</dt> + <dd>N/A</dd> + <dt>Author:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Change controller:</dt> + <dd>IESG</dd> +</dl> +</section> + +<section title="Media Type application/http" anchor="media.type.application.http"> +<iref item="Media Type" subitem="application/http" primary="true"/> +<iref item="application/http Media Type" primary="true"/> +<t> + The "application/http" media type can be used to enclose a pipeline of one or more + HTTP request or response messages (not intermixed). +</t> +<dl> + <dt>Type name:</dt> + <dd>application</dd> + <dt>Subtype name:</dt> + <dd>http</dd> + <dt>Required parameters:</dt> + <dd>N/A</dd> + <dt>Optional parameters:</dt> + <dd> + <t> + version, msgtype + </t> + <dl> + <dt>version:</dt> + <dd> + The HTTP-version number of the enclosed messages + (e.g., "1.1"). If not present, the version can be + determined from the first line of the body. + </dd> + <dt>msgtype:</dt> + <dd> + The message type — "request" or "response". If not + present, the type can be determined from the first + line of the body. + </dd> + </dl> + </dd> + <dt>Encoding considerations:</dt> + <dd> + HTTP messages enclosed by this type + are in "binary" format; use of an appropriate + Content-Transfer-Encoding is required when + transmitted via email. + </dd> + <dt>Security considerations:</dt> + <dd> + see <xref target="security.considerations"/> + </dd> + <dt>Interoperability considerations:</dt> + <dd>N/A</dd> + <dt>Published specification:</dt> + <dd> + RFC 9112 (see <xref target="media.type.application.http"/>). + </dd> + <dt>Applications that use this media type:</dt> + <dd>N/A</dd> + <dt>Fragment identifier considerations:</dt> + <dd>N/A</dd> + <dt>Additional information:</dt> + <dd> + <dl> + <dt>Deprecated alias names for this type:</dt> + <dd>N/A</dd> + <dt>Magic number(s):</dt> + <dd>N/A</dd> + <dt>File extension(s):</dt> + <dd>N/A</dd> + <dt>Macintosh file type code(s):</dt> + <dd>N/A</dd> + </dl> + </dd> + <dt>Person and email address to contact for further information:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Intended usage:</dt> + <dd>COMMON</dd> + <dt>Restrictions on usage:</dt> + <dd>N/A</dd> + <dt>Author:</dt> + <dd>See Authors' Addresses section.</dd> + <dt>Change controller:</dt> + <dd>IESG</dd> +</dl> +</section> +</section> + +<section title="Security Considerations" anchor="security.considerations"> +<t> + This section is meant to inform developers, information providers, and + users about known security considerations relevant to HTTP message syntax + and parsing. Security considerations about HTTP semantics, + content, and routing are addressed in <xref target="HTTP"/>. +</t> + +<section title="Response Splitting" anchor="response.splitting"> +<t> + Response splitting (a.k.a. CRLF injection) is a common technique, used in + various attacks on Web usage, that exploits the line-based nature of HTTP + message framing and the ordered association of requests to responses on + persistent connections <xref target="Klein"/>. This technique can be + particularly damaging when the requests pass through a shared cache. +</t> +<t> + Response splitting exploits a vulnerability in servers (usually within an + application server) where an attacker can send encoded data within some + parameter of the request that is later decoded and echoed within any of the + response header fields of the response. If the decoded data is crafted to + look like the response has ended and a subsequent response has begun, the + response has been split, and the content within the apparent second response + is controlled by the attacker. The attacker can then make any other request + on the same persistent connection and trick the recipients (including + intermediaries) into believing that the second half of the split is an + authoritative answer to the second request. +</t> +<t> + For example, a parameter within the request-target might be read by an + application server and reused within a redirect, resulting in the same + parameter being echoed in the <x:ref>Location</x:ref> header field of the + response. If the parameter is decoded by the application and not properly + encoded when placed in the response field, the attacker can send encoded + CRLF octets and other content that will make the application's single + response look like two or more responses. +</t> +<t> + A common defense against response splitting is to filter requests for data + that looks like encoded CR and LF (e.g., "%0D" and "%0A"). However, that + assumes the application server is only performing URI decoding rather + than more obscure data transformations like charset transcoding, XML entity + translation, base64 decoding, sprintf reformatting, etc. A more effective + mitigation is to prevent anything other than the server's core protocol + libraries from sending a CR or LF within the header section, which means + restricting the output of header fields to APIs that filter for bad octets + and not allowing application servers to write directly to the protocol + stream. +</t> +</section> + +<section title="Request Smuggling" anchor="request.smuggling"> +<t> + Request smuggling (<xref target="Linhart"/>) is a technique that exploits + differences in protocol parsing among various recipients to hide additional + requests (which might otherwise be blocked or disabled by policy) within an + apparently harmless request. Like response splitting, request smuggling + can lead to a variety of attacks on HTTP usage. +</t> +<t> + This specification has introduced new requirements on request parsing, + particularly with regard to message framing in + <xref target="message.body.length"/>, to reduce the effectiveness of + request smuggling. +</t> +</section> + +<section title="Message Integrity" anchor="message.integrity"> +<t> + HTTP does not define a specific mechanism for ensuring message integrity, + instead relying on the error-detection ability of underlying transport + protocols and the use of length or chunk-delimited framing to detect + completeness. Historically, the lack of + a single integrity mechanism has been justified by the informal nature of + most HTTP communication. However, the prevalence of HTTP as an information + access mechanism has resulted in its increasing use within environments + where verification of message integrity is crucial. +</t> +<t> + The mechanisms provided with the "https" scheme, such as authenticated + encryption, provide protection against modification of messages. Care + is needed, however, to ensure that connection closure cannot be used to + truncate messages (see <xref target="tls.connection.closure"/>). User agents + might refuse to accept incomplete messages or treat them specially. For + example, a browser being used to view medical history or drug interaction + information needs to indicate to the user when such information is detected + by the protocol to be incomplete, expired, or corrupted during transfer. + Such mechanisms might be selectively enabled via user agent extensions or + the presence of message integrity metadata in a response. +</t> +<t> + The "http" scheme provides no protection against accidental or malicious + modification of messages. +</t> +<t> + Extensions to the protocol might be used to mitigate the risk of unwanted + modification of messages by intermediaries, even when the "https" scheme is + used. Integrity might be assured by using message authentication codes + or digital + signatures that are selectively added to messages via extensible metadata + fields. +</t> +</section> + +<section title="Message Confidentiality" anchor="message.confidentiality"> +<t> + HTTP relies on underlying transport protocols to provide message + confidentiality when that is desired. HTTP has been specifically designed + to be independent of the transport protocol, such that it can be used + over many forms of encrypted connection, with the selection of + such transports being identified by the choice of URI scheme or within + user agent configuration. +</t> +<t> + The "https" scheme can be used to identify resources that require a + confidential connection, as described in <xref target="HTTP" x:rel="#https.uri"/>. +</t> +</section> +</section> + +<section title="IANA Considerations" anchor="IANA.considerations"> +<t> + The change controller for the following registrations is: + "IETF (iesg@ietf.org) - Internet Engineering Task Force". +</t> + +<section title="Field Name Registration" anchor="field.name.registration"> +<t> + IANA has added the following field names to the "Hypertext Transfer Protocol (HTTP) Field + Name Registry" at <eref target="https://www.iana.org/assignments/http-fields"/>, + as described in <xref target="HTTP" x:rel="#field.name.registration"/>. +</t> +<?BEGININC build/draft-ietf-httpbis-messaging-latest.iana-headers ?> +<!--AUTOGENERATED FROM extract-header-defs.xslt, do not edit manually--> +<table align="left" anchor="iana.header.registration.table"> + <thead> + <tr> + <th>Field Name</th> + <th>Status</th> + <th>Section</th> + <th>Comments</th> + </tr> + </thead> + <tbody> + <tr> + <td>Close</td> + <td>permanent</td> + <td> + <xref target="persistent.tear-down" format="counter"/> + </td> + <td>(reserved)</td> + </tr> + <tr> + <td>MIME-Version</td> + <td>permanent</td> + <td> + <xref target="mime-version" format="counter"/> + </td> + <td/> + </tr> + <tr> + <td>Transfer-Encoding</td> + <td>permanent</td> + <td> + <xref target="field.transfer-encoding" format="counter"/> + </td> + <td/> + </tr> + </tbody> +</table> +<!--(END)--> +<?ENDINC build/draft-ietf-httpbis-messaging-latest.iana-headers ?> +</section> + +<section title="Media Type Registration" anchor="media.type.http"> +<t> + IANA has updated the "Media Types" registry at + <eref target="https://www.iana.org/assignments/media-types"/> + with the registration information in Sections + <xref target="media.type.message.http" format="counter"/> and + <xref target="media.type.application.http" format="counter"/> for the media types + "message/http" and "application/http", respectively. +</t> +</section> + +<section title="Transfer Coding Registration" anchor="transfer.coding.registration"> +<t> + IANA has updated the "HTTP Transfer Coding Registry" at + <eref target="https://www.iana.org/assignments/http-parameters/"/> + with the registration procedure of <xref target="transfer.coding.registry"/> + and the content coding names summarized in the table below. +</t> +<table align="left" anchor="iana.transfer.coding.registration.table"> + <thead> + <tr> + <th>Name</th> + <th>Description</th> + <th>Section</th> + </tr> + </thead> + <tbody> + <tr> + <td>chunked</td> + <td>Transfer in a series of chunks</td> + <td><xref target="chunked.encoding" format="counter"/></td> + </tr> + <tr> + <td>compress</td> + <td>UNIX "compress" data format <xref target="Welch"/></td> + <td><xref target="compression.codings" format="counter"/></td> + </tr> + <tr> + <td>deflate</td> + <td>"deflate" compressed data (<xref target="RFC1951"/>) inside + the "zlib" data format (<xref target="RFC1950"/>)</td> + <td><xref target="compression.codings" format="counter"/></td> + </tr> + <tr> + <td>gzip</td> + <td>GZIP file format <xref target="RFC1952"/></td> + <td><xref target="compression.codings" format="counter"/></td> + </tr> + <tr> + <td>trailers</td> + <td>(reserved)</td> + <td><xref target="transfer.coding.registration" format="counter"/></td> + </tr> + <tr> + <td>x-compress</td> + <td>Deprecated (alias for compress)</td> + <td><xref target="compression.codings" format="counter"/></td> + </tr> + <tr> + <td>x-gzip</td> + <td>Deprecated (alias for gzip)</td> + <td><xref target="compression.codings" format="counter"/></td> + </tr> + </tbody> +</table> +<aside> + <t> + <x:h>Note:</x:h> the coding name "trailers" is reserved because its use would + conflict with the keyword "trailers" in the <x:ref>TE</x:ref> + header field (<xref target="HTTP" x:rel="#field.te"/>). + </t> +</aside> +</section> + +<section title="ALPN Protocol ID Registration" anchor="alpn.registration"> +<t> + IANA has updated the + "TLS Application-Layer Protocol Negotiation (ALPN) Protocol IDs" registry at + <eref target="https://www.iana.org/assignments/tls-extensiontype-values/"/> + with the registration below: +</t> +<table> + <thead> + <tr> + <th>Protocol</th> + <th>Identification Sequence</th> + <th>Reference</th> + </tr> + </thead> + <tbody> + <tr> + <td>HTTP/1.1</td> + <td>0x68 0x74 0x74 0x70 0x2f 0x31 0x2e 0x31 ("http/1.1")</td> + <td>RFC 9112</td> + </tr> + </tbody> +</table> +</section> + +</section> + +</middle> +<back> +<displayreference target="HTTP10" to="HTTP/1.0"/> + +<references title="Normative References"> + +<reference anchor="HTTP"> + <x:source href="rfc9110.xml" basename="rfc9110"> + <x:defines>OWS</x:defines> + <x:defines>RWS</x:defines> + <x:defines>BWS</x:defines> + <x:defines>port</x:defines> + <x:defines>transfer-coding</x:defines> + <x:defines>uri-host</x:defines> + </x:source> +</reference> + +<reference anchor="CACHING"> + <x:source href="rfc9111.xml" basename="rfc9111"/> +</reference> + +<reference anchor="URI"> + <front> + <title abbrev="URI Generic Syntax">Uniform Resource Identifier (URI): Generic Syntax</title> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="L." surname="Masinter" fullname="Larry Masinter"/> + <date month="January" year="2005"/> + </front> + <seriesInfo name="STD" value="66"/> + <seriesInfo name="RFC" value="3986"/> +</reference> + +<reference anchor="RFC5234"> + <front> + <title abbrev="ABNF for Syntax Specifications">Augmented BNF for Syntax Specifications: ABNF</title> + <author initials="D." surname="Crocker" fullname="Dave Crocker" role="editor"/> + <author initials="P." surname="Overell" fullname="Paul Overell"/> + <date month="January" year="2008"/> + </front> + <seriesInfo name="STD" value="68"/> + <seriesInfo name="RFC" value="5234"/> +</reference> + +<reference anchor="RFC7405"> + <front> + <title>Case-Sensitive String Support in ABNF</title> + <author initials="P." surname="Kyzivat" fullname="Dave Kyzivat"/> + <date month="December" year="2014"/> + </front> + <seriesInfo name="RFC" value="7405"/> +</reference> + +<reference anchor="RFC2119"> + <front> + <title>Key words for use in RFCs to Indicate Requirement Levels</title> + <author initials="S." surname="Bradner" fullname="Scott Bradner"/> + <date month="March" year="1997"/> + </front> + <seriesInfo name="BCP" value="14"/> + <seriesInfo name="RFC" value="2119"/> +</reference> + +<reference anchor="RFC8174"> + <front> + <title>Ambiguity of Uppercase vs Lowercase in RFC 2119 Key Words</title> + <author initials="B." surname="Leiba" fullname="Barry Leiba"/> + <date year="2017" month="May"/> + </front> + <seriesInfo name="BCP" value="14"/> + <seriesInfo name="RFC" value="8174"/> +</reference> + +<reference anchor="USASCII"> + <front> + <title>Coded Character Set -- 7-bit American Standard Code for Information Interchange</title> + <author> + <organization>American National Standards Institute</organization> + </author> + <date year="1986"/> + </front> + <seriesInfo name="ANSI" value="X3.4"/> +</reference> + +<reference anchor="RFC1950"> + <front> + <title>ZLIB Compressed Data Format Specification version 3.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <author initials="J-L." surname="Gailly" fullname="Jean-Loup Gailly"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1950"/> +</reference> + +<reference anchor="RFC1951"> + <front> + <title>DEFLATE Compressed Data Format Specification version 1.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1951"/> +</reference> + +<reference anchor="RFC1952"> + <front> + <title>GZIP file format specification version 4.3</title> + <author initials="P." surname="Deutsch" fullname="L. Peter Deutsch"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1952"/> +</reference> + +<reference anchor="TLS13"> + <front> + <title>The Transport Layer Security (TLS) Protocol Version 1.3</title> + <author initials="E." surname="Rescorla" fullname="E. Rescorla"/> + <date year="2018" month="August"/> + </front> + <seriesInfo name="RFC" value="8446"/> +</reference> + +<reference anchor="Welch" target="https://ieeexplore.ieee.org/document/1659158/"> + <front> + <title>A Technique for High-Performance Data Compression</title> + <author initials="T." surname="Welch" fullname="Terry A. Welch"/> + <date month="June" year="1984"/> + </front> + <seriesInfo name="IEEE Computer" value="17(6)"/> + <seriesInfo name="DOI" value="10.1109/MC.1984.1659158"/> +</reference> + +</references> + +<references title="Informative References" anchor="informative.references"> + +<reference anchor="HTTP10"> + <front> + <title abbrev="HTTP/1.0">Hypertext Transfer Protocol -- HTTP/1.0</title> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="H." surname="Frystyk" fullname="Henrik Frystyk Nielsen"/> + <date month="May" year="1996"/> + </front> + <seriesInfo name="RFC" value="1945"/> +</reference> + +<reference anchor="RFC2045"> + <front> + <title abbrev="Internet Message Bodies">Multipurpose Internet Mail Extensions (MIME) Part One: Format of Internet Message Bodies</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="N." surname="Borenstein" fullname="Nathaniel S. Borenstein"/> + <date month="November" year="1996"/> + </front> + <seriesInfo name="RFC" value="2045"/> +</reference> + +<reference anchor="RFC2046"> + <front> + <title abbrev="Media Types">Multipurpose Internet Mail Extensions (MIME) Part Two: Media Types</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="N." surname="Borenstein" fullname="Nathaniel S. Borenstein"/> + <date month="November" year="1996"/> + </front> + <seriesInfo name="RFC" value="2046"/> +</reference> + +<reference anchor="RFC2049"> + <front> + <title abbrev="MIME Conformance">Multipurpose Internet Mail Extensions (MIME) Part Five: Conformance Criteria and Examples</title> + <author initials="N." surname="Freed" fullname="Ned Freed"/> + <author initials="N." surname="Borenstein" fullname="Nathaniel S. Borenstein"/> + <date month="November" year="1996"/> + </front> + <seriesInfo name="RFC" value="2049"/> +</reference> + +<reference anchor="RFC2068"> + <front> + <title>Hypertext Transfer Protocol -- HTTP/1.1</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding"/> + <author initials="J." surname="Gettys" fullname="Jim Gettys"/> + <author initials="J." surname="Mogul" fullname="Jeffrey C. Mogul"/> + <author initials="H." surname="Frystyk" fullname="Henrik Frystyk Nielsen"/> + <author initials="T." surname="Berners-Lee" fullname="Tim Berners-Lee"/> + <date month="January" year="1997"/> + </front> + <seriesInfo name="RFC" value="2068"/> +</reference> + +<reference anchor="RFC2557"> + <front> + <title abbrev="MIME Encapsulation of Aggregate Documents">MIME Encapsulation of Aggregate Documents, such as HTML (MHTML)</title> + <author initials="J." surname="Palme" fullname="Jacob Palme"/> + <author initials="A." surname="Hopmann" fullname="Alex Hopmann"/> + <author initials="N." surname="Shelness" fullname="Nick Shelness"/> + <date year="1999" month="March"/> + </front> + <seriesInfo name="RFC" value="2557"/> +</reference> + +<reference anchor="RFC5322"> + <front> + <title>Internet Message Format</title> + <author initials="P." surname="Resnick" fullname="P. Resnick" role="editor"/> + <date year="2008" month="October"/> + </front> + <seriesInfo name="RFC" value="5322"/> +</reference> + +<reference anchor="RFC7230"> + <front> + <title>Hypertext Transfer Protocol (HTTP/1.1): Message Syntax and Routing</title> + <author initials="R." surname="Fielding" fullname="Roy T. Fielding" role="editor"/> + <author initials="J." surname="Reschke" fullname="Julian F. Reschke" role="editor"/> + <date month="June" year="2014"/> + </front> + <seriesInfo name="RFC" value="7230"/> +</reference> + +<reference anchor="RFC8126"> + <front> + <title>Guidelines for Writing an IANA Considerations Section in RFCs</title> + <author initials="M." surname="Cotton" fullname="M. Cotton"/> + <author initials="B." surname="Leiba" fullname="B. Leiba"/> + <author initials="T." surname="Narten" fullname="T. Narten"/> + <date year="2017" month="June"/> + </front> + <seriesInfo name="BCP" value="26"/> + <seriesInfo name="RFC" value="8126"/> +</reference> + +<reference anchor="Klein" target="https://packetstormsecurity.com/papers/general/whitepaper_httpresponse.pdf"> + <front> + <title>Divide and Conquer - HTTP Response Splitting, Web Cache Poisoning Attacks, and Related Topics</title> + <author initials="A." surname="Klein" fullname="Amit Klein"/> + <date year="2004" month="March"/> + </front> +</reference> + +<reference anchor="Linhart" target="https://www.cgisecurity.com/lib/HTTP-Request-Smuggling.pdf"> + <front> + <title>HTTP Request Smuggling</title> + <author initials="C." surname="Linhart" fullname="Chaim Linhart"/> + <author initials="A." surname="Klein" fullname="Amit Klein"/> + <author initials="R." surname="Heled" fullname="Ronen Heled"/> + <author initials="S." surname="Orrin" fullname="Steve Orrin"/> + <date year="2005" month="June"/> + </front> +</reference> + +</references> + +<?BEGININC build/draft-ietf-httpbis-messaging-latest.abnf-appendix ?> +<section title="Collected ABNF" anchor="collected.abnf"><t>In the collected ABNF below, list rules are expanded per <xref target="HTTP" x:rel="#abnf.extension"/>.</t><sourcecode type="abnf" name="rfc9112.parsed-abnf"> +<x:ref>BWS</x:ref> = &lt;BWS, see <xref target="HTTP" x:fmt="," x:sec="5.6.3"/>&gt; + +<x:ref>HTTP-message</x:ref> = start-line CRLF *( field-line CRLF ) CRLF [ + message-body ] +<x:ref>HTTP-name</x:ref> = %x48.54.54.50 ; HTTP +<x:ref>HTTP-version</x:ref> = HTTP-name "/" DIGIT "." DIGIT + +<x:ref>OWS</x:ref> = &lt;OWS, see <xref target="HTTP" x:fmt="," x:sec="5.6.3"/>&gt; + +<x:ref>RWS</x:ref> = &lt;RWS, see <xref target="HTTP" x:fmt="," x:sec="5.6.3"/>&gt; + +<x:ref>Transfer-Encoding</x:ref> = [ transfer-coding *( OWS "," OWS transfer-coding + ) ] + +<x:ref>absolute-URI</x:ref> = &lt;absolute-URI, see <xref target="URI" x:fmt="," x:sec="4.3"/>&gt; +<x:ref>absolute-form</x:ref> = absolute-URI +<x:ref>absolute-path</x:ref> = &lt;absolute-path, see <xref target="HTTP" x:fmt="," x:sec="4.1"/>&gt; +<x:ref>asterisk-form</x:ref> = "*" +<x:ref>authority</x:ref> = &lt;authority, see <xref target="URI" x:fmt="," x:sec="3.2"/>&gt; +<x:ref>authority-form</x:ref> = uri-host ":" port + +<x:ref>chunk</x:ref> = chunk-size [ chunk-ext ] CRLF chunk-data CRLF +<x:ref>chunk-data</x:ref> = 1*OCTET +<x:ref>chunk-ext</x:ref> = *( BWS ";" BWS chunk-ext-name [ BWS "=" BWS chunk-ext-val + ] ) +<x:ref>chunk-ext-name</x:ref> = token +<x:ref>chunk-ext-val</x:ref> = token / quoted-string +<x:ref>chunk-size</x:ref> = 1*HEXDIG +<x:ref>chunked-body</x:ref> = *chunk last-chunk trailer-section CRLF + +<x:ref>field-line</x:ref> = field-name ":" OWS field-value OWS +<x:ref>field-name</x:ref> = &lt;field-name, see <xref target="HTTP" x:fmt="," x:sec="5.1"/>&gt; +<x:ref>field-value</x:ref> = &lt;field-value, see <xref target="HTTP" x:fmt="," x:sec="5.5"/>&gt; + +<x:ref>last-chunk</x:ref> = 1*"0" [ chunk-ext ] CRLF + +<x:ref>message-body</x:ref> = *OCTET +<x:ref>method</x:ref> = token + +<x:ref>obs-fold</x:ref> = OWS CRLF RWS +<x:ref>obs-text</x:ref> = &lt;obs-text, see <xref target="HTTP" x:fmt="," x:sec="5.6.4"/>&gt; +<x:ref>origin-form</x:ref> = absolute-path [ "?" query ] + +<x:ref>port</x:ref> = &lt;port, see <xref target="URI" x:fmt="," x:sec="3.2.3"/>&gt; + +<x:ref>query</x:ref> = &lt;query, see <xref target="URI" x:fmt="," x:sec="3.4"/>&gt; +<x:ref>quoted-string</x:ref> = &lt;quoted-string, see <xref target="HTTP" x:fmt="," x:sec="5.6.4"/>&gt; + +<x:ref>reason-phrase</x:ref> = 1*( HTAB / SP / VCHAR / obs-text ) +<x:ref>request-line</x:ref> = method SP request-target SP HTTP-version +<x:ref>request-target</x:ref> = origin-form / absolute-form / authority-form / + asterisk-form + +<x:ref>start-line</x:ref> = request-line / status-line +<x:ref>status-code</x:ref> = 3DIGIT +<x:ref>status-line</x:ref> = HTTP-version SP status-code SP [ reason-phrase ] + +<x:ref>token</x:ref> = &lt;token, see <xref target="HTTP" x:fmt="," x:sec="5.6.2"/>&gt; +<x:ref>trailer-section</x:ref> = *( field-line CRLF ) +<x:ref>transfer-coding</x:ref> = &lt;transfer-coding, see <xref target="HTTP" x:fmt="," x:sec="10.1.4"/>&gt; + +<x:ref>uri-host</x:ref> = &lt;host, see <xref target="URI" x:fmt="," x:sec="3.2.2"/>&gt; +</sourcecode> +</section> +<?ENDINC build/draft-ietf-httpbis-messaging-latest.abnf-appendix ?> + +<section title="Differences between HTTP and MIME" anchor="differences.between.http.and.mime"> +<t> + HTTP/1.1 uses many of the constructs defined for the Internet Message + Format <xref target="RFC5322"/> and Multipurpose Internet Mail Extensions (MIME) + <xref target="RFC2045"/> to allow a message body to be transmitted in an open + variety of representations and with extensible fields. However, some + of these constructs have been reinterpreted to better fit the needs + of interactive communication, leading to some differences in how MIME + constructs are used within HTTP. These differences were carefully + chosen to optimize performance over binary connections, allow + greater freedom in the use of new media types, ease date comparisons, + and accommodate common implementations. +</t> +<t> + This appendix describes specific areas where HTTP differs from MIME. + Proxies and gateways to and from strict MIME environments need to be + aware of these differences and provide the appropriate conversions + where necessary. +</t> + +<section title="MIME-Version" anchor="mime-version"> + <iref primary="true" item="Fields" subitem="MIME-Version" x:for-anchor=""/><iref primary="true" item="Header Fields" subitem="MIME-Version" x:for-anchor=""/><iref primary="true" item="MIME-Version header field" x:for-anchor=""/> + <x:anchor-alias value="MIME-Version"/> +<t> + HTTP is not a MIME-compliant protocol. However, messages can + include a single MIME-Version header field to indicate what + version of the MIME protocol was used to construct the message. Use + of the MIME-Version header field indicates that the message is in + full conformance with the MIME protocol (as defined in <xref target="RFC2045"/>). + Senders are responsible for ensuring full conformance (where + possible) when exporting HTTP messages to strict MIME environments. +</t> +</section> + +<section title="Conversion to Canonical Form" anchor="conversion.to.canonical.form"> +<t> + MIME requires that an Internet mail body part be converted to canonical + form prior to being transferred, as described in <xref target="RFC2049" x:fmt="of" x:sec="4"/>, and that content with a type of "text" represents + line breaks as CRLF, forbidding the use of CR or LF outside of line break + sequences <xref target="RFC2046"/>. In contrast, HTTP does not care whether + CRLF, bare CR, or bare LF are used to indicate a line break within content. +</t> +<t> + A proxy or gateway from HTTP to a strict MIME + environment ought to translate all line breaks within text media + types to the RFC 2049 canonical form of CRLF. Note, however, + this might be complicated by the presence of a <x:ref>Content-Encoding</x:ref> + and by the fact that HTTP allows the use of some charsets + that do not use octets 13 and 10 to represent CR and LF, respectively. +</t> +<t> + Conversion will break any cryptographic + checksums applied to the original content unless the original content + is already in canonical form. Therefore, the canonical form is + recommended for any content that uses such checksums in HTTP. +</t> +</section> + +<section title="Conversion of Date Formats" anchor="conversion.of.date.formats"> +<t> + HTTP/1.1 uses a restricted set of date formats (<xref target="HTTP" x:rel="#http.date"/>) to + simplify the process of date comparison. Proxies and gateways from + other protocols ought to ensure that any <x:ref>Date</x:ref> header field + present in a message conforms to one of the HTTP/1.1 formats and rewrite + the date if necessary. +</t> +</section> + +<section title="Conversion of Content-Encoding" anchor="conversion.of.content-encoding"> +<t> + MIME does not include any concept equivalent to HTTP's + <x:ref>Content-Encoding</x:ref> header field. Since this acts as a modifier + on the media type, proxies and gateways from HTTP to MIME-compliant + protocols ought to either change the value of the <x:ref>Content-Type</x:ref> + header field or decode the representation before forwarding the message. + (Some experimental applications of Content-Type for Internet mail have used + a media-type parameter of ";conversions=&lt;content-coding&gt;" to perform + a function equivalent to Content-Encoding. However, this parameter is + not part of the MIME standards.) +</t> +</section> + +<section title="Conversion of Content-Transfer-Encoding" anchor="conversion.of.content-transfer-encoding"> + <iref item="Content-Transfer-Encoding header field" x:for-anchor=""/> +<t> + HTTP does not use the Content-Transfer-Encoding field of MIME. + Proxies and gateways from MIME-compliant protocols to HTTP need to remove + any Content-Transfer-Encoding prior to delivering the response message to + an HTTP client. +</t> +<t> + Proxies and gateways from HTTP to MIME-compliant protocols are + responsible for ensuring that the message is in the correct format + and encoding for safe transport on that protocol, where "safe + transport" is defined by the limitations of the protocol being used. + Such a proxy or gateway ought to transform and label the data with an + appropriate Content-Transfer-Encoding if doing so will improve the + likelihood of safe transport over the destination protocol. +</t> +</section> + +<section title="MHTML and Line Length Limitations" anchor="mhtml.line.length"> +<t> + HTTP implementations that share code with MHTML <xref target="RFC2557"/> + implementations need to be aware of MIME line length limitations. + Since HTTP does not have this limitation, HTTP does not fold long lines. + MHTML messages being transported by HTTP follow all conventions of MHTML, + including line length limitations and folding, canonicalization, etc., + since HTTP transfers message-bodies without modification and, aside from the + "multipart/byteranges" type (<xref target="HTTP" x:rel="#multipart.byteranges"/>), + does not interpret + the content or any MIME header lines that might be contained therein. +</t> +</section> +</section> + +<section title="Changes from Previous RFCs" anchor="changes"> +<section title="Changes from HTTP/0.9" anchor="changes.from.0.9"> +<t> + Since HTTP/0.9 did not support header fields in a request, there is no + mechanism for it to support name-based virtual hosts (selection of resource + by inspection of the <x:ref>Host</x:ref> header field). + Any server that implements name-based virtual hosts ought to disable + support for HTTP/0.9. Most requests that appear to be HTTP/0.9 are, in + fact, badly constructed HTTP/1.x requests caused by a client failing to + properly encode the request-target. +</t> +</section> + +<section title="Changes from HTTP/1.0" anchor="changes.from.1.0"> +<section title="Multihomed Web Servers" anchor="changes.to.simplify.multihomed.web.servers.and.conserve.ip.addresses"> +<t> + The requirements that clients and servers support the <x:ref>Host</x:ref> + header field (<xref target="HTTP" x:rel="#field.host"/>), report an error if it is + missing from an HTTP/1.1 request, and accept absolute URIs + (<xref target="request.target"/>) + are among the most important changes defined by HTTP/1.1. +</t> +<t> + Older HTTP/1.0 clients assumed a one-to-one relationship of IP + addresses and servers; there was no established mechanism for + distinguishing the intended server of a request other than the IP address + to which that request was directed. The <x:ref>Host</x:ref> header field was + introduced during the development of HTTP/1.1 and, though it was + quickly implemented by most HTTP/1.0 browsers, additional requirements + were placed on all HTTP/1.1 requests in order to ensure complete + adoption. At the time of this writing, most HTTP-based services + are dependent upon the Host header field for targeting requests. +</t> +</section> + +<section title="Keep-Alive Connections" anchor="compatibility.with.http.1.0.persistent.connections"> +<t> + In HTTP/1.0, each connection is established by the client prior to the + request and closed by the server after sending the response. However, some + implementations implement the explicitly negotiated ("Keep-Alive") version + of persistent connections described in <xref x:sec="19.7.1" x:fmt="of" target="RFC2068"/>. +</t> +<t> + Some clients and servers might wish to be compatible with these previous + approaches to persistent connections, by explicitly negotiating for them + with a "Connection: keep-alive" request header field. However, some + experimental implementations of HTTP/1.0 persistent connections are faulty; + for example, if an HTTP/1.0 proxy server doesn't understand + <x:ref>Connection</x:ref>, it will erroneously forward that header field + to the next inbound server, which would result in a hung connection. +</t> +<t> + One attempted solution was the introduction of a Proxy-Connection header + field, targeted specifically at proxies. In practice, this was also + unworkable, because proxies are often deployed in multiple layers, bringing + about the same problem discussed above. +</t> +<t> + As a result, clients are encouraged not to send the Proxy-Connection header + field in any requests. +</t> +<t> + Clients are also encouraged to consider the use of "Connection: keep-alive" + in requests carefully; while they can enable persistent connections with + HTTP/1.0 servers, clients using them will need to monitor the + connection for "hung" requests (which indicate that the client ought to stop + sending the header field), and this mechanism ought not be used by clients + at all when a proxy is being used. +</t> +</section> + +<section title="Introduction of Transfer-Encoding" anchor="introduction.of.transfer-encoding"> +<t> + HTTP/1.1 introduces the <x:ref>Transfer-Encoding</x:ref> header field + (<xref target="field.transfer-encoding"/>). + Transfer codings need to be decoded prior to forwarding an HTTP message + over a MIME-compliant protocol. +</t> +</section> +</section> + +<section title="Changes from RFC 7230" anchor="changes.from.rfc.7230"> +<t> + Most of the sections introducing HTTP's design goals, history, architecture, + conformance criteria, protocol versioning, URIs, message routing, and + header fields have been moved to <xref target="HTTP"/>. + This document has been reduced to just the messaging syntax and + connection management requirements specific to HTTP/1.1. +</t> +<t> + Bare CRs have been prohibited outside of content. + (<xref target="message.parsing"/>) +</t> +<t> + The ABNF definition of <x:ref>authority-form</x:ref> has changed from the + more general authority component of a URI (in which port is optional) to + the specific host:port format that is required by CONNECT. + (<xref target="authority-form"/>) +</t> +<t> + Recipients are required to avoid smuggling/splitting attacks when processing an + ambiguous message framing. + (<xref target="field.transfer-encoding"/>) +</t> +<t> + In the ABNF for chunked extensions, (bad) whitespace around ";" and "=" + has been reintroduced. Whitespace was removed + in <xref target="RFC7230"/>, but that change was found to break existing + implementations. + (<xref target="chunked.extension"/>) +</t> +<t> + Trailer field semantics now transcend the specifics of chunked transfer coding. + The decoding algorithm for chunked (<xref target="decoding.chunked"/>) has + been updated to encourage storage/forwarding of trailer fields separately + from the header section, to only allow merging into the header section if + the recipient knows the corresponding field definition permits and defines + how to merge, and otherwise to discard the trailer fields instead of + merging. The trailer part is now called the trailer section to be more + consistent with the header section and more distinct from a body part. + (<xref target="chunked.trailer.section"/>) +</t> +<t> + Transfer coding parameters called "q" are disallowed in order to avoid + conflicts with the use of ranks in the <x:ref>TE</x:ref> header field. + (<xref target="transfer.coding.registry"/>) +</t> +</section> +</section> + +<section title="Acknowledgements" anchor="acks" numbered="false"> +<t> + See <xref target="HTTP" x:rel="#acks"/>, which applies to this document as well. +</t> +</section> +</back> +</rfc> \ No newline at end of file diff --git a/test/fixtures/cache-tests/spec/script.mjs b/test/fixtures/cache-tests/spec/script.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/script.mjs @@ -0,0 +1,79 @@ +import baseTests from '../tests/index.mjs' + +const SUITE = 'suite' +const TEST = 'id' + +function populateLinks () { + const anchors = new AnchorMap() + baseTests.forEach(suite => { + if (suite.spec_anchors) { + suite.spec_anchors.forEach(anchor => { + anchors.push(anchor, [SUITE, suite.id]) + }) + } + suite.tests.forEach(test => { + if (test.spec_anchors) { + test.spec_anchors.forEach(anchor => { + anchors.push(anchor, [TEST, test.id]) + }) + } + }) + }) + for (const [anchor, ids] of anchors.map.entries()) { + adornSpecSection(anchor, ids) + } +} + +function adornSpecSection (anchor, ids) { + const anchorNode = document.getElementById(anchor) + if (!anchorNode) { + console.log(`Anchor ${anchor} not found.`) + return + } + const headerNode = anchorNode.children[0] + const wrapper = document.createElement('span') + wrapper.classList.add('adornment') + const adornment = document.createTextNode('ℹ️') + wrapper.appendChild(adornment) + wrapper.addEventListener('click', function (event) { + event.preventDefault() + showTests(ids) + this.scrollIntoView(true) + }) + headerNode.appendChild(wrapper) +} + +function showTests (ids) { + // modify the spec HTML to make room for the test results + const specNode = document.getElementById('top') + specNode.classList.add('half') + const mainNode = document.querySelector('div[role="main"]') + mainNode.classList.remove('col-lg-8') + mainNode.classList.add('col-lg-11') + const tocNode = document.getElementById('sidebar') + tocNode.classList.remove('d-lg-block') + tocNode.classList.add('d-none') + const iframeNode = document.createElement('iframe') + iframeNode.id = 'resultsFrame' + const query = ids.map(id => `${id[0]}=${id[1]}`).join('&') + iframeNode.setAttribute('src', `/index.html?${query}&frame=1`) + document.body.appendChild(iframeNode) +} + +class AnchorMap { + constructor () { + this.map = new Map() + } + + push (k, v) { + const val = this.map.get(k) + if (val) { + val.push(v) + this.map.set(k, val) + } else { + this.map.set(k, [v]) + } + } +} + +populateLinks() diff --git a/test/fixtures/cache-tests/spec/style.css b/test/fixtures/cache-tests/spec/style.css new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/spec/style.css @@ -0,0 +1,23 @@ + +.adornment { + padding-left: 1em; +} + +#resultsFrame { + position: fixed; + bottom: 0; + right: 0; + height: 100vh; + width: 50vw; + z-index: 500; + background-color: white; + border: 0px; + border-left: 2px solid #666; + overflow-y: auto; +} + +.half { + position: absolute; + width: 48vw; + left: 0; +} diff --git a/test/fixtures/cache-tests/test-browser.html b/test/fixtures/cache-tests/test-browser.html new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-browser.html @@ -0,0 +1,52 @@ +<!doctype html> +<html> + <head> + <meta charset="utf-8"> + <title>Browser HTTP Caching Tests</title> + <link rel="stylesheet" href="/asset/style.css" type="text/css"> + <script type="module"> + import { runTests, getResults } from './test-engine/client/runner.mjs' + import { testUUIDs } from './test-engine/client/test.mjs' + import * as display from './test-engine/lib/display.mjs' + import baseTests from './tests/index.mjs' + + runTests(baseTests, fetch, true) + .then(function() { + let resultsTarget = document.getElementById('results') + display.renderTestResults(baseTests, getResults(), testUUIDs, resultsTarget, true) + }) + .then(function() { + let params = (new URL(location)).searchParams + let filename = 'results.json' + if (params.get('download')) { + filename = `${params.get('download')}.json` + } + let auto = false + if (params.get('auto')) { + auto = true + } + let downloadTarget = document.getElementById('download') + display.downloadTestResults(downloadTarget, filename, getResults(), auto) + }) + </script> + </head> + <body> + <h1>Browser HTTP Caching Tests</h1> + + <p class="warning">These tests are a work in progress. The reported results may be faulty, and + do not necessarily reflect the true capabilities of each cache. Furthermore, their primary + purpose is to inform revision of the <a + href="https://httpwg.org/http-core/draft-ietf-httpbis-cache-latest.html">HTTP caching + specification</a>; so, they should not be used evaluate or compare feature support, since the + specification itself might change. <strong>This is an open source project</strong>; to make + contributions, add your implementation's results, file issues or learn more, see <a + href="https://github.com/http-tests/cache-tests">the repository</a>.</p> + + <p><i>Hover over failed tests for details. Click on <span class="hint">⚙︎</span> to copy the + test UUID (for finding it in the browser networking tab or proxy logs).</i></p> + + <p><a id="download">Download results (JSON)</a></p> + + <div id="results"></div> + </body> +</html> diff --git a/test/fixtures/cache-tests/test-browser.sh b/test/fixtures/cache-tests/test-browser.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/test-browser.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +## Run tests against a local browser on OSX. + +set -euo pipefail + +PORT=8000 +DOWNLOADS=~/Downloads +PIDFILE=/tmp/http-cache-test-server.pid + +function usage { + if [[ -n "${1}" ]]; then + echo "${1}" + fi + echo "Usage: $0 [ browser-name ... ]" >&2 +} + +function run { + BROWSERS=( "$@" ) + + # start test server + npm run --silent server --port=$PORT --pidfile=$PIDFILE & + trap cleanup EXIT + sleep 2 + + for browser in "${BROWSERS[@]}" + do + test_browser "${browser}" + done +} + +function cleanup { + # stop test server + kill "$(cat $PIDFILE)" > /dev/null 2>&1 + rm $PIDFILE +} + +function test_browser { + BROWSER=${1} + URL="http://localhost:${PORT}/test-browser.html?auto=1&download=${BROWSER}" + + case ${BROWSER} in + safari) + BROWSER_CMD="/Applications/Safari.app" + ;; + firefox) + BROWSER_CMD="/Applications/Firefox.app" + ;; + chrome) + BROWSER_CMD="/Applications/Google Chrome.app" + ;; + *) + usage "Browser ${BROWSER} not recognised." + return + ;; + esac + + # remove target file + TARGET="${DOWNLOADS}/${BROWSER}.json" + rm -f "${TARGET}" + + # run tests + open -g -a "${BROWSER_CMD}" "${URL}" + + # wait for the target to be created + i=0 + while [ ! -f "${TARGET}" ] + do + sleep 1 + i=$((i+1)) + if [ "$i" -gt "60" ] ; then + echo "Timeout." >&2 + exit 1 + fi + done + + sleep 1 + if [ -f "${TARGET}" ] ; then + mv "${TARGET}" results/ + fi + +} + +OS=$(uname) +if [[ "${OS}" != "Darwin" ]]; then + usage "This script must be run on Mac OSX." + exit 1 +fi + +if [[ $# -eq 0 ]]; then + run safari firefox chrome +else + run "$@" +fi diff --git a/test/fixtures/cache-tests/test-docker.sh b/test/fixtures/cache-tests/test-docker.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/test-docker.sh @@ -0,0 +1,116 @@ +#!/bin/bash + +## Run tests against a local docker image with common proxy/caches. + +set -euo pipefail + +PIDFILE=/tmp/http-cache-test-server.pid + +ALL_PROXIES=(squid nginx apache trafficserver varnish caddy) +DOCKER_PORTS="" +for PORT in {8001..8006}; do + DOCKER_PORTS+="-p 127.0.0.1:${PORT}:${PORT} " +done + +function usage { + if [[ -n "${1}" ]]; then + echo "${1}" + fi + echo "Usage: $0 [ -i test_id ] [ proxy... ]" +} + +function run { + TEST_ID="${1}" + shift + PROXIES=( "$@" ) + # start test server + npm run --silent server --port=8000 --pidfile=${PIDFILE} & + + # run proxies container + docker run --name=tmp_proxies ${DOCKER_PORTS} -dt mnot/proxy-cache-tests host.docker.internal \ + > /dev/null + + trap cleanup EXIT + + # give docker enough time to start + sleep 10 + + for proxy in "${PROXIES[@]}" + do + test_proxy "${proxy}" "${TEST_ID}" + done +} + +function cleanup { + # stop docker containers + docker kill tmp_proxies > /dev/null + docker rm tmp_proxies > /dev/null + + # stop test server + kill "$(cat ${PIDFILE})" > /dev/null 2>&1 + rm ${PIDFILE} +} + +function test_proxy { + PROXY=$1 + PKG=$1 + TEST_ID=$2 + case ${PKG} in + squid) + PROXY_PORT=8001 + ;; + nginx) + PROXY_PORT=8002 + ;; + trafficserver) + PROXY_PORT=8003 + ;; + apache) + PROXY_PORT=8004 + PKG=apache2 + ;; + varnish) + PROXY_PORT=8005 + ;; + caddy) + PROXY_PORT=8006 + ;; + *) + echo "Proxy ${PKG} not recognised." + exit 1 + ;; + esac + + echo "* ${PKG} $(docker container exec tmp_proxies /usr/bin/apt-cache show ${PKG} | grep Version)" + + if [[ -z "${TEST_ID}" ]]; then + npm run --silent cli --base=http://localhost:${PROXY_PORT} > "results/${PROXY}.json" + else + npm run --silent cli --base=http://localhost:${PROXY_PORT} --id="${TEST_ID}" + fi +} + + +TEST_ID="" +while getopts "h?i:" opt; do + case "${opt}" in + h) + usage + exit 0 + ;; + i) + TEST_ID=$OPTARG + ;; + *) + usage + exit 1 + ;; + esac +done +shift $((OPTIND-1)) + +if [[ $# -eq 0 ]]; then + run "" "${ALL_PROXIES[@]}" +else + run "${TEST_ID}" "${@}" +fi diff --git a/test/fixtures/cache-tests/test-engine/cli.mjs b/test/fixtures/cache-tests/test-engine/cli.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/cli.mjs @@ -0,0 +1,44 @@ +import { runTests, getResults } from './client/runner.mjs' +import { determineTestResult } from './lib/results.mjs' +import { GREEN, NC } from './lib/defines.mjs' +import fetch from 'node-fetch-with-proxy' +import tests from '../tests/index.mjs' + +const baseUrl = process.env.npm_config_base || process.env.npm_package_config_base +const testId = process.env.npm_config_id || process.env.npm_package_config_id + +let testsToRun +if (testId !== '') { + console.log(`Running ${testId}`) + tests.forEach(suite => { + suite.tests.forEach(test => { + if (test.id === testId) { + test.dump = true + testsToRun = [{ + name: suite.name, + id: suite.id, + description: suite.description, + tests: [test] + }] + } + }) + }) +} else { + testsToRun = tests +} + +await runTests(testsToRun, fetch, false, baseUrl).catch(err => { + console.error(err) + process.exit(1) +}) + +const results = getResults() + +if (testId !== '') { + console.log(`${GREEN}==== Results${NC}`) + const resultSymbol = determineTestResult(tests, testId, results, false) + const resultDetails = results[testId][1] || '' + console.log(`${resultSymbol[2]} - ${resultDetails}`) +} else { + console.log(JSON.stringify(results, null, 2)) +} diff --git a/test/fixtures/cache-tests/test-engine/client/config.mjs b/test/fixtures/cache-tests/test-engine/client/config.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/client/config.mjs @@ -0,0 +1,22 @@ +export let fetch = null +export let useBrowserCache = false +export let baseUrl = '' +export const requestTimeout = 10 // seconds + +export function setFetch (call) { + if (call !== undefined) { + if ('bind' in call) { + fetch = call.bind(fetch) + } else { + fetch = call + } + } +} + +export function setUseBrowserCache (bool) { + if (bool !== undefined) useBrowserCache = bool +} + +export function setBaseUrl (url) { + if (url !== undefined) baseUrl = url +} diff --git a/test/fixtures/cache-tests/test-engine/client/fetching.mjs b/test/fixtures/cache-tests/test-engine/client/fetching.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/client/fetching.mjs @@ -0,0 +1,45 @@ +import * as config from './config.mjs' +import { fixupHeader } from '../lib/header-fixup.mjs' + +export function init (idx, reqConfig, prevResp) { + const init = { + headers: [] + } + if (!config.useBrowserCache) { + init.cache = 'no-store' + init.headers.push(['Pragma', 'foo']) // dirty hack for Fetch + init.headers.push(['Cache-Control', 'nothing-to-see-here']) // ditto + } + if ('request_method' in reqConfig) init.method = reqConfig.request_method + if ('request_headers' in reqConfig) init.headers = reqConfig.request_headers + if ('magic_ims' in reqConfig && reqConfig.magic_ims === true) { + for (let i = 0; i < init.headers.length; i++) { + const header = init.headers[i] + if (header[0].toLowerCase() === 'if-modified-since') { + init.headers[i] = fixupHeader(header, prevResp, reqConfig) + } + } + } + if ('name' in reqConfig) init.headers.push(['Test-Name', reqConfig.name]) + if ('request_body' in reqConfig) init.body = reqConfig.request_body + if ('mode' in reqConfig) init.mode = reqConfig.mode + if ('credentials' in reqConfig) init.mode = reqConfig.credentials + if ('cache' in reqConfig) init.cache = reqConfig.cache + if ('redirect' in reqConfig) init.redirect = reqConfig.redirect + init.headers.push(['Test-ID', reqConfig.id]) + init.headers.push(['Req-Num', (idx + 1).toString()]) + return init +} + +export function inflateRequests (test) { + const rawRequests = test.requests + const requests = [] + for (let i = 0; i < rawRequests.length; i++) { + const reqConfig = rawRequests[i] + reqConfig.name = test.name + reqConfig.id = test.id + reqConfig.dump = test.dump + requests.push(reqConfig) + } + return requests +} diff --git a/test/fixtures/cache-tests/test-engine/client/runner.mjs b/test/fixtures/cache-tests/test-engine/client/runner.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/client/runner.mjs @@ -0,0 +1,41 @@ +import * as config from './config.mjs' +import { makeTest, testResults } from './test.mjs' + + +export async function runTests (tests, myFetch, browserCache, base, chunkSize = 50) { + const testArray = [] + config.setFetch(myFetch) + config.setBaseUrl(base) + config.setUseBrowserCache(browserCache) + tests.forEach(testSet => { + testSet.tests.forEach(test => { + if (test.id === undefined) throw new Error('Missing test id') + if (test.browser_only === true && !config.useBrowserCache === true) return + if (test.cdn_only === true && config.useBrowserCache === true) return + // note: still runs cdn tests on rev-proxy + if (test.browser_skip === true && config.useBrowserCache === true) return + testArray.push(test) + }) + }) + return runSome(testArray, chunkSize) +} + +export function getResults () { + const ordered = {} + Object.keys(testResults).sort().forEach(key => { + ordered[key] = testResults[key] + }) + return ordered +} + +async function runSome (tests, chunkSize) { + let index = 0 + function next () { + if (index < tests.length) { + const these = tests.slice(index, index + chunkSize).map(makeTest) + index += chunkSize + return Promise.all(these).then(next) + } + } + return next() +} diff --git a/test/fixtures/cache-tests/test-engine/client/test.mjs b/test/fixtures/cache-tests/test-engine/client/test.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/client/test.mjs @@ -0,0 +1,300 @@ +import * as defines from '../lib/defines.mjs' +import { fixupHeader } from '../lib/header-fixup.mjs' +import * as utils from '../lib/utils.mjs' +import * as config from './config.mjs' +import * as clientUtils from './utils.mjs' +import * as fetching from './fetching.mjs' +const assert = utils.assert +const setupCheck = clientUtils.setupCheck + +export const testUUIDs = {} +export const testResults = {} + +export async function makeTest (test) { + const uuid = utils.token() + testUUIDs[test.id] = uuid + const requests = fetching.inflateRequests(test) + const responses = [] + const fetchFunctions = [] + for (let i = 0; i < requests.length; ++i) { + fetchFunctions.push({ + code: idx => { + const reqConfig = requests[idx] + const reqNum = idx + 1 + const url = clientUtils.makeTestUrl(uuid, reqConfig) + let prevRes + if (i > 0) { + prevRes = Object.fromEntries(responses[i - 1].headers) + } + const init = fetching.init(idx, reqConfig, prevRes) + const controller = new AbortController() + const timeout = setTimeout(() => { + controller.abort() + }, config.requestTimeout * 1000) + init.signal = controller.signal + if (test.dump === true) clientUtils.logRequest(url, init, reqNum) + return config.fetch(url, init) + .then(response => { + responses.push(response) + return checkResponse(test, requests, idx, response) + }) + .finally(() => { + clearTimeout(timeout) + }) + }, + pauseAfter: 'pause_after' in requests[i] + }) + } + + let idx = 0 + function runNextStep () { + if (fetchFunctions.length) { + const nextFetchFunction = fetchFunctions.shift() + if (nextFetchFunction.pauseAfter === true) { + return nextFetchFunction.code(idx++) + .then(clientUtils.pause) + .then(runNextStep) + } else { + return nextFetchFunction.code(idx++) + .then(runNextStep) + } + } + } + + return clientUtils.putTestConfig(uuid, requests) + .catch(handleError) + .then(runNextStep) + .then(() => { + return clientUtils.getServerState(uuid) + }) + .then(serverState => { + checkServerRequests(requests, responses, serverState) + }) + .then(() => { // pass + if (test.id in testResults) throw new Error(`Duplicate test ${test.id}`) + testResults[test.id] = true + }) + .catch(err => { // fail + if (test.id in testResults) throw new Error(`Duplicate test ${test.id}`) + testResults[test.id] = [(err.name || 'unknown'), err.message] + }) +} + +function checkResponse (test, requests, idx, response) { + const reqNum = idx + 1 + const reqConfig = requests[idx] + const resNum = parseInt(response.headers.get('Server-Request-Count')) + if (test.dump === true) clientUtils.logResponse(response, reqNum) + + // catch retries + if (response.headers.has('Request-Numbers')) { + const serverRequests = response.headers.get('Request-Numbers').split(' ').map(item => parseInt(item)) + if (serverRequests.length !== new Set(serverRequests).size) { + assert(true, false, 'retry') + } + } + + // check response type + if ('expected_type' in reqConfig) { + const typeSetup = setupCheck(reqConfig, 'expected_type') + if (reqConfig.expected_type === 'cached') { + if (response.status === 304 && isNaN(resNum)) { // some caches will not include the hdr + // pass + } else { + assert(typeSetup, resNum < reqNum, `Response ${reqNum} does not come from cache`) + } + } + if (reqConfig.expected_type === 'not_cached') { + assert(typeSetup, resNum === reqNum, `Response ${reqNum} comes from cache`) + } + } + + // check response status + if ('expected_status' in reqConfig) { + assert(setupCheck(reqConfig, 'expected_status'), + response.status === reqConfig.expected_status, + `Response ${reqNum} status is ${response.status}, not ${reqConfig.expected_status}`) + } else if ('response_status' in reqConfig) { + assert(true, // response status is always setup + response.status === reqConfig.response_status[0], + `Response ${reqNum} status is ${response.status}, not ${reqConfig.response_status[0]}`) + } else if (response.status === 999) { + // special condition; the server thought it should have received a conditional request. + assert(setupCheck(reqConfig, 'expected_type'), false, + `Request ${reqNum} should have been conditional, but it was not.`) + } else { + assert(true, // default status is always setup + response.status === 200, + `Response ${reqNum} status is ${response.status}, not 200`) + } + + // check response headers + if ('expected_response_headers' in reqConfig) { + const respPresentSetup = setupCheck(reqConfig, 'expected_response_headers') + reqConfig.expected_response_headers.forEach(header => { + if (typeof header === 'string') { + assert(respPresentSetup, response.headers.has(header), + `Response ${reqNum} ${header} header not present.`) + } else if (header.length > 2) { + assert(respPresentSetup, response.headers.has(header[0]), + `Response ${reqNum} ${header[0]} header not present.`) + + const value = response.headers.get(header[0]) + let msg, condition + if (header[1] === '=') { + const expected = response.headers.get(header[2]) + condition = value === expected + msg = `match ${header[2]} (${expected})` + } else if (header[1] === '>') { + const expected = header[2] + condition = parseInt(value) > expected + msg = `be bigger than ${expected}` + } else { + throw new Error(`Unknown expected-header operator '${header[1]}'`) + } + + assert(respPresentSetup, condition, + `Response ${reqNum} header ${header[0]} is ${value}, should ${msg}`) + } else { + const expectedValue = fixupHeader( + header, Object.fromEntries(response.headers), reqConfig)[1] + assert(respPresentSetup, response.headers.get(header[0]) === expectedValue, + `Response ${reqNum} header ${header[0]} is "${response.headers.get(header[0])}", not "${expectedValue}"`) + } + }) + } + if ('expected_response_headers_missing' in reqConfig) { + const respMissingSetup = setupCheck(reqConfig, 'expected_response_headers_missing') + reqConfig.expected_response_headers_missing.forEach(header => { + if (typeof header === 'string') { + assert(respMissingSetup, !response.headers.has(header), + `Response ${reqNum} includes unexpected header ${header}: "${response.headers.get(header)}"`) + } else if (header.length === 2) { + if (response.headers.has(header[0]) && response.headers[header[0]]) { + const hdrValue = response.headers[header[0]] + assert(respMissingSetup, hdrValue.indexOf(header[1]) === -1, `Response ${reqNum} header ${header[0]} still has value "${hdrValue}"`) + } + } else { + throw new Error(`Unknown unexpected-header form '${header}'`) + } + }) + } + return response.text().then(makeCheckResponseBody(test, reqConfig, response.status)) +} + +function makeCheckResponseBody (test, reqConfig, statusCode) { + return function checkResponseBody (resBody) { + if ('check_body' in reqConfig && reqConfig.check_body === false) { + return true + } else if ('expected_response_text' in reqConfig) { + if (reqConfig.expected_response_text !== null) { + assert(setupCheck(reqConfig, 'expected_response_text'), + resBody === reqConfig.expected_response_text, + `Response body is "${resBody}", not "${reqConfig.expected_response_text}"`) + } + } else if ('response_body' in reqConfig && reqConfig.response_body !== null) { + assert(true, // response_body is always setup + resBody === reqConfig.response_body, + `Response body is "${resBody}", not "${reqConfig.response_body}"`) + } else if (!defines.noBodyStatus.has(statusCode) && reqConfig.request_method !== 'HEAD') { + const uuid = testUUIDs[test.id] + assert(true, // no_body is always setup + resBody === uuid, + `Response body is "${resBody}", not "${uuid}"`) + } + } +} + +function checkServerRequests (requests, responses, serverState) { + // compare a test's requests array against the server-side serverState + let testIdx = 0 + for (let i = 0; i < requests.length; ++i) { + const expectedValidatingHeaders = [] + const reqConfig = requests[i] + const response = responses[i] + const serverRequest = serverState[testIdx] + const reqNum = i + 1 + const typeSetup = setupCheck(reqConfig, 'expected_type') + if ('expected_type' in reqConfig) { + if (reqConfig.expected_type === 'cached') continue // the server will not see the request + if (reqConfig.expected_type === 'not_cached') { + assert(typeSetup, serverRequest.request_num === reqNum, `Response ${reqNum} comes from cache (${serverRequest.request_num} on server)`) + } + if (reqConfig.expected_type === 'etag_validated') { + expectedValidatingHeaders.push('if-none-match') + } + if (reqConfig.expected_type === 'lm_validated') { + expectedValidatingHeaders.push('if-modified-since') + } + } + testIdx++ // only increment for requests the server sees + expectedValidatingHeaders.forEach(vhdr => { + assert(typeSetup, typeof (serverRequest) !== 'undefined', `request ${reqNum} wasn't sent to server`) + assert(typeSetup, Object.prototype.hasOwnProperty.call(serverRequest.request_headers, vhdr), + `request ${reqNum} doesn't have ${vhdr} header`) + }) + if ('expected_request_headers' in reqConfig) { + const reqPresentSetup = setupCheck(reqConfig, 'expected_request_headers') + reqConfig.expected_request_headers.forEach(header => { + if (typeof header === 'string') { + const headerName = header.toLowerCase() + assert(reqPresentSetup, Object.prototype.hasOwnProperty.call(serverRequest.request_headers, headerName), + `Request ${reqNum} ${header} header not present.`) + } else { + const reqValue = serverRequest.request_headers[header[0].toLowerCase()] + assert(reqPresentSetup, reqValue === header[1], + `Request ${reqNum} header ${header[0]} is "${reqValue}", not "${header[1]}"`) + } + }) + } + if ('expected_request_headers_missing' in reqConfig) { + const reqmPresentSetup = setupCheck(reqConfig, 'expected_request_headers_missing') + reqConfig.expected_request_headers_missing.forEach(header => { + if (typeof header === 'string') { + const headerName = header.toLowerCase() + assert(reqmPresentSetup, !Object.prototype.hasOwnProperty.call(serverRequest.request_headers, headerName), + `Request ${reqNum} ${header} header present.`) + } else { + const reqValue = serverRequest.request_headers[header[0].toLowerCase()] + assert(reqmPresentSetup, reqValue !== header[1], + `Request ${reqNum} header ${header[0]} is "${reqValue}"`) + } + }) + } + if (typeof serverRequest !== 'undefined' && 'response_headers' in serverRequest) { + serverRequest.response_headers.forEach(header => { + if (config.useBrowserCache && defines.forbiddenResponseHeaders.has(header[0].toLowerCase())) { + // browsers prevent reading these headers through the Fetch API so we can't verify them + return + } + if (defines.skipResponseHeaders.has(header[0].toLowerCase())) { + // these just cause spurious failures + return + } + let received = response.headers.get(header[0]) + // XXX: assumes that if a proxy joins headers, it'll separate them with a comma and exactly one space + if (Array.isArray(received)) { + received = received.join(', ') + } + if (Array.isArray(header[1])) { + header[1] = header[1].join(', ') + } + assert(true, // default headers is always setup + received === header[1], + `Response ${reqNum} header ${header[0]} is "${received}", not "${header[1]}"`) + }) + } + if ('expected_method' in reqConfig) { + assert( + setupCheck(reqConfig, 'expected_method'), + serverRequest.request_method === reqConfig.expected_method, + `Request ${reqNum} had method ${serverRequest.request_method}, not ${reqConfig.expected_method}` + ) + } + } +} + +function handleError (err) { + console.log(new Error().stack) + console.error(`ERROR: ${err}`) +} diff --git a/test/fixtures/cache-tests/test-engine/client/utils.mjs b/test/fixtures/cache-tests/test-engine/client/utils.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/client/utils.mjs @@ -0,0 +1,82 @@ +import * as config from './config.mjs' +import * as utils from '../lib/utils.mjs' +import * as defines from '../lib/defines.mjs' + +export function pause () { + return new Promise(function (resolve, reject) { + setTimeout(() => { + return resolve() + }, 3000) + }) +} + +export function makeTestUrl (uuid, reqConfig) { + let extra = '' + if ('filename' in reqConfig) { + extra += `/${reqConfig.filename}` + } + if ('query_arg' in reqConfig) { + extra += `?${reqConfig.query_arg}` + } + return `${config.baseUrl}/test/${uuid}${extra}` +} + +const uninterestingHeaders = new Set(['date', 'expires', 'last-modified', 'content-length', 'content-type', 'connection', 'content-language', 'vary', 'mime-version']) + +export async function putTestConfig (uuid, requests) { + const init = { + method: 'PUT', + headers: [['content-type', 'application/json']], + body: JSON.stringify(requests) + } + return config.fetch(`${config.baseUrl}/config/${uuid}`, init) + .then(response => { + if (response.status !== 201) { + let headers = '' + response.headers.forEach((hvalue, hname) => { // for some reason, node-fetch reverses these + if (!uninterestingHeaders.has(hname.toLowerCase())) { + headers += `${hname}: ${hvalue} ` + } + }) + throw new utils.SetupError({ message: `PUT config resulted in ${response.status} ${response.statusText} - ${headers}` }) + } + }) +} + +export async function getServerState (uuid) { + return config.fetch(`${config.baseUrl}/state/${uuid}`) + .then(response => { + if (response.status === 200) { + return response.text() + } + }).then(text => { + if (text === undefined) return [] + return JSON.parse(text) + }) +} + +export function setupCheck (reqConfig, memberName) { + return reqConfig.setup === true || ('setup_tests' in reqConfig && reqConfig.setup_tests.indexOf(memberName) > -1) +} + +export function logRequest (url, init, reqNum) { + console.log(`${defines.GREEN}=== Client request ${reqNum}${defines.NC}`) + if ('method' in init) { + console.log(` ${init.method} ${url}`) + } else { + console.log(` GET ${url}`) + } + init.headers.forEach(header => { + console.log(` ${header[0]}: ${header[1]}`) + }) + console.log('') +} + +export function logResponse (response, reqNum) { + console.log(`${defines.GREEN}=== Client response ${reqNum}${defines.NC}`) + console.log(` HTTP ${response.status} ${response.statusText}`) + response.headers.forEach((hvalue, hname) => { // for some reason, node-fetch reverses these + console.log(` ${hname}: ${hvalue}`) + }) + console.log('') +} diff --git a/test/fixtures/cache-tests/test-engine/export.mjs b/test/fixtures/cache-tests/test-engine/export.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/export.mjs @@ -0,0 +1,18 @@ +import fs from 'fs' + +import Ajv from 'ajv' + +import tests from '../tests/index.mjs' + +if (process.argv[2] === 'validate') { + const ajv = new Ajv() + const schema = JSON.parse(fs.readFileSync('test-engine/lib/testsuite-schema.json', 'utf8')) + const validate = ajv.compile(schema) + const valid = validate(tests) + if (!valid) { + console.log(validate.errors) + process.exit(1) + } +} else { + console.log(JSON.stringify(tests, null, 2)) +} diff --git a/test/fixtures/cache-tests/test-engine/lib/defines.mjs b/test/fixtures/cache-tests/test-engine/lib/defines.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/defines.mjs @@ -0,0 +1,28 @@ +export const noBodyStatus = new Set([204, 304]) + +export const dateHeaders = new Set(['date', 'expires', 'last-modified', 'if-modified-since', 'if-unmodified-since']) + +export const locationHeaders = new Set(['location', 'content-location']) + +// https://fetch.spec.whatwg.org/#forbidden-response-header-name +export const forbiddenResponseHeaders = new Set(['set-cookie', 'set-cookie2']) + +// headers to skip when checking response_headers (not expected) +export const skipResponseHeaders = new Set(['date']) + +// colours for console +export const RED = '\x1b[31m' +export const GREEN = '\x1b[32m' +export const BLUE = '\x1b[34m' +export const NC = '\x1b[0m' + +// mime types for server +export const mimeTypes = { + html: 'text/html', + jpeg: 'image/jpeg', + jpg: 'image/jpeg', + png: 'image/png', + js: 'application/javascript', + mjs: 'application/javascript', + css: 'text/css' +} diff --git a/test/fixtures/cache-tests/test-engine/lib/display.mjs b/test/fixtures/cache-tests/test-engine/lib/display.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/display.mjs @@ -0,0 +1,153 @@ +/* global Blob marked */ + +import '../../asset/marked.min.js' +import { Liquid } from '../../asset/liquid.browser.esm.mjs' +import { modalOpen } from './modal.mjs' + +import { determineTestResult, resultTypes } from './results.mjs' + +const templateEngine = new Liquid({ root: 'test-engine/lib/tpl', extname: '.liquid', cache: true }) +templateEngine.registerFilter('typeof', v => typeof (v)) +templateEngine.registerFilter('toLocaleString', v => v.toLocaleString()) +templateEngine.registerFilter('skipHeaders', v => { + if (v) { + return v.filter(hdr => hdr.length < 3 || hdr[2] !== false) + } else { + return [] + } +}) + +export function downloadTestResults (target, fileName, data, auto) { + const dataBlob = new Blob([JSON.stringify(data, null, 2)], { type: 'text/json' }) + target.setAttribute('href', window.URL.createObjectURL(dataBlob)) + target.setAttribute('download', fileName) + target.style.display = 'inherit' + if (auto) { + target.click() + } +} + +export function renderTestResults (testSuites, testResults, testUUIDs, target, useBrowserCache) { + let totalTests = 0 + let totalPassed = 0 + testSuites.forEach(testSuite => { + const headerElement = document.createElement('h3') + target.appendChild(headerElement) + const headerText = document.createTextNode(testSuite.name) + headerElement.appendChild(headerText) + const listElement = document.createElement('ul') + const resultList = target.appendChild(listElement) + let tests = 0 + let passed = 0 + testSuite.tests.forEach(test => { + if (test.browser_only === true && !useBrowserCache === true) return + if (test.cdn_only === true && useBrowserCache === true) return + if (test.browser_skip === true && useBrowserCache === true) return + test.suiteName = testSuite.name + const testElement = resultList.appendChild(document.createElement('li')) + testElement.appendChild(showTestResult(testSuites, test.id, testResults)) + testElement.appendChild(showTestName(test, testUUIDs[test.id])) + tests++ + if (testResults[test.id] === true) { + passed++ + } + }) + const summaryElement = document.createElement('p') + const suiteSummary = target.appendChild(summaryElement) + suiteSummary.appendChild(document.createTextNode(tests + ' tests, ' + passed + ' passed.')) + totalTests += tests + totalPassed += passed + }) + const totalElement = document.createElement('p') + const totalSummary = target.appendChild(totalElement) + const totalText = document.createTextNode('Total ' + totalTests + ' tests, ' + totalPassed + ' passed.') + totalSummary.appendChild(totalText) +} + +export function showTestName (test, uuid) { + const wrapper = document.createElement('span') + const span = document.createElement('span') + span.setAttribute('class', 'clickhint') + span.innerHTML = marked.parse(test.name).slice(3, -5) + span.addEventListener('click', event => { + copyTextToClipboard(test.id) + showTestDetails(test) + }) + wrapper.appendChild(span) + + if (uuid) { + const uuidLinkElement = document.createElement('a') + uuidLinkElement.appendChild(document.createTextNode('⚙︎')) + uuidLinkElement.setAttribute('class', 'uuid') + uuidLinkElement.addEventListener('click', event => { + copyTextToClipboard(uuid) + }) + uuidLinkElement.title = 'Test UUID (click to copy)' + wrapper.appendChild(uuidLinkElement) + } + return wrapper +} + +export function showKey (element) { + const spans = element.getElementsByClassName('fa') + for (const span of spans) { + const kind = span.getAttribute('data-kind') + const styling = resultTypes[kind] + const contentNode = document.createTextNode(styling[0]) + span.style.color = styling[1] + span.appendChild(contentNode) + } +} + +export function showTestResult (testSuites, testId, testResults) { + const result = testResults[testId] + const resultValue = determineTestResult(testSuites, testId, testResults) + const resultNode = document.createTextNode(` ${resultValue[0]} `) + const span = document.createElement('span') + span.className = 'fa' + span.style.color = resultValue[1] + span.appendChild(resultNode) + if (result && typeof (result[1]) === 'string') { + span.title = result[1] + } + return span +} + +export function showTestDetails (test) { + templateEngine + .renderFile('explain-test', { test }) + .then(result => { + console.log(result) + const html = marked.parse(result) + modalOpen(html) + }) + .catch(err => { + console.log(`Template error: ${err}`) + }) +} + +function copyTextToClipboard (text) { + const textArea = document.createElement('textarea') + textArea.style.position = 'fixed' + textArea.style.top = 0 + textArea.style.left = 0 + textArea.style.width = '2em' + textArea.style.height = '2em' + textArea.style.padding = 0 + textArea.style.border = 'none' + textArea.style.outline = 'none' + textArea.style.boxShadow = 'none' + textArea.style.background = 'transparent' + textArea.value = text + document.body.appendChild(textArea) + textArea.focus() + textArea.select() + try { + const successful = document.execCommand('copy') + const msg = successful ? 'successful' : 'unsuccessful' + console.log(`Copying text "${text}" was ${msg}`) + } catch (err) { + console.log('Unable to copy') + } + document.body.removeChild(textArea) +} diff --git a/test/fixtures/cache-tests/test-engine/lib/header-fixup.mjs b/test/fixtures/cache-tests/test-engine/lib/header-fixup.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/header-fixup.mjs @@ -0,0 +1,28 @@ +import { locationHeaders, dateHeaders } from './defines.mjs' +import { httpDate } from './utils.mjs' + +export function fixupHeader (header, respHeaders, reqConfig) { + const headerName = header[0].toLowerCase() + + // Date headers + const serverNow = parseInt(respHeaders['server-now']) + if (dateHeaders.has(headerName) && Number.isInteger(header[1])) { + let format + if ('rfc850date' in reqConfig && reqConfig.rfc850date.includes(headerName)) { + format = 'rfc850' + } + header[1] = httpDate(serverNow, header[1], format) + } + + // Location headers + const baseUrl = respHeaders['server-base-url'] + if (locationHeaders.has(headerName) && reqConfig.magic_locations) { + if (header[1]) { + header[1] = `${baseUrl}/${header[1]}` + } else { + header[1] = `${baseUrl}` + } + } + + return header +} diff --git a/test/fixtures/cache-tests/test-engine/lib/modal.mjs b/test/fixtures/cache-tests/test-engine/lib/modal.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/modal.mjs @@ -0,0 +1,27 @@ +export function modalOpen (content) { + let modal = document.getElementById('modal') + if (!modal) { + modal = document.createElement('div') + modal.classList.add('modal') + modal.id = 'modal' + } + modal.classList.add('modal-open') + modal.innerHTML = content + const closeButton = document.createElement('button') + const closeText = document.createTextNode('❎') + closeButton.appendChild(closeText) + closeButton.classList.add('modal-exit') + closeButton.addEventListener('click', function (event) { + event.preventDefault() + modal.classList.remove('modal-open') + }) + modal.appendChild(closeButton) + document.body.appendChild(modal) + document.onkeydown = function (evt) { + evt = evt || window.event + if (evt.key === 'Escape' || evt.key === 'Esc') { + modal.classList.remove('modal-open') + document.onkeydown = function () {} + } + } +} diff --git a/test/fixtures/cache-tests/test-engine/lib/results.mjs b/test/fixtures/cache-tests/test-engine/lib/results.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/results.mjs @@ -0,0 +1,73 @@ +export const resultTypes = { + untested: ['-', '', '-'], + pass: ['\uf058', '#1aa123', '✅'], + fail: ['\uf057', '#c33131', '⛔️'], + optional_fail: ['\uf05a', '#bbbd15', '⚠️'], + yes: ['\uf055', '#999696', 'Y'], + no: ['\uf056', '#999696', 'N'], + setup_fail: ['\uf059', '#4c61ae', '🔹'], + harness_fail: ['\uf06a', '#4c61ae', '⁉️'], + dependency_fail: ['\uf192', '#b4b2b2', '⚪️'], + retry: ['\uf01e', '#4c61ae', '↻'] +} +const passTypes = [resultTypes.pass, resultTypes.yes] + +export function determineTestResult (testSuites, testId, testResults, honorDependencies = true) { + const test = testLookup(testSuites, testId) + const result = testResults[testId] + if (result === undefined) { + return resultTypes.untested + } + if (honorDependencies && test.depends_on !== undefined) { + for (const dependencyId of test.depends_on) { + if (!passTypes.includes(determineTestResult(testSuites, dependencyId, testResults))) { + return resultTypes.dependency_fail + } + } + } + if (result[0] === 'Setup') { + if (result[1] === 'retry') { + return resultTypes.retry + } else { + return resultTypes.setup_fail + } + } + if (result === false && result[0] !== 'Assertion') { + return resultTypes.harness_fail + } + if (result[0] === 'AbortError') { + return resultTypes.harness_fail + } + if (test.kind === 'required' || test.kind === undefined) { + if (result === true) { + return resultTypes.pass + } else { + return resultTypes.fail + } + } else if (test.kind === 'optimal') { + if (result === true) { + return resultTypes.pass + } else { + return resultTypes.optional_fail + } + } else if (test.kind === 'check') { + if (result === true) { + return resultTypes.yes + } else { + return resultTypes.no + } + } else { + throw new Error(`Unrecognised test kind ${test.kind}`) + } +} + +export function testLookup (testSuites, testId) { + for (const testSuite of testSuites) { + for (const test of testSuite.tests) { + if (test.id === testId) { + return test + } + } + } + throw new Error(`Cannot find test ${testId}`) +} diff --git a/test/fixtures/cache-tests/test-engine/lib/summary.mjs b/test/fixtures/cache-tests/test-engine/lib/summary.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/summary.mjs @@ -0,0 +1,178 @@ +/* global fetch marked */ + +import '../../asset/marked.min.js' +import * as display from './display.mjs' +import { testLookup } from './results.mjs' + +export function loadResults (index) { + return Promise.all(index.map(item => + fetch(`results/${item.file}`) + .then(response => { + return response.json() + }) + .then(results => { + item.results = results + return item + } + )) + ) +} + +export function showResults (target, testSuites, results, testIds, suiteIds) { + const isDefault = testIds.length === 0 && suiteIds.length === 0 + testSuites.forEach(testSuite => { + const selectedTests = [] + const suiteTestIds = [] + testSuite.tests.forEach(test => { + if (isDefault || suiteIds.includes(testSuite.id)) { + selectedTests.push(test) + suiteTestIds.push(test.id) + } + if (isDefault === 0 || testIds.includes(test.id)) { + if (!suiteTestIds.includes(test.id)) { + selectedTests.push(test) + } + } + }) + if (selectedTests.length) { + showHeader(testSuite, results).forEach(row => { + target.appendChild(row) + }) + selectedTests.forEach(test => { + const result = showTest(testSuites, test.id, results) + if (target.childElementCount % 2) { + result.setAttribute('class', 'shade') + } + target.appendChild(result) + }) + } + }) +} + +export function showToC (target, testSuites) { + testSuites.forEach(testSuite => { + const suiteLink = document.createElement('a') + suiteLink.href = '#' + testSuite.id + suiteLink.appendChild(document.createTextNode(testSuite.name)) + const suiteLi = document.createElement('li') + suiteLi.appendChild(suiteLink) + target.appendChild(suiteLi) + }) +} + +function showHeader (testSuite, results) { + const rows = [] + const numCols = results.length + 2 + const blankRow = tableRow() + blankRow.appendChild(emptyCell(numCols)) + rows.push(blankRow) + const headerRow = tableRow() + headerRow.appendChild(tableCell('th', '\xa0', 'name category')) + const headerLink = document.createElement('a') + headerLink.href = '#' + testSuite.id + headerLink.appendChild(document.createTextNode(testSuite.name)) + const firstHeader = tableCell('th', headerLink, 'name category') + firstHeader.id = testSuite.id + headerRow.appendChild(firstHeader) + results.forEach(implementation => { + headerRow.appendChild(tableCell('th', implementation.name, 'category', implementation.version, implementation.link)) + }) + rows.push(headerRow) + if (testSuite.description !== undefined) { + const descriptionRow = tableRow() + const drCells = emptyCell(numCols) + drCells.innerHTML = marked.parse(testSuite.description).slice(3, -5) + descriptionRow.appendChild(drCells) + rows.push(descriptionRow) + } + return rows +} + +function showTest (testSuites, testId, results) { + const test = testLookup(testSuites, testId) + const testRow = tableRow() + testRow.appendChild(tableCell('td', testSelector(test.id))) + testRow.appendChild(tableCell('th', display.showTestName(test), 'name')) + results.forEach(implementation => { + testRow.appendChild( + tableCell('th', display.showTestResult(testSuites, test.id, implementation.results))) + }) + return testRow +} + +function tableRow (CssClass) { + const rowElement = document.createElement('tr') + if (CssClass) { + rowElement.setAttribute('class', CssClass) + } + return rowElement +} + +function tableCell (cellType, content, CssClass, hint, link, colspan) { + const cellElement = document.createElement(cellType) + if (CssClass) { + cellElement.setAttribute('class', CssClass) + } + if (colspan) { + cellElement.colSpan = colspan + } + let contentNode + if (typeof (content) === 'string') { + contentNode = document.createTextNode(content) + } else { + contentNode = content + } + if (link) { + const linkElement = document.createElement('a') + linkElement.setAttribute('href', link) + linkElement.appendChild(contentNode) + cellElement.appendChild(linkElement) + } else { + cellElement.appendChild(contentNode) + } + if (hint) { + cellElement.title = hint + } + return cellElement +} + +function testSelector (testId) { + const checkbox = document.createElement('input') + checkbox.type = 'checkbox' + checkbox.name = 'id' + checkbox.value = testId + checkbox.style.display = 'none' + checkbox.setAttribute('class', 'select') + return checkbox +} + +export function selectClickListen () { + const select = document.getElementById('select') + select.addEventListener('click', selectClick, { + once: true + }) +} + +function selectClick () { + const selectBoxes = document.getElementsByClassName('select') + for (const selectBox of selectBoxes) { + selectBox.style.display = 'inherit' + } + const submit = document.createElement('input') + submit.type = 'submit' + submit.value = 'Show only selected tests' + const select = document.getElementById('select') + select.replaceWith(submit) +} + +export function selectClearShow () { + const clear = document.createElement('a') + clear.href = '?' + clear.appendChild(document.createTextNode('Clear selections')) + const select = document.getElementById('select') + select.replaceWith(clear) +} + +function emptyCell (numCols = 1) { + return tableCell('td', '\xa0', undefined, undefined, undefined, numCols) +} diff --git a/test/fixtures/cache-tests/test-engine/lib/testsuite-schema.json b/test/fixtures/cache-tests/test-engine/lib/testsuite-schema.json new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/testsuite-schema.json @@ -0,0 +1,445 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://cache-tests.fyi/testsuite-schema.json", + "title": "Cache Tests", + "description": "A list of test suites", + "type": "array", + "items": { + "description": "A test suite", + "type": "object", + "required": [ "name", "id", "tests"], + "additionalProperties": false, + "properties": { + "name": { + "description": "The name of the suite", + "type": "string" + }, + "id": { + "description": "A unique identifier for the suite", + "type": "string" + }, + "description": { + "description": "A Markdown description of the suite", + "type": "string" + }, + "spec_anchors": { + "description": "Anchors in the HTTP caching spec that this suite applies to", + "type": "array", + "items": { + "type": "string" + } + }, + "tests": { + "description": "An array of tests in the suite", + "type": "array", + "items": { + "description": "A test", + "type": "object", + "additionalProperties": false, + "required": ["name", "id", "requests"], + "properties": { + "name": { + "description": "The test name; can contain Markdown.", + "type": "string" + }, + "id": { + "$ref": "#/definitions/test-id" + }, + "description": { + "description": "A longer, Markdown description of the test", + "type": "string" + }, + "kind": { + "description": "The kind of test", + "type": "string", + "enum": ["required", "optimal", "check"] + }, + "spec_anchors": { + "description": "Anchors in the HTTP caching spec that this test applies to", + "type": "array", + "items": { + "type": "string" + } + }, + "requests": { + "description": "An array of requests", + "type": "array", + "items": { + "description": "A request to send in the test", + "type": "object", + "additionalProperties": false, + "properties": { + "request_method": { + "description": "the HTTP method to be used", + "type": "string" + }, + "request_headers": { + "description": "headers to emit in the request", + "type": "array", + "items": { + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "$ref": "#/definitions/magic-field-value" + } + ] + } + }, + "request_body": { + "description": "the HTTP request body to be used", + "type": "string" + }, + "query_arg": { + "description": "query arguments to add to the URL", + "type": "string" + }, + "filename": { + "description": "filename to add to the URL", + "type": "string" + }, + "mode": { + "description": "the mode value to pass to fetch()", + "type": "string", + "enum": ["same-origin", "no-cors", "navigate", "websocket"] + }, + "credentials": { + "description": "the credentials value to pass to fetch()", + "type": "string", + "enum": ["omit", "same-origin", "include"] + }, + "cache": { + "description": "the cache value to pass to fetch()", + "type": "string", + "enum": ["default", "no-store", "reload", "no-cache", "force-cache", "only-if-cached"] + }, + "redirect": { + "description": "the redirect value to pass to fetch()", + "type": "string", + "enum": ["follow", "error", "manual"] + }, + "pause_after": { + "description": "Whether to pause for three seconds after the request completes", + "type": "boolean" + }, + "disconnect": { + "description": "Whether to disconnect the client when receiving thsi request", + "type": "boolean" + }, + "magic_locations": { + "description": "Whether to rewrite Location and Content-Location to full URLs", + "type": "boolean" + }, + "magic_ims": { + "description": "Whether to rewrite If-Modified-Since to a delta from the previous Last-Modified", + "type": "boolean" + }, + "rfc850date": { + "description": "Header names to use RFC850 format on when converting dates", + "type": "array", + "items": [ + { + "$ref": "#/definitions/date-headers" + } + ] + }, + "response_status": { + "description": "HTTP status code and phrase to return from origin", + "type": "array", + "items": [ + { + "$ref": "#/definitions/status-code" + }, + { + "description": "status phrase", + "type": "string" + } + ] + }, + "response_headers": { + "description": "Response header fields to be returned from origin", + "type": "array", + "items": { + "anyOf": [ + { + "description": "name and value", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "oneOf": [ + { "type": "string" }, + { "type": "integer" } + ] + } + ] + }, + { + "description": "name and value with control over checking", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "$ref": "#/definitions/magic-field-value" + }, + { + "type": "boolean" + } + ] + } + ] + } + }, + "response_body": { + "description": "Response body to be returned from origin; defaults to the test identifier", + "$ref": "#/definitions/response-or-null" + }, + "check_body": { + "description": "Whether to check the response body on the client", + "type": "boolean" + }, + "expected_type": { + "description": "What the test result is expected to be", + "type": "string", + "enum": ["cached", "not_cached", "lm_validated", "etag_validated"] + }, + "expected_method": { + "description": "Expected request method received by the server", + "type": "string" + }, + "expected_status": { + "description": "Expected response status received by the client", + "$ref": "#/definitions/status-code" + }, + "expected_request_headers": { + "description": "Request headers to check for on the server", + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "name and value", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "field value", + "type": "string" + } + ] + } + ] + } + }, + "response_pause": { + "description": "Pause the response body by the server", + "type": "integer" + }, + "expected_request_headers_missing": { + "description": "Request headers to check for absence on the server", + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "name and value", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "field value", + "type": "string" + } + ] + } + ] + } + }, + "expected_response_headers": { + "description": "Response headers to check for on the client", + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "name and value", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "$ref": "#/definitions/magic-field-value" + } + ] + }, + { + "description": "check two headers have the same value", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "const": "=" + }, + { + "description": "field name to check against", + "type": "string" + } + ] + }, + { + "description": "header value is greater than an integer", + "type": "array", + "additionalItems": false, + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "const": ">" + }, + { + "description": "integer to check against", + "type": "integer" + } + ] + } + ] + } + }, + "expected_response_headers_missing": { + "description": "Response headers to check are missing on the client", + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "name and value", + "type": "array", + "items": [ + { + "$ref": "#/definitions/field-name" + }, + { + "description": "field value substring", + "type": "string" + } + ] + } + ] + } + }, + "expected_response_text": { + "description": "Expected response body received by the client", + "$ref": "#/definitions/response-or-null" + }, + "setup": { + "description": "Whether this is a setup request; failures don't mean the actual test failed", + "type": "boolean" + }, + "setup_tests": { + "description": "List of checks that are considered setup", + "type": "array", + "items": { + "type": "string", + "enum": ["expected_type", "expected_method", "expected_status", "expected_response_headers", "expected_response_text", "expected_request_headers"] + } + } + } + } + }, + "browser_only": { + "description": "Whether the test will only run on browsers", + "type": "boolean" + }, + "cdn_only": { + "description": "Whether the test will only run on CDN caches", + "type": "boolean" + }, + "browser_skip": { + "description": "Whether the test will skip browsers", + "type": "boolean" + }, + "depends_on": { + "description": "List of Test IDs that this test depends on", + "type": "array", + "items": { + "$ref": "#/definitions/test-id" + } + } + } + } + } + } + }, + "definitions": { + "field-name": { + "description": "HTTP header field name", + "type": "string", + "pattern": "^[a-zA-Z0-9-_]+$" + }, + "magic-field-value": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "integer" + } + ] + }, + "test-id": { + "description": "A short identifer for a test", + "type": "string" + }, + "response-or-null": { + "oneOf": [ + { + "description": "The text of the response", + "type": "string" + }, + { + "description": "Do not check the response", + "type": "null" + } + ] + }, + "status-code": { + "description": "HTTP response status code", + "type": "integer", + "minimum": 100, + "maximum": 599 + }, + "date-headers": { + "type": "string", + "enum": ["date", "if-modified-since", "last-modified", "expires"] + } + } +} diff --git a/test/fixtures/cache-tests/test-engine/lib/tpl/checks.liquid b/test/fixtures/cache-tests/test-engine/lib/tpl/checks.liquid new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/tpl/checks.liquid @@ -0,0 +1,52 @@ + +{%- if request.setup -%} + {%- assign setup_prop = '' -%} +{%- else -%} + {%- assign setup_prop = ' _(Failure will be considered a test setup issue)_' -%} +{%- endif %} + +### The following checks will be performed: + +{%- if request.expected_type %} +- The client will check that this response {% case request.expected_type %}{% when "cached" %}is cached{% when "not_cached" %}is not cached{% when "lm_validated" %}is validated using `Last-Modified`{% when "etag_validated" %}is validated using `ETag`{% endcase %} {% if test.setup_tests contains "expected_type" %}{{ setup_prop }}{% endif %}{% endif -%} + +{%- if request.expected_method %} +- The server will check that the request method is `{{ request.expected_method }}`.{% endif -%} + +{%- if request.expected_request_headers.size > 0 %} +- The server will check that the following request headers (and values, when specified) are present{% if test.setup_tests contains "expected_request_headers" %}{{ setup_prop }}{% endif %}: +{%- render 'header-list' with request.expected_request_headers as headers %}{% endif -%} + +{%- if request.expected_request_headers_missing.size > 0 %} +- The server will check that the following request headers (and values, when specified) are absent{% if test.setup_tests contains "expected_request_headers_missing" %}{{ setup_prop }}{% endif %}: +{%- render 'header-list' with request.expected_request_headers_missing as headers %}{% endif -%} + +{%- if request.expected_status %} +- The client will check that the status code is `{{ request.expected_status }}` {% if test.setup_tests contains "expected_status" %}{{ setup_prop }}{% endif %} +{%- elsif request.response_status %} +- The client will check that the status code is `{{ request.response_status | join: " " }}`{{ setup_prop }} +{%- else %} +- The client will check that the status code is `200 OK`{{ setup_prop }} +{%- endif -%} + +{%- assign response_headers = request.response_headers | skipHeaders -%} +{%- if request.expected_response_headers.size > 0 or response_headers.size > 0 %} +- The client will check that the following response headers (and values, when specified) are present{% if test.setup_tests contains "expected_response_headers" %}{{ setup_prop }}{% endif -%}: +{%- render 'header-list' with request.expected_response_headers as headers -%} +{%- render 'header-list' with response_headers as headers %}{% endif -%} + +{%- if request.expected_response_headers_missing.size > 0 %} +- The client will check that the following response headers (and values, when specified) are missing: +{%- render 'header-list' with request.expected_response_headers_missing as headers %}{% endif -%} + +{%- if request.check_body != false %} +- The client will check the body +{%- if request.expected_response text -%} +, expecting: `{{ request.expected_response_text }}` {% if test.setup_tests contains "expected_response_text" %}{{ setup_prop }}{% endif %} +{%- else -%} +, expecting the generated response body. +{%- endif -%} +{%- endif -%} + + + diff --git a/test/fixtures/cache-tests/test-engine/lib/tpl/explain-test.liquid b/test/fixtures/cache-tests/test-engine/lib/tpl/explain-test.liquid new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/tpl/explain-test.liquid @@ -0,0 +1,90 @@ +# {{ test.name }} + +Test ID: `{{ test.id }}` + +{% case test.kind %} +{%- when "optimal" -%}This is an optional test for cache efficiency +{%- when "check" -%}This is an informational check +{%- else -%}This is a conformance test +{%- endcase -%} +{%- if test.browser_only %} run on browsers only{% endif -%} +{%- if test.cdn_only %} run on CDN caches only{% endif -%} +{%- if test.browser_skip %} not run on browsers{% endif -%} +. + +{%- if test.depends_on %} It depends on the following test IDs: +{%- for dependant in test.depends_on %} +- `{{ dependant }}` +{%- endfor %}{% endif %} + +{% for request in test.requests -%} +## Request {{ forloop.index }} + +{%- if request.setup == true %} + +_This is a setup request; if it fails, we can't perform the test._ +{%- endif -%} + +{%- if request.disconnect == true %} + +The server will disconnect the client when receiving this request. +{%- endif -%} + + +{%- if request.mode or request.credentials or request.cache or request.redirect %} + +### Fetch [init](https://fetch.spec.whatwg.org/#requestinit): +{%- if request.mode %} +- Mode: {{ request.mode }} +{%- endif -%} +{%- if request.credentials %} +- Credentials: {{ request.credentials }} +{%- endif -%} +{%- if request.cache %} +- Cache: {{ request.cache }} +{%- endif -%} +{%- if request.redirect %} +- Redirect: {{ request.redirect }} +{%- endif -%} +{%- endif %} + +### The client sends a request containing: +~~~ +{{ request.request_method | default: 'GET' }} [generated test URL]{% if request.filename %}/{% endif %}{{ request.filename }}{% if request.query_arg %}?{% endif %}{{ request.query_arg }} {{ magic_locations }} +{% for header in request.request_headers %}{% render 'header-magic' with header as header %} +{% endfor %} +{{ request.request_body }} +~~~ + +{%- if request.response_pause %} + +The server will pause for {{ request.response_pause }} seconds before responding.{% endif -%} + +{%- if request.response_status or request.response_headers or request.response_body %} + +### The server sends a response containing: +~~~ +{% if request.expected_type == "lm_validated" or request.expected_type = "etag_validated" -%} +HTTP/1.1 304 Not Modified +{%- else -%} +HTTP/1.1 {{ request.response_status[0] | default: 200 }} {{ request.response_status[1] | default: "OK" }} +{%- endif %} +{% for header in request.response_headers %}{% render 'header-magic' with header as header %} +{% endfor %} +{{ request.response_body | default: '[generated response body]' }} +~~~{% endif -%} + +{%- if request.rfc850date %} +All instances of the following headers will be send and checked for being in RFC850 date format: +{% for header in request.rfc850date %} + - `{{ header }}` +{% endfor %} +{% endif -%} + +{% render 'checks' with request as request %} + +{%- if request.pause_after == true %} + +The client will pause for three seconds after this request.{% endif %} + +{% endfor %} diff --git a/test/fixtures/cache-tests/test-engine/lib/tpl/header-list.liquid b/test/fixtures/cache-tests/test-engine/lib/tpl/header-list.liquid new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/tpl/header-list.liquid @@ -0,0 +1,16 @@ +{%- for header in headers %} + - {% if header.first -%} + {%- if header.size == 3 -%} + {%- case header[1] -%} + {%- when ">" -%} + `{{ header[0] }}` is greater than `{{ header[2] }}` + {%- when "=" -%} + `{{ header[0] }}` has the same value as `{{ header[2] }}` + {%- endcase -%} + {%- else -%} + `{% render 'header-magic' with header as header %}` + {%- endif -%} + {%- else -%} + `{{ header }}` + {%- endif -%} +{% endfor -%} diff --git a/test/fixtures/cache-tests/test-engine/lib/tpl/header-magic.liquid b/test/fixtures/cache-tests/test-engine/lib/tpl/header-magic.liquid new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/tpl/header-magic.liquid @@ -0,0 +1,6 @@ +{%- assign mytype = header[1] | typeof -%} +{{ header[0] }}: {% if mytype == 'number' -%} + [ {{ header[1] | toLocaleString }} seconds delta ] +{%- else -%} + {{ header[1] }} +{%- endif -%} diff --git a/test/fixtures/cache-tests/test-engine/lib/utils.mjs b/test/fixtures/cache-tests/test-engine/lib/utils.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/lib/utils.mjs @@ -0,0 +1,89 @@ +export function AssertionError (options) { + this.name = 'Assertion' + this.message = options.message +} + +export function SetupError (options) { + this.name = 'Setup' + this.message = options.message +} + +export function assert (isSetup, expr, message) { + if (expr) return + if (isSetup) { + throw new SetupError({ message }) + } else { + throw new AssertionError({ message }) + } +} + +export function token () { + return [toHex(randInt(32), 8), + toHex(randInt(16), 4), + toHex(0x4000 | randInt(12), 4), + toHex(0x8000 | randInt(14), 4), + toHex(randInt(48), 12)].join('-') +} + +function randInt (bits) { + if (bits < 1 || bits > 53) { + throw new TypeError() + } else { + if (bits >= 1 && bits <= 30) { + return 0 | ((1 << bits) * Math.random()) + } else { + const high = (0 | ((1 << (bits - 30)) * Math.random())) * (1 << 30) + const low = 0 | ((1 << 30) * Math.random()) + return high + low + } + } +} + +function toHex (x, length) { + let rv = x.toString(16) + while (rv.length < length) { + rv = '0' + rv + } + return rv +} + +const rfc850day = { + 0: 'Sunday', + 1: 'Monday', + 2: 'Tuesday', + 3: 'Wednesday', + 4: 'Thursday', + 5: 'Friday', + 6: 'Saturday' +} + +const rfc850month = { + 0: 'Jan', + 1: 'Feb', + 2: 'Mar', + 3: 'Apr', + 4: 'May', + 5: 'Jun', + 6: 'Jul', + 7: 'Aug', + 8: 'Sep', + 9: 'Oct', + 10: 'Nov', + 11: 'Dec' +} + +export function httpDate (now, deltaSecs, format) { + const instant = new Date(now + (deltaSecs * 1000)) + if (format && format === 'rfc850') { + const day = rfc850day[instant.getUTCDay()] + const date = instant.getUTCDate().toString().padStart(2, '0') + const month = rfc850month[instant.getUTCMonth()] + const year = instant.getUTCFullYear().toString().slice(2) + const hours = instant.getUTCHours().toString().padStart(2, '0') + const mins = instant.getUTCMinutes().toString().padStart(2, '0') + const secs = instant.getUTCSeconds().toString().padStart(2, '0') + // Sunday, 06-Nov-94 08:49:37 GMT + return `${day}, ${date}-${month}-${year} ${hours}:${mins}:${secs} GMT` + } + return instant.toGMTString() +} diff --git a/test/fixtures/cache-tests/test-engine/server/handle-config.mjs b/test/fixtures/cache-tests/test-engine/server/handle-config.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/handle-config.mjs @@ -0,0 +1,22 @@ +import { sendResponse, configs, setConfig } from './utils.mjs' + +export default function handleConfig (pathSegs, request, response) { + const uuid = pathSegs[0] + if (request.method !== 'PUT') { + sendResponse(response, 405, `${request.method} request to config for ${uuid}`) + return + } + if (configs.has(uuid)) { + sendResponse(response, 409, `Config already exists for ${uuid}`) + return + } + let body = '' + request.on('data', chunk => { + body += chunk + }) + request.on('end', () => { + setConfig(uuid, JSON.parse(body)) + response.statusCode = 201 + response.end('OK') + }) +} diff --git a/test/fixtures/cache-tests/test-engine/server/handle-file.mjs b/test/fixtures/cache-tests/test-engine/server/handle-file.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/handle-file.mjs @@ -0,0 +1,24 @@ +import fs from 'fs' +import path from 'path' +import process from 'process' + +import { sendResponse } from './utils.mjs' +import { mimeTypes } from '../lib/defines.mjs' + +export default function handleFile (url, request, response) { + let urlPath = path.normalize(url.pathname) + if (urlPath === '/') urlPath = '/index.html' + const filename = path.join(process.cwd(), urlPath) + let stat + try { + stat = fs.statSync(filename) + } catch {} + if (!stat || !stat.isFile()) { + sendResponse(response, 404, `${urlPath} Not Found`) + return + } + const mimeType = mimeTypes[path.extname(filename).split('.')[1]] || 'application/octet-stream' + const fileStream = fs.createReadStream(filename) + response.writeHead(200, { 'Content-Type': mimeType }) + fileStream.pipe(response) +} diff --git a/test/fixtures/cache-tests/test-engine/server/handle-state.mjs b/test/fixtures/cache-tests/test-engine/server/handle-state.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/handle-state.mjs @@ -0,0 +1,12 @@ +import { sendResponse, stash } from './utils.mjs' + +export default function handleState (pathSegs, request, response) { + const uuid = pathSegs[0] + const state = stash.get(uuid) + if (state === undefined) { + sendResponse(response, 404, `State not found for ${uuid}`) + return + } + response.setHeader('Content-Type', 'text/plain') + response.end(JSON.stringify(state)) +} diff --git a/test/fixtures/cache-tests/test-engine/server/handle-test.mjs b/test/fixtures/cache-tests/test-engine/server/handle-test.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/handle-test.mjs @@ -0,0 +1,118 @@ +import { noBodyStatus } from '../lib/defines.mjs' +import { fixupHeader } from '../lib/header-fixup.mjs' +import { sendResponse, getHeader, configs, stash, setStash, logRequest, logResponse } from './utils.mjs' + +export default function handleTest (pathSegs, request, response) { + // identify the desired configuration for this request + const uuid = pathSegs[0] + if (!uuid) { + sendResponse(response, 404, `Config Not Found for ${uuid}`) + return + } + const requests = configs.get(uuid) + if (!requests) { + sendResponse(response, 409, `Requests not found for ${uuid}`) + return + } + + const serverState = stash.get(uuid) || [] + const srvReqNum = serverState.length + 1 + const cliReqNum = parseInt(request.headers['req-num']) + const reqNum = cliReqNum || srvReqNum + const reqConfig = requests[reqNum - 1] + + if (!reqConfig) { + sendResponse(response, 409, `${requests[0].id} config not found for request ${srvReqNum} (anticipating ${requests.length})`) + return + } + if (reqConfig.dump) logRequest(request, srvReqNum) + + // response_pause + if ('response_pause' in reqConfig) { + setTimeout(continueHandleTest, reqConfig.response_pause * 1000, uuid, request, response, requests, serverState) + } else { + continueHandleTest(uuid, request, response, requests, serverState) + } +} + +function continueHandleTest (uuid, request, response, requests, serverState) { + const srvReqNum = serverState.length + 1 + const cliReqNum = parseInt(request.headers['req-num']) + const reqNum = cliReqNum || srvReqNum + const reqConfig = requests[reqNum - 1] + const previousConfig = requests[reqNum - 2] + const now = Date.now() + + // Determine what the response status should be + let httpStatus = reqConfig.response_status || [200, 'OK'] + if ('expected_type' in reqConfig && reqConfig.expected_type.endsWith('validated')) { + const previousLm = getHeader(previousConfig.response_headers, 'Last-Modified') + if (previousLm && request.headers['if-modified-since'] === previousLm) { + httpStatus = [304, 'Not Modified'] + } + const previousEtag = getHeader(previousConfig.response_headers, 'ETag') + if (previousEtag && request.headers['if-none-match'] === previousEtag) { + httpStatus = [304, 'Not Modified'] + } + if (httpStatus[0] !== 304) { + httpStatus = [999, '304 Not Generated'] + } + } + response.statusCode = httpStatus[0] + response.statusPhrase = httpStatus[1] + + // header manipulation + const responseHeaders = reqConfig.response_headers || [] + const savedHeaders = new Map() + response.setHeader('Server-Base-Url', request.url) + response.setHeader('Server-Request-Count', srvReqNum) + response.setHeader('Client-Request-Count', cliReqNum) + response.setHeader('Server-Now', now, 0) + responseHeaders.forEach(header => { + header = fixupHeader(header, response.getHeaders(), reqConfig) + if (response.hasHeader(header[0])) { + const currentVal = response.getHeader(header[0]) + if (typeof currentVal === 'string') { + response.setHeader(header[0], [currentVal, header[1]]) + } else if (Array.isArray(currentVal)) { + response.setHeader(header[0], currentVal.concat(header[1])) + } else { + console.log(`ERROR: Unanticipated header type of ${typeof currentVal} for ${header[0]}`) + } + } else { + response.setHeader(header[0], header[1]) + } + if (header.length < 3 || header[2] === true) { + savedHeaders.set(header[0], response.getHeader(header[0])) + } + }) + + if (!response.hasHeader('content-type')) { + response.setHeader('Content-Type', 'text/plain') + } + + // stash information about this request for the client + serverState.push({ + request_num: cliReqNum, + request_method: request.method, + request_headers: request.headers, + response_headers: Array.from(savedHeaders.entries()) + }) + response.setHeader('Request-Numbers', serverState.map(item => item.request_num).join(' ')) + setStash(uuid, serverState) + + // Response body generation + if ('disconnect' in reqConfig && reqConfig.disconnect) { + // disconnect now because we want the state + response.socket.destroy() + response = 'disconnect' + } else if (noBodyStatus.has(response.statusCode)) { + response.end() + } else { + const content = reqConfig.response_body || uuid + response.end(content) + } + + // logging + if (reqConfig.dump) logResponse(response, srvReqNum) +} diff --git a/test/fixtures/cache-tests/test-engine/server/server.mjs b/test/fixtures/cache-tests/test-engine/server/server.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/server.mjs @@ -0,0 +1,54 @@ +/* global URL */ + +import fs from 'fs' +import http from 'http' +import https from 'https' +import process from 'process' + +import handleConfig from './handle-config.mjs' +import handleFile from './handle-file.mjs' +import handleState from './handle-state.mjs' +import handleTest from './handle-test.mjs' + +function handleMain (request, response) { + const url = new URL(request.url, baseUrl) + const pathSegs = url.pathname.split('/') + pathSegs.shift() + const dispatch = pathSegs.shift() + if (dispatch === 'config') { + handleConfig(pathSegs, request, response) + } else if (dispatch === 'test') { + handleTest(pathSegs, request, response) + } else if (dispatch === 'state') { + handleState(pathSegs, request, response) + } else { + handleFile(url, request, response) + } +} + +const protocol = process.env.npm_config_protocol || process.env.npm_package_config_protocol +const port = process.env.npm_config_port || process.env.npm_package_config_port +const baseUrl = `${protocol}://localhost:${port}/` +const pidfile = process.env.npm_config_pidfile || process.env.npm_package_config_pidfile + +fs.writeFile(pidfile, process.pid.toString(), 'ascii', function (err) { + if (err) { console.log(`PID file write error: ${err.message}`) } +}) + +let server +if (protocol.toLowerCase() === 'https') { + const options = { + key: fs.readFileSync(process.env.npm_config_keyfile), + cert: fs.readFileSync(process.env.npm_config_certfile) + } + server = https.createServer(options, handleMain) +} else { + server = http.createServer(handleMain) +} +server.on('listening', () => { + const host = (server.address().family === 'IPv6') + ? `[${server.address().address}]` + : server.address().address + console.log(`Listening on ${protocol.toLowerCase()}://${host}:${server.address().port}/`) +}) +server.listen(port) diff --git a/test/fixtures/cache-tests/test-engine/server/utils.mjs b/test/fixtures/cache-tests/test-engine/server/utils.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/test-engine/server/utils.mjs @@ -0,0 +1,54 @@ +import { BLUE, NC } from '../lib/defines.mjs' + +export function sendResponse (response, statusCode, message) { + console.log(`SERVER WARNING: ${message}`) + response.writeHead(statusCode, { 'Content-Type': 'text/plain' }) + response.write(`${message}\n`) + response.end() +} + +export function getHeader (headers, headerName) { + let result + headers.forEach(header => { + if (header[0].toLowerCase() === headerName.toLowerCase()) { + result = header[1] + } + }) + return result +} + +// stash for server state +export const stash = new Map() + +export function setStash (key, value) { + stash.set(key, value) +} + +// configurations +export const configs = new Map() + +export function setConfig (key, value) { + configs.set(key, value) +} + +export function logRequest (request, reqNum) { + console.log(`${BLUE}=== Server request ${reqNum}${NC}`) + console.log(` ${request.method} ${request.url}`) + for (const [key, value] of Object.entries(request.headers)) { + console.log(` ${key}: ${value}`) + } + console.log('') +} + +export function logResponse (response, resNum) { + console.log(`${BLUE}=== Server response ${resNum}${NC}`) + if (response === 'disconnect') { + console.log(' [ server disconnect ]') + } else { + console.log(` HTTP ${response.statusCode} ${response.statusPhrase}`) + for (const [key, value] of Object.entries(response.getHeaders())) { + console.log(` ${key}: ${value}`) + } + } + console.log('') +} diff --git a/test/fixtures/cache-tests/test-host.sh b/test/fixtures/cache-tests/test-host.sh new file mode 100755 --- /dev/null +++ b/test/fixtures/cache-tests/test-host.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +## Run tests against a host/port combination. + +set -euo pipefail + +function usage { + if [[ -n "${1}" ]]; then + echo "${1}" + fi + echo "Usage: ${0} [ -i test-id ] host[:port]" +} + +function run { + TEST_ID="$1" + HOST="$2" + if [[ -z $TEST_ID ]]; then + npm run --silent cli --base="http://${HOST}" + else + npm run --silent cli --base="http://${HOST}" --id="${TEST_ID}" + fi + +} + +TEST_ID="" +while getopts "h?i:" opt; do + case "${opt}" in + h) + usage + exit 0 + ;; + i) + TEST_ID=$OPTARG + ;; + *) + usage + exit 1 + ;; + esac +done +shift $((OPTIND-1)) + +if [[ $# -ne 1 ]]; then + usage "Please specify a host:port." + exit 1 +fi + +run "$TEST_ID" "$1" diff --git a/test/fixtures/cache-tests/tests/age-parse.mjs b/test/fixtures/cache-tests/tests/age-parse.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/age-parse.mjs @@ -0,0 +1,301 @@ +export default + +{ + name: 'Age Parsing', + id: 'age-parse', + description: 'These tests check how caches parse the `Age` response header.', + spec_anchors: ['field.age', 'expiration.model'], + tests: [ + { + name: 'HTTP cache should ignore an `Age` header with a non-numeric value', + id: 'age-parse-nonnumeric', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', 'abc', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should ignore an `Age` header with a negative value', + id: 'age-parse-negative', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '-7200', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should ignore an `Age` header with a float value', + id: 'age-parse-float', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200.0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a `Age` value of 2147483647 to be stale', + id: 'age-parse-large-minus-one', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '2147483647', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a `Age` value of 2147483648 to be stale', + id: 'age-parse-large', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '2147483648', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a `Age` value of 2147483649 to be stale', + id: 'age-parse-larger', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '2147483649', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a single `Age` header line `old, 0` to be stale', + id: 'age-parse-suffix', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200, 0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a single `Age` header line `0, old` to be fresh', + id: 'age-parse-prefix', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '0, 7200', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should use the first line in a response with multiple `Age` header lines: `old`, `0`', + id: 'age-parse-suffix-twoline', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200', false], + ['Age', '0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache should use the first line in a response with multiple `Age` header lines: `0`, `old`', + id: 'age-parse-prefix-twoline', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '0', false], + ['Age', '7200', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with a single line `Age: 0, 0` to be fresh', + id: 'age-parse-dup-0', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '0, 0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with two `Age: 0` header lines to be fresh', + id: 'age-parse-dup-0-twoline', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '0', false], + ['Age', '0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache should consider a response with two `Age: not_old` header lines to be fresh', + id: 'age-parse-dup-old', + depends_on: ['freshness-max-age-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=10000'], + ['Age', '3600', false], + ['Age', '3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache consider an alphabetic parameter on `Age` header to be valid?', + id: 'age-parse-parameter', + depends_on: ['freshness-max-age-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200;foo=bar', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache should consider a numeric parameter on `Age` header to be valid?', + id: 'age-parse-numeric-parameter', + depends_on: ['freshness-max-age-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200;foo=111', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/authorization.mjs b/test/fixtures/cache-tests/tests/authorization.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/authorization.mjs @@ -0,0 +1,110 @@ +import * as templates from './lib/templates.mjs' + +export default + +{ + name: 'Storing Respones to Authenticated Requests', + id: 'auth', + description: 'These tests check for behaviours regarding authenticated HTTP responses.', + spec_anchors: ['caching.authenticated.responses'], + tests: [ + { + name: 'HTTP shared cache must not reuse a response to a request that contained `Authorization`, even with explicit freshness', + id: 'other-authorization', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + request_headers: [ + ['Authorization', 'FOO'] + ], + expected_request_headers: [ + ['Authorization', 'FOO'] + ] + }), + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP shared cache reuses a response to a request that contained `Authorization`, if it has `Cache-Control: public`', + id: 'other-authorization-public', + kind: 'optimal', + browser_skip: true, + depends_on: ['other-authorization'], + spec_anchors: ['cache-response-directive.public'], + requests: [ + { + request_headers: [ + ['Authorization', 'FOO'] + ], + expected_request_headers: [ + ['Authorization', 'FOO'] + ], + response_headers: [ + ['Cache-Control', 'max-age=3600, public'], + ['Date', 0] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP shared cache reuses a response to a request that contained `Authorization`, if it has `Cache-Control: must-revalidate`', + id: 'other-authorization-must-revalidate', + kind: 'optimal', + browser_skip: true, + depends_on: ['other-authorization'], + requests: [ + { + request_headers: [ + ['Authorization', 'FOO'] + ], + expected_request_headers: [ + ['Authorization', 'FOO'] + ], + response_headers: [ + ['Cache-Control', 'max-age=3600, must-revalidate'], + ['Date', 0] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP shared cache reuses a response to a request that contained `Authorization`, if it has `Cache-Control: s-maxage`', + id: 'other-authorization-smaxage', + kind: 'optimal', + browser_skip: true, + depends_on: ['other-authorization'], + requests: [ + { + request_headers: [ + ['Authorization', 'FOO'] + ], + expected_request_headers: [ + ['Authorization', 'FOO'] + ], + response_headers: [ + ['Cache-Control', 's-maxage=3600'], + ['Date', 0] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/cc-freshness.mjs b/test/fixtures/cache-tests/tests/cc-freshness.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/cc-freshness.mjs @@ -0,0 +1,468 @@ +import * as templates from './lib/templates.mjs' + +export default + +{ + name: 'Cache-Control Freshness', + id: 'cc-freshness', + description: 'These tests check how caches calculate freshness using `Cache-Control`.', + spec_anchors: ['expiration.model', 'cache-response-directive'], + tests: [ + { + name: 'Does HTTP cache avoid reusing a response without explict freshness information or a validator (reuse is allowed, but not common, and many tests rely upon a cache _not_ doing it)?', + id: 'freshness-none', + kind: 'check', + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: max-age`', + id: 'freshness-max-age', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with `Cache-Control: max-age` after it becomes stale', + id: 'freshness-max-age-stale', + kind: 'optimal', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + templates.becomeStale({}), + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with `Cache-Control: max-age=0`', + id: 'freshness-max-age-0', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=0'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with `Cache-Control: max-age: 2147483647`', + id: 'freshness-max-age-max-minus-1', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2147483647'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with `Cache-Control: max-age: 2147483648`', + id: 'freshness-max-age-max', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2147483648'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with `Cache-Control: max-age: 2147483649`', + id: 'freshness-max-age-max-plus-1', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2147483649'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with `Cache-Control: max-age: 99999999999`', + id: 'freshness-max-age-max-plus', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=99999999999'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response when the `Age` header is greater than its `Cache-Control: max-age` freshness lifetime', + id: 'freshness-max-age-age', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age', 'field.age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Cache-Control', 'max-age=3600'], + ['Age', '7200'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache consider `Date` when applying `Cache-Control: max-age` (i.e., is `apparent_age` used)?', + id: 'freshness-max-age-date', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Date', -7200], + ['Cache-Control', 'max-age=3600'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: max-age` and a past `Expires`', + id: 'freshness-max-age-expires', + depends_on: ['freshness-max-age'], + kind: 'optimal', + spec_anchors: ['cache-response-directive.max-age', 'field.expires'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Expires', -7200], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: max-age` and an invalid `Expires`', + id: 'freshness-max-age-expires-invalid', + depends_on: ['freshness-max-age'], + kind: 'optimal', + spec_anchors: ['cache-response-directive.max-age', 'field.expires'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Expires', '0', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with `Cache-Control: max-age=0` and a future `Expires`', + id: 'freshness-max-age-0-expires', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age', 'field.expires'], + requests: [ + { + response_headers: [ + ['Expires', 3600], + ['Cache-Control', 'max-age=0'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: max-age` and a CC extension present', + id: 'freshness-max-age-extension', + kind: 'optimal', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache.control.extensions'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'foobar, max-age=3600'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: MaX-AgE`', + id: 'freshness-max-age-case-insenstive', + kind: 'optimal', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'MaX-aGe=3600'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with negative `Cache-Control: max-age`', + id: 'freshness-max-age-negative', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=-3600'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Private HTTP cache must not prefer `Cache-Control: s-maxage` over shorter `Cache-Control: max-age`', + id: 'freshness-max-age-s-maxage-private', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 's-maxage=3600, max-age=1'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ], + browser_only: true + }, + { + name: 'Private HTTP cache must not prefer `Cache-Control: s-maxage` over shorter `Cache-Control: max-age` (multiple headers)', + id: 'freshness-max-age-s-maxage-private-multiple', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 's-maxage=3600'], + ['Cache-Control', 'max-age=1'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ], + browser_only: true + }, + { + name: 'An optimal shared HTTP cache reuses a response with positive `Cache-Control: s-maxage`', + id: 'freshness-s-maxage-shared', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 's-maxage=3600'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ], + browser_skip: true + }, + { + name: 'Shared HTTP cache must prefer short `Cache-Control: s-maxage` over a longer `Cache-Control: max-age`', + id: 'freshness-max-age-s-maxage-shared-longer', + depends_on: ['freshness-s-maxage-shared'], + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600, s-maxage=1'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ], + browser_skip: true + }, + { + name: 'Shared HTTP cache must prefer short `Cache-Control: s-maxage` over a longer `Cache-Control: max-age` (reversed)', + id: 'freshness-max-age-s-maxage-shared-longer-reversed', + depends_on: ['freshness-s-maxage-shared'], + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 's-maxage=1, max-age=3600'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ], + browser_skip: true + }, + { + name: 'Shared HTTP cache must prefer short `Cache-Control: s-maxage` over a longer `Cache-Control: max-age` (multiple headers)', + id: 'freshness-max-age-s-maxage-shared-longer-multiple', + depends_on: ['freshness-s-maxage-shared'], + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Cache-Control', 's-maxage=1'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ], + browser_skip: true + }, + { + name: 'An optimal shared HTTP cache prefers long `Cache-Control: s-maxage` over a shorter `Cache-Control: max-age`', + id: 'freshness-max-age-s-maxage-shared-shorter', + depends_on: ['freshness-s-maxage-shared'], + kind: 'optimal', + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1, s-maxage=3600'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ], + browser_skip: true + }, + { + name: 'An optimal shared HTTP cache prefers long `Cache-Control: s-maxage` over `Cache-Control: max-age=0`, even with a past `Expires`', + id: 'freshness-max-age-s-maxage-shared-shorter-expires', + depends_on: ['freshness-s-maxage-shared'], + kind: 'optimal', + spec_anchors: ['cache-response-directive.max-age', 'cache-response-directive.s-maxage'], + requests: [ + { + response_headers: [ + ['Expires', -10], + ['Cache-Control', 'max-age=0, s-maxage=3600'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ], + browser_skip: true + } + ] +} diff --git a/test/fixtures/cache-tests/tests/cc-parse.mjs b/test/fixtures/cache-tests/tests/cc-parse.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/cc-parse.mjs @@ -0,0 +1,278 @@ +export default + +{ + name: 'Cache-Control Parsing', + id: 'cc-parse', + description: 'These tests check how caches parse the `Cache-Control` response header.', + spec_anchors: ['field.cache-control'], + tests: [ + { + name: 'Does HTTP cache reuse a response when first `Cache-Control: max-age` is fresh, but second is stale (same line)?', + id: 'freshness-max-age-two-fresh-stale-sameline', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1800, max-age=1', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response when first `Cache-Control: max-age` is fresh, but second is stale (separate lines)?', + id: 'freshness-max-age-two-fresh-stale-sepline', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1800', false], + ['Cache-Control', 'max-age=1', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response when first `Cache-Control: max-age` is stale, but second is fresh (same line)?', + id: 'freshness-max-age-two-stale-fresh-sameline', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1, max-age=1800', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response when first `Cache-Control: max-age` is stale, but second is fresh (separate lines)?', + id: 'freshness-max-age-two-stale-fresh-sepline', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1', false], + ['Cache-Control', 'max-age=1800', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response with a quoted `Cache-Control: max-age`?', + id: 'freshness-max-age-quoted', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age="3600"', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with `max-age` in a quoted string (before the "real" `max-age`)', + id: 'freshness-max-age-ignore-quoted', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'extension="max-age=3600", max-age=1', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache mut not reuse a response with `max-age` in a quoted string (after the "real" `max-age`)', + id: 'freshness-max-age-ignore-quoted-rev', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1, extension="max-age=3600"', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache ignore max-age with space before the `=`?', + id: 'freshness-max-age-space-before-equals', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age =3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache ignore max-age with space after the `=`?', + id: 'freshness-max-age-space-after-equals', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age= 3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses max-age with the value `003600`', + id: 'freshness-max-age-leading-zero', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=003600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with a single-quoted `Cache-Control: max-age`', + id: 'freshness-max-age-single-quoted', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=\'3600\'', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache reuse max-age with `3600.0` value?', + id: 'freshness-max-age-decimal-zero', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600.0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse max-age with `3600.5` value?', + id: 'freshness-max-age-decimal-five', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600.5', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response with an invalid `Cache-Control: max-age` (leading alpha)?', + id: 'freshness-max-age-a100', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=a3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a response with an invalid `Cache-Control: max-age` (trailing alpha)?', + id: 'freshness-max-age-100a', + kind: 'check', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600a', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/cc-request.mjs b/test/fixtures/cache-tests/tests/cc-request.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/cc-request.mjs @@ -0,0 +1,241 @@ +import * as templates from './lib/templates.mjs' +import * as utils from './lib/utils.mjs' + +export default { + name: 'Cache-Control Request Directives', + id: 'cc-request', + description: 'These tests check to see if caches respect `Cache-Control` request directives. Note that HTTP does not require them to be supported.', + spec_anchors: ['cache-request-directive'], + tests: [ + { + name: 'Does HTTP cache honor request `Cache-Control: max-age=0` when it holds a fresh response?', + id: 'ccreq-ma0', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.max-age'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Cache-Control', 'max-age=0'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: max-age=1` when it holds a fresh response?', + id: 'ccreq-ma1', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.max-age'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Cache-Control', 'max-age=1'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: max-age` when it holds a fresh but `Age`d response that is not fresh enough?', + id: 'ccreq-magreaterage', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.max-age'], + requests: [ + templates.fresh({ + response_headers: [ + ['Age', '1800'] + ] + }), + { + request_headers: [ + ['Cache-Control', 'max-age=600'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a stale response when request `Cache-Control: max-stale` allows it?', + id: 'ccreq-max-stale', + kind: 'check', + depends_on: ['freshness-max-age-stale'], + spec_anchors: ['cache-request-directive.max-stale'], + requests: [ + templates.becomeStale({}), + { + request_headers: [ + ['Cache-Control', 'max-stale=1000'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a stale `Age`d response when request `Cache-Control: max-stale` allows it?', + id: 'ccreq-max-stale-age', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.max-stale'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1500'], + ['Age', '2000'] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'max-stale=1000'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: min-fresh` when the response it holds is not fresh enough?', + id: 'ccreq-min-fresh', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.min-fresh'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1500'] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'min-fresh=2000'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: min-fresh` when the `Age`d response it holds is not fresh enough?', + id: 'ccreq-min-fresh-age', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.min-fresh'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1500'], + ['Age', '1000'] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'min-fresh=1000'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: no-cache` when it holds a fresh response?', + id: 'ccreq-no-cache', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'no-cache'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: no-cache` by validating a response with `Last-Modified`?', + id: 'ccreq-no-cache-lm', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Last-Modified', -10000], + ['Date', 0] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'no-cache'] + ], + expected_type: 'lm_validated' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: no-cache` by validating a response with an `ETag`?', + id: 'ccreq-no-cache-etag', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['ETag', utils.httpContent('abc')] + ], + setup: true + }, + { + request_headers: [ + ['Cache-Control', 'no-cache'] + ], + expected_type: 'etag_validated' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: no-store` when it holds a fresh response?', + id: 'ccreq-no-store', + kind: 'check', + depends_on: ['freshness-max-age'], + spec_anchors: ['cache-request-directive.no-store'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Cache-Control', 'no-store'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache honour request `Cache-Control: only-if-cached` by generating a `504` response when it does not have a stored response?', + id: 'ccreq-oic', + kind: 'check', + spec_anchors: ['cache-request-directive.only-if-cached'], + requests: [ + { + request_headers: [ + ['Cache-Control', 'only-if-cached'] + ], + expected_status: 504, + expected_response_text: null + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/cc-response.mjs b/test/fixtures/cache-tests/tests/cc-response.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/cc-response.mjs @@ -0,0 +1,375 @@ +export default + +{ + name: 'Cache-Control Response Directives', + id: 'cc-response', + description: 'These tests check how caches handle response `Cache-Control` directives other than those related to freshness, like `no-cache` and `no-store`.', + spec_anchors: ['cache-response-directive'], + tests: [ + { + name: 'Shared HTTP cache must not store a response with `Cache-Control: private`', + id: 'cc-resp-private-shared', + browser_skip: true, + spec_anchors: ['cache-response-directive.private'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'private, max-age=3600'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal private HTTP cache reuses a fresh response with `Cache-Control: private`', + id: 'cc-resp-private-private', + browser_only: true, + kind: 'optimal', + spec_anchors: ['cache-response-directive.private'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'private, max-age=3600'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not store a response with `Cache-Control: no-store`', + id: 'cc-resp-no-store', + spec_anchors: ['cache-response-directive.no-store'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'no-store'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not store a response with `Cache-Control: nO-StOrE`', + id: 'cc-resp-no-store-case-insensitive', + depends_on: ['cc-resp-no-store'], + spec_anchors: ['cache-response-directive.no-store'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'No-StOrE'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not store a response with `Cache-Control: no-store`, even with `max-age` and `Expires`', + id: 'cc-resp-no-store-fresh', + depends_on: ['cc-resp-no-store'], + spec_anchors: ['cache-response-directive.no-store'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, no-store'], + ['Expires', 10000], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does HTTP cache use older stored response when newer one came with `Cache-Control: no-store`?', + id: 'cc-resp-no-store-old-new', + depends_on: ['cc-resp-no-store'], + spec_anchors: ['cache-response-directive.no-store'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000'], + ['Expires', 10000], + ['Date', 0], + ['A', '1'] + ], + setup: true, + pause_after: true + }, + { + response_headers: [ + ['Cache-Control', 'no-store'], + ['Date', 0], + ['A', '2'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached', + expected_response_headers: [['a', '1']] + } + ] + }, + { + name: 'Does HTTP cache use older stored response when newer one came with `Cache-Control: no-store, max-age=0`?', + id: 'cc-resp-no-store-old-max-age', + depends_on: ['cc-resp-no-store'], + spec_anchors: ['cache-response-directive.no-store'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000'], + ['Expires', 10000], + ['Date', 0], + ['A', '1'] + ], + setup: true, + pause_after: true + }, + { + response_headers: [ + ['Cache-Control', 'no-store, max-age=0'], + ['Date', 0], + ['A', '2'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached', + expected_response_headers: [['a', '1']] + } + ] + }, + { + name: 'HTTP cache must not use a cached response with `Cache-Control: no-cache`, even with `max-age` and `Expires`', + id: 'cc-resp-no-cache', + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, no-cache'], + ['Expires', 10000], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not use a cached response with `Cache-Control: No-CaChE`, even with `max-age` and `Expires`', + id: 'cc-resp-no-cache-case-insensitive', + depends_on: ['cc-resp-no-cache'], + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, No-CaChE'], + ['Expires', 10000], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache stores a response with `Cache-Control: no-cache`, but revalidates it upon use', + id: 'cc-resp-no-cache-revalidate', + kind: 'optimal', + depends_on: ['cc-resp-no-cache'], + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'no-cache'], + ['ETag', '"abcd"'] + ], + setup: true + }, + { + expected_type: 'etag_validated' + } + ] + }, + { + name: 'An optimal HTTP cache stores a response with `Cache-Control: no-cache`, but revalidates it upon use, even with `max-age` and `Expires`', + id: 'cc-resp-no-cache-revalidate-fresh', + kind: 'optimal', + depends_on: ['cc-resp-no-cache'], + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, no-cache'], + ['Expires', 10000], + ['Date', 0], + ['ETag', '"abcd"'] + ], + setup: true + }, + { + expected_type: 'etag_validated' + } + ] + }, + { + name: 'Does `Cache-Control: no-cache` inhibit storing a listed header?', + id: 'headers-omit-headers-listed-in-Cache-Control-no-cache-single', + kind: 'check', + depends_on: ['cc-resp-no-cache-revalidate'], + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'no-cache="a"'], + ['a', '1'], + ['b', '2'], + ['Cache-Control', 'max-age=3600'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached', + expected_response_headers: [['b', '2']], + expected_response_headers_missing: ['a'], + setup_tests: ['expected_type'] + } + ] + }, + { + name: 'Does `Cache-Control: no-cache` inhibit storing multiple listed headers?', + id: 'headers-omit-headers-listed-in-Cache-Control-no-cache', + kind: 'check', + depends_on: ['cc-resp-no-cache-revalidate'], + spec_anchors: ['cache-response-directive.no-cache'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'no-cache="a, b"'], + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['Cache-Control', 'max-age=3600'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached', + expected_response_headers: [['c', '3']], + expected_response_headers_missing: ['a', 'b'], + setup_tests: ['expected_type'] + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with positive `Cache-Control: max-age, must-revalidate`', + id: 'cc-resp-must-revalidate-fresh', + kind: 'optimal', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.must-revalidate'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, must-revalidate'], + ['ETag', '"abcd"'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must revalidate a stale response with positive `Cache-Control: max-age, must-revalidate`', + id: 'cc-resp-must-revalidate-stale', + depends_on: ['freshness-none'], + spec_anchors: ['cache-response-directive.must-revalidate'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2, must-revalidate'], + ['ETag', '"abcd"'] + ], + setup: true + }, + { + expected_type: 'cached', + setup: true, + pause_after: true, + response_headers: [ + ['Cache-Control', 'max-age=2, must-revalidate'], + ['ETag', '"abcd"'] + ] + }, + { + expected_type: 'etag_validated' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a fresh response with `Cache-Control: immutable` without revalidation.', + id: 'cc-resp-immutable-fresh', + kind: 'optimal', + browser_only: true, + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000, immutable'], + ['ETag', '"abcd"'] + ], + setup: true, + pause_after: true + }, + { + cache: 'no-cache', + expected_type: 'cached' + } + ] + }, + { + name: 'A HTTP cache MUST revalidate a stale response with `Cache-Control: immutable`', + id: 'cc-resp-immutable-stale', + browser_only: true, + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2, immutable'], + ['ETag', '"abcd"'] + ], + setup: true, + pause_after: true + }, + { + cache: 'no-cache', + expected_type: 'etag_validated', + expected_request_headers: [['cache-control', 'max-age=0']] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/cdn-cache-control.mjs b/test/fixtures/cache-tests/tests/cdn-cache-control.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/cdn-cache-control.mjs @@ -0,0 +1,491 @@ +export default + +{ + name: 'CDN-Cache-Control', + id: 'cdn-cache-control', + description: 'These tests check non-browser caches for behaviours around the [`CDN-Cache-Control` response header](https://httpwg.org/specs/rfc9213.html).', + tests: [ + { + name: 'An optimal CDN reuses a response with positive `CDN-Cache-Control: max-age`', + id: 'cdn-max-age', + cdn_only: true, + depends_on: ['freshness-none'], + kind: 'optimal', + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal CDN reuses a response with `CDN-Cache-Control: max-age: 2147483648`', + id: 'cdn-max-age-max', + kind: 'optimal', + cdn_only: true, + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=2147483648', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal CDN reuses a response with `CDN-Cache-Control: max-age: 99999999999`', + id: 'cdn-max-age-max-plus', + kind: 'optimal', + cdn_only: true, + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=99999999999', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'CDN must not reuse a response when the `Age` header is greater than its `CDN-Cache-Control: max-age` freshness lifetime', + id: 'cdn-max-age-age', + cdn_only: true, + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['CDN-Cache-Control', 'max-age=3600', false], + ['Age', '7200'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does CDN ignore `CDN-Cache-Control: max-age` with space before the `=`?', + id: 'cdn-max-age-space-before-equals', + cdn_only: true, + kind: 'check', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['CDN-Cache-Control', 'max-age =100', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does CDN ignore `CDN-Cache-Control: max-age` with space after the `=`?', + id: 'cdn-max-age-space-after-equals', + cdn_only: true, + kind: 'check', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['CDN-Cache-Control', 'max-age= 100', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'CDN must not reuse a response with `CDN-Cache-Control: max-age=0`', + id: 'cdn-max-age-0', + cdn_only: true, + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=0', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal CDN reuses a response with a positive `CDN-Cache-Control: max-age` and an extension cache directive', + id: 'cdn-max-age-extension', + cdn_only: true, + kind: 'optimal', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'foobar, max-age=3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does CDN reuse a response with a positive `CDN-Cache-Control: MaX-aGe`?', + id: 'cdn-max-age-case-insensitive', + cdn_only: true, + kind: 'check', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'MaX-aGe=3600', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + + { + name: 'An optimal CDN reuses a response with a positive `CDN-Cache-Control: max-age` and a past `Expires`', + id: 'cdn-max-age-expires', + cdn_only: true, + kind: 'optimal', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=3600', false], + ['Expires', -10000], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal CDN reuses a response with a positive `CDN-Cache-Control: max-age` and an invalid `Expires`', + id: 'cdn-max-age-cc-max-age-invalid-expires', + cdn_only: true, + kind: 'optimal', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=3600', false], + ['Expires', '0', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'CDN must not reuse a response with a `CDN-Cache-Control: max-age=0` and a future `Expires`', + id: 'cdn-max-age-0-expires', + cdn_only: true, + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=0', false], + ['Expires', 10000], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal CDN prefers a long `CDN-Cache-Control: max-age` over a short `Cache-Control: max-age`', + id: 'cdn-max-age-short-cc-max-age', + cdn_only: true, + kind: 'optimal', + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['CDN-Cache-Control', 'max-age=3600', false] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'CDN must prefer a short `CDN-Cache-Control: max-age` over a long `Cache-Control: max-age`', + id: 'cdn-max-age-long-cc-max-age', + cdn_only: true, + depends_on: ['cdn-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['CDN-Cache-Control', 'max-age=1', false] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'CDN must not reuse a cached response with `CDN-Cache-Control: private`, even with `Cache-Control: max-age` and `Expires`', + id: 'cdn-private', + cdn_only: true, + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'private'], + ['Cache-Control', 'max-age=10000'], + ['Expires', 10000], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'CDN must not reuse a cached response with `CDN-Cache-Control: no-cache`, even with `Cache-Control: max-age` and `Expires`', + id: 'cdn-no-cache', + cdn_only: true, + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'no-cache'], + ['Cache-Control', 'max-age=10000'], + ['Expires', 10000], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'CDN must not store a response with `CDN-Cache-Control: no-store`, even with `Cache-Control: max-age` and `Expires`', + id: 'cdn-no-store-cc-fresh', + cdn_only: true, + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=10000'], + ['CDN-Cache-Control', 'no-store', false], + ['Expires', 10000], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal CDN stores a response with a fresh `CDN-Cache-Control: max-age`, even with `Cache-Control: no-store`', + id: 'cdn-fresh-cc-nostore', + depends_on: ['freshness-none'], + cdn_only: true, + requests: [ + { + response_headers: [ + ['Cache-Control', 'no-store'], + ['CDN-Cache-Control', 'max-age=10000', false] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'CDN should ignore a `CDN-Cache-Control` that\'s an invalid Structured Field (unknown type)', + id: 'cdn-cc-invalid-sh-type-unknown', + depends_on: ['cdn-max-age'], + cdn_only: true, + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age=10000, &&&&&', false], + ['Cache-Control', 'no-store'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'CDN should ignore a `CDN-Cache-Control` that\'s an invalid Structured Field (wrong type)', + id: 'cdn-cc-invalid-sh-type-wrong', + depends_on: ['cdn-max-age'], + cdn_only: true, + requests: [ + { + response_headers: [ + ['CDN-Cache-Control', 'max-age="10000"', false], + ['Cache-Control', 'no-store'] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'Does the CDN forward the `CDN-Cache-Control` response header?', + id: 'cdn-remove-header', + cdn_only: true, + kind: 'check', + requests: [ + { + // only check for the header in expected_response_headers, so failing + // this is an assertion failure and not a setup error + response_headers: [ + ['Cache-Control', 'max-age=10000'], + ['CDN-Cache-Control', 'foo', false], + ['Expires', 10000], + ['Date', 0] + ], + expected_response_headers: [ + ['CDN-Cache-Control', 'foo'] + ] + } + ] + }, + { + name: 'Does the CDN send `Age` when `CDN-Cache-Control: max-age` exceeds `Cache-Control: max-age`?', + id: 'cdn-remove-age-exceed', + cdn_only: true, + depends_on: ['cdn-max-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['CDN-Cache-Control', 'max-age=10000'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_response_headers: [ + 'Age' + ] + } + ] + }, + { + name: 'Does the CDN preserve `Date` when `CDN-Cache-Control: max-age` exceeds `Cache-Control: max-age`?', + id: 'cdn-date-update-exceed', + cdn_only: true, + depends_on: ['cdn-max-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['CDN-Cache-Control', 'max-age=10000'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_response_headers: [ + ['Date', 0] + ] + } + ] + }, + { + name: 'Does the CDN preserve `Expires` when `CDN-Cache-Control: max-age` exceeds `Cache-Control: max-age`?', + id: 'cdn-expires-update-exceed', + cdn_only: true, + depends_on: ['cdn-max-age'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=1'], + ['Expires', 1], + ['CDN-Cache-Control', 'max-age=10000'], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_response_headers: [ + ['Expires', 1] + ] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/conditional-etag.mjs b/test/fixtures/cache-tests/tests/conditional-etag.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/conditional-etag.mjs @@ -0,0 +1,456 @@ +import * as templates from './lib/templates.mjs' + +export default { + name: 'Conditional Requests: If-None-Match and ETag', + id: 'conditional-inm', + description: 'These tests check handling of conditional requests using `If-None-Match` and `ETag`.', + spec_anchors: ['validation.model'], + tests: [ + { + name: 'An optimal HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching strong `ETag`', + id: 'conditional-etag-strong-respond', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache must include `ETag` in a `304 Not Modified`', + id: 'conditional-304-etag', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304, + expected_response_headers: [ + ['ETag', '"abcdef"'] + ] + } + ] + }, + { + name: 'HTTP cache must give precedence to `If-None-Match` over `If-Modified-Since`', + id: 'conditional-etag-precedence', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['Last-Modified', -5000], + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdef"'], + ['If-Modified-Since', -1] + ], + magic_ims: true, + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'Does HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching strong `ETag` containing obs-text?', + id: 'conditional-etag-strong-respond-obs-text', + kind: 'check', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdefü"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdefü"'] + ], + expected_type: 'cached', + expected_status: 304, + expected_response_headers: [ + ['ETag', '"abcdefü"'] + ] + } + ] + }, + { + name: 'HTTP cache responds to unquoted `If-None-Match` with a `304` when holding a fresh response with a matching strong `ETag` that is quoted', + id: 'conditional-etag-quoted-respond-unquoted', + kind: 'check', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'abcdef'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache responds to unquoted `If-None-Match` with a `304` when holding a fresh response with a matching strong `ETag` that is unquoted', + id: 'conditional-etag-unquoted-respond-unquoted', + kind: 'check', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'abcdef'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'abcdef'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache responds to quoted `If-None-Match` with a `304` when holding a fresh response with a matching strong `ETag` that is unquoted', + id: 'conditional-etag-unquoted-respond-quoted', + kind: 'check', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'abcdef'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching weak `ETag`', + id: 'conditional-etag-weak-respond', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'W/"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'W/"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching weak `ETag`, and the entity-tag weakness flag is lowercase', + id: 'conditional-etag-weak-respond-lowercase', + kind: 'check', + depends_on: ['conditional-etag-weak-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'w/"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'w/"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching weak `ETag`, and the entity-tag weakness flag uses `\\` instead of `/`', + id: 'conditional-etag-weak-respond-backslash', + kind: 'check', + depends_on: ['conditional-etag-weak-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'W\\"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'W\\"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache responds to `If-None-Match` with a `304` when holding a fresh response with a matching weak `ETag`, and the entity-tag weakness flag omits `/`', + id: 'conditional-etag-weak-respond-omit-slash', + depends_on: ['conditional-etag-weak-respond'], + kind: 'check', + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', 'W"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', 'W"abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-None-Match` with a `304` when it contains multiple entity-tags (first one)', + id: 'conditional-etag-strong-respond-multiple-first', + kind: 'optimal', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"abcdef", "1234", "5678"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-None-Match` with a `304` when it contains multiple entity-tags (middle one)', + id: 'conditional-etag-strong-respond-multiple-second', + kind: 'optimal', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"1234", "abcdef", "5678"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-None-Match` with a `304` when it contains multiple entity-tags (last one)', + id: 'conditional-etag-strong-respond-multiple-last', + kind: 'optimal', + depends_on: ['conditional-etag-strong-respond'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + request_headers: [ + ['If-None-Match', '"1234", "5678", "abcdef"'] + ], + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'HTTP cache must include stored response headers identified by `Vary` in a conditional request it generates', + id: 'conditional-etag-vary-headers', + requests: [ + { + request_headers: [ + ['Abc', '123'] + ], + response_headers: [ + ['Expires', 1], + ['ETag', '"abcdef"'], + ['Date', 0], + ['Vary', 'Abc'] + ], + setup: true, + pause_after: true + }, + { + request_headers: [ + ['Abc', '123'] + ], + expected_type: 'etag_validated', + expected_request_headers: [ + ['Abc', '123'] + ], + setup_tests: ['expected_type'] + } + ] + }, + { + name: 'HTTP cache must not use a stored `ETag` to validate when the presented `Vary`ing request header differs', + id: 'conditional-etag-vary-headers-mismatch', + depends_on: ['conditional-etag-vary-headers', 'vary-no-match'], + requests: [ + { + request_headers: [ + ['Abc', '123'] + ], + response_headers: [ + ['Expires', 10000], + ['ETag', '"abcdef"'], + ['Date', 0], + ['Vary', 'Abc'] + ], + setup: true, + pause_after: true + }, + { + request_headers: [ + ['Abc', '456'] + ], + expected_request_headers_missing: [ + ['If-None-Match', '"abcdef"'] + ] + } + ] + }, + { + name: 'An optimal HTTP cache generates a `If-None-Match` request when holding a stale response with a matching strong `ETag`', + id: 'conditional-etag-strong-generate', + kind: 'optimal', + depends_on: ['freshness-max-age-stale'], + requests: [ + templates.becomeStale({ + response_headers: [ + ['ETag', '"abcdef"'] + ] + }), + { + expected_request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_type: 'etag_validated' + } + ] + }, + { + name: 'An optimal HTTP cache generates a `If-None-Match` request when holding a stale response with a matching weak `ETag`', + id: 'conditional-etag-weak-generate-weak', + kind: 'optimal', + depends_on: ['freshness-max-age-stale'], + requests: [ + templates.becomeStale({ + response_headers: [ + ['ETag', 'W/"abcdef"'] + ] + }), + { + expected_request_headers: [ + ['If-None-Match', 'W/"abcdef"'] + ], + expected_type: 'etag_validated' + } + ] + }, + { + name: 'Does HTTP cache generate a quoted `If-None-Match` request when holding a stale response with a matching, unquoted strong `ETag`?', + id: 'conditional-etag-strong-generate-unquoted', + kind: 'check', + depends_on: ['conditional-etag-strong-generate'], + requests: [ + templates.becomeStale({ + response_headers: [ + ['ETag', 'abcdef'] + ] + }), + { + expected_request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_type: 'etag_validated' + } + ] + }, + { + name: 'Does HTTP cache forward `If-None-Match` request header when no stored response is available?', + id: 'conditional-etag-forward', + kind: 'check', + requests: [ + { + request_headers: [ + ['If-None-Match', '"abcdef"'] + ], + expected_request_headers: [ + ['If-None-Match', '"abcdef"'] + ] + } + ] + }, + { + name: 'Does HTTP cache add quotes to an unquoted `If-None-Match` request when forwarding it?', + id: 'conditional-etag-forward-unquoted', + depends_on: ['conditional-etag-forward'], + kind: 'check', + requests: [ + { + request_headers: [ + ['If-None-Match', 'abcdef'] + ], + expected_request_headers: [ + ['If-None-Match', '"abcdef"'] + ] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/conditional-lm.mjs b/test/fixtures/cache-tests/tests/conditional-lm.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/conditional-lm.mjs @@ -0,0 +1,119 @@ +import * as templates from './lib/templates.mjs' + +export default { + name: 'Conditional Requests: If-Modified-Since and Last-Modified', + id: 'conditional-lm', + description: 'These tests check handling of conditional requests using `If-Modified-Since` and `Last-Modified`.', + spec_anchors: ['validation.model'], + tests: [ + { + name: 'An optimal HTTP cache responds to `If-Modified-Since` with a `304` when holding a fresh response with a matching `Last-Modified`', + id: 'conditional-lm-fresh', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['Last-Modified', -3000] + ] + }), + { + request_headers: [ + ['If-Modified-Since', -3000] + ], + magic_ims: true, + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-Modified-Since` with a `304` when holding a fresh response with an earlier `Last-Modified`', + id: 'conditional-lm-fresh-earlier', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['Last-Modified', -3000] + ] + }), + { + request_headers: [ + ['If-Modified-Since', -2000] + ], + magic_ims: true, + expected_type: 'cached', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-Modified-Since` with a `304` when holding a stale response with a matching `Last-Modified`, after validation', + id: 'conditional-lm-stale', + kind: 'optimal', + depends_on: ['freshness-max-age-stale'], + browser_skip: true, + requests: [ + templates.becomeStale({ + response_headers: [ + ['Last-Modified', -3000] + ] + }), + { + request_headers: [ + ['If-Modified-Since', -3000] + ], + magic_ims: true, + expected_type: 'lm_validated', + expected_status: 304 + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-Modified-Since` with a `304` when holding a newer fresh response with no `Last-Modified`', + id: 'conditional-lm-fresh-no-lm', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({}), + { + request_headers: [ + ['If-Modified-Since', -3000] + ], + magic_ims: true, + expected_type: 'cached', + expected_status: 304, + setup_tests: ['expected_type'] + } + ] + }, + { + name: 'An optimal HTTP cache responds to `If-Modified-Since` with a `304` when holding a newer fresh response when IMS uses an equivalent rfc850 date', + id: 'conditional-lm-fresh-rfc850', + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: true, + requests: [ + templates.fresh({ + response_headers: [ + ['Last-Modified', -3000] + ] + }), + { + request_headers: [ + ['If-Modified-Since', -3000] + ], + magic_ims: true, + rfc850date: ['if-modified-since'], + expected_type: 'cached', + expected_status: 304, + setup_tests: ['expected_type'] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/expires-freshness.mjs b/test/fixtures/cache-tests/tests/expires-freshness.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/expires-freshness.mjs @@ -0,0 +1,154 @@ +export default + +{ + name: 'Expires Freshness', + id: 'expires', + description: 'These tests check how caches calculate freshness using `Expires`.', + spec_anchors: ['expiration.model', 'field.expires'], + tests: [ + { + name: 'An optimal HTTP cache reuses a response with a future `Expires`', + id: 'freshness-expires-future', + kind: 'optimal', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Expires', 30 * 24 * 60 * 60], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with a past `Expires`', + id: 'freshness-expires-past', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', -30 * 24 * 60 * 60], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with a present `Expires`', + id: 'freshness-expires-present', + depends_on: ['freshness-none'], + requests: [ + { + response_headers: [ + ['Expires', 0], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an `Expires` older than `Date`, both fast', + id: 'freshness-expires-old-date', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 300], + ['Date', 400] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (0)', + id: 'freshness-expires-invalid', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', '0', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with `Expires`, even if `Date` is invalid', + id: 'freshness-expires-invalid-date', + depends_on: ['freshness-expires-future'], + kind: 'optimal', + requests: [ + { + response_headers: [ + ['Date', 'foo', false], + ['Expires', 10] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response when the `Age` header is greater than its `Expires` minus `Date`, and `Date` is slow', + id: 'freshness-expires-age-slow-date', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Date', -10], + ['Expires', 10], + ['Age', '25'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response when the `Age` header is greater than its `Expires` minus `Date`, and `Date` is fast', + id: 'freshness-expires-age-fast-date', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Date', 10], + ['Expires', 20], + ['Age', '15'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/expires-parse.mjs b/test/fixtures/cache-tests/tests/expires-parse.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/expires-parse.mjs @@ -0,0 +1,301 @@ +export default + +{ + name: 'Expires Parsing', + id: 'expires-parse', + description: 'These tests check how caches parse the `Expires` response header.', + spec_anchors: ['field.expires'], + tests: [ + { + name: 'An optimal HTTP cache reuses a response with an `Expires` that is exactly 32 bits', + id: 'freshness-expires-32bit', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Tue, 19 Jan 2038 14:14:08 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with an `Expires` that is far in the future', + id: 'freshness-expires-far-future', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Sun, 21 Nov 2286 04:46:39 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with a future `Expires` in obsolete RFC 850 format', + id: 'freshness-expires-rfc850', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thursday, 18-Aug-50 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with a future `Expires` in ANSI C\'s asctime() format', + id: 'freshness-expires-ansi-c', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu Aug 8 02:01:18 2050', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with a future `Expires` using wrong case (weekday)', + id: 'freshness-expires-wrong-case-weekday', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'THU, 18 Aug 2050 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with a future `Expires` using wrong case (month)', + id: 'freshness-expires-wrong-case-month', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 AUG 2050 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a response with a future `Expires` using wrong case (tz)', + id: 'freshness-expires-wrong-case-tz', + kind: 'optimal', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 02:01:18 gMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (UTC)', + id: 'freshness-expires-invalid-utc', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 02:01:18 UTC', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (other tz)', + id: 'freshness-expires-invalid-aest', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 02:01:18 AEST', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (two-digit year)', + id: 'freshness-expires-invalid-2-digit-year', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 50 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (missing comma)', + id: 'freshness-expires-invalid-no-comma', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu 18 Aug 2050 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (multiple spaces)', + id: 'freshness-expires-invalid-multiple-spaces', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (date dashes)', + id: 'freshness-expires-invalid-date-dashes', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18-Aug-2050 02:01:18 GMT', false], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (time periods)', + id: 'freshness-expires-invalid-time-periods', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 02.01.18 GMT', false], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (1-digit hour)', + id: 'freshness-expires-invalid-1-digit-hour', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 2:01:18 GMT', false], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse a response with an invalid `Expires` (multiple lines)', + id: 'freshness-expires-invalid-multiple-lines', + depends_on: ['freshness-expires-future'], + requests: [ + { + response_headers: [ + ['Expires', 'Thu, 18 Aug 2050 2:01:18 GMT', false], + ['Expires', 'Thu, 18 Aug 2050 2:01:19 GMT', false], + ['Date', 0] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/headers.mjs b/test/fixtures/cache-tests/tests/headers.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/headers.mjs @@ -0,0 +1,77 @@ +import * as templates from './lib/templates.mjs' +import * as utils from './lib/utils.mjs' +import headerList from './lib/header-list.mjs' + +const tests = [] + +tests.push({ + name: '`Connection` header must inhibit a HTTP cache from storing listed headers', + id: 'headers-omit-headers-listed-in-Connection', + kind: 'required', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({ + response_headers: [ + ['Connection', 'a, b', false], + ['a', '1', false], + ['b', '2', false], + ['c', '3', false] + ] + }), + { + expected_type: 'cached', + expected_response_headers: [['c', '3']], + expected_response_headers_missing: ['a', 'b'], + setup_tests: ['expected_type', 'expected_response_headers'] + } + ] +}) + +function checkStoreHeader (config) { + const id = `store-${config.name}` + const value = 'valB' in config ? config.valB : utils.httpContent(`${config.name}-store-value`) + const storeHeader = 'noStore' in config ? !config.noStore : true + const requirement = storeHeader ? 'must' : 'must not' + const expectedHeaders = storeHeader ? [[config.name, value]] : [] + const unexpectedHeaders = storeHeader ? [] : [[config.name, value]] + + const respHeaders = [ + ['Date', 0], + [config.name, value, storeHeader] + ] + if (config.name !== 'Cache-Control') { + respHeaders.push(['Cache-Control', 'max-age=3600']) + } + + tests.push({ + name: `HTTP cache ${requirement} store \`${config.name}\` header field`, + id: `headers-${id}`, + kind: 'required', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: respHeaders, + setup: true, + pause_after: true, + check_body: 'checkBody' in config ? config.checkBody : true + }, + { + expected_type: 'cached', + expected_response_headers: expectedHeaders, + expected_response_headers_missing: unexpectedHeaders, + setup_tests: ['expected_type'], + check_body: 'checkBody' in config ? config.checkBody : true + } + ] + }) +} + +headerList.forEach(checkStoreHeader) + +export default { + name: 'Storing Header Fields', + id: 'headers', + description: 'These tests examine how caches store headers in responses.', + spec_anchors: ['storing.fields'], + tests +} diff --git a/test/fixtures/cache-tests/tests/heuristic-freshness.mjs b/test/fixtures/cache-tests/tests/heuristic-freshness.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/heuristic-freshness.mjs @@ -0,0 +1,95 @@ +import * as utils from './lib/utils.mjs' + +const tests = [] + +function checkStatus (status) { + const succeed = status[0] + const code = status[1] + const phrase = status[2] + let body = status[3] + if (body === undefined) { + body = utils.httpContent(code) + } + const extra = status[4] || '' + const extraHdr = status[5] + const specAnchors = status[6] || [] + let expectedType = 'not_cached' + let desired = 'HTTP cache must not reuse' + if (succeed === true) { + expectedType = 'cached' + desired = 'An optimal HTTP cache should reuse' + } + const responseHeaders = [ + ['Last-Modified', -24 * 60 * 60], + ['Date', 0] + ] + if (extraHdr) { + responseHeaders.push(extraHdr) + } + tests.push({ + name: `${desired} a \`${code} ${phrase}\` response with \`Last-Modified\` based upon heuristic freshness ${extra}`, + id: `heuristic-${code}-${expectedType}`, + kind: succeed ? 'optimal' : 'required', + spec_anchors: specAnchors, + requests: [{ + response_status: [code, phrase], + response_headers: responseHeaders, + response_body: body, + setup: true + }, { + expected_type: expectedType, + response_status: [code, phrase], + response_body: body + }] + }) +} + +[ + [true, 200, 'OK'], + [false, 201, 'Created'], + [false, 202, 'Accepted'], + [true, 203, 'Non-Authoritative Information'], + [true, 204, 'No Content', null], + [false, 403, 'Forbidden'], + [true, 404, 'Not Found'], + [true, 405, 'Method Not Allowed'], + [true, 410, 'Gone'], + [true, 414, 'URI Too Long'], + [true, 501, 'Not Implemented'], + [false, 502, 'Bad Gateway'], + [false, 503, 'Service Unavailable'], + [false, 504, 'Gateway Timeout'], + [false, 599, 'Unknown', undefined, 'when `Cache-Control: public` is not present', undefined, ['cache-response-directive.public']], + [true, 599, 'Unknown', undefined, 'when `Cache-Control: public` is present', ['Cache-Control', 'public'], ['cache-response-directive.public']] +].forEach(checkStatus) + +function checkHeuristic (delta) { + tests.push({ + name: `Does HTTP cache consider a \`Last-Modified\` ${delta} seconds ago heuristically fresh?`, + id: `heuristic-delta-${delta}`, + kind: 'check', + requests: [{ + response_headers: [ + ['Last-Modified', -delta], + ['Date', 0] + ], + setup: true, + pause_after: true + }, + { + expected_type: 'cached' + }] + }) +} + +[ + 5, 10, 30, 60, 300, 600, 1200, 1800, 3600, 3600 * 12, 3600 * 24 +].forEach(checkHeuristic) + +export default { + name: 'Heuristic Freshness', + id: 'heuristic', + description: 'These tests check how caches handle heuristic freshness.', + spec_anchors: ['heuristic.freshness'], + tests +} diff --git a/test/fixtures/cache-tests/tests/index.mjs b/test/fixtures/cache-tests/tests/index.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/index.mjs @@ -0,0 +1,26 @@ +import ccParse from './cc-parse.mjs' +import ccRequest from './cc-request.mjs' +import ccResponse from './cc-response.mjs' +import ccFreshness from './cc-freshness.mjs' +import ageParse from './age-parse.mjs' +import pragma from './pragma.mjs' +import expiresParse from './expires-parse.mjs' +import expires from './expires-freshness.mjs' +import stale from './stale.mjs' +import heuristic from './heuristic-freshness.mjs' +import methods from './method.mjs' +import statuses from './status.mjs' +import vary from './vary.mjs' +import varyParse from './vary-parse.mjs' +import conditionalLm from './conditional-lm.mjs' +import conditionalEtag from './conditional-etag.mjs' +import headers from './headers.mjs' +import update304 from './update304.mjs' +import updateHead from './updateHead.mjs' +import invalidation from './invalidation.mjs' +import partial from './partial.mjs' +import auth from './authorization.mjs' +import other from './other.mjs' +import cdncc from './cdn-cache-control.mjs' + +export default [ccFreshness, ccParse, ageParse, expires, expiresParse, ccResponse, stale, heuristic, methods, statuses, ccRequest, pragma, vary, varyParse, conditionalLm, conditionalEtag, headers, update304, updateHead, invalidation, partial, auth, other, cdncc] diff --git a/test/fixtures/cache-tests/tests/invalidation.mjs b/test/fixtures/cache-tests/tests/invalidation.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/invalidation.mjs @@ -0,0 +1,121 @@ +import { makeTemplate, fresh } from './lib/templates.mjs' + +const contentLocation = makeTemplate({ + filename: 'content_location_target', + response_headers: [ + ['Cache-Control', 'max-age=100000'], + ['Last-Modified', 0], + ['Date', 0] + ] +}) + +const location = makeTemplate({ + filename: 'location_target', + response_headers: [ + ['Cache-Control', 'max-age=100000'], + ['Last-Modified', 0], + ['Date', 0] + ] +}) + +const lclResponse = makeTemplate({ + response_headers: [ + ['Location', 'location_target'], + ['Content-Location', 'content_location_target'] + ], + magic_locations: true +}) + +const tests = [] + +function checkInvalidation (method) { + tests.push({ + name: `HTTP cache must invalidate the URL after a successful response to a \`${method}\` request`, + id: `invalidate-${method}`, + depends_on: ['freshness-max-age'], + requests: [ + fresh({}), { + request_method: method, + request_body: 'abc', + setup: true + }, { + expected_type: 'not_cached' + } + ] + }) + tests.push({ + name: `An optimal HTTP cache does not invalidate the URL after a failed response to a \`${method}\` request`, + id: `invalidate-${method}-failed`, + kind: 'optimal', + depends_on: [`invalidate-${method}`], + requests: [ + fresh({}), { + request_method: method, + request_body: 'abc', + response_status: [500, 'Internal Server Error'], + setup: true + }, { + expected_type: 'cached' + } + ] + }) +} + +function checkLocationInvalidation (method) { + tests.push({ + name: `Does HTTP cache invalidate \`Location\` URL after a successful response to a \`${method}\` request?`, + id: `invalidate-${method}-location`, + kind: 'check', + depends_on: [`invalidate-${method}`], + requests: [ + location({ + setup: true + }), lclResponse({ + request_method: 'POST', + request_body: 'abc', + setup: true + }), location({ + expected_type: 'not_cached' + }) + ] + }) +} + +function checkClInvalidation (method) { + tests.push({ + name: `Does HTTP cache must invalidate \`Content-Location\` URL after a successful response to a \`${method}\` request?`, + id: `invalidate-${method}-cl`, + kind: 'check', + depends_on: [`invalidate-${method}`], + requests: [ + contentLocation({ + setup: true + }), lclResponse({ + request_method: method, + request_body: 'abc', + setup: true + }), contentLocation({ + expected_type: 'not_cached' + }) + ] + }) +} + +const methods = [ + 'POST', + 'PUT', + 'DELETE', + 'M-SEARCH' +] + +methods.forEach(checkInvalidation) +methods.forEach(checkLocationInvalidation) +methods.forEach(checkClInvalidation) + +export default { + name: 'Cache Invalidation', + id: 'invalidation', + description: 'These tests check how caches support invalidation, including when it is triggered by the `Location` and `Content-Location` response headers.', + spec_anchors: ['invalidation'], + tests +} diff --git a/test/fixtures/cache-tests/tests/lib/header-list.mjs b/test/fixtures/cache-tests/tests/lib/header-list.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/lib/header-list.mjs @@ -0,0 +1,135 @@ +export default [ + { + name: 'Test-Header', + reqUpdate: true + }, + { + name: 'X-Test-Header', + reqUpdate: true + }, + { + name: 'Content-Foo', + reqUpdate: true + }, + { + name: 'X-Content-Foo', + reqUpdate: true + }, + { + name: 'Cache-Control', + valA: 'max-age=1', + valB: 'max-age=3600', + reqUpdate: true + }, + { + name: 'Connection', + noStore: true + }, + { + name: 'Content-Encoding' + }, + { + name: 'Content-Length', + valA: '36', + valB: '10', + noUpdate: true, + checkBody: false + }, + { + name: 'Content-Location', + valA: '/foo', + valB: '/bar' + }, + { + name: 'Content-MD5', + valA: 'rL0Y20zC+Fzt72VPzMSk2A==', + valB: 'N7UdGUp1E+RbVvZSTy1R8g==' + }, + { + name: 'Content-Range' + }, + { + name: 'Content-Security-Policy', + valA: 'default-src \'self\'', + valB: 'default-src \'self\' cdn.example.com' + }, + { + name: 'Content-Type', + valA: 'text/plain', + valB: 'text/plain;charset=utf-8' + }, + { + name: 'Clear-Site-Data', + valA: 'cache', + valB: 'cookies' + }, + { + name: 'ETag', + valA: '"abcdef"', + valB: '"ghijkl"' + }, + { + name: 'Expires', + valA: 'Fri, 01 Jan 2038 01:01:01 GMT', + valB: 'Mon, 11 Jan 2038 11:11:11 GMT' + }, + { + name: 'Keep-Alive', + noStore: true + }, + { + name: 'Proxy-Authenticate', + noStore: true + }, + { + name: 'Proxy-Authentication-Info', + noStore: true + }, + { + name: 'Proxy-Authorization', + noStore: true + }, + { + name: 'Proxy-Connection', + noStore: true + }, + { + name: 'Public-Key-Pins' + }, + { + name: 'Set-Cookie', + valA: 'a=b', + valB: 'a=c' + }, + { + name: 'Set-Cookie2', + valA: 'a=b', + valB: 'a=c' + }, + { + name: 'TE', + noStore: true + }, + // { + // name: 'Trailer', + // noStore: true + // }, + { + name: 'Transfer-Encoding', + noStore: true + }, + { + name: 'Upgrade', + noStore: true + }, + { + name: 'X-Frame-Options', + valA: 'deny', + valB: 'sameorigin' + }, + { + name: 'X-XSS-Protection', + valA: '1', + valB: '1; mode=block' + } +] diff --git a/test/fixtures/cache-tests/tests/lib/templates.mjs b/test/fixtures/cache-tests/tests/lib/templates.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/lib/templates.mjs @@ -0,0 +1,74 @@ +/* +makeTemplate(template) + +templates take an optional request object; the template +will be updated with the request object in the following manner: + +- Object members will be assigned from the request +- Array members will be concatonated from the request +- Other members will be updated from the request +*/ +export function makeTemplate (template) { + return function (request) { + return mergeDeep({}, template, request) + } +} + +function isObject (item) { + return (item && typeof item === 'object' && !Array.isArray(item)) +} + +function mergeDeep (target, ...sources) { + if (!sources.length) return target + const source = sources.shift() + + if (isObject(target) && isObject(source)) { + for (const key in source) { + if (isObject(source[key])) { + if (!target[key]) Object.assign(target, { [key]: {} }) + mergeDeep(target[key], source[key]) + } else if (Array.isArray(source[key])) { + if (!target[key]) Object.assign(target, { [key]: [] }) + Object.assign(target, { [key]: target[key].concat(source[key]) }) + } else { + Object.assign(target, { [key]: source[key] }) + } + } + } + + return mergeDeep(target, ...sources) +} + +/* + Templates below are shared between multiple suites; + suite-specific tests should go in that file. +*/ + +export const fresh = makeTemplate({ + response_headers: [ + ['Cache-Control', 'max-age=100000'], + ['Date', 0] + ], + setup: true, + pause_after: true +}) + +export const stale = makeTemplate({ + response_headers: [ + ['Expires', -5000], + ['Last-Modified', -100000], + ['Date', 0] + ], + setup: true, + pause_after: true +}) + +export const becomeStale = makeTemplate({ + response_headers: [ + ['Cache-Control', 'max-age=2'], + ['Date', 0], + ['Template-A', '1'] + ], + setup: true, + pause_after: true +}) diff --git a/test/fixtures/cache-tests/tests/lib/utils.mjs b/test/fixtures/cache-tests/tests/lib/utils.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/lib/utils.mjs @@ -0,0 +1,20 @@ +const contentSeed = 1 +const contentStore = {} +export function httpContent (csKey, contentLength = 15) { + if (csKey in contentStore) { + return contentStore[csKey] + } else { + let keySeed = 0 + for (let i = 0; i < csKey.length; i++) { + keySeed += csKey.charCodeAt(i) + } + const contents = [] + for (let i = 0; i < contentLength; ++i) { + const idx = ((i * keySeed * contentSeed) % 26) + 97 + contents.push(String.fromCharCode(idx)) + } + const content = contents.join('') + contentStore[csKey] = content + return content + } +} diff --git a/test/fixtures/cache-tests/tests/method.mjs b/test/fixtures/cache-tests/tests/method.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/method.mjs @@ -0,0 +1,35 @@ +export default + +{ + name: 'Method-related Caching Requirements', + id: 'method', + description: 'These tests check how caches handle different HTTP methods.', + spec_anchors: ['response.cacheability'], + tests: [ + { + name: 'An optimal HTTP cache reuses a stored `POST` response (that has `Content-Location` with the same URL and explicit freshness) for subsequent `GET` requests', + id: 'method-POST', + kind: 'optimal', + requests: [ + { + request_method: 'POST', + request_body: '12345', + request_headers: [ + ['Content-Type', 'text/plain'] + ], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Location', ''], + ['Date', 0] + ], + magic_locations: true, + pause_after: true, + setup: true + }, + { + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/other.mjs b/test/fixtures/cache-tests/tests/other.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/other.mjs @@ -0,0 +1,239 @@ +import * as templates from './lib/templates.mjs' +import * as utils from './lib/utils.mjs' + +export default + +{ + name: 'Other Caching Requirements', + id: 'other', + description: 'These tests check miscellaneous HTTP cache behaviours. ', + tests: [ + { + name: 'HTTP cache must generate an `Age` header field when using a stored response.', + id: 'other-age-gen', + depends_on: ['freshness-max-age'], + spec_anchors: ['field.age', 'constructing.responses.from.caches'], + requests: [ + templates.fresh({}), + { + expected_type: 'cached', + expected_response_headers: [ + ['Age', '>', 2] + ] + } + ] + }, + { + name: 'Does HTTP cache insert an `Age` header field when there is delay generating the response?', + id: 'other-age-delay', + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Date', 0] + ], + response_pause: 5, + expected_response_headers: [['age', '>', 0]] + } + ] + }, + { + name: 'HTTP cache must update the `Age` header field when freshness is based upon `Expires`', + id: 'other-age-update-expires', + depends_on: ['freshness-expires-future'], + spec_anchors: ['constructing.responses.from.caches', 'field.age'], + requests: [ + { + response_headers: [ + ['Expires', 30 * 24 * 60 * 60], + ['Date', 0], + ['Age', '30'] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached', + expected_response_headers: [ + ['Age', '>', 32] + ] + } + ] + }, + { + name: 'HTTP cache must update the `Age` header field when freshness is based upon `CC: max-age`', + id: 'other-age-update-max-age', + depends_on: ['freshness-max-age'], + spec_anchors: ['constructing.responses.from.caches', 'field.age'], + requests: [ + templates.fresh({ + response_headers: [ + ['Age', '30'] + ] + }), + { + expected_type: 'cached', + expected_response_headers: [ + ['Age', '>', 32] + ] + } + ] + }, + { + name: 'HTTP cache must not update the `Date` header field', + id: 'other-date-update', + depends_on: ['freshness-max-age'], + spec_anchors: ['field.date'], + requests: [ + templates.fresh({}), + { + expected_type: 'cached', + expected_response_headers: [ + ['Date', 0] + ] + } + ] + }, + { + name: 'HTTP cache must not update the `Date` header field when `Expires` is present', + id: 'other-date-update-expires', + depends_on: ['freshness-expires-future'], + spec_anchors: ['field.date'], + requests: [ + { + response_headers: [ + ['Expires', 30 * 24 * 60 * 60], + ['Date', 0] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached', + expected_response_headers: [ + ['Date', 0] + ] + } + ] + }, + { + name: 'Does HTTP cache leave the `Expires` header field alone?', + id: 'other-date-update-expires-update', + kind: 'check', + depends_on: ['freshness-expires-future'], + spec_anchors: ['field.date'], + requests: [ + { + response_headers: [ + ['Expires', 30 * 24 * 60 * 60], + ['Date', 0] + ], + pause_after: true, + setup: true + }, + { + expected_type: 'cached', + expected_response_headers: [ + ['Expires', 30 * 24 * 60 * 60] + ] + } + ] + }, + { + name: 'Different query arguments must be different cache keys', + id: 'query-args-different', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({ + query_arg: 'test=' + utils.httpContent('query-args-different-1') + }), + { + query_arg: 'test=' + utils.httpContent('query-args-different-2'), + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache should not be affected by the presence of a URL query', + id: 'query-args-same', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({ + query_arg: 'test=' + utils.httpContent('query-args-same') + }), + { + query_arg: 'test=' + utils.httpContent('query-args-same'), + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP heuristically cache a response with a `Content-Disposition: attachment` header?', + id: 'other-heuristic-content-disposition-attachment', + kind: 'check', + depends_on: ['heuristic-200-cached'], + requests: [ + { + response_headers: [ + ['Last-Modified', -100000], + ['Date', 0], + ['Content-Disposition', 'attachment; filename=example.txt'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP reuse a fresh response with a `Content-Disposition: attachment` header?', + id: 'other-fresh-content-disposition-attachment', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({ + response_headers: [ + ['Content-Disposition', 'attachment; filename=example.txt'] + ] + }), + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a fresh response with a `Set-Cookie` header', + id: 'other-set-cookie', + depends_on: ['freshness-max-age'], + kind: 'optimal', + requests: [ + templates.fresh({ + response_headers: [ + ['Set-Cookie', 'a=b'] + ] + }), + { + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a fresh response when the request has a `Cookie` header', + id: 'other-cookie', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Cookie', 'a=b'] + ], + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/partial.mjs b/test/fixtures/cache-tests/tests/partial.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/partial.mjs @@ -0,0 +1,271 @@ +export default { + name: 'Combining Partial Content', + id: 'partial', + description: 'These tests check how caches handle partial content (also known as `Range` requests).', + spec_anchors: ['combining.responses'], + tests: [ + { + name: 'An optimal HTTP cache stores partial content and reuses it', + id: 'partial-store-partial-reuse-partial', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + response_status: [206, 'Partial Content'], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Range', 'bytes 4-9/10'] + ], + response_body: '01234', + expected_request_headers: [ + ['Range', 'bytes=-5'] + ], + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '01234' + } + ] + }, + { + name: 'An optimal HTTP cache stores complete responses and serves smaller ranges from them (byte-range-spec)', + id: 'partial-store-complete-reuse-partial', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'] + ], + response_body: '01234567890', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=0-1'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '01' + } + ] + }, + { + name: 'An optimal HTTP cache stores complete responses and serves smaller ranges from them (absent last-byte-pos)', + id: 'partial-store-complete-reuse-partial-no-last', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'] + ], + response_body: '01234567890', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=1-'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '1234567890' + } + ] + }, + { + name: 'An optimal HTTP cache stores complete responses and serves smaller ranges from them (suffix-byte-range-spec)', + id: 'partial-store-complete-reuse-partial-suffix', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'] + ], + response_body: '0123456789A', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=-1'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: 'A' + } + ] + }, + { + name: 'An optimal HTTP cache stores partial responses and serves smaller ranges from them (byte-range-spec)', + id: 'partial-store-partial-reuse-partial-byterange', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + response_status: [206, 'Partial Content'], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Range', 'bytes 4-9/10'] + ], + response_body: '01234', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=6-8'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '234' + } + ] + }, + { + name: 'An optimal HTTP cache stores partial responses and serves smaller ranges from them (absent last-byte-pos)', + id: 'partial-store-partial-reuse-partial-absent', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + response_status: [206, 'Partial Content'], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Range', 'bytes 4-9/10'] + ], + response_body: '01234', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=6-'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '234' + } + ] + }, + { + name: 'An optimal HTTP cache stores partial responses and serves smaller ranges from them (suffix-byte-range-spec)', + id: 'partial-store-partial-reuse-partial-suffix', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + response_status: [206, 'Partial Content'], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Range', 'bytes 4-9/10'] + ], + response_body: '01234', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=-1'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '4' + } + ] + }, + { + name: 'An optimal HTTP cache stores partial content and completes it', + id: 'partial-store-partial-complete', + kind: 'optimal', + depends_on: ['freshness-max-age'], + requests: [ + { + request_headers: [ + ['Range', 'bytes=-5'] + ], + response_status: [206, 'Partial Content'], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Content-Range', 'bytes 0-4/10'] + ], + response_body: '01234', + setup: true + }, + { + expected_request_headers: [ + ['range', 'bytes=5-'] + ] + } + ] + }, + { + name: 'HTTP cache must use header fields from the new response', + id: 'partial-use-headers', + depends_on: ['partial-store-complete-reuse-partial'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['A', '1'] + ], + response_body: '01234567890', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=0-1'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '01', + setup_tests: ['expected_type', 'expected_status', 'expected_response_text'], + response_headers: [ + ['A', '2'] + ] + } + ] + }, + { + name: 'HTTP cache must preserve unupdated header fields from the stored response', + id: 'partial-use-stored-headers', + depends_on: ['partial-store-complete-reuse-partial'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['A', '1'] + ], + response_body: '01234567890', + setup: true + }, + { + request_headers: [ + ['Range', 'bytes=0-1'] + ], + expected_type: 'cached', + expected_status: 206, + expected_response_text: '01', + setup_tests: ['expected_type', 'expected_status', 'expected_response_text'], + expected_response_headers: [ + ['A', '1'] + ] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/pragma.mjs b/test/fixtures/cache-tests/tests/pragma.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/pragma.mjs @@ -0,0 +1,97 @@ +import * as templates from './lib/templates.mjs' + +export default + +{ + name: 'Pragma', + id: 'pragma', + description: 'These tests check how caches handle the deprecated `Pragma` header in reqeusts and responses. Note that This field is deprecated - it is not required to be supported.', + spec_anchors: ['field.pragma'], + tests: [ + { + name: 'Does HTTP cache use a stored fresh response when request contains `Pragma: no-cache`?', + id: 'pragma-request-no-cache', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Pragma', 'no-cache'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a stored fresh response when request contains `Pragma: unrecognised-extension`?', + id: 'pragma-request-extension', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + templates.fresh({}), + { + request_headers: [ + ['Pragma', 'unrecognised-extension'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a stored and otherwise fresh response when it contains `Pragma: no-cache`?', + id: 'pragma-response-no-cache', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Pragma', 'no-cache'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache reuse a stored and heuristically fresh response when it contains `Pragma: no-cache`?', + id: 'pragma-response-no-cache-heuristic', + kind: 'check', + depends_on: ['heuristic-200-cached'], + requests: [ + { + response_headers: [ + ['Date', 0], + ['Last-Modified', -10000], + ['Pragma', 'no-cache'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache use a stored and otherwise fresh response when it contains `Pragma: unrecognised-extension`?', + id: 'pragma-response-extension', + kind: 'check', + depends_on: ['freshness-max-age'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ['Pragma', 'unrecognised-extension'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/stale.mjs b/test/fixtures/cache-tests/tests/stale.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/stale.mjs @@ -0,0 +1,177 @@ +import * as templates from './lib/templates.mjs' + +function makeStaleCheckCC (cc, sharedOnly, value) { + const shared = sharedOnly === true ? 'Shared ' : '' + return { + name: `${shared}HTTP cache must not serve stale stored response when prohibited by \`Cache-Control: ${cc}\``, + id: `stale-close-${cc}${value || ''}`, + browser_skip: sharedOnly, + depends_on: ['stale-close'], + spec_anchors: [`cache-response-directive.${cc}`], + requests: [ + { + response_headers: [ + ['Cache-Control', `max-age=2, ${cc}${value || ''}`] + ], + setup: true, + pause_after: true + }, + { + disconnect: true, + expected_type: 'not_cached' + } + ] + } +} + +export default { + name: 'Serving Stale', + id: 'stale', + description: 'These tests check how caches serve stale content.', + spec_anchors: ['serving.stale.responses'], + tests: [ + { + name: 'Does HTTP cache serve stale stored response when server closes the connection?', + id: 'stale-close', + depends_on: ['freshness-max-age-stale'], + kind: 'check', + requests: [ + templates.becomeStale({}), + { + disconnect: true, + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache serve stale stored response when server sends a `503 Service Unavailable`?', + id: 'stale-503', + depends_on: ['freshness-max-age-stale'], + kind: 'check', + requests: [ + templates.becomeStale({}), + { + response_status: [503, 'Service Unavailable'], + expected_status: 200, + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal cache serves stale stored response with [`Cache-Control: stale-while-revalidate`](https://httpwg.org/specs/rfc5861.html)', + id: 'stale-while-revalidate', + depends_on: ['freshness-max-age-stale'], + kind: 'optimal', + requests: [ + { + setup: true, + pause_after: true, + response_headers: [ + ['Cache-Control', 'max-age=1, stale-while-revalidate=3600'], + ['ETag', '"abc"'] + ] + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not serve stale stored response after the [`stale-while-revalidate`](https://httpwg.org/specs/rfc5861.html) window', + id: 'stale-while-revalidate-window', + depends_on: ['stale-while-revalidate'], + requests: [ + { + setup: true, + pause_after: true, + response_headers: [ + ['Cache-Control', 'max-age=1, stale-while-revalidate=4'], + ['ETag', '"abc"'] + ] + }, + { + setup: true, + pause_after: true, + expected_type: 'cached' + }, + { + expected_response_headers: [ + ['client-request-count', '3'] + ] + } + ] + }, + { + name: 'Does HTTP cache serve stale stored response when server sends `Cache-Control: stale-if-error` and subsequently closes the connection?', + id: 'stale-sie-close', + depends_on: ['freshness-max-age-stale'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2, stale-if-error=60'] + ], + setup: true, + pause_after: true + }, + { + disconnect: true, + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache serve stale stored response when server sends `Cache-Control: stale-if-error` and subsequently a `503 Service Unavailable`?', + id: 'stale-sie-503', + depends_on: ['freshness-max-age-stale'], + kind: 'check', + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2, stale-if-error=60'] + ], + setup: true, + pause_after: true + }, + { + disconnect: true, + expected_type: 'cached' + } + ] + }, + makeStaleCheckCC('must-revalidate', false), + makeStaleCheckCC('proxy-revalidate', true), + makeStaleCheckCC('no-cache', false), + makeStaleCheckCC('s-maxage', true, '=2'), + { + name: 'Does HTTP cache generate a `Warning` header when using a response that was stored already stale?', + id: 'stale-warning-stored', + kind: 'check', + depends_on: ['stale-close'], + requests: [ + templates.stale({}), + { + disconnect: true, + expected_type: 'cached', + expected_response_headers: ['warning'], + setup_tests: ['expected_type'] + } + ] + }, + { + name: 'Does HTTP cache generate a `Warning` header when using a stored response that became stale?', + id: 'stale-warning-become', + kind: 'check', + depends_on: ['stale-close'], + requests: [ + templates.becomeStale({}), + { + disconnect: true, + expected_type: 'cached', + expected_response_headers: ['warning'], + setup_tests: ['expected_type'] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/status.mjs b/test/fixtures/cache-tests/tests/status.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/status.mjs @@ -0,0 +1,118 @@ +import * as templates from './lib/templates.mjs' +import * as utils from './lib/utils.mjs' + +const tests = [] + +function checkStatus (status) { + const code = status[0] + const phrase = status[1] + let body = status[2] + if (body === undefined) { + body = utils.httpContent(code) + } + const is3xx = code > 299 && code < 400 + tests.push({ + name: 'An optimal HTTP cache reuses a fresh `' + code + '` response with explict freshness', + id: `status-${code}-fresh`, + kind: 'optimal', + depends_on: ['freshness-max-age'], + browser_skip: is3xx, + requests: [ + templates.fresh({ + response_status: [code, phrase], + response_body: body, + redirect: 'manual' + }), { + expected_type: 'cached', + response_status: [code, phrase], + redirect: 'manual', + response_body: body + } + ] + }) + tests.push({ + name: 'HTTP cache must not reuse a stale `' + code + '` response with explicit freshness', + id: `status-${code}-stale`, + depends_on: [`status-${code}-fresh`], + browser_skip: is3xx, + requests: [ + templates.stale({ + response_status: [code, phrase], + response_body: body, + redirect: 'manual', + setup: true + }), { + expected_type: 'not_cached', + redirect: 'manual', + response_body: body + } + ] + }) +} +[ + [200, 'OK'], + [203, 'Non-Authoritative Information'], + [204, 'No Content', null], + [299, 'Whatever'], + [301, 'Moved Permanently'], + [302, 'Found'], + [303, 'See Other'], + [307, 'Temporary Redirect'], + [308, 'Permanent Redirect'], + [400, 'Bad Request'], + [404, 'Not Found'], + [410, 'Gone'], + [499, 'Whatever'], + [500, 'Internal Server Error'], + [502, 'Bad Gateway'], + [503, 'Service Unavailable'], + [504, 'Gateway Timeout'], + [599, 'Whatever'] +].forEach(checkStatus) + +tests.push({ + name: 'HTTP cache must not reuse a fresh response with an unrecognised status code and `Cache-Control: no-store, must-understand`', + id: 'status-599-must-understand', + depends_on: ['status-599-fresh'], + spec_anchors: ['cache-response-directive.must-understand'], + requests: [ + { + response_status: [599, 'Whatever'], + response_headers: [ + ['Cache-Control', 'max-age=3600, no-store, must-understand'] + ], + setup: true + }, + { + expected_type: 'not_cached' + } + ] +}) + +tests.push({ + name: 'An optimal HTTP cache reuses a fresh response with a recognised status code and `Cache-Control: no-store, must-understand`', + id: 'status-200-must-understand', + kind: 'optimal', + depends_on: ['status-200-fresh', 'cc-resp-no-store-fresh'], + spec_anchors: ['cache-response-directive.must-understand'], + requests: [ + { + response_status: [200, 'OK'], + response_headers: [ + ['Cache-Control', 'max-age=3600, no-store, must-understand'] + ], + setup: true + }, + { + expected_type: 'cached' + } + ] +}) + +export default { + name: 'Status Code Cacheability', + id: 'status', + description: 'These tests check to see if a cache will store and reuse various status codes when they have explicit freshness information associated with them.', + spec_anchors: ['response.cacheability'], + tests +} diff --git a/test/fixtures/cache-tests/tests/update304.mjs b/test/fixtures/cache-tests/tests/update304.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/update304.mjs @@ -0,0 +1,124 @@ +import * as utils from './lib/utils.mjs' +import headerList from './lib/header-list.mjs' + +const tests = [] + +// first, check to see that the cache actually returns a stored header +const storedHeader = 'Test-Header' +const valueA = utils.httpContent(`${storedHeader}-value-A`) +const lm1 = 'Wed, 01 Jan 2020 00:00:00 GMT' +tests.push({ + name: `HTTP cache must return stored \`${storedHeader}\` from a \`304\` that omits it`, + id: `304-lm-use-stored-${storedHeader}`, + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=2'], + ['Last-Modified', lm1], + ['Date', 0], + [storedHeader, valueA] + ], + setup: true, + pause_after: true + }, + { + response_headers: [ + ['Last-Modified', lm1], + ['Date', 0] + ], + expected_type: 'lm_validated', + expected_response_headers: [ + [storedHeader, valueA] + ], + setup_tests: ['expected_type'] + } + ] +}) + +// now check headers in the list +function check304 (config) { + if (config.noStore) return + config.valueA = config.valA || utils.httpContent(`${config.name}-value-A`) + config.valueB = config.valB || utils.httpContent(`${config.name}-value-B`) + if (config.noUpdate === true) { + config.expectedValue = config.valueA + config.requirement = 'HTTP cache must not' + config.punctuation = '' + config.kind = 'required' + } else if (config.reqUpdate === true) { + config.expectedValue = config.valueB + config.requirement = 'HTTP cache must' + config.punctuation = '' + config.kind = 'required' + } else { + config.expectedValue = config.valueB + config.requirement = 'Does HTTP cache' + config.punctuation = '?' + config.kind = 'check' + } + config.etagVal = utils.httpContent(`${config.name}-etag-1`) + config.etag = `"${config.etagVal}"` + config.lm = 'Wed, 01 Jan 2020 00:00:00 GMT' + + tests.push({ + name: `${config.requirement} update and return \`${config.name}\` from a \`304\`${config.punctuation}`, + id: `304-etag-update-response-${config.name}`, + kind: config.kind, + depends_on: [`304-lm-use-stored-${storedHeader}`], + requests: makeRequests(config, 'ETag', config.etag) + }) +} + +function makeRequests (config, validatorType, validatorValue) { + return [ + { + response_headers: makeResponse(config, config.valueA, validatorType, validatorValue), + setup: true, + pause_after: true, + check_body: 'checkBody' in config ? config.checkBody : true + }, + { + response_headers: makeResponse(config, config.valueB, validatorType, validatorValue), + expected_type: validatorType === 'ETag' ? 'etag_validated' : 'lm_validated', + setup_tests: ['expected_type'], + expected_response_headers: [ + [config.name, config.expectedValue] + ], + check_body: 'checkBody' in config ? config.checkBody : true + }, + { + response_headers: makeResponse(config, config.expectedValue), + expected_type: 'cached', + setup_tests: ['expected_type'], + expected_response_headers: [ + [config.name, config.expectedValue] + ], + check_body: 'checkBody' in config ? config.checkBody : true + } + ] +} + +function makeResponse (config, value, validatorType, validatorValue) { + const checkHeader = 'noUpdate' in config ? !config.noUpdate : true + const responseHeaders = [ + ['Date', 0], + [config.name, value, checkHeader] + ] + if (config.name !== 'Cache-Control') { + responseHeaders.push(['Cache-Control', 'max-age=2']) + } + if (validatorType && validatorType !== config.name) { + responseHeaders.push([validatorType, validatorValue]) + } + return responseHeaders +} + +headerList.forEach(check304) + +export default { + name: 'Update Headers Upon a 304', + id: 'update304', + description: 'These tests check cache behaviour upon receiving a `304 Not Modified` response.', + spec_anchors: ['freshening.responses'], + tests +} diff --git a/test/fixtures/cache-tests/tests/updateHead.mjs b/test/fixtures/cache-tests/tests/updateHead.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/updateHead.mjs @@ -0,0 +1,109 @@ +import * as templates from './lib/templates.mjs' + +export default + +{ + name: 'HEAD updates', + id: 'updateHEAD', + description: 'These tests check how a cache updates stored responses when receiving a `HEAD` response.', + spec_anchors: ['head.effects'], + tests: [ + { + name: 'Does HTTP cache write through a HEAD when stored response is stale?', + id: 'head-writethrough', + kind: 'check', + depends_on: ['freshness-max-age-stale'], + requests: [ + templates.becomeStale({}), + { + request_method: 'HEAD', + expected_method: 'HEAD' + } + ] + }, + { + name: 'Does HTTP cache preserve stored fields not received in a `200` response to a `HEAD`?', + id: 'head-200-retain', + kind: 'check', + depends_on: ['head-writethrough'], + requests: [ + templates.becomeStale({}), + { + request_method: 'HEAD', + expected_method: 'HEAD', + expected_response_headers: [ + ['Template-A', '1'] + ] + } + ] + }, + { + name: 'Does HTTP cache update freshness lifetime recieved in a `200` response to a `HEAD`?', + id: 'head-200-freshness-update', + kind: 'check', + depends_on: ['head-writethrough'], + requests: [ + templates.becomeStale({}), + { + request_method: 'HEAD', + expected_method: 'HEAD', + response_headers: [ + ['Cache-Control', 'max-age=1000'] + ] + }, + { + expected_type: 'cached' + } + ] + }, + { + name: 'Does HTTP cache update stored fields recieved in a `200` response to a `HEAD`?', + id: 'head-200-update', + kind: 'check', + depends_on: ['head-200-freshness-update'], + requests: [ + templates.becomeStale({}), + { + request_method: 'HEAD', + expected_method: 'HEAD', + response_headers: [ + ['Template-A', '2'], + ['Cache-Control', 'max-age=1000'] + ] + }, + { + expected_type: 'cached', + setup_tests: ['expected_type'], + expected_response_headers: [ + ['Template-A', '2'] + ] + } + ] + }, + { + name: 'Does HTTP cache update stored fields recieved in a `410` response to a `HEAD`?', + id: 'head-410-update', + kind: 'check', + depends_on: ['head-200-freshness-update'], + requests: [ + templates.becomeStale({}), + { + request_method: 'HEAD', + expected_method: 'HEAD', + response_status: [410, 'Gone'], + response_headers: [ + ['Template-A', '2'], + ['Cache-Control', 'max-age=1000'] + ] + }, + { + expected_type: 'cached', + setup_tests: ['expected_type'], + expected_response_headers: [ + ['Template-A', '2'] + ] + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/vary-parse.mjs b/test/fixtures/cache-tests/tests/vary-parse.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/vary-parse.mjs @@ -0,0 +1,157 @@ +import { makeTemplate } from './lib/templates.mjs' + +const varyParseSetup = makeTemplate({ + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0] + ], + setup: true +}) + +export default { + name: 'Vary Parsing', + id: 'vary-parse', + description: 'These tests check how caches parse the `Vary` response header.', + spec_anchors: ['caching.negotiated.responses'], + tests: [ + { + name: 'HTTP cache must not reuse `Vary` response with a value of `*`', + id: 'vary-syntax-star', + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', '*', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `*, *`', + id: 'vary-syntax-star-star', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', '*, *', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `*, *` on different lines', + id: 'vary-syntax-star-star-lines', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', '*', false], + ['Vary', '*', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `, *`', + id: 'vary-syntax-empty-star', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', ', *', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `, *` on different lines', + id: 'vary-syntax-empty-star-lines', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', '', false], + ['Vary', '*', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `*, Foo`', + id: 'vary-syntax-star-foo', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', '*, Foo', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `Foo, *`', + id: 'vary-syntax-foo-star', + depends_on: ['freshness-max-age'], + requests: [ + varyParseSetup({ + response_headers: [ + ['Vary', 'Foo, *', false] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + } + ] +} diff --git a/test/fixtures/cache-tests/tests/vary.mjs b/test/fixtures/cache-tests/tests/vary.mjs new file mode 100644 --- /dev/null +++ b/test/fixtures/cache-tests/tests/vary.mjs @@ -0,0 +1,470 @@ +import { makeTemplate } from './lib/templates.mjs' +import * as utils from './lib/utils.mjs' + +const varySetup = makeTemplate({ + request_headers: [ + ['Foo', '1'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo'] + ], + setup: true +}) + +const vary2Setup = makeTemplate({ + request_headers: [ + ['Foo', '1'], + ['Bar', 'abc'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo, Bar', false] + ], + setup: true +}) + +const vary3Setup = makeTemplate({ + request_headers: [ + ['Foo', '1'], + ['Bar', 'abc'], + ['Baz', '789'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo, Bar, Baz', false] + ], + setup: true +}) + +export default { + name: 'Vary and Cache Keys', + id: 'vary', + description: 'These tests check how caches calculate a cache key using `Vary`.', + spec_anchors: ['caching.negotiated.responses'], + tests: [ + { + name: 'An optimal HTTP cache reuses a `Vary` response when the request matches', + id: 'vary-match', + depends_on: ['freshness-max-age'], + kind: 'optimal', + requests: [ + varySetup({}), + { + request_headers: [ + ['Foo', '1'] + ], + expected_type: 'cached' + } + ] + }, + { + name: "HTTP cache must not reuse `Vary` response when request doesn't match", + id: 'vary-no-match', + depends_on: ['vary-match'], + requests: [ + varySetup({}), + { + request_headers: [ + ['Foo', '2'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response when stored request omits variant request header', + id: 'vary-omit-stored', + depends_on: ['vary-match'], + requests: [ + { + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo'] + ], + setup: true + }, + { + request_headers: [ + ['Foo', '1'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response when presented request omits variant request header', + id: 'vary-omit', + depends_on: ['vary-match'], + requests: [ + varySetup({}), + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache can store two different variants', + id: 'vary-invalidate', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + varySetup({ + response_body: utils.httpContent('foo_1') + }), + { + request_headers: [ + ['Foo', '2'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo'] + ], + expected_type: 'not_cached', + response_body: utils.httpContent('foo_2'), + setup: true + }, + { + request_headers: [ + ['Foo', '1'] + ], + response_body: utils.httpContent('foo_1'), + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache should not include headers not listed in `Vary` in the cache key', + id: 'vary-cache-key', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + varySetup({ + request_headers: [ + ['Other', '2'] + ] + }), + { + request_headers: [ + ['Foo', '1'], + ['Other', '3'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a two-way `Vary` response when request matches', + id: 'vary-2-match', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + vary2Setup({}), + { + request_headers: [ + ['Foo', '1'], + ['Bar', 'abc'] + ], + expected_type: 'cached' + } + ] + }, + { + name: "HTTP cache must not reuse two-way `Vary` response when request doesn't match", + id: 'vary-2-no-match', + depends_on: ['vary-2-match'], + requests: [ + vary2Setup({}), + { + request_headers: [ + ['Foo', '2'], + ['Bar', 'abc'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'HTTP cache must not reuse two-way `Vary` response when request omits variant request header', + id: 'vary-2-match-omit', + depends_on: ['vary-2-match'], + requests: [ + vary2Setup({}), + { + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a three-way `Vary` response when request matches', + id: 'vary-3-match', + depends_on: ['vary-2-match'], + kind: 'optimal', + requests: [ + vary3Setup({}), + { + request_headers: [ + ['Foo', '1'], + ['Bar', 'abc'], + ['Baz', '789'] + ], + expected_type: 'cached' + } + ] + }, + { + name: "HTTP cache must not reuse three-way `Vary` response when request doesn't match", + id: 'vary-3-no-match', + depends_on: ['vary-3-match'], + requests: [ + vary3Setup({}), + { + request_headers: [ + ['Foo', '2'], + ['Bar', 'abc'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: "HTTP cache must not reuse three-way `Vary` response when request doesn't match, regardless of header order", + id: 'vary-3-order', + depends_on: ['vary-3-match'], + requests: [ + vary3Setup({}), + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'], + ['Bar', 'abcde'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache reuses a three-way `Vary` response when both request and the original request omited a variant header', + id: 'vary-3-omit', + depends_on: ['vary-3-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Date', 0], + ['Last-Modified', -3000], + ['Vary', 'Foo, Bar, Baz', false] // FIXME: allow whitespace changes + ], + setup: true + }, + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'HTTP cache must not reuse `Vary` response with a value of `*`', + id: 'vary-star', + requests: [ + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', '*'] + ], + setup: true + }, + { + request_headers: [ + ['Foo', '1'], + ['Baz', '789'] + ], + expected_type: 'not_cached' + } + ] + }, + { + name: 'An optimal HTTP cache normalises unknown selecting headers by combining fields', + id: 'vary-normalise-combine', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Foo', '1, 2'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo'] + ], + setup: true + }, + { + request_headers: [ + ['Foo', '1'], + ['Foo', '2'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache normalises `Accept-Language` by ignoring language order', + id: 'vary-normalise-lang-order', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Accept-Language', 'en, de'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Accept-Language'] + ], + setup: true + }, + { + request_headers: [ + ['Accept-Language', 'de, en'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache normalises `Accept-Language` by ignoring language case', + id: 'vary-normalise-lang-case', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Accept-Language', 'en, de'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Accept-Language'] + ], + setup: true + }, + { + request_headers: [ + ['Accept-Language', 'eN, De'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache normalises `Accept-Language` by ignoring whitespace', + id: 'vary-normalise-lang-space', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Accept-Language', 'en, de'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Accept-Language'] + ], + setup: true + }, + { + request_headers: [ + ['Accept-Language', ' en , de'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache selects `Content-Language` by using the qvalue on `Accept-Language`', + id: 'vary-normalise-lang-select', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Accept-Language', 'en, de'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Accept-Language'], + ['Content-Language', 'de'] + ], + setup: true + }, + { + request_headers: [ + ['Accept-Language', 'fr;q=0.5, de;q=1.0'] + ], + expected_type: 'cached' + } + ] + }, + { + name: 'An optimal HTTP cache normalises unknown selecting headers by removing whitespace', + id: 'vary-normalise-space', + depends_on: ['vary-match'], + kind: 'optimal', + requests: [ + { + request_headers: [ + ['Foo', '1,2'] + ], + response_headers: [ + ['Cache-Control', 'max-age=5000'], + ['Last-Modified', -3000], + ['Date', 0], + ['Vary', 'Foo'] + ], + setup: true + }, + { + request_headers: [ + ['Foo', ' 1, 2 '] + ], + expected_type: 'cached' + } + ] + } + ] +} diff --git a/test/interceptors/cache.js b/test/interceptors/cache.js --- a/test/interceptors/cache.js +++ b/test/interceptors/cache.js @@ -154,6 +154,73 @@ describe('Cache Interceptor', () => { strictEqual(await response.body.text(), 'asd') }) + test('vary headers are present in revalidation request', async () => { + const clock = FakeTimers.install({ + shouldClearNativeTimers: true + }) + + let requestsToOrigin = 0 + let revalidationRequests = 0 + const server = createServer((req, res) => { + res.setHeader('date', 0) + res.setHeader('cache-control', 's-maxage=1, stale-while-revalidate=10') + + if (requestsToOrigin === 0) { + requestsToOrigin++ + res.setHeader('vary', 'a, b') + res.setHeader('etag', '"asd"') + res.end('asd') + } else { + revalidationRequests++ + notEqual(req.headers['if-none-match'], undefined) + notEqual(req.headers['a'], undefined) + notEqual(req.headers['b'], undefined) + + res.statusCode = 304 + res.end() + } + }).listen(0) + + const client = new Client(`http://localhost:${server.address().port}`) + .compose(interceptors.cache()) + + after(async () => { + server.close() + await client.close() + clock.uninstall() + }) + + await once(server, 'listening') + + strictEqual(requestsToOrigin, 0) + strictEqual(revalidationRequests, 0) + + const request = { + origin: 'localhost', + path: '/', + method: 'GET', + headers: { + a: 'asd', + b: 'asd' + } + } + + { + const response = await client.request(request) + strictEqual(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + } + + clock.tick(1500) + + { + const response = await client.request(request) + strictEqual(requestsToOrigin, 1) + strictEqual(revalidationRequests, 1) + strictEqual(await response.body.text(), 'asd') + } + }) + test('revalidates request when needed', async () => { let requestsToOrigin = 0 @@ -162,6 +229,7 @@ describe('Cache Interceptor', () => { }) const server = createServer((req, res) => { + res.setHeader('date', 0) res.setHeader('cache-control', 'public, s-maxage=1, stale-while-revalidate=10') requestsToOrigin++ @@ -228,6 +296,7 @@ describe('Cache Interceptor', () => { }) const server = createServer((req, res) => { + res.setHeader('date', 0) res.setHeader('cache-control', 'public, s-maxage=1, stale-while-revalidate=10') requestsToOrigin++ @@ -373,8 +442,10 @@ describe('Cache Interceptor', () => { }) test('necessary headers are stripped', async () => { + let requestsToOrigin = 0 const server = createServer((req, res) => { - res.setHeader('cache-control', 'public, s-maxage=1, stale-while-revalidate=10, no-cache=should-be-stripped') + requestsToOrigin++ + res.setHeader('cache-control', 'public, s-maxage=10, no-cache=should-be-stripped') res.setHeader('should-be-stripped', 'hello world') res.setHeader('should-not-be-stripped', 'dsa321') @@ -398,8 +469,77 @@ describe('Cache Interceptor', () => { } // Send initial request. This should reach the origin - const response = await client.request(request) - strictEqual(await response.body.text(), 'asd') + { + const response = await client.request(request) + equal(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + equal(response.headers['should-be-stripped'], 'hello world') + equal(response.headers['should-not-be-stripped'], 'dsa321') + } + + // Send second request, this should hit the cache + { + const response = await client.request(request) + equal(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + equal(response.headers['should-be-stripped'], undefined) + equal(response.headers['should-not-be-stripped'], 'dsa321') + } + }) + + test('necessary headers are stripped (quotes)', async () => { + let requestsToOrigin = 0 + const server = createServer((_, res) => { + requestsToOrigin++ + res.setHeader('connection', 'a, b') + res.setHeader('a', '123') + res.setHeader('b', '123') + res.setHeader('cache-control', 's-maxage=3600, no-cache="should-be-stripped, should-be-stripped2"') + res.setHeader('should-be-stripped', 'hello world') + res.setHeader('should-be-stripped2', 'hello world') + res.setHeader('should-not-be-stripped', 'dsa321') + + res.end('asd') + }).listen(0) + + const client = new Client(`http://localhost:${server.address().port}`) + .compose(interceptors.cache()) + + after(async () => { + server.close() + await client.close() + }) + + await once(server, 'listening') + + const request = { + origin: 'localhost', + method: 'GET', + path: '/' + } + + // Send initial request. This should reach the origin + { + const response = await client.request(request) + equal(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + equal(response.headers['a'], '123') + equal(response.headers['b'], '123') + equal(response.headers['should-be-stripped'], 'hello world') + equal(response.headers['should-be-stripped2'], 'hello world') + equal(response.headers['should-not-be-stripped'], 'dsa321') + } + + // Send second request, this should hit the cache + { + const response = await client.request(request) + equal(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + equal(response.headers['a'], undefined) + equal(response.headers['b'], undefined) + equal(response.headers['should-be-stripped'], undefined) + equal(response.headers['should-be-stripped2'], undefined) + } }) test('requests w/ unsafe methods never get cached', async () => { @@ -441,6 +581,8 @@ describe('Cache Interceptor', () => { let requestsToOrigin = 0 let revalidationRequests = 0 const server = createServer((req, res) => { + res.setHeader('date', 0) + if (req.headers['if-none-match']) { revalidationRequests++ if (req.headers['if-none-match'] !== '"asd"') { @@ -536,6 +678,8 @@ describe('Cache Interceptor', () => { let requestsToOrigin = 0 const server = createServer((_, res) => { + res.setHeader('date', 0) + requestsToOrigin++ if (requestsToOrigin === 1) { // First request @@ -615,6 +759,45 @@ describe('Cache Interceptor', () => { } }) + test('cacheByDefault', async () => { + let requestsToOrigin = 0 + const server = createServer((_, res) => { + requestsToOrigin++ + res.end('asd') + }).listen(0) + + after(() => server.close()) + + const client = new Client(`http://localhost:${server.address().port}`) + .compose(interceptors.cache({ + cacheByDefault: 3600 + })) + + equal(requestsToOrigin, 0) + + // Should hit the origin + { + const res = await client.request({ + origin: 'localhost', + path: '/', + method: 'GET' + }) + equal(requestsToOrigin, 1) + equal(await res.body.text(), 'asd') + } + + // Should hit the cache + { + const res = await client.request({ + origin: 'localhost', + path: '/', + method: 'GET' + }) + equal(requestsToOrigin, 1) + equal(await res.body.text(), 'asd') + } + }) + describe('Client-side directives', () => { test('max-age', async () => { const clock = FakeTimers.install({ @@ -696,6 +879,7 @@ describe('Cache Interceptor', () => { }) const server = createServer((req, res) => { + res.setHeader('date', 0) res.setHeader('cache-control', 'public, s-maxage=1, stale-while-revalidate=10') if (requestsToOrigin === 1) { @@ -768,6 +952,7 @@ describe('Cache Interceptor', () => { const server = createServer((req, res) => { requestsToOrigin++ + res.setHeader('date', 0) res.setHeader('cache-control', 'public, s-maxage=10') res.end('asd') }).listen(0) @@ -988,6 +1173,8 @@ describe('Cache Interceptor', () => { let requestsToOrigin = 0 const server = createServer((_, res) => { + res.setHeader('date', 0) + requestsToOrigin++ if (requestsToOrigin === 1) { // First request, send stale-while-revalidate to keep the value in the cache
sql cache should always have a entry size limit We can't store more than 2GB.
2024-11-18T21:05:55Z
7
nodejs/undici
3,758
nodejs__undici-3758
[ "3718" ]
24df4a5cb0d8718bdb179531e04b869f0d62a03f
diff --git a/lib/cache/memory-cache-store.js b/lib/cache/memory-cache-store.js --- a/lib/cache/memory-cache-store.js +++ b/lib/cache/memory-cache-store.js @@ -91,6 +91,7 @@ class MemoryCacheStore { statusCode: entry.statusCode, rawHeaders: entry.rawHeaders, body: entry.body, + etag: entry.etag, cachedAt: entry.cachedAt, staleAt: entry.staleAt, deleteAt: entry.deleteAt diff --git a/lib/handler/cache-handler.js b/lib/handler/cache-handler.js --- a/lib/handler/cache-handler.js +++ b/lib/handler/cache-handler.js @@ -4,7 +4,8 @@ const util = require('../core/util') const DecoratorHandler = require('../handler/decorator-handler') const { parseCacheControlHeader, - parseVaryHeader + parseVaryHeader, + isEtagUsable } = require('../util/cache') const { nowAbsolute } = require('../util/timers.js') @@ -136,7 +137,10 @@ class CacheHandler extends DecoratorHandler { cacheControlDirectives ) - this.#writeStream = this.#store.createWriteStream(this.#cacheKey, { + /** + * @type {import('../../types/cache-interceptor.d.ts').default.CacheValue} + */ + const value = { statusCode, statusMessage, rawHeaders: strippedHeaders, @@ -144,7 +148,13 @@ class CacheHandler extends DecoratorHandler { cachedAt: now, staleAt, deleteAt - }) + } + + if (typeof headers.etag === 'string' && isEtagUsable(headers.etag)) { + value.etag = headers.etag + } + + this.#writeStream = this.#store.createWriteStream(this.#cacheKey, value) if (this.#writeStream) { const handler = this diff --git a/lib/interceptor/cache.js b/lib/interceptor/cache.js --- a/lib/interceptor/cache.js +++ b/lib/interceptor/cache.js @@ -152,7 +152,8 @@ module.exports = (opts = {}) => { ...opts, headers: { ...opts.headers, - 'if-modified-since': new Date(result.cachedAt).toUTCString() + 'if-modified-since': new Date(result.cachedAt).toUTCString(), + etag: result.etag } }, new CacheRevalidationHandler( diff --git a/lib/util/cache.js b/lib/util/cache.js --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -201,6 +201,40 @@ function parseVaryHeader (varyHeader, headers) { return output } +/** + * Note: this deviates from the spec a little. Empty etags ("", W/"") are valid, + * however, including them in cached resposnes serves little to no purpose. + * + * @see https://www.rfc-editor.org/rfc/rfc9110.html#name-etag + * + * @param {string} etag + * @returns {boolean} + */ +function isEtagUsable (etag) { + if (etag.length <= 2) { + // Shortest an etag can be is two chars (just ""). This is where we deviate + // from the spec requiring a min of 3 chars however + return false + } + + if (etag[0] === '"' && etag[etag.length - 1] === '"') { + // ETag: ""asd123"" or ETag: "W/"asd123"", kinda undefined behavior in the + // spec. Some servers will accept these while others don't. + // ETag: "asd123" + return !(etag[1] === '"' || etag.startsWith('"W/')) + } + + if (etag.startsWith('W/"') && etag[etag.length - 1] === '"') { + // ETag: W/"", also where we deviate from the spec & require a min of 3 + // chars + // ETag: for W/"", W/"asd123" + return etag.length !== 4 + } + + // Anything else + return false +} + /** * @param {unknown} store * @returns {asserts store is import('../../types/cache-interceptor.d.ts').default.CacheStore} @@ -244,6 +278,7 @@ module.exports = { makeCacheKey, parseCacheControlHeader, parseVaryHeader, + isEtagUsable, assertCacheMethods, assertCacheStore } diff --git a/types/cache-interceptor.d.ts b/types/cache-interceptor.d.ts --- a/types/cache-interceptor.d.ts +++ b/types/cache-interceptor.d.ts @@ -30,6 +30,7 @@ declare namespace CacheHandler { statusMessage: string rawHeaders: Buffer[] vary?: Record<string, string | string[]> + etag?: string cachedAt: number staleAt: number deleteAt: number
diff --git a/test/cache-interceptor/cache-stores.js b/test/cache-interceptor/cache-stores.js --- a/test/cache-interceptor/cache-stores.js +++ b/test/cache-interceptor/cache-stores.js @@ -58,6 +58,7 @@ function cacheStoreTests (CacheStore) { deepStrictEqual(await readResponse(readResult), { ...requestValue, + etag: undefined, body: requestBody }) @@ -94,6 +95,7 @@ function cacheStoreTests (CacheStore) { notEqual(readResult, undefined) deepStrictEqual(await readResponse(readResult), { ...anotherValue, + etag: undefined, body: anotherBody }) }) @@ -127,6 +129,7 @@ function cacheStoreTests (CacheStore) { const readResult = store.get(request) deepStrictEqual(await readResponse(readResult), { ...requestValue, + etag: undefined, body: requestBody }) }) @@ -198,6 +201,7 @@ function cacheStoreTests (CacheStore) { const { vary, ...responseValue } = requestValue deepStrictEqual(await readResponse(readStream), { ...responseValue, + etag: undefined, body: requestBody }) diff --git a/test/cache-interceptor/utils.js b/test/cache-interceptor/utils.js --- a/test/cache-interceptor/utils.js +++ b/test/cache-interceptor/utils.js @@ -1,8 +1,8 @@ 'use strict' const { describe, test } = require('node:test') -const { deepStrictEqual } = require('node:assert') -const { parseCacheControlHeader, parseVaryHeader } = require('../../lib/util/cache') +const { deepStrictEqual, equal } = require('node:assert') +const { parseCacheControlHeader, parseVaryHeader, isEtagUsable } = require('../../lib/util/cache') describe('parseCacheControlHeader', () => { test('all directives are parsed properly when in their correct format', () => { @@ -215,3 +215,28 @@ describe('parseVaryHeader', () => { }) }) }) + +describe('isEtagUsable', () => { + const valuesToTest = { + // Invalid etags + '': false, + asd: false, + '"W/"asd""': false, + '""asd""': false, + + // Valid etags + '"asd"': true, + 'W/"ads"': true, + + // Spec deviations + '""': false, + 'W/""': false + } + + for (const key in valuesToTest) { + const expectedValue = valuesToTest[key] + test(`\`${key}\` = ${expectedValue}`, () => { + equal(isEtagUsable(key), expectedValue) + }) + } +}) diff --git a/test/interceptors/cache.js b/test/interceptors/cache.js --- a/test/interceptors/cache.js +++ b/test/interceptors/cache.js @@ -223,6 +223,74 @@ describe('Cache Interceptor', () => { strictEqual(await response.body.text(), 'asd123') }) + test('revalidates request w/ etag when provided', async (t) => { + let requestsToOrigin = 0 + + const clock = FakeTimers.install({ + shouldClearNativeTimers: true + }) + tick(0) + + const server = createServer((req, res) => { + res.setHeader('cache-control', 'public, s-maxage=1, stale-while-revalidate=10') + requestsToOrigin++ + + if (requestsToOrigin > 1) { + equal(req.headers['etag'], '"asd123"') + + if (requestsToOrigin === 3) { + res.end('asd123') + } else { + res.statusCode = 304 + res.end() + } + } else { + res.setHeader('etag', '"asd123"') + res.end('asd') + } + }).listen(0) + + const client = new Client(`http://localhost:${server.address().port}`) + .compose(interceptors.cache()) + + after(async () => { + server.close() + await client.close() + clock.uninstall() + }) + + await once(server, 'listening') + + strictEqual(requestsToOrigin, 0) + + const request = { + origin: 'localhost', + method: 'GET', + path: '/' + } + + // Send initial request. This should reach the origin + let response = await client.request(request) + strictEqual(requestsToOrigin, 1) + strictEqual(await response.body.text(), 'asd') + + clock.tick(1500) + tick(1500) + + // Now we send two more requests. Both of these should reach the origin, + // but now with a conditional header asking if the resource has been + // updated. These need to be ran after the response is stale. + // No update for the second request + response = await client.request(request) + strictEqual(requestsToOrigin, 2) + strictEqual(await response.body.text(), 'asd') + + // This should be updated, even though the value isn't expired. + response = await client.request(request) + strictEqual(requestsToOrigin, 3) + strictEqual(await response.body.text(), 'asd123') + }) + test('respects cache store\'s isFull property', async () => { const server = createServer((_, res) => { res.end('asd')
Easier way to attach `ETag` to `If-None-Match` after #3562 ## This would solve... #3562 introduces HTTP Caching (RFC9110) to undici, which also implements HTTP 304 with `If-Modified-Since`. But it seems that `ETag` is missing. cc @flakey5 ## The implementation should look like... IMHO #3562 should not be delayed anymore, this could be implemented after #3562 is merged. The `undici` could check the stored `ETag` header from the cache store and use that to negotiate with the `If-None-Match` in future requests. If the server returns HTTP 304, undici would then return the cached (transparent) HTTP 200 response to the client (the same behavior of `fetch` as in browsers). ## Additional context #3562 https://github.com/JakeChampion/fetch/issues/241 `ETag` is described in RFC9110 here: https://httpwg.org/specs/rfc9110.html#field.etag `If-None-Match` is described in RFC9110 here: https://httpwg.org/specs/rfc9110.html#field.if-none-match HTTP 304 is described in RFC9110 here: https://httpwg.org/specs/rfc9110.html#status.304 The handling of HTTP 304 is described in RFC9110 here: https://httpwg.org/specs/rfc9111.html#freshening.responses
I have a question. In the fetch spec, 304 (along with some other status codes like 101 and 204) are null body status (https://fetch.spec.whatwg.org/#concept-status). undici could return a "transparent" cached `Response` object with 200 OKs when the server returns 304 (which is also what browsers do), which consists as I mentioned. But in the context of Node.js, some people might also want to know whether the response is from cached or not. We haven't really assessed if/how this would integrate with fetch. > But in the context of Node.js, some people might also want to know whether the response is from cached or not. We currently use make-fetch-happen and their `X-Local-Cache-Status` header to see if the response is stale is not (https://github.com/PREreview/prereview.org/blob/2fc7a676df9951494d67aa1fa3d275818fd6796d/src/fetch.ts#L27). While in this case it should be possible to use the `Age` header, it's generally far easier to have custom headers for this. > > But in the context of Node.js, some people might also want to know whether the response is from cached or not. > > We currently use make-fetch-happen and their `X-Local-Cache-Status` header to see if the response is stale is not (https://github.com/PREreview/prereview.org/blob/2fc7a676df9951494d67aa1fa3d275818fd6796d/src/fetch.ts#L27). While in this case it should be possible to use the `Age` header, it's generally far easier to have custom headers for this. I am also using `make-fetch-happen` and `X-Local-Cache-Status`. But with undici, it is possible to use `undici.request` over `undici.fetch`, and undici could add a `cacheStatus` property on the return value. We shall see!
2024-10-21T23:46:20Z
7
nodejs/undici
3,631
nodejs__undici-3631
[ "3630" ]
54fd2df4e359ecde137f382af4c4590aea5f0555
diff --git a/lib/web/cache/cache.js b/lib/web/cache/cache.js --- a/lib/web/cache/cache.js +++ b/lib/web/cache/cache.js @@ -514,6 +514,7 @@ class Cache { for (const request of requests) { const requestObject = fromInnerRequest( request, + undefined, new AbortController().signal, 'immutable' ) diff --git a/lib/web/fetch/request.js b/lib/web/fetch/request.js --- a/lib/web/fetch/request.js +++ b/lib/web/fetch/request.js @@ -775,7 +775,7 @@ class Request { } // 4. Return clonedRequestObject. - return fromInnerRequest(clonedRequest, ac.signal, getHeadersGuard(this[kHeaders])) + return fromInnerRequest(clonedRequest, this[kDispatcher], ac.signal, getHeadersGuard(this[kHeaders])) } [nodeUtil.inspect.custom] (depth, options) { @@ -875,13 +875,15 @@ function cloneRequest (request) { /** * @see https://fetch.spec.whatwg.org/#request-create * @param {any} innerRequest + * @param {import('../../dispatcher/agent')} dispatcher * @param {AbortSignal} signal * @param {'request' | 'immutable' | 'request-no-cors' | 'response' | 'none'} guard * @returns {Request} */ -function fromInnerRequest (innerRequest, signal, guard) { +function fromInnerRequest (innerRequest, dispatcher, signal, guard) { const request = new Request(kConstruct) request[kState] = innerRequest + request[kDispatcher] = dispatcher request[kSignal] = signal request[kHeaders] = new Headers(kConstruct) setHeadersList(request[kHeaders], innerRequest.headersList)
diff --git a/test/fetch/issue-3630.js b/test/fetch/issue-3630.js new file mode 100644 --- /dev/null +++ b/test/fetch/issue-3630.js @@ -0,0 +1,12 @@ +'use strict' + +const { test } = require('node:test') +const assert = require('node:assert') +const { Request, Agent } = require('../..') +const { kDispatcher } = require('../../lib/web/fetch/symbols') + +test('Cloned request should inherit its dispatcher', () => { + const agent = new Agent() + const request = new Request('https://a', { dispatcher: agent }) + assert.strictEqual(request[kDispatcher], agent) +}) diff --git a/test/fetch/request.js b/test/fetch/request.js --- a/test/fetch/request.js +++ b/test/fetch/request.js @@ -10,9 +10,6 @@ const { Headers, fetch } = require('../../') -const { fromInnerRequest, makeRequest } = require('../../lib/web/fetch/request') -const { kState, kSignal, kHeaders } = require('../../lib/web/fetch/symbols') -const { getHeadersGuard, getHeadersList } = require('../../lib/web/fetch/headers') const hasSignalReason = 'reason' in AbortSignal.prototype @@ -462,17 +459,3 @@ test('Issue#2465', async (t) => { const request = new Request('http://localhost', { body: new SharedArrayBuffer(0), method: 'POST' }) strictEqual(await request.text(), '[object SharedArrayBuffer]') }) - -test('fromInnerRequest', () => { - const innerRequest = makeRequest({ - urlList: [new URL('http://asd')] - }) - const signal = new AbortController().signal - const request = fromInnerRequest(innerRequest, signal, 'immutable') - - // check property - assert.strictEqual(request[kState], innerRequest) - assert.strictEqual(request[kSignal], signal) - assert.strictEqual(getHeadersList(request[kHeaders]), innerRequest.headersList) - assert.strictEqual(getHeadersGuard(request[kHeaders]), 'immutable') -})
undici.Request.clone does not copy dispatcher ## Bug Description <!-- A clear and concise description of what the bug is. --> ## Reproducible By Consider this code: ``` async function test() { const undici = require('undici'); class CustomAgent extends undici.Agent { constructor() { super({ keepAliveTimeout: 10, keepAliveMaxTimeout: 10 }); } dispatch(options, handler) { console.log("DISPATCH"); return super.dispatch(options, handler); } } const request = new undici.Request("https://example.org", { dispatcher: new CustomAgent() }); // will make an HTTP call and print "DISPATCH" await undici.fetch(request); // will make an HTTP call, but not print "DISPATCH" await undici.fetch(request.clone()); } test(); ``` ## Expected Behavior In the code above, I would expect both `fetch` calls to use the dispatcher. ## Logs & Screenshots none. ## Environment NodeJS, Version v22.9.0 ### Additional context
2024-09-20T14:45:57Z
6.19
nodejs/undici
3,566
nodejs__undici-3566
[ "3546" ]
dca0aa0998cbdef28916b23d6300beb2fd979140
diff --git a/lib/web/websocket/websocket.js b/lib/web/websocket/websocket.js --- a/lib/web/websocket/websocket.js +++ b/lib/web/websocket/websocket.js @@ -537,6 +537,14 @@ class WebSocket extends EventTarget { message: reason }) } + + if (!this.#parser && !this.#handler.receivedClose) { + fireEvent('close', this, (type, init) => new CloseEvent(type, init), { + wasClean: false, + code: 1006, + reason + }) + } } #onMessage (type, data) {
diff --git a/test/websocket/issue-3546.js b/test/websocket/issue-3546.js --- a/test/websocket/issue-3546.js +++ b/test/websocket/issue-3546.js @@ -5,7 +5,7 @@ const { WebSocket } = require('../..') const { tspl } = require('@matteo.collina/tspl') test('first error than close event is fired on failed connection', async (t) => { - const { completed, strictEqual } = tspl(t, { plan: 2 }) + const { completed, strictEqual } = tspl(t, { plan: 4 }) const ws = new WebSocket('ws://localhost:1') let orderOfEvents = 0
WebSockets do not fire 'close' event if the connection failed to be established Consider the following code: ```js const webSocket = new undici.WebSocket("wss://invalid-domain.example.com/"); webSocket.onopen = () => { console.log("open"); }; webSocket.onclose = () => { console.log("close"); }; webSocket.onerror = () => { console.log("error"); }; ``` It outputs: ``` error ``` However, a standard-compliant WebSocket implementation would output: ``` error close ``` This is because [establish a WebSocket connection](https://websockets.spec.whatwg.org/#concept-websocket-establish) algorithm would invoke [fail the WebSocket Connection](https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.7) algorithm in step 11.1, which would [Close the WebSocket Connection](https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.1). The WebSocket standard states: > When [the WebSocket connection is closed](https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4), possibly cleanly, the user agent must [queue a task](https://html.spec.whatwg.org/multipage/webappapis.html#queue-a-task) to run the following substeps: > ... > 3. [Fire an event](https://dom.spec.whatwg.org/#concept-event-fire) named close at the [WebSocket](https://websockets.spec.whatwg.org/#websocket) object. This bug is reproducible on commit [4b8958a](https://github.com/nodejs/undici/commit/4b8958a228aae55366dfba1dc70f03f08bc2d48d).
Is it connected to #3506 and maybe already fixed in main but not released yet? The `close` event is never fired even though the `readyState` is set to 3 (`WebSocket.CLOSED`). Tested on latest commit (4b8958a228aae55366dfba1dc70f03f08bc2d48d) on main. Code ```typescript import { WebSocket } from 'undici'; const webSocket = new WebSocket("wss://invalid-domain.example.com/"); webSocket.onopen = () => { console.log("open"); }; webSocket.onclose = () => { console.log("close"); }; webSocket.onerror = () => { console.log("error"); console.log("readyState", webSocket.readyState); }; ``` Console output ``` ➜ test-issue git:(main) ✗ yarn ts-node src/index.ts yarn run v1.22.22 $ /Users/exhumer/test-issue/node_modules/.bin/ts-node src/index.ts error readyState 3 ✨ Done in 1.00s. ``` On `v6.x` branch (latest stable release branch), the `readyState` is instead at 0 (`WebSocket.CONNECTING`) at error. Still no close event is fired. Trying the code in a local HTML page on a browser, a `close` event is fired after the `error` event. `test-issue-browser.html` ``` <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Document</title> </head> <body></body> <script> const webSocket = new WebSocket('ws://localhost:8080') webSocket.onopen = () => { console.log("open"); }; webSocket.onclose = () => { console.log("close"); }; webSocket.onerror = () => { console.log("error"); console.log("readyState", webSocket.readyState); }; </script> </html> ``` Console output <img width="1440" alt="Screenshot 2024-09-04 at 3 07 40 PM" src="https://github.com/user-attachments/assets/7be3661d-7749-4201-acf6-ff0b44e034c7"> This can be fixed by changing the the following. https://github.com/nodejs/undici/blob/4b8958a228aae55366dfba1dc70f03f08bc2d48d/lib/web/websocket/websocket.js#L527-L533 First, adding a `this.#handler.onSocketClose()` or `this.#onSocketClose()` after the `if` block to fire the `error` event. And then changing this line https://github.com/nodejs/undici/blob/4b8958a228aae55366dfba1dc70f03f08bc2d48d/lib/web/websocket/websocket.js#L669 to `const result = this.#parser?.closingInfo` as `this.#parser` is `undefined` when it is accessed before a connection is established. I can open a PR to fix with the mentioned changes if it is considered acceptable fix.
2024-09-08T07:40:34Z
6.19
nodejs/undici
3,495
nodejs__undici-3495
[ "3410" ]
b7254574e54d135d41baa5193f89007e78a3c710
diff --git a/benchmarks/timers/compare-timer-getters.mjs b/benchmarks/timers/compare-timer-getters.mjs new file mode 100644 --- /dev/null +++ b/benchmarks/timers/compare-timer-getters.mjs @@ -0,0 +1,18 @@ +import { bench, group, run } from 'mitata' + +group('timers', () => { + bench('Date.now()', () => { + Date.now() + }) + bench('performance.now()', () => { + performance.now() + }) + bench('Math.trunc(performance.now())', () => { + Math.trunc(performance.now()) + }) + bench('process.uptime()', () => { + process.uptime() + }) +}) + +await run() diff --git a/lib/core/connect.js b/lib/core/connect.js --- a/lib/core/connect.js +++ b/lib/core/connect.js @@ -4,6 +4,7 @@ const net = require('node:net') const assert = require('node:assert') const util = require('./util') const { InvalidArgumentError, ConnectTimeoutError } = require('./errors') +const timers = require('../util/timers') let tls // include tls conditionally since it is not always available @@ -130,12 +131,12 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess socket.setKeepAlive(true, keepAliveInitialDelay) } - const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + const cancelConnectTimeout = setupConnectTimeout(new WeakRef(socket), timeout) socket .setNoDelay(true) .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { - cancelTimeout() + cancelConnectTimeout() if (callback) { const cb = callback @@ -144,7 +145,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess } }) .on('error', function (err) { - cancelTimeout() + cancelConnectTimeout() if (callback) { const cb = callback @@ -157,30 +158,44 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess } } -function setupTimeout (onConnectTimeout, timeout) { - if (!timeout) { - return () => {} - } +const setupConnectTimeout = process.platform === 'win32' + ? (socket, timeout) => { + if (!timeout) { + return () => { } + } - let s1 = null - let s2 = null - const timeoutId = setTimeout(() => { - // setImmediate is added to make sure that we prioritize socket error events over timeouts - s1 = setImmediate(() => { - if (process.platform === 'win32') { + let s1 = null + let s2 = null + const timer = timers.setTimeout(() => { + // setImmediate is added to make sure that we prioritize socket error events over timeouts + s1 = setImmediate(() => { // Windows needs an extra setImmediate probably due to implementation differences in the socket logic - s2 = setImmediate(() => onConnectTimeout()) - } else { - onConnectTimeout() + s2 = setImmediate(() => onConnectTimeout(socket.deref())) + }) + }, timeout) + return () => { + timers.clearTimeout(timer) + clearImmediate(s1) + clearImmediate(s2) } - }) - }, timeout) - return () => { - clearTimeout(timeoutId) - clearImmediate(s1) - clearImmediate(s2) - } -} + } + : (socket, timeout) => { + if (!timeout) { + return () => { } + } + + let s1 = null + const timer = timers.setTimeout(() => { + // setImmediate is added to make sure that we prioritize socket error events over timeouts + s1 = setImmediate(() => { + onConnectTimeout(socket.deref()) + }) + }, timeout) + return () => { + timers.clearTimeout(timer) + clearImmediate(s1) + } + } function onConnectTimeout (socket) { let message = 'Connect Timeout Error' diff --git a/lib/dispatcher/client-h1.js b/lib/dispatcher/client-h1.js --- a/lib/dispatcher/client-h1.js +++ b/lib/dispatcher/client-h1.js @@ -162,12 +162,12 @@ class Parser { this.maxResponseSize = client[kMaxResponseSize] } - setTimeout (value, type) { + setTimeout (delay, type) { this.timeoutType = type - if (value !== this.timeoutValue) { - timers.clearTimeout(this.timeout) - if (value) { - this.timeout = timers.setTimeout(onParserTimeout, value, new WeakRef(this)) + if (delay !== this.timeoutValue) { + this.timeout && timers.clearTimeout(this.timeout) + if (delay) { + this.timeout = timers.setTimeout(onParserTimeout, delay, new WeakRef(this)) // istanbul ignore else: only for jest if (this.timeout.unref) { this.timeout.unref() @@ -175,7 +175,7 @@ class Parser { } else { this.timeout = null } - this.timeoutValue = value + this.timeoutValue = delay } else if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { @@ -286,7 +286,7 @@ class Parser { this.llhttp.llhttp_free(this.ptr) this.ptr = null - timers.clearTimeout(this.timeout) + this.timeout && timers.clearTimeout(this.timeout) this.timeout = null this.timeoutValue = null this.timeoutType = null diff --git a/lib/util/timers.js b/lib/util/timers.js --- a/lib/util/timers.js +++ b/lib/util/timers.js @@ -1,99 +1,379 @@ 'use strict' -const TICK_MS = 499 +/** + * This module offers an optimized timer implementation designed for scenarios + * where high precision is not critical. + * + * The timer achieves faster performance by using a low-resolution approach, + * with an accuracy target of within 500ms. This makes it particularly useful + * for timers with delays of 1 second or more, where exact timing is less + * crucial. + * + * It's important to note that Node.js timers are inherently imprecise, as + * delays can occur due to the event loop being blocked by other operations. + * Consequently, timers may trigger later than their scheduled time. + */ -let fastNow = Date.now() +const nativeSetTimeout = global.setTimeout +const nativeClearTimeout = global.clearTimeout + +/** + * The fastNow variable contains the internal fast timer clock value. + * + * @type {number} + */ +let fastNow = 0 + +/** + * RESOLUTION_MS represents the target resolution time in milliseconds. + * + * @type {number} + * @default 1000 + */ +const RESOLUTION_MS = 1e3 + +/** + * TICK_MS defines the desired interval in milliseconds between each tick. + * The target value is set to half the resolution time, minus 1 ms, to account + * for potential event loop overhead. + * + * @type {number} + * @default 499 + */ +const TICK_MS = (RESOLUTION_MS >> 1) - 1 + +/** + * fastNowTimeout is a Node.js timer used to manage and process + * the FastTimers stored in the `fastTimers` array. + * + * @type {NodeJS.Timeout} + */ let fastNowTimeout +/** + * The kFastTimer symbol is used to identify FastTimer instances. + * + * @type {Symbol} + */ +const kFastTimer = Symbol('kFastTimer') + +/** + * The fastTimers array contains all active FastTimers. + * + * @type {FastTimer[]} + */ const fastTimers = [] -function onTimeout () { - fastNow = Date.now() +/** + * These constants represent the various states of a FastTimer. + */ - let len = fastTimers.length +/** + * The `NOT_IN_LIST` constant indicates that the FastTimer is not included + * in the `fastTimers` array. Timers with this status will not be processed + * during the next tick by the `onTick` function. + * + * A FastTimer can be re-added to the `fastTimers` array by invoking the + * `refresh` method on the FastTimer instance. + * + * @type {-2} + */ +const NOT_IN_LIST = -2 + +/** + * The `TO_BE_CLEARED` constant indicates that the FastTimer is scheduled + * for removal from the `fastTimers` array. A FastTimer in this state will + * be removed in the next tick by the `onTick` function and will no longer + * be processed. + * + * This status is also set when the `clear` method is called on the FastTimer instance. + * + * @type {-1} + */ +const TO_BE_CLEARED = -1 + +/** + * The `PENDING` constant signifies that the FastTimer is awaiting processing + * in the next tick by the `onTick` function. Timers with this status will have + * their `_idleStart` value set and their status updated to `ACTIVE` in the next tick. + * + * @type {0} + */ +const PENDING = 0 + +/** + * The `ACTIVE` constant indicates that the FastTimer is active and waiting + * for its timer to expire. During the next tick, the `onTick` function will + * check if the timer has expired, and if so, it will execute the associated callback. + * + * @type {1} + */ +const ACTIVE = 1 + +/** + * The onTick function processes the fastTimers array. + * + * @returns {void} + */ +function onTick () { + /** + * Increment the fastNow value by the TICK_MS value, despite the actual time + * that has passed since the last tick. This approach ensures independence + * from the system clock and delays caused by a blocked event loop. + * + * @type {number} + */ + fastNow += TICK_MS + + /** + * The `idx` variable is used to iterate over the `fastTimers` array. + * Expired timers are removed by replacing them with the last element in the array. + * Consequently, `idx` is only incremented when the current element is not removed. + * + * @type {number} + */ let idx = 0 + + /** + * The len variable will contain the length of the fastTimers array + * and will be decremented when a FastTimer should be removed from the + * fastTimers array. + * + * @type {number} + */ + let len = fastTimers.length + while (idx < len) { + /** + * @type {FastTimer} + */ const timer = fastTimers[idx] - if (timer.state === 0) { - timer.state = fastNow + timer.delay - TICK_MS - } else if (timer.state > 0 && fastNow >= timer.state) { - timer.state = -1 - timer.callback(timer.opaque) + // If the timer is in the ACTIVE state and the timer has expired, it will + // be processed in the next tick. + if (timer._state === PENDING) { + // Set the _idleStart value to the fastNow value minus the TICK_MS value + // to account for the time the timer was in the PENDING state. + timer._idleStart = fastNow - TICK_MS + timer._state = ACTIVE + } else if ( + timer._state === ACTIVE && + fastNow >= timer._idleStart + timer._idleTimeout + ) { + timer._state = TO_BE_CLEARED + timer._idleStart = -1 + timer._onTimeout(timer._timerArg) } - if (timer.state === -1) { - timer.state = -2 - if (idx !== len - 1) { - fastTimers[idx] = fastTimers.pop() - } else { - fastTimers.pop() + if (timer._state === TO_BE_CLEARED) { + timer._state = NOT_IN_LIST + + // Move the last element to the current index and decrement len if it is + // not the only element in the array. + if (--len !== 0) { + fastTimers[idx] = fastTimers[len] } - len -= 1 } else { - idx += 1 + ++idx } } - if (fastTimers.length > 0) { + // Set the length of the fastTimers array to the new length and thus + // removing the excess FastTimers elements from the array. + fastTimers.length = len + + // If there are still active FastTimers in the array, refresh the Timer. + // If there are no active FastTimers, the timer will be refreshed again + // when a new FastTimer is instantiated. + if (fastTimers.length !== 0) { refreshTimeout() } } function refreshTimeout () { - if (fastNowTimeout?.refresh) { + // If the fastNowTimeout is already set, refresh it. + if (fastNowTimeout) { fastNowTimeout.refresh() + // fastNowTimeout is not instantiated yet, create a new Timer. } else { clearTimeout(fastNowTimeout) - fastNowTimeout = setTimeout(onTimeout, TICK_MS) + fastNowTimeout = setTimeout(onTick, TICK_MS) + + // If the Timer has an unref method, call it to allow the process to exit if + // there are no other active handles. if (fastNowTimeout.unref) { fastNowTimeout.unref() } } } -class Timeout { - constructor (callback, delay, opaque) { - this.callback = callback - this.delay = delay - this.opaque = opaque +/** + * The `FastTimer` class is a data structure designed to store and manage + * timer information. + */ +class FastTimer { + [kFastTimer] = true + + /** + * The state of the timer, which can be one of the following: + * - NOT_IN_LIST (-2) + * - TO_BE_CLEARED (-1) + * - PENDING (0) + * - ACTIVE (1) + * + * @type {-2|-1|0|1} + * @private + */ + _state = NOT_IN_LIST + + /** + * The number of milliseconds to wait before calling the callback. + * + * @type {number} + * @private + */ + _idleTimeout = -1 + + /** + * The time in milliseconds when the timer was started. This value is used to + * calculate when the timer should expire. + * + * @type {number} + * @default -1 + * @private + */ + _idleStart = -1 + + /** + * The function to be executed when the timer expires. + * @type {Function} + * @private + */ + _onTimeout + + /** + * The argument to be passed to the callback when the timer expires. + * + * @type {*} + * @private + */ + _timerArg - // -2 not in timer list - // -1 in timer list but inactive - // 0 in timer list waiting for time - // > 0 in timer list waiting for time to expire - this.state = -2 + /** + * @constructor + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should wait + * before the specified function or code is executed. + * @param {*} arg + */ + constructor (callback, delay, arg) { + this._onTimeout = callback + this._idleTimeout = delay + this._timerArg = arg this.refresh() } + /** + * Sets the timer's start time to the current time, and reschedules the timer + * to call its callback at the previously specified duration adjusted to the + * current time. + * Using this on a timer that has already called its callback will reactivate + * the timer. + * + * @returns {void} + */ refresh () { - if (this.state === -2) { + // In the special case that the timer is not in the list of active timers, + // add it back to the array to be processed in the next tick by the onTick + // function. + if (this._state === NOT_IN_LIST) { fastTimers.push(this) - if (!fastNowTimeout || fastTimers.length === 1) { - refreshTimeout() - } } - this.state = 0 + // If the timer is the only active timer, refresh the fastNowTimeout for + // better resolution. + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + + // Setting the state to PENDING will cause the timer to be reset in the + // next tick by the onTick function. + this._state = PENDING } + /** + * The `clear` method cancels the timer, preventing it from executing. + * + * @returns {void} + * @private + */ clear () { - this.state = -1 + // Set the state to TO_BE_CLEARED to mark the timer for removal in the next + // tick by the onTick function. + this._state = TO_BE_CLEARED + + // Reset the _idleStart value to -1 to indicate that the timer is no longer + // active. + this._idleStart = -1 } } +/** + * This module exports a setTimeout and clearTimeout function that can be + * used as a drop-in replacement for the native functions. + */ module.exports = { - setTimeout (callback, delay, opaque) { - return delay <= 1e3 - ? setTimeout(callback, delay, opaque) - : new Timeout(callback, delay, opaque) + /** + * The setTimeout() method sets a timer which executes a function once the + * timer expires. + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should + * wait before the specified function or code is executed. + * @param {*} [arg] An optional argument to be passed to the callback function + * when the timer expires. + * @returns {NodeJS.Timeout|FastTimer} + */ + setTimeout (callback, delay, arg) { + // If the delay is less than or equal to the RESOLUTION_MS value return a + // native Node.js Timer instance. + return delay <= RESOLUTION_MS + ? nativeSetTimeout(callback, delay, arg) + : new FastTimer(callback, delay, arg) }, + /** + * The clearTimeout method cancels an instantiated Timer previously created + * by calling setTimeout. + * + * @param {FastTimer} timeout + */ clearTimeout (timeout) { - if (timeout instanceof Timeout) { + // If the timeout is a FastTimer, call its own clear method. + if (timeout[kFastTimer]) { + /** + * @type {FastTimer} + */ timeout.clear() + // Otherwise it is an instance of a native NodeJS.Timeout, so call the + // Node.js native clearTimeout function. } else { - clearTimeout(timeout) + nativeClearTimeout(timeout) } - } + }, + /** + * The now method returns the value of the internal fast timer clock. + * + * @returns {number} + */ + now () { + return fastNow + }, + /** + * Exporting for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + */ + kFastTimer }
diff --git a/test/issue-3410.js b/test/issue-3410.js new file mode 100644 --- /dev/null +++ b/test/issue-3410.js @@ -0,0 +1,88 @@ +'use strict' + +const { tspl } = require('@matteo.collina/tspl') +const { fork } = require('node:child_process') +const { resolve: pathResolve } = require('node:path') +const { describe, test } = require('node:test') +const { Agent, fetch, setGlobalDispatcher } = require('..') +const { eventLoopBlocker } = require('./utils/event-loop-blocker') + +describe('https://github.com/nodejs/undici/issues/3410', () => { + test('FastTimers', async (t) => { + t = tspl(t, { plan: 1 }) + + // Spawn a server in a new process to avoid effects from the blocking event loop + const { + serverProcess, + address + } = await new Promise((resolve, reject) => { + const childProcess = fork( + pathResolve(__dirname, './utils/hello-world-server.js'), + [], + { windowsHide: true } + ) + + childProcess.on('message', (address) => { + resolve({ + serverProcess: childProcess, + address + }) + }) + childProcess.on('error', err => { + reject(err) + }) + }) + + const connectTimeout = 2000 + setGlobalDispatcher(new Agent({ connectTimeout })) + + const fetchPromise = fetch(address) + + eventLoopBlocker(3000) + + const response = await fetchPromise + + t.equal(await response.text(), 'Hello World') + + serverProcess.kill('SIGKILL') + }) + + test('native Timers', async (t) => { + t = tspl(t, { plan: 1 }) + + // Spawn a server in a new process to avoid effects from the blocking event loop + const { + serverProcess, + address + } = await new Promise((resolve, reject) => { + const childProcess = fork( + pathResolve(__dirname, './utils/hello-world-server.js'), + [], + { windowsHide: true } + ) + + childProcess.on('message', (address) => { + resolve({ + serverProcess: childProcess, + address + }) + }) + childProcess.on('error', err => { + reject(err) + }) + }) + + const connectTimeout = 900 + setGlobalDispatcher(new Agent({ connectTimeout })) + + const fetchPromise = fetch(address) + + eventLoopBlocker(1500) + + const response = await fetchPromise + + t.equal(await response.text(), 'Hello World') + + serverProcess.kill('SIGKILL') + }) +}) diff --git a/test/socket-timeout.js b/test/socket-timeout.js --- a/test/socket-timeout.js +++ b/test/socket-timeout.js @@ -3,7 +3,6 @@ const { tspl } = require('@matteo.collina/tspl') const { test, after } = require('node:test') const { Client, errors } = require('..') -const timers = require('../lib/util/timers') const { createServer } = require('node:http') const FakeTimers = require('@sinonjs/fake-timers') @@ -68,12 +67,6 @@ test('Disable socket timeout', async (t) => { const clock = FakeTimers.install() after(clock.uninstall.bind(clock)) - const orgTimers = { ...timers } - Object.assign(timers, { setTimeout, clearTimeout }) - after(() => { - Object.assign(timers, orgTimers) - }) - server.once('request', (req, res) => { setTimeout(() => { res.end('hello') diff --git a/test/timers.js b/test/timers.js new file mode 100644 --- /dev/null +++ b/test/timers.js @@ -0,0 +1,198 @@ +'use strict' + +const { tspl } = require('@matteo.collina/tspl') +const { describe, test } = require('node:test') + +const timers = require('../lib/util/timers') +const { eventLoopBlocker } = require('./utils/event-loop-blocker') + +describe('timers', () => { + test('timers exports a clearTimeout', (t) => { + t = tspl(t, { plan: 1 }) + + t.ok(typeof timers.clearTimeout === 'function') + }) + + test('timers exports a setTimeout', (t) => { + t = tspl(t, { plan: 1 }) + + t.ok(typeof timers.setTimeout === 'function') + }) + + test('setTimeout instantiates a native NodeJS.Timeout when delay is lower or equal 1e3 ms', (t) => { + t = tspl(t, { plan: 2 }) + + t.strictEqual(timers.setTimeout(() => { }, 999)[timers.kFastTimer], undefined) + t.strictEqual(timers.setTimeout(() => { }, 1e3)[timers.kFastTimer], undefined) + }) + + test('setTimeout instantiates a FastTimer when delay is smaller than 1e3 ms', (t) => { + t = tspl(t, { plan: 1 }) + + const timeout = timers.setTimeout(() => { }, 1001) + t.strictEqual(timeout[timers.kFastTimer], true) + }) + + test('clearTimeout can clear a node native Timeout', (t) => { + t = tspl(t, { plan: 3 }) + + const nativeTimeoutId = setTimeout(() => { }, 1e6) + t.equal(nativeTimeoutId._idleTimeout, 1e6) + t.ok(timers.clearTimeout(nativeTimeoutId) === undefined) + t.equal(nativeTimeoutId._idleTimeout, -1) + }) + + test('a FastTimer will get a _idleStart value after short time', async (t) => { + t = tspl(t, { plan: 3 }) + + const timer = timers.setTimeout(() => { + t.fail('timer should not have fired') + }, 1e4) + + t.strictEqual(timer[timers.kFastTimer], true) + t.strictEqual(timer._idleStart, -1) + await new Promise((resolve) => setTimeout(resolve, 750)) + t.notStrictEqual(timer._idleStart, -1) + + timers.clearTimeout(timer) + }) + + test('a cleared FastTimer will reset the _idleStart value to -1', async (t) => { + t = tspl(t, { plan: 4 }) + + const timer = timers.setTimeout(() => { + t.fail('timer should not have fired') + }, 1e4) + + t.strictEqual(timer[timers.kFastTimer], true) + t.strictEqual(timer._idleStart, -1) + await new Promise((resolve) => setTimeout(resolve, 750)) + t.notStrictEqual(timer._idleStart, -1) + timers.clearTimeout(timer) + t.strictEqual(timer._idleStart, -1) + }) + + test('a FastTimer can be cleared', async (t) => { + t = tspl(t, { plan: 3 }) + + const timer = timers.setTimeout(() => { + t.fail('timer should not have fired') + }, 1001) + + t.strictEqual(timer[timers.kFastTimer], true) + timers.clearTimeout(timer) + + t.strictEqual(timer._idleStart, -1) + await new Promise((resolve) => setTimeout(resolve, 750)) + t.strictEqual(timer._idleStart, -1) + }) + + test('a cleared FastTimer can be refreshed', async (t) => { + t = tspl(t, { plan: 2 }) + + const timer = timers.setTimeout(() => { + t.ok('pass') + }, 1001) + + t.strictEqual(timer[timers.kFastTimer], true) + timers.clearTimeout(timer) + timer.refresh() + await new Promise((resolve) => setTimeout(resolve, 2000)) + timers.clearTimeout(timer) + }) + + const getDelta = (start, target) => { + const end = process.hrtime.bigint() + const actual = (end - start) / 1_000_000n + return actual - BigInt(target) + } + + // timers.setTimeout implements a low resolution timer with a 500 ms granularity + // It is expected that in the worst case, a timer will fire about 500 ms after the + // intended amount of time, an extra 200 ms is added to account event loop overhead + // Timers should never fire excessively early, 1ms early is tolerated + const ACCEPTABLE_DELTA = 700n + + test('meet acceptable resolution time', async (t) => { + const testTimeouts = [0, 1, 499, 500, 501, 990, 999, 1000, 1001, 1100, 1400, 1499, 1500, 4000, 5000] + + t = tspl(t, { plan: 1 + testTimeouts.length * 2 }) + + const start = process.hrtime.bigint() + + for (const target of testTimeouts) { + timers.setTimeout(() => { + const delta = getDelta(start, target) + + t.ok(delta >= -1n, `${target}ms fired early`) + t.ok(delta < ACCEPTABLE_DELTA, `${target}ms fired late, got difference of ${delta}ms`) + }, target) + } + + setTimeout(() => t.ok(true), 6000) + await t.completed + }) + + test('refresh correctly with timeout < TICK_MS', async (t) => { + t = tspl(t, { plan: 3 }) + + const start = process.hrtime.bigint() + + const timeout = timers.setTimeout(() => { + // 400 ms timer was refreshed after 600ms; total target is 1000 + const delta = getDelta(start, 1000) + + t.ok(delta >= -1n, 'refreshed timer fired early') + t.ok(delta < ACCEPTABLE_DELTA, 'refreshed timer fired late') + }, 400) + + setTimeout(() => timeout.refresh(), 200) + setTimeout(() => timeout.refresh(), 400) + setTimeout(() => timeout.refresh(), 600) + + setTimeout(() => t.ok(true), 1500) + await t.completed + }) + + test('refresh correctly with timeout > TICK_MS', async (t) => { + t = tspl(t, { plan: 3 }) + + const start = process.hrtime.bigint() + + const timeout = timers.setTimeout(() => { + // 501ms timer was refreshed after 1250ms; total target is 1751 + const delta = getDelta(start, 1751) + + t.ok(delta >= -1n, 'refreshed timer fired early') + t.ok(delta < ACCEPTABLE_DELTA, 'refreshed timer fired late') + }, 501) + + setTimeout(() => timeout.refresh(), 250) + setTimeout(() => timeout.refresh(), 750) + setTimeout(() => timeout.refresh(), 1250) + + setTimeout(() => t.ok(true), 3000) + await t.completed + }) + + test('a FastTimer will only increment by the defined TICK_MS value', async (t) => { + t = tspl(t, { plan: 2 }) + + const startInternalClock = timers.now() + + // The long running FastTimer will ensure that the internal clock is + // incremented by the TICK_MS value in the onTick function + const longRunningFastTimer = timers.setTimeout(() => {}, 1e10) + + eventLoopBlocker(1000) + + // wait to ensure the timer has fired in the next loop + await new Promise((resolve) => setTimeout(resolve, 1)) + + t.strictEqual(timers.now() - startInternalClock, 499) + await new Promise((resolve) => setTimeout(resolve, 1000)) + t.ok(timers.now() - startInternalClock <= 1497) + + timers.clearTimeout(longRunningFastTimer) + }) +}) diff --git a/test/util.js b/test/util.js --- a/test/util.js +++ b/test/util.js @@ -1,12 +1,10 @@ 'use strict' -const { tspl } = require('@matteo.collina/tspl') const { strictEqual, throws, doesNotThrow } = require('node:assert') const { test, describe } = require('node:test') const { isBlobLike, parseURL, isHttpOrHttpsPrefixed, isValidPort } = require('../lib/core/util') const { Blob, File } = require('node:buffer') const { InvalidArgumentError } = require('../lib/core/errors') -const timers = require('../lib/util/timers') describe('isBlobLike', () => { test('buffer', () => { @@ -255,79 +253,3 @@ describe('parseURL', () => { }) }) }) - -describe('timers', () => { - const getDelta = (start, target) => { - const end = process.hrtime.bigint() - const actual = (end - start) / 1_000_000n - return actual - BigInt(target) - } - - // timers.setTimeout implements a low resolution timer with a 500 ms granularity - // It is expected that in the worst case, a timer will fire about 500 ms after the - // intended amount of time, an extra 200 ms is added to account event loop overhead - // Timers should never fire excessively early, 1ms early is tolerated - const ACCEPTABLE_DELTA = 700n - - test('meet acceptable resolution time', async (t) => { - const testTimeouts = [0, 1, 499, 500, 501, 990, 999, 1000, 1001, 1100, 1400, 1499, 1500, 4000, 5000] - - t = tspl(t, { plan: 1 + testTimeouts.length * 2 }) - - const start = process.hrtime.bigint() - - for (const target of testTimeouts) { - timers.setTimeout(() => { - const delta = getDelta(start, target) - - t.ok(delta >= -1n, `${target}ms fired early`) - t.ok(delta < ACCEPTABLE_DELTA, `${target}ms fired late`) - }, target) - } - - setTimeout(() => t.ok(true), 6000) - await t.completed - }) - - test('refresh correctly with timeout < TICK_MS', async (t) => { - t = tspl(t, { plan: 3 }) - - const start = process.hrtime.bigint() - - const timeout = timers.setTimeout(() => { - // 400 ms timer was refreshed after 600ms; total target is 1000 - const delta = getDelta(start, 1000) - - t.ok(delta >= -1n, 'refreshed timer fired early') - t.ok(delta < ACCEPTABLE_DELTA, 'refreshed timer fired late') - }, 400) - - setTimeout(() => timeout.refresh(), 200) - setTimeout(() => timeout.refresh(), 400) - setTimeout(() => timeout.refresh(), 600) - - setTimeout(() => t.ok(true), 1500) - await t.completed - }) - - test('refresh correctly with timeout > TICK_MS', async (t) => { - t = tspl(t, { plan: 3 }) - - const start = process.hrtime.bigint() - - const timeout = timers.setTimeout(() => { - // 501ms timer was refreshed after 1250ms; total target is 1751 - const delta = getDelta(start, 1751) - - t.ok(delta >= -1n, 'refreshed timer fired early') - t.ok(delta < ACCEPTABLE_DELTA, 'refreshed timer fired late') - }, 501) - - setTimeout(() => timeout.refresh(), 250) - setTimeout(() => timeout.refresh(), 750) - setTimeout(() => timeout.refresh(), 1250) - - setTimeout(() => t.ok(true), 3000) - await t.completed - }) -}) diff --git a/test/utils/event-loop-blocker.js b/test/utils/event-loop-blocker.js new file mode 100644 --- /dev/null +++ b/test/utils/event-loop-blocker.js @@ -0,0 +1,10 @@ +'use strict' + +function eventLoopBlocker (ms) { + const nil = new Int32Array(new SharedArrayBuffer(4)) + Atomics.wait(nil, 0, 0, ms) +} + +module.exports = { + eventLoopBlocker +} diff --git a/test/utils/hello-world-server.js b/test/utils/hello-world-server.js new file mode 100644 --- /dev/null +++ b/test/utils/hello-world-server.js @@ -0,0 +1,30 @@ +'use strict' + +const { createServer } = require('node:http') +const hostname = '127.0.0.1' + +const server = createServer(async (req, res) => { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + + await sendInDelayedChunks(res, 'Hello World', 125) + res.end() +}) + +async function sendInDelayedChunks (res, payload, delay) { + const chunks = payload.split('') + + for (const chunk of chunks) { + await new Promise(resolve => setTimeout(resolve, delay)) + + res.write(chunk) + } +} + +server.listen(0, hostname, () => { + if (process.send) { + process.send(`http://${hostname}:${server.address().port}/`) + } else { + console.log(`http://${hostname}:${server.address().port}/`) + } +})
`UND_ERR_CONNECT_TIMEOUT` errors thrown when there is CPU intensive code on the event loop ## Bug Description Work on the event loop can interrupt the Undici lifecycle for making requests, causing errors to be thrown even when there is no problem with the underlying connection. For example, if a fetch request is started and then work on the event loop takes more than 10 seconds (default connect timeout), Undici will throw a `UND_ERR_CONNECT_TIMEOUT` error even if the connection could be established very quickly. I believe what is happening is: 1. When the fetch request is started, Undici starts the work to make a connection. Undici calls `setTimeout` with the value of the `connectTimeoutMs` to throw an error and cancel the connection if it takes too long (https://github.com/nodejs/undici/blob/main/lib/core/connect.js). It makes a call to `GetAddrInfoReqWrap` (https://github.com/nodejs/node/blob/main/lib/dns.js#L221), but this is asynchronous and processing of the callback will be delayed until the next event loop. 2. User tasks block the event loop for a long period of time. 3. The `onConnectTimeout` timer is run because the previous task took longer than the timeout. `onConnectTimeout` calls `setImmediate` with a function to destroy the socket and throw the error. https://github.com/nodejs/undici/blob/main/lib/core/connect.js 4. The `GetAddrInfoReq` lookup callback (`emitLookup` in `node:net`) is run. This code begins the TCP connection (`internalConnect` is called in https://github.com/nodejs/node/blob/main/lib/net.js#L1032) but that is also asynchronous, so it won't finish in this round of the event loop. 5. The `setImmediate` function is run in the next phase which destroys the socket and throws the `UND_ERR_CONNECT_TIMEOUT` error. 6. Undici never gets a chance to handle the TCP connection response. Internally at Vercel, we have been seeing a high number of these `UND_ERR_CONNECT_TIMEOUT` issues while pre-rendering pages in our Next.js application. I can't run this task on my local machine so it's harder to debug, but it's a CPU intensive task and moving fetch requests to a worker thread eliminated the Undici errors. We tried other suggestions (like `--dns-result-order=ipv4first` and verified that we were not seeing any packet loss) that did not resolve the issue. Increasing the connect timeout resolves the issue in the reproduction but _not_ the issue in our Next.js build (which I can't explain). ## Reproducible By A minimal reproduction is available at https://github.com/mknichel/undici-connect-timeout-errors. We can reproduce the behavior on Node 18.x and 20.x and with the `5.24.0` and the latest version of Undici (`6.19.2`) ## Expected Behavior The Undici request lifecycle could operate on a separate thread that does not get blocked by user code. By separating it out from the user code, this would remove impact of any user code on requests. To test this theory, we created a dispatcher that proxied the fetch request to a dedicated worker thread (`new Worker` from `worker_threads`). This eliminated all the Undici errors that we were seeing in our Next.js build. ## Logs & Screenshots In the minimal reproduction, the error is: ``` TypeError: fetch failed at fetch (/Users/mknichel/code/tmp/undici-connect-timeout-errors/node_modules/.pnpm/undici@6.19.2/node_modules/undici/index.js:112:13) at process.processTicksAndRejections (node:internal/process/task_queues:95:5) at fetchExample (/Users/mknichel/code/tmp/undici-connect-timeout-errors/index.ts:21:20) at main (/Users/mknichel/code/tmp/undici-connect-timeout-errors/index.ts:66:3) { [cause]: ConnectTimeoutError: Connect Timeout Error at onConnectTimeout (/Users/mknichel/code/tmp/undici-connect-timeout-errors/node_modules/.pnpm/undici@6.19.2/node_modules/undici/lib/core/connect.js:190:24) at /Users/mknichel/code/tmp/undici-connect-timeout-errors/node_modules/.pnpm/undici@6.19.2/node_modules/undici/lib/core/connect.js:133:46 at Immediate._onImmediate (/Users/mknichel/code/tmp/undici-connect-timeout-errors/node_modules/.pnpm/undici@6.19.2/node_modules/undici/lib/core/connect.js:174:9) at process.processImmediate (node:internal/timers:478:21) { code: 'UND_ERR_CONNECT_TIMEOUT' ``` In our Next.js builds, the error is: ``` TypeError: fetch failed at node:internal/deps/undici/undici:12618:11 at process.processTicksAndRejections (node:internal/process/task_queues:95:5) at async s (elided path) at async elided path { cause: ConnectTimeoutError: Connect Timeout Error at onConnectTimeout (node:internal/deps/undici/undici:7760:28) at node:internal/deps/undici/undici:7716:50 at Immediate._onImmediate (node:internal/deps/undici/undici:7748:13) at process.processImmediate (node:internal/timers:478:21) at process.callbackTrampoline (node:internal/async_hooks:130:17) { code: 'UND_ERR_CONNECT_TIMEOUT' } } ``` ## Environment The reproduction repo was erroring for me on Mac OS 14.4, while internally we are seeing issues on AWS EC2 Intel machines. ### Additional context Vercel/Next.js users have reported `UND_ERR_CONNECT_TIMEOUT` issues to us: - https://github.com/vercel/vercel/issues/11692 - https://github.com/vercel/next.js/issues/66373
I think something like this would also do the trick: ```js diff --git a/lib/util/timers.js b/lib/util/timers.js index d0091cc1..fa56be8b 100644 --- a/lib/util/timers.js +++ b/lib/util/timers.js @@ -2,13 +2,13 @@ const TICK_MS = 499 -let fastNow = Date.now() +let fastNow = 0 let fastNowTimeout const fastTimers = [] function onTimeout () { - fastNow = Date.now() + fastNow += TICK_MS let len = fastTimers.length let idx = 0 ``` Though it needs some work to update tests. @mknichel any progress or updates on this one? My Vercel/Next.js based projects are facing bad experiences. I am currently working on this Any updates? I am on it ;)
2024-08-23T14:56:16Z
6.19
nodejs/undici
3,505
nodejs__undici-3505
[ "1125" ]
ba8c0739e4a6cc20c8f00aa78b14548486f416a8
diff --git a/lib/api/readable.js b/lib/api/readable.js --- a/lib/api/readable.js +++ b/lib/api/readable.js @@ -201,6 +201,17 @@ class BodyReadable extends Readable { .resume() }) } + + /** + * @param {BufferEncoding} encoding + * @returns {BodyReadable} + */ + setEncoding (encoding) { + if (Buffer.isEncoding(encoding)) { + this._readableState.encoding = encoding + } + return this + } } // https://streams.spec.whatwg.org/#readablestream-locked @@ -278,10 +289,10 @@ function consumeStart (consume) { } if (state.endEmitted) { - consumeEnd(this[kConsume]) + consumeEnd(this[kConsume], this._readableState.encoding) } else { consume.stream.on('end', function () { - consumeEnd(this[kConsume]) + consumeEnd(this[kConsume], this._readableState.encoding) }) } @@ -295,8 +306,10 @@ function consumeStart (consume) { /** * @param {Buffer[]} chunks * @param {number} length + * @param {BufferEncoding} encoding + * @returns {string} */ -function chunksDecode (chunks, length) { +function chunksDecode (chunks, length, encoding) { if (chunks.length === 0 || length === 0) { return '' } @@ -311,7 +324,11 @@ function chunksDecode (chunks, length) { buffer[2] === 0xbf ? 3 : 0 - return buffer.utf8Slice(start, bufferLength) + if (!encoding || encoding === 'utf8' || encoding === 'utf-8') { + return buffer.utf8Slice(start, bufferLength) + } else { + return buffer.subarray(start, bufferLength).toString(encoding) + } } /** @@ -339,14 +356,14 @@ function chunksConcat (chunks, length) { return buffer } -function consumeEnd (consume) { +function consumeEnd (consume, encoding) { const { type, body, resolve, stream, length } = consume try { if (type === 'text') { - resolve(chunksDecode(body, length)) + resolve(chunksDecode(body, length, encoding)) } else if (type === 'json') { - resolve(JSON.parse(chunksDecode(body, length))) + resolve(JSON.parse(chunksDecode(body, length, encoding))) } else if (type === 'arrayBuffer') { resolve(chunksConcat(body, length).buffer) } else if (type === 'blob') {
diff --git a/test/client-request.js b/test/client-request.js --- a/test/client-request.js +++ b/test/client-request.js @@ -220,7 +220,7 @@ test('request body destroyed on invalid callback', async (t) => { after(() => client.destroy()) const body = new Readable({ - read () {} + read () { } }) try { client.request({ @@ -1252,3 +1252,88 @@ test('request post body DataView', async (t) => { await t.completed }) + +test('request multibyte json with setEncoding', async (t) => { + t = tspl(t, { plan: 1 }) + + const asd = Buffer.from('あいうえお') + const data = JSON.stringify({ asd }) + const server = createServer((req, res) => { + res.write(data.slice(0, 1)) + setTimeout(() => { + res.write(data.slice(1)) + res.end() + }, 100) + }) + after(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + after(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf8') + t.deepStrictEqual(JSON.parse(data), await body.json()) + }) + + await t.completed +}) + +test('request multibyte text with setEncoding', async (t) => { + t = tspl(t, { plan: 1 }) + + const data = Buffer.from('あいうえお') + const server = createServer((req, res) => { + res.write(data.slice(0, 1)) + setTimeout(() => { + res.write(data.slice(1)) + res.end() + }, 100) + }) + after(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + after(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf8') + t.deepStrictEqual(data.toString('utf8'), await body.text()) + }) + + await t.completed +}) + +test('request multibyte text with setEncoding', async (t) => { + t = tspl(t, { plan: 1 }) + + const data = Buffer.from('あいうえお') + const server = createServer((req, res) => { + res.write(data.slice(0, 1)) + setTimeout(() => { + res.write(data.slice(1)) + res.end() + }, 100) + }) + after(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + after(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('hex') + t.deepStrictEqual(data.toString('hex'), await body.text()) + }) + + await t.completed +})
client.request throws Error with setEncoding ## Bug Description `client.request` throws an error when `body.json()` & `body.text()` are called with `setEncoding`. ## Reproducible By ```javascript import { createServer } from 'http' import { Client } from 'undici' import { once } from 'events' import assert from 'assert' const data = 'a'.repeat(100) const server = createServer((request, response) => { response.end(JSON.stringify({ data })) }).listen() await once(server, 'listening') const client = new Client(`http://localhost:${server.address().port}`) try { const { body, headers, statusCode } = await client.request({ path: '/', method: 'GET' }) console.log(`response received ${statusCode}`) console.log('headers', headers) body.setEncoding('utf8') const json = await body.json() assert.strictEqual(json.data, data, 'multi byte') } catch (error) { console.error('error!!:', error) } finally { client.close() server.close() } ``` ```bash $ node index.mjs error!!: TypeError [ERR_INVALID_ARG_TYPE]: The "list[0]" argument must be an instance of Buffer or Uint8Array. Received type string ('{"data":"aaaaaaaaaaaaaaa...) at new NodeError (node:internal/errors:371:5) at Function.concat (node:buffer:559:13) at consumeEnd (/Users/kohtaito/dev/tmp/test_undici/node_modules/undici/lib/api/readable.js:238:33) at BodyReadable.<anonymous> (/Users/kohtaito/dev/tmp/test_undici/node_modules/undici/lib/api/readable.js:220:7) at BodyReadable.emit (node:events:390:28) at BodyReadable.emit (/Users/kohtaito/dev/tmp/test_undici/node_modules/undici/lib/api/readable.js:66:18) at endReadableNT (node:internal/streams/readable:1343:12) at processTicksAndRejections (node:internal/process/task_queues:83:21) { code: 'ERR_INVALID_ARG_TYPE' } ``` ## Expected Behavior <!-- A clear and concise description of what you expected to happen. --> ## Logs & Screenshots <!-- If applicable, add screenshots to help explain your problem, or alternatively add your console logs here. --> ## Environment Node.js v16.13.1 undici v4.11.0 ### Additional context `chunk` is converted to a string by StringDecoder only when consumeStart is executed. ```diff $ git diff diff --git a/lib/api/readable.js b/lib/api/readable.js index 3bb454e..8ea758e 100644 --- a/lib/api/readable.js +++ b/lib/api/readable.js @@ -94,6 +94,7 @@ module.exports = class BodyReadable extends Readable { push (chunk) { if (this[kConsume] && chunk !== null && !this[kReading]) { + console.log('BodyReadable: push', typeof chunk, this.readableEncoding) consumePush(this[kConsume], chunk) return true } else { @@ -196,6 +197,7 @@ function consumeStart (consume) { const { _readableState: state } = consume.stream for (const chunk of state.buffer) { + console.log('consumeStart', typeof chunk, consume.stream.readableEncoding) consumePush(consume, chunk) } ``` ```bash $ node index.mjs response received 200 headers { date: 'Mon, 06 Dec 2021 09:08:48 GMT', connection: 'keep-alive', 'keep-alive': 'timeout=5', 'content-length': '10000011' } consumeStart string utf8 BodyReadable: push object utf8 BodyReadable: push object utf8 BodyReadable: push object utf8 BodyReadable: push object utf8 BodyReadable: push object utf8 ``` I think that this code needs to use `consume.stream._readableState.decoder.end()` in `consumeEnd` instead of `Buffer.concat(body)` if the value is in `readableEncoding`. Is this the right way to fix it? I also considered a method to overwrite StringDecoder so that it is not created in setEncoding. However, it seems that it will be difficult to upgrade because it depends too much on the structure of Stream. Ref: https://github.com/nodejs/undici/issues/1119 CC: @mcollina
> I think that this code needs to use consume.stream._readableState.decoder.end() in consumeEnd instead of Buffer.concat(body) if the value is in readableEncoding. Is this the right way to fix it? Just check if elements in body are string or Buffer and concat accordingly. I think that mixing strings and buffers could result in data loss at chunk boundaries. Isn't this a problem? Then it seems that it can be made simple. ```javascript if (typeof chunk === 'str') { chunk = Buffer.from(chunk) } ``` I think that would work. okey. I'll do test & create PR. Thanks. @ronag I tested this code. ```diff diff --git a/lib/api/readable.js b/lib/api/readable.js index 3bb454e..c1a80db 100644 --- a/lib/api/readable.js +++ b/lib/api/readable.js @@ -196,6 +196,10 @@ function consumeStart (consume) { const { _readableState: state } = consume.stream for (const chunk of state.buffer) { + if (consume.stream.readableEncoding) { + consumePush(consume, Buffer.from(chunk, consume.stream.readableEncoding)) + continue + } consumePush(consume, chunk) } ``` The first reproduction code now works with this fix. But this reproduction code doesn't work. ```javascript import { createServer } from 'http' import { Client } from 'undici' import { once } from 'events' import assert from 'assert' const data = Buffer.from('あいうえお') const server = createServer((request, response) => { response.write(data.slice(0, 1)) setTimeout(() => { response.write(data.slice(1)) response.end() }, 100) }).listen() await once(server, 'listening') const client = new Client(`http://localhost:${server.address().port}`) try { const { body, headers, statusCode } = await client.request({ path: '/', method: 'GET' }) console.log(`response received ${statusCode}`) console.log('headers', headers) body.setEncoding('utf8') const text = await body.text() console.log(Buffer.from(text)) console.log(Buffer.from(data)) assert.strictEqual(text, data, 'multi byte') } catch (error) { console.error('error!!:', error) } finally { client.close() server.close() } ``` ```bash $ node test-first-byte.mjs response received 200 headers { date: 'Tue, 07 Dec 2021 05:31:38 GMT', connection: 'keep-alive', 'keep-alive': 'timeout=5', 'transfer-encoding': 'chunked' } <Buffer ef bf bd ef bf bd e3 81 84 e3 81 86 e3 81 88 e3 81 8a> <Buffer e3 81 82 e3 81 84 e3 81 86 e3 81 88 e3 81 8a> error!!: AssertionError [ERR_ASSERTION]: multi byte at file:///tmp/test_undici/index2.mjs:31:10 at processTicksAndRejections (node:internal/process/task_queues:96:5) { generatedMessage: false, code: 'ERR_ASSERTION', actual: '��いうえお', expected: [Buffer [Uint8Array]], operator: 'strictEqual' } ``` It looks like the first few bytes are lost. So I think we need another way. I you open a PR with the tests then we can collaborate on it. OK. Thanks. I'll open PR.
2024-08-25T02:12:43Z
6.19
nodejs/undici
3,294
nodejs__undici-3294
[ "3288" ]
18af4b07c3602c09a432c07e0c305deed5fbb08c
diff --git a/lib/core/symbols.js b/lib/core/symbols.js --- a/lib/core/symbols.js +++ b/lib/core/symbols.js @@ -20,6 +20,7 @@ module.exports = { kHost: Symbol('host'), kNoRef: Symbol('no ref'), kBodyUsed: Symbol('used'), + kBody: Symbol('abstracted request body'), kRunning: Symbol('running'), kBlocking: Symbol('blocking'), kPending: Symbol('pending'), diff --git a/lib/core/util.js b/lib/core/util.js --- a/lib/core/util.js +++ b/lib/core/util.js @@ -1,19 +1,72 @@ 'use strict' const assert = require('node:assert') -const { kDestroyed, kBodyUsed, kListeners } = require('./symbols') +const { kDestroyed, kBodyUsed, kListeners, kBody } = require('./symbols') const { IncomingMessage } = require('node:http') const stream = require('node:stream') const net = require('node:net') -const { InvalidArgumentError } = require('./errors') const { Blob } = require('node:buffer') const nodeUtil = require('node:util') const { stringify } = require('node:querystring') +const { EventEmitter: EE } = require('node:events') +const { InvalidArgumentError } = require('./errors') const { headerNameLowerCasedRecord } = require('./constants') const { tree } = require('./tree') const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } +} + +function wrapRequestBody (body) { + if (isStream(body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (bodyLength(body) === 0) { + body + .on('data', function () { + assert(false) + }) + } + + if (typeof body.readableDidRead !== 'boolean') { + body[kBodyUsed] = false + EE.prototype.on.call(body, 'data', function () { + this[kBodyUsed] = true + }) + } + + return body + } else if (body && typeof body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + return new BodyAsyncIterable(body) + } else if ( + body && + typeof body !== 'string' && + !ArrayBuffer.isView(body) && + isIterable(body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + return new BodyAsyncIterable(body) + } else { + return body + } +} + function nop () {} function isStream (obj) { @@ -634,5 +687,6 @@ module.exports = { isHttpOrHttpsPrefixed, nodeMajor, nodeMinor, - safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'], + wrapRequestBody } diff --git a/lib/handler/retry-handler.js b/lib/handler/retry-handler.js --- a/lib/handler/retry-handler.js +++ b/lib/handler/retry-handler.js @@ -3,7 +3,12 @@ const assert = require('node:assert') const { kRetryHandlerDefaultRetry } = require('../core/symbols') const { RequestRetryError } = require('../core/errors') -const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util') +const { + isDisturbed, + parseHeaders, + parseRangeHeader, + wrapRequestBody +} = require('../core/util') function calculateRetryAfterHeader (retryAfter) { const current = Date.now() @@ -29,7 +34,7 @@ class RetryHandler { this.dispatch = handlers.dispatch this.handler = handlers.handler - this.opts = dispatchOpts + this.opts = { ...dispatchOpts, body: wrapRequestBody(opts.body) } this.abort = null this.aborted = false this.retryOpts = { @@ -174,7 +179,9 @@ class RetryHandler { this.abort( new RequestRetryError('Request failed', statusCode, { headers, - count: this.retryCount + data: { + count: this.retryCount + } }) ) return false @@ -278,7 +285,7 @@ class RetryHandler { const err = new RequestRetryError('Request failed', statusCode, { headers, - count: this.retryCount + data: { count: this.retryCount } }) this.abort(err)
diff --git a/test/retry-handler.js b/test/retry-handler.js --- a/test/retry-handler.js +++ b/test/retry-handler.js @@ -4,6 +4,7 @@ const { tspl } = require('@matteo.collina/tspl') const { test, after } = require('node:test') const { createServer } = require('node:http') const { once } = require('node:events') +const { Readable } = require('node:stream') const { RetryHandler, Client } = require('..') const { RequestHandler } = require('../lib/api/api-request') @@ -204,6 +205,74 @@ test('Should account for network and response errors', async t => { await t.completed }) +test('Issue #3288 - request with body (asynciterable)', async t => { + t = tspl(t, { plan: 6 }) + const server = createServer() + const dispatchOptions = { + method: 'POST', + path: '/', + headers: { + 'content-type': 'application/json' + }, + body: (function * () { + yield 'hello' + yield 'world' + })() + } + + server.on('request', (req, res) => { + res.writeHead(500, { + 'content-type': 'application/json' + }) + + res.end('{"message": "failed"}') + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect () { + t.ok(true, 'pass') + }, + onBodySent () { + t.ok(true, 'pass') + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.strictEqual(status, 500) + return true + }, + onData (chunk) { + return true + }, + onComplete () { + t.fail() + }, + onError (err) { + t.equal(err.message, 'Request failed') + t.equal(err.statusCode, 500) + t.equal(err.data.count, 1) + } + } + }) + + after(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + dispatchOptions, + handler + ) + }) + + await t.completed +}) + test('Should use retry-after header for retries', async t => { t = tspl(t, { plan: 4 }) @@ -734,6 +803,145 @@ test('retrying a request with a body', async t => { await t.completed }) +test('retrying a request with a body (stream)', async t => { + let counter = 0 + const server = createServer() + const dispatchOptions = { + retryOptions: { + retry: (err, { state, opts }, done) => { + counter++ + + if ( + err.statusCode === 500 || + err.message.includes('other side closed') + ) { + setTimeout(done, 500) + return + } + + return done(err) + } + }, + method: 'POST', + path: '/', + headers: { + 'content-type': 'application/json' + }, + body: Readable.from(Buffer.from(JSON.stringify({ hello: 'world' }))) + } + + t = tspl(t, { plan: 3 }) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + res.writeHead(500) + res.end('failed') + return + default: + t.fail() + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: new RequestHandler(dispatchOptions, (err, data) => { + t.equal(err.statusCode, 500) + t.equal(err.data.count, 1) + t.equal(err.code, 'UND_ERR_REQ_RETRY') + }) + }) + + after(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + dispatchOptions, + handler + ) + }) + + await t.completed +}) + +test('retrying a request with a body (buffer)', async t => { + let counter = 0 + const server = createServer() + const dispatchOptions = { + retryOptions: { + retry: (err, { state, opts }, done) => { + counter++ + + if ( + err.statusCode === 500 || + err.message.includes('other side closed') + ) { + setTimeout(done, 500) + return + } + + return done(err) + } + }, + method: 'POST', + path: '/', + headers: { + 'content-type': 'application/json' + }, + body: Buffer.from(JSON.stringify({ hello: 'world' })) + } + + t = tspl(t, { plan: 1 }) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + req.destroy() + return + case 1: + res.writeHead(500) + res.end('failed') + return + case 2: + res.writeHead(200) + res.end('hello world!') + return + default: + t.fail() + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: new RequestHandler(dispatchOptions, (err, data) => { + t.ifError(err) + }) + }) + + after(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + dispatchOptions, + handler + ) + }) + + await t.completed +}) + test('should not error if request is not meant to be retried', async t => { t = tspl(t, { plan: 3 }) @@ -777,8 +985,7 @@ test('should not error if request is not meant to be retried', async t => { t.strictEqual(Buffer.concat(chunks).toString('utf-8'), 'Bad request') }, onError (err) { - console.log({ err }) - t.fail() + t.fail(err) } } })
Retry Handler Fails When Request Has a Body ## Bug Description When a body is provided to a request, such as in a `PUT` request, the retry handler fails with `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH`. ## Reproducible By repro.ts: ``` import http from 'http'; import { Agent, RetryAgent } from 'undici'; const myDispatcher = new RetryAgent(new Agent()); async function failListener(req, res) { res.setHeader('content-type', 'application/json'); res.writeHead(500); res.end(`{"message": "failure"}`); } async function makeRetryRequest() { const result = await fetch('http://localhost:3333/fail', { method: 'PUT', body: JSON.stringify({ foo: 'bar', baz: 'bat' }), headers: { 'content-type': 'application/json', }, dispatcher: myDispatcher, }); console.log(result); } async function main() { http.createServer(failListener).listen(3333, makeRetryRequest); } main(); ``` ``` npx tsx repro.ts ``` ## Expected Behavior The request should be retried and should not fail because `undici` fails to handle re-use of the `body` correctly. ## Logs & Screenshots ``` > npx tsx repro.ts node:internal/deps/undici/undici:11372 Error.captureStackTrace(err, this); ^ TypeError: fetch failed at Object.fetch (node:internal/deps/undici/undici:11372:11) at Server.makeRetryRequest REDACTED/repro.ts:13:18) { cause: RequestContentLengthMismatchError: Request body length does not match content-length header at AsyncWriter.end REDACTED/node_modules/undici/lib/dispatcher/client-h1.js:1319:15) at writeIterable (REDACTED/node_modules/undici/lib/dispatcher/client-h1.js:1201:12) { code: 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' } } Node.js v20.8.1 ``` ## Environment * macOS 14.4.1 * nodejs v20.8.1 * undici 6.18.0 ### Additional context None
This is indeed tricky. On one hand, state altering verbs should not be automatically retried, on the other hand, it's a pretty common case. It might be better to just scope this to the safe methods only `GET`, `HEAD`, and `OPTIONS` instead of the idempotent ones. Tho the issue seems to root the `AsyncIterable` that is made out of `fetch`. As the `AsyncIterable` has been already consumed at its first request, when the `RetryHandler` attempts a new request, the `content-length` mismatch surfaces as `undici` fails to iterate over the body because it was already consumed. Not sure what will be the course of action here, I'd say that maybe the handler can make a best attempt to detect specific bodies (Stream, AsyncIterable) and clone them and attempt to free them after successful request. Just will add the overhead of duplicated bodies I think the fix here is to avoid the use of an AsyncIterable in fetch if the full body was specified. We can save quite a bit of overhead. FWIW I tested directly with `Streams` and are handled properly by the `RetryHandler`, but facing similar issue to as per fetch with `AsyncIterator` (once consumed it does not loop). So this is definitely something that the handler might need to be aware of
2024-05-22T10:41:18Z
6.18
nodejs/undici
3,251
nodejs__undici-3251
[ "3249" ]
033530df602d32bfe1dec22c97215f75a9afc433
diff --git a/lib/web/websocket/connection.js b/lib/web/websocket/connection.js --- a/lib/web/websocket/connection.js +++ b/lib/web/websocket/connection.js @@ -261,13 +261,9 @@ function closeWebSocketConnection (ws, code, reason, reasonByteLength) { /** @type {import('stream').Duplex} */ const socket = ws[kResponse].socket - socket.write(frame.createFrame(opcodes.CLOSE), (err) => { - if (!err) { - ws[kSentClose] = sentCloseFrameState.SENT - } - }) + socket.write(frame.createFrame(opcodes.CLOSE)) - ws[kSentClose] = sentCloseFrameState.PROCESSING + ws[kSentClose] = sentCloseFrameState.SENT // Upon either sending or receiving a Close control frame, it is said // that _The WebSocket Closing Handshake is Started_ and that the diff --git a/lib/web/websocket/receiver.js b/lib/web/websocket/receiver.js --- a/lib/web/websocket/receiver.js +++ b/lib/web/websocket/receiver.js @@ -5,7 +5,16 @@ const assert = require('node:assert') const { parserStates, opcodes, states, emptyBuffer, sentCloseFrameState } = require('./constants') const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols') const { channels } = require('../../core/diagnostics') -const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived, utf8Decode, isControlFrame, isContinuationFrame } = require('./util') +const { + isValidStatusCode, + isValidOpcode, + failWebsocketConnection, + websocketMessageReceived, + utf8Decode, + isControlFrame, + isContinuationFrame, + isTextBinaryFrame +} = require('./util') const { WebsocketFrameSend } = require('./frame') const { CloseEvent } = require('./events') @@ -58,19 +67,45 @@ class ByteParser extends Writable { const opcode = buffer[0] & 0x0F const masked = (buffer[1] & 0x80) === 0x80 + if (!isValidOpcode(opcode)) { + failWebsocketConnection(this.ws, 'Invalid opcode received') + return callback() + } + if (masked) { failWebsocketConnection(this.ws, 'Frame cannot be masked') return callback() } + const rsv1 = (buffer[0] & 0x40) !== 0 + const rsv2 = (buffer[0] & 0x20) !== 0 + const rsv3 = (buffer[0] & 0x10) !== 0 + + // MUST be 0 unless an extension is negotiated that defines meanings + // for non-zero values. If a nonzero value is received and none of + // the negotiated extensions defines the meaning of such a nonzero + // value, the receiving endpoint MUST _Fail the WebSocket + // Connection_. + if (rsv1 || rsv2 || rsv3) { + failWebsocketConnection(this.ws, 'RSV1, RSV2, RSV3 must be clear') + return + } + const fragmented = !fin && opcode !== opcodes.CONTINUATION - if (fragmented && opcode !== opcodes.BINARY && opcode !== opcodes.TEXT) { + if (fragmented && !isTextBinaryFrame(opcode)) { // Only text and binary frames can be fragmented failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') return } + // If we are already parsing a text/binary frame and do not receive either + // a continuation frame or close frame, fail the connection. + if (isTextBinaryFrame(opcode) && this.#info.opcode !== undefined) { + failWebsocketConnection(this.ws, 'Expected continuation frame') + return + } + const payloadLength = buffer[1] & 0x7F if (isControlFrame(opcode)) { @@ -269,7 +304,7 @@ class ByteParser extends Writable { if (info.payloadLength > 125) { // Control frames can have a payload length of 125 bytes MAX - callback(new Error('Payload length for control frame exceeded 125 bytes.')) + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') return false } else if (this.#byteOffset < info.payloadLength) { callback() @@ -375,7 +410,7 @@ class ByteParser extends Writable { parseContinuationFrame (callback, info) { // If we received a continuation frame before we started parsing another frame. if (this.#info.opcode === undefined) { - callback(new Error('Received unexpected continuation frame.')) + failWebsocketConnection(this.ws, 'Received unexpected continuation frame.') return false } else if (this.#byteOffset < info.payloadLength) { callback() diff --git a/lib/web/websocket/util.js b/lib/web/websocket/util.js --- a/lib/web/websocket/util.js +++ b/lib/web/websocket/util.js @@ -226,6 +226,14 @@ function isContinuationFrame (opcode) { return opcode === opcodes.CONTINUATION } +function isTextBinaryFrame (opcode) { + return opcode === opcodes.TEXT || opcode === opcodes.BINARY +} + +function isValidOpcode (opcode) { + return isTextBinaryFrame(opcode) || isContinuationFrame(opcode) || isControlFrame(opcode) +} + // https://nodejs.org/api/intl.html#detecting-internationalization-support const hasIntl = typeof process.versions.icu === 'string' const fatalDecoder = hasIntl ? new TextDecoder('utf-8', { fatal: true }) : undefined @@ -255,5 +263,7 @@ module.exports = { websocketMessageReceived, utf8Decode, isControlFrame, - isContinuationFrame + isContinuationFrame, + isTextBinaryFrame, + isValidOpcode } diff --git a/lib/web/websocket/websocket.js b/lib/web/websocket/websocket.js --- a/lib/web/websocket/websocket.js +++ b/lib/web/websocket/websocket.js @@ -26,7 +26,7 @@ const { ByteParser } = require('./receiver') const { kEnumerableProperty, isBlobLike } = require('../../core/util') const { getGlobalDispatcher } = require('../../global') const { types } = require('node:util') -const { ErrorEvent } = require('./events') +const { ErrorEvent, CloseEvent } = require('./events') let experimentalWarned = false @@ -594,9 +594,19 @@ function onParserDrain () { } function onParserError (err) { - fireEvent('error', this, () => new ErrorEvent('error', { error: err, message: err.reason })) + let message + let code + + if (err instanceof CloseEvent) { + message = err.reason + code = err.code + } else { + message = err.message + } + + fireEvent('error', this, () => new ErrorEvent('error', { error: err, message })) - closeWebSocketConnection(this, err.code) + closeWebSocketConnection(this, code) } module.exports = {
diff --git a/test/autobahn/.gitignore b/test/autobahn/.gitignore new file mode 100644 --- /dev/null +++ b/test/autobahn/.gitignore @@ -0,0 +1 @@ +reports/clients diff --git a/test/autobahn/client.js b/test/autobahn/client.js new file mode 100644 --- /dev/null +++ b/test/autobahn/client.js @@ -0,0 +1,47 @@ +'use strict' + +const { WebSocket } = require('../..') + +let currentTest = 1 +let testCount + +const autobahnFuzzingserverUrl = process.env.FUZZING_SERVER_URL || 'ws://localhost:9001' + +function nextTest () { + let ws + + if (currentTest > testCount) { + ws = new WebSocket(`${autobahnFuzzingserverUrl}/updateReports?agent=undici`) + return + } + + console.log(`Running test case ${currentTest}/${testCount}`) + + ws = new WebSocket( + `${autobahnFuzzingserverUrl}/runCase?case=${currentTest}&agent=undici` + ) + ws.addEventListener('message', (data) => { + ws.send(data.data) + }) + ws.addEventListener('close', () => { + currentTest++ + process.nextTick(nextTest) + }) + ws.addEventListener('error', (e) => { + console.error(e.error) + }) +} + +const ws = new WebSocket(`${autobahnFuzzingserverUrl}/getCaseCount`) +ws.addEventListener('message', (data) => { + testCount = parseInt(data.data) +}) +ws.addEventListener('close', () => { + if (testCount > 0) { + nextTest() + } +}) +ws.addEventListener('error', (e) => { + console.error(e.error) + process.exit(1) +}) diff --git a/test/autobahn/config/fuzzingserver.json b/test/autobahn/config/fuzzingserver.json new file mode 100644 --- /dev/null +++ b/test/autobahn/config/fuzzingserver.json @@ -0,0 +1,7 @@ +{ + "url": "ws://127.0.0.1:9001", + "outdir": "./reports/clients", + "cases": ["*"], + "exclude-cases": [], + "exclude-agent-cases": {} +} diff --git a/test/autobahn/report.js b/test/autobahn/report.js new file mode 100644 --- /dev/null +++ b/test/autobahn/report.js @@ -0,0 +1,142 @@ +'use strict' + +const result = require('./reports/clients/index.json').undici + +const failOnError = process.env.FAIL_ON_ERROR === 'true' +const reporter = process.env.REPORTER || 'table' +let runFailed = false + +let okTests = 0 +let failedTests = 0 +let nonStrictTests = 0 +let wrongCodeTests = 0 +let uncleanTests = 0 +let failedByClientTests = 0 +let informationalTests = 0 +let unimplementedTests = 0 + +let totalTests = 0 + +function testCaseIdToWeight (testCaseId) { + const [major, minor, sub] = testCaseId.split('.') + return sub + ? parseInt(major, 10) * 10000 + parseInt(minor, 10) * 100 + parseInt(sub, 10) + : parseInt(major, 10) * 10000 + parseInt(minor, 10) * 100 +} + +function isFailedTestCase (testCase) { + return ( + testCase.behavior === 'FAILED' || + testCase.behavior === 'WRONG CODE' || + testCase.behavior === 'UNCLEAN' || + testCase.behavior === 'FAILED BY CLIENT' || + testCase.behaviorClose === 'FAILED' || + testCase.behaviorClose === 'WRONG CODE' || + testCase.behaviorClose === 'UNCLEAN' || + testCase.behaviorClose === 'FAILED BY CLIENT' + ) +} + +const keys = Object.keys(result).sort((a, b) => { + a = testCaseIdToWeight(a) + b = testCaseIdToWeight(b) + return a - b +}) + +const reorderedResult = {} +for (const key of keys) { + reorderedResult[key] = result[key] + delete reorderedResult[key].reportfile + + totalTests++ + + if ( + failOnError && + !runFailed && + isFailedTestCase(result[key]) + ) { + runFailed = true + } + + switch (result[key].behavior) { + case 'OK': + okTests++ + break + case 'FAILED': + failedTests++ + break + case 'NON-STRICT': + nonStrictTests++ + break + case 'WRONG CODE': + wrongCodeTests++ + break + case 'UNCLEAN': + uncleanTests++ + break + case 'FAILED BY CLIENT': + failedByClientTests++ + break + case 'INFORMATIONAL': + informationalTests++ + break + case 'UNIMPLEMENTED': + unimplementedTests++ + break + } +} + +if ( + reporter === 'table' +) { + console.log('Autobahn Test Report\n\nSummary:') + + console.table({ + OK: okTests, + Failed: failedTests, + 'Non-Strict': nonStrictTests, + 'Wrong Code': wrongCodeTests, + Unclean: uncleanTests, + 'Failed By Client': failedByClientTests, + Informational: informationalTests, + Unimplemented: unimplementedTests, + 'Total Tests': totalTests + }) + + console.log('Details:') + + console.table(reorderedResult) +} + +if (reporter === 'markdown') { + console.log(`## Autobahn Test Report + +### Summary + +| Type | Count | +|---|---| +| OK | ${okTests} | +| Failed | ${failedTests} | +| Non-Strict | ${nonStrictTests} | +| Wrong Code | ${wrongCodeTests} | +| Unclean | ${uncleanTests} | +| Failed By Client | ${failedByClientTests} | +| Informational | ${informationalTests} | +| Unimplemented | ${unimplementedTests} | +| Total Tests | ${totalTests} | + +<details> +<summary>Details</summary> + +| Test Case | Behavior | Close Behavior | Duration | Remote Close Code | +|---|---|---|---|---| +${keys.map(key => { + const testCase = reorderedResult[key] + return `| ${key} | ${testCase.behavior} | ${testCase.behaviorClose} | ${testCase.duration} | ${testCase.remoteCloseCode} |` +}).join('\n')} + +</details> +`) +} + +process.exit(runFailed ? 1 : 0) diff --git a/test/autobahn/run.sh b/test/autobahn/run.sh new file mode 100755 --- /dev/null +++ b/test/autobahn/run.sh @@ -0,0 +1,6 @@ +docker run -it --rm \ + -v "${PWD}/config:/config" \ + -v "${PWD}/reports:/reports" \ + -p 9001:9001 \ + --name fuzzingserver \ + crossbario/autobahn-testsuite
running autobahn test suite We cannot move WebSocket out from experimental until we run the autobahn test suite. https://github.com/crossbario/autobahn-testsuite
2024-05-12T19:45:58Z
6.16
nodejs/undici
3,206
nodejs__undici-3206
[ "3080" ]
6866d2992b1d6bef05f8d503f1bcba9f6ddc1c7e
diff --git a/lib/web/websocket/connection.js b/lib/web/websocket/connection.js --- a/lib/web/websocket/connection.js +++ b/lib/web/websocket/connection.js @@ -1,13 +1,14 @@ 'use strict' -const { uid, states, sentCloseFrameState } = require('./constants') +const { uid, states, sentCloseFrameState, emptyBuffer, opcodes } = require('./constants') const { kReadyState, kSentClose, kByteParser, - kReceivedClose + kReceivedClose, + kResponse } = require('./symbols') -const { fireEvent, failWebsocketConnection } = require('./util') +const { fireEvent, failWebsocketConnection, isClosing, isClosed, isEstablished } = require('./util') const { channels } = require('../../core/diagnostics') const { CloseEvent } = require('./events') const { makeRequest } = require('../fetch/request') @@ -15,6 +16,7 @@ const { fetching } = require('../fetch/index') const { Headers } = require('../fetch/headers') const { getDecodeSplit } = require('../fetch/util') const { kHeadersList } = require('../../core/symbols') +const { WebsocketFrameSend } = require('./frame') /** @type {import('crypto')} */ let crypto @@ -211,6 +213,72 @@ function establishWebSocketConnection (url, protocols, ws, onEstablish, options) return controller } +function closeWebSocketConnection (ws, code, reason, reasonByteLength) { + if (isClosing(ws) || isClosed(ws)) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(ws)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(ws, 'Connection was closed before it was established.') + ws[kReadyState] = states.CLOSING + } else if (ws[kSentClose] === sentCloseFrameState.NOT_SENT) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + ws[kSentClose] = sentCloseFrameState.PROCESSING + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + + /** @type {import('stream').Duplex} */ + const socket = ws[kResponse].socket + + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + ws[kSentClose] = sentCloseFrameState.SENT + } + }) + + ws[kSentClose] = sentCloseFrameState.PROCESSING + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + ws[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + ws[kReadyState] = states.CLOSING + } +} + /** * @param {Buffer} chunk */ @@ -237,10 +305,10 @@ function onSocketClose () { const result = ws[kByteParser].closingInfo - if (result) { + if (result && !result.error) { code = result.code ?? 1005 reason = result.reason - } else if (ws[kSentClose] !== sentCloseFrameState.SENT) { + } else if (!ws[kReceivedClose]) { // If _The WebSocket // Connection is Closed_ and no Close control frame was received by the // endpoint (such as could occur if the underlying transport connection @@ -293,5 +361,6 @@ function onSocketError (error) { } module.exports = { - establishWebSocketConnection + establishWebSocketConnection, + closeWebSocketConnection } diff --git a/lib/web/websocket/receiver.js b/lib/web/websocket/receiver.js --- a/lib/web/websocket/receiver.js +++ b/lib/web/websocket/receiver.js @@ -6,6 +6,7 @@ const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbol const { channels } = require('../../core/diagnostics') const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived, utf8Decode } = require('./util') const { WebsocketFrameSend } = require('./frame') +const { CloseEvent } = require('./events') // This code was influenced by ws released under the MIT license. // Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com> @@ -55,6 +56,12 @@ class ByteParser extends Writable { this.#info.fin = (buffer[0] & 0x80) !== 0 this.#info.opcode = buffer[0] & 0x0F + this.#info.masked = (buffer[1] & 0x80) === 0x80 + + if (this.#info.masked) { + failWebsocketConnection(this.ws, 'Frame cannot be masked') + return callback() + } // If we receive a fragmented message, we use the type of the first // frame to parse the full message as binary/text, when it's terminated @@ -102,6 +109,13 @@ class ByteParser extends Writable { this.#info.closeInfo = this.parseCloseBody(body) + if (this.#info.closeInfo.error) { + const { code, reason } = this.#info.closeInfo + + callback(new CloseEvent('close', { wasClean: false, reason, code })) + return + } + if (this.ws[kSentClose] !== sentCloseFrameState.SENT) { // If an endpoint receives a Close frame and did not previously send a // Close frame, the endpoint MUST send a Close frame in response. (When @@ -310,16 +324,16 @@ class ByteParser extends Writable { } if (code !== undefined && !isValidStatusCode(code)) { - return null + return { code: 1002, reason: 'Invalid status code', error: true } } try { reason = utf8Decode(reason) } catch { - return null + return { code: 1007, reason: 'Invalid UTF-8', error: true } } - return { code, reason } + return { code, reason, error: false } } get closingInfo () { diff --git a/lib/web/websocket/util.js b/lib/web/websocket/util.js --- a/lib/web/websocket/util.js +++ b/lib/web/websocket/util.js @@ -197,7 +197,8 @@ function failWebsocketConnection (ws, reason) { if (reason) { // TODO: process.nextTick fireEvent('error', ws, (type, init) => new ErrorEvent(type, init), { - error: new Error(reason) + error: new Error(reason), + message: reason }) } } diff --git a/lib/web/websocket/websocket.js b/lib/web/websocket/websocket.js --- a/lib/web/websocket/websocket.js +++ b/lib/web/websocket/websocket.js @@ -3,7 +3,7 @@ const { webidl } = require('../fetch/webidl') const { URLSerializer } = require('../fetch/data-url') const { getGlobalOrigin } = require('../fetch/global') -const { staticPropertyDescriptors, states, sentCloseFrameState, opcodes, emptyBuffer } = require('./constants') +const { staticPropertyDescriptors, states, sentCloseFrameState, opcodes } = require('./constants') const { kWebSocketURL, kReadyState, @@ -16,18 +16,17 @@ const { const { isConnecting, isEstablished, - isClosed, isClosing, isValidSubprotocol, - failWebsocketConnection, fireEvent } = require('./util') -const { establishWebSocketConnection } = require('./connection') +const { establishWebSocketConnection, closeWebSocketConnection } = require('./connection') const { WebsocketFrameSend } = require('./frame') const { ByteParser } = require('./receiver') const { kEnumerableProperty, isBlobLike } = require('../../core/util') const { getGlobalDispatcher } = require('../../global') const { types } = require('node:util') +const { ErrorEvent } = require('./events') let experimentalWarned = false @@ -197,67 +196,7 @@ class WebSocket extends EventTarget { } // 3. Run the first matching steps from the following list: - if (isClosing(this) || isClosed(this)) { - // If this's ready state is CLOSING (2) or CLOSED (3) - // Do nothing. - } else if (!isEstablished(this)) { - // If the WebSocket connection is not yet established - // Fail the WebSocket connection and set this's ready state - // to CLOSING (2). - failWebsocketConnection(this, 'Connection was closed before it was established.') - this[kReadyState] = WebSocket.CLOSING - } else if (this[kSentClose] === sentCloseFrameState.NOT_SENT) { - // If the WebSocket closing handshake has not yet been started - // Start the WebSocket closing handshake and set this's ready - // state to CLOSING (2). - // - If neither code nor reason is present, the WebSocket Close - // message must not have a body. - // - If code is present, then the status code to use in the - // WebSocket Close message must be the integer given by code. - // - If reason is also present, then reasonBytes must be - // provided in the Close message after the status code. - - this[kSentClose] = sentCloseFrameState.PROCESSING - - const frame = new WebsocketFrameSend() - - // If neither code nor reason is present, the WebSocket Close - // message must not have a body. - - // If code is present, then the status code to use in the - // WebSocket Close message must be the integer given by code. - if (code !== undefined && reason === undefined) { - frame.frameData = Buffer.allocUnsafe(2) - frame.frameData.writeUInt16BE(code, 0) - } else if (code !== undefined && reason !== undefined) { - // If reason is also present, then reasonBytes must be - // provided in the Close message after the status code. - frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) - frame.frameData.writeUInt16BE(code, 0) - // the body MAY contain UTF-8-encoded data with value /reason/ - frame.frameData.write(reason, 2, 'utf-8') - } else { - frame.frameData = emptyBuffer - } - - /** @type {import('stream').Duplex} */ - const socket = this[kResponse].socket - - socket.write(frame.createFrame(opcodes.CLOSE), (err) => { - if (!err) { - this[kSentClose] = sentCloseFrameState.SENT - } - }) - - // Upon either sending or receiving a Close control frame, it is said - // that _The WebSocket Closing Handshake is Started_ and that the - // WebSocket connection is in the CLOSING state. - this[kReadyState] = states.CLOSING - } else { - // Otherwise - // Set this's ready state to CLOSING (2). - this[kReadyState] = WebSocket.CLOSING - } + closeWebSocketConnection(this, code, reason, reasonByteLength) } /** @@ -521,9 +460,8 @@ class WebSocket extends EventTarget { this[kResponse] = response const parser = new ByteParser(this) - parser.on('drain', function onParserDrain () { - this.ws[kResponse].socket.resume() - }) + parser.on('drain', onParserDrain) + parser.on('error', onParserError.bind(this)) response.socket.ws = this this[kByteParser] = parser @@ -647,6 +585,16 @@ webidl.converters.WebSocketSendData = function (V) { return webidl.converters.USVString(V) } +function onParserDrain () { + this.ws[kResponse].socket.resume() +} + +function onParserError (err) { + fireEvent('error', this, () => new ErrorEvent('error', { error: err, message: err.reason })) + + closeWebSocketConnection(this, err.code) +} + module.exports = { WebSocket }
diff --git a/test/websocket/client-received-masked-frame.js b/test/websocket/client-received-masked-frame.js new file mode 100644 --- /dev/null +++ b/test/websocket/client-received-masked-frame.js @@ -0,0 +1,45 @@ +'use strict' + +const { test } = require('node:test') +const { once } = require('node:events') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') +const { tspl } = require('@matteo.collina/tspl') +const { WebsocketFrameSend } = require('../../lib/web/websocket/frame') + +test('Client fails the connection if receiving a masked frame', async (t) => { + const assert = tspl(t, { plan: 2 }) + + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(1006, 0) + + const frame = new WebsocketFrameSend(body) + const buffer = frame.createFrame(0x8) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + const socket = ws._socket + + socket.write(buffer, () => ws.close()) + }) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('close', (e) => { + assert.deepStrictEqual(e.code, 1006) + }) + + ws.addEventListener('error', () => { + assert.ok(true) + }) + + t.after(() => { + server.close() + ws.close() + }) + + await once(ws, 'close') + + await assert.completed +}) diff --git a/test/websocket/close-invalid-status-code.js b/test/websocket/close-invalid-status-code.js new file mode 100644 --- /dev/null +++ b/test/websocket/close-invalid-status-code.js @@ -0,0 +1,39 @@ +'use strict' + +const { test } = require('node:test') +const { once } = require('node:events') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') +const { tspl } = require('@matteo.collina/tspl') + +test('Client fails the connection if receiving a masked frame', async (t) => { + const assert = tspl(t, { plan: 2 }) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + const socket = ws._socket + + // 1006 status code + socket.write(Buffer.from([0x88, 0x02, 0x03, 0xee]), () => ws.close()) + }) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('close', (e) => { + assert.deepStrictEqual(e.code, 1006) + }) + + ws.addEventListener('error', () => { + assert.ok(true) + }) + + t.after(() => { + server.close() + ws.close() + }) + + await once(ws, 'close') + + await assert.completed +}) diff --git a/test/websocket/close-invalid-utf-8.js b/test/websocket/close-invalid-utf-8.js new file mode 100644 --- /dev/null +++ b/test/websocket/close-invalid-utf-8.js @@ -0,0 +1,49 @@ +'use strict' + +const { test } = require('node:test') +const { once } = require('node:events') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') +const { tspl } = require('@matteo.collina/tspl') + +test('Receiving a close frame with invalid utf-8', async (t) => { + const assert = tspl(t, { plan: 2 }) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.close(1000, Buffer.from([0xFF, 0xFE])) + + ws.on('close', (code) => { + assert.equal(code, 1007) + }) + }) + + const events = [] + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('close', (e) => { + events.push({ type: 'close', code: e.code }) + }) + + ws.addEventListener('error', () => { + events.push({ type: 'error' }) + }) + + t.after(() => { + server.close() + ws.close() + }) + + await once(ws, 'close') + + // An error event should be propagated immediately, then we should receive + // a close event with a 1006 code. The code is 1006, and not 1007 (as we send + // the server) because the connection is closed before the server responds. + assert.deepStrictEqual(events, [ + { type: 'error' }, + { type: 'close', code: 1006 } + ]) + + await assert.completed +})
websocket: fix ByteParser.run and parseCloseBody ParseCloseBody can return null. In that case this.#info.closeInfo would be null and the following line would result in a crash. https://github.com/nodejs/undici/blob/5d5454380b72889706146a9c4d5f65225a99640f/lib/web/websocket/receiver.js#L111 Also reduces the amount of Buffers created.
## [Codecov](https://app.codecov.io/gh/nodejs/undici/pull/3080?dropdown=coverage&src=pr&el=h1&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs) Report Attention: Patch coverage is `87.50000%` with `2 lines` in your changes are missing coverage. Please review. > Project coverage is 94.19%. Comparing base [(`48af032`)](https://app.codecov.io/gh/nodejs/undici/commit/48af0320a35214316b4ecdfaed0194af5d1d13d5?dropdown=coverage&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs) to head [(`3def0c3`)](https://app.codecov.io/gh/nodejs/undici/pull/3080?dropdown=coverage&src=pr&el=desc&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs). > Report is 76 commits behind head on main. | [Files](https://app.codecov.io/gh/nodejs/undici/pull/3080?dropdown=coverage&src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs) | Patch % | Lines | |---|---|---| | [lib/web/websocket/receiver.js](https://app.codecov.io/gh/nodejs/undici/pull/3080?src=pr&el=tree&filepath=lib%2Fweb%2Fwebsocket%2Freceiver.js&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs#diff-bGliL3dlYi93ZWJzb2NrZXQvcmVjZWl2ZXIuanM=) | 87.50% | [2 Missing :warning: ](https://app.codecov.io/gh/nodejs/undici/pull/3080?src=pr&el=tree&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs) | <details><summary>Additional details and impacted files</summary> ```diff @@ Coverage Diff @@ ## main #3080 +/- ## ========================================== + Coverage 94.00% 94.19% +0.18% ========================================== Files 89 90 +1 Lines 24314 24436 +122 ========================================== + Hits 22856 23017 +161 + Misses 1458 1419 -39 ``` </details> [:umbrella: View full report in Codecov by Sentry](https://app.codecov.io/gh/nodejs/undici/pull/3080?dropdown=coverage&src=pr&el=continue&utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs). :loudspeaker: Have feedback on the report? [Share it here](https://about.codecov.io/codecov-pr-comment-feedback/?utm_medium=referral&utm_source=github&utm_content=comment&utm_campaign=pr+comments&utm_term=nodejs). tests are rightfully failing, the PR assumes that having no closing code is invalid when it's optional @KhafraDev PTAL I think there is a bug in the original logic. `parseCloseBody` is returning in two places null. This will actually result in code breaking. @KhafraDev What do you think now about this? I'm going to go ahead and close this. The main change is creating a singular less Buffer in a very cold case. As lpinca said in my original PR implementing websocket (paraphrasing): "the performance of the opening handshake doesn't matter because it rarely happens". It's the same for closing the connection. I disagree. At first it was about a performance improvement. Then I found a bug when I implemented this PR, which could lead to a crash. There isn't a bug being fixed without a test to cover it. The test that you added passes in main. The logic is mostly flawed as I pointed out. well, lets see if i can write a rogue websocket server. @KhafraDev added unit test ;) This diff makes that test pass. There is quite a lot of extraneous stuff here. ```diff diff --git a/lib/web/websocket/receiver.js b/lib/web/websocket/receiver.js index 4b35ceb5..b1ef4720 100644 --- a/lib/web/websocket/receiver.js +++ b/lib/web/websocket/receiver.js @@ -316,7 +316,7 @@ class ByteParser extends Writable { try { reason = utf8Decode(reason) } catch { - return null + reason = undefined } return { code, reason } ``` I'm not sure if ignoring the reason is correct though. Maybe failing the connection (basically, force closing it) is a better idea. In that case, nothing would need to be changed in parseCloseBody. edit: Yeah, I don't think the test case makes sense. We're receiving an invalid frame, why are we handling it gracefully? same test against main: ![image](https://github.com/nodejs/undici/assets/5059100/2b42025c-6f75-44a1-8a3b-7aacb56fb051) I understand, but the way this PR is fixing this is wrong. ``` 1007 1007 indicates that an endpoint is terminating the connection because it has received data within a message that was not consistent with the type of the message (e.g., non-UTF-8 [[RFC3629]] data within a text message). ```
2024-05-05T22:57:41Z
6.15
nodejs/undici
3,169
nodejs__undici-3169
[ "2227" ]
3f927b8ef17791109cbb4f427b3e98ec4db9df25
diff --git a/lib/web/fetch/request.js b/lib/web/fetch/request.js --- a/lib/web/fetch/request.js +++ b/lib/web/fetch/request.js @@ -38,6 +38,8 @@ const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { signal.removeEventListener('abort', abort) }) +const dependentControllerMap = new WeakMap() + function buildAbort (acRef) { return abort @@ -57,6 +59,21 @@ function buildAbort (acRef) { this.removeEventListener('abort', abort) ac.abort(this.reason) + + const controllerList = dependentControllerMap.get(ac.signal) + + if (controllerList !== undefined) { + if (controllerList.size !== 0) { + for (const ref of controllerList) { + const ctrl = ref.deref() + if (ctrl !== undefined) { + ctrl.abort(this.reason) + } + } + controllerList.clear() + } + dependentControllerMap.delete(ac.signal) + } } } } @@ -754,11 +771,16 @@ class Request { if (this.signal.aborted) { ac.abort(this.signal.reason) } else { + let list = dependentControllerMap.get(this.signal) + if (list === undefined) { + list = new Set() + dependentControllerMap.set(this.signal, list) + } + const acRef = new WeakRef(ac) + list.add(acRef) util.addAbortListener( - this.signal, - () => { - ac.abort(this.signal.reason) - } + ac.signal, + buildAbort(acRef) ) }
diff --git a/test/wpt/status/fetch.status.json b/test/wpt/status/fetch.status.json --- a/test/wpt/status/fetch.status.json +++ b/test/wpt/status/fetch.status.json @@ -2,13 +2,11 @@ "api": { "abort": { "general.any.js": { - "note": "TODO(@KhafraDev): Clone aborts with original controller can probably be fixed", "fail": [ "Already aborted signal rejects immediately", "Underlying connection is closed when aborting after receiving response - no-cors", "Stream errors once aborted. Underlying connection closed.", - "Readable stream synchronously cancels with AbortError if aborted before reading", - "Clone aborts with original controller" + "Readable stream synchronously cancels with AbortError if aborted before reading" ] }, "cache.https.any.js": {
abortcontroller & request.clone issue with fetch ```js import { Request } from 'undici' import { ok, strictEqual } from 'node:assert' const controller = new AbortController(); const signal = controller.signal; const request = new Request('http://a', { signal }); const clonedRequest = request.clone(); const log = []; request.signal.addEventListener('abort', () => log.push('original-aborted')); clonedRequest.signal.addEventListener('abort', () => log.push('clone-aborted')); controller.abort(); strictEqual(log, ['original-aborted', 'clone-aborted'], "Abort events fired in correct order"); ok(request.signal.aborted, 'Signal aborted'); ok(clonedRequest.signal.aborted, 'Signal aborted'); ``` cc @atlowChemi since you recently made a PR regarding abortcontroller, hopefully you can help. I don't know enough about EventTarget and order of events to fix this
2024-04-26T12:53:22Z
6.14
nodejs/undici
3,105
nodejs__undici-3105
[ "3097" ]
8f192e3748f045e07d1cd8cc892a8433dd31bc32
diff --git a/lib/web/fetch/body.js b/lib/web/fetch/body.js --- a/lib/web/fetch/body.js +++ b/lib/web/fetch/body.js @@ -406,7 +406,7 @@ async function consumeBody (object, convertBytesToJSValue, instance) { // 1. If object is unusable, then return a promise rejected // with a TypeError. if (bodyUnusable(object[kState].body)) { - throw new TypeError('Body is unusable') + throw new TypeError('Body is unusable: Body has already been read') } throwIfAborted(object[kState])
diff --git a/test/fetch/client-fetch.js b/test/fetch/client-fetch.js --- a/test/fetch/client-fetch.js +++ b/test/fetch/client-fetch.js @@ -370,7 +370,7 @@ test('locked blob body', (t, done) => { const res = await fetch(`http://localhost:${server.address().port}`) const reader = res.body.getReader() res.blob().catch(err => { - strictEqual(err.message, 'Body is unusable') + strictEqual(err.message, 'Body is unusable: Body has already been read') reader.cancel() }).finally(done) }) @@ -390,7 +390,7 @@ test('disturbed blob body', (t, done) => { ok(true) }) await res.blob().catch(err => { - strictEqual(err.message, 'Body is unusable') + strictEqual(err.message, 'Body is unusable: Body has already been read') }) done() }) diff --git a/test/node-fetch/main.js b/test/node-fetch/main.js --- a/test/node-fetch/main.js +++ b/test/node-fetch/main.js @@ -1385,7 +1385,7 @@ describe('node-fetch', () => { assert.strictEqual(res.headers.get('content-type'), 'text/plain') return res.text().then(() => { assert.strictEqual(res.bodyUsed, true) - return assert.rejects(res.text(), new TypeError('Body is unusable')) + return assert.rejects(res.text(), new TypeError('Body is unusable: Body has already been read')) }) }) })
Change "Body is unusable" error to something more helpful ## This would solve... Unidici throws an error with the message "Body is unusable" when the body of a response is non-null and the stream is either locked or disturbed. In most cases, this situation occurs because someone has attempted to ready the body twice, such as: ```js const response = await fetch(url); console.log(await response.text()); const body = await response.json(); // throws "Body is unusable" ``` The message "Body is unusable" is opaque and requires searching to understand what it means and what the problem is. A more descriptive message would save developers a lot of time. ## The implementation should look like... Changing the string on this line: https://github.com/nodejs/undici/blob/fe44b9b36718ff2c171568fc4a239ecb8eba038d/lib/web/fetch/body.js#L411 Suggestion: "Body is unusable: Body is either null or the stream has already been read." The spec only specifies that a `TypeError` must be thrown, but it does not specify the text contained within that error, so it seems like changing the message would still be in-line with the spec. ## I have also considered... Not filing this issue. :smile: ## Additional context <!-- Add any other context, screenshots or ideas about the feature request here. -->
Thanks for reporting! Would you like to send a Pull Request to address this issue? Remember to add unit tests. I am not certain that the body can be null there @mcollina happy to! :tada: If I had to implement it, I would do it like this: ```js // symbols.js const kBodyOk = Symbol('body_ok') const kBodyLocked = Symbol('body_locked') const kBodyDisturbed = Symbol('body_disturbed') // https://fetch.spec.whatwg.org/#body-unusable function bodyUnusable (body) { // An object including the Body interface mixin is // said to be unusable if its body is non-null and // its body’s stream is disturbed or locked. if (body != null) { if (body.stream.locked) { return kBodyLocked } else if (util.isDisturbed(body.stream)) return kBodyDisturbed } } return kBodyOk } ``` then throw it properly in the linked code place. or directly throw in bodyUnusable. Maybe rename it to assertBodyUsable. But probably @KhafraDev has a strong opinion regarding such a change. @KhafraDev ah gotcha, I see that [`bodyUsable()` returns `false` if body is `null`](https://github.com/nodejs/undici/blob/fe44b9b36718ff2c171568fc4a239ecb8eba038d/lib/web/fetch/body.js#L448-L453). 👍 So maybe just "Body is unusable: The stream has already been read."? That message isn't much better, it assumes that you have some knowledge of fetch internals (that the body mixin methods are helper methods for consuming .body, which is a ReadableStream). This example doesn't use any streams outwardly: ```js const response = await fetch('https://www.google.com') res.text() res.text() // TypeError: Body is unusable ``` I don't think "read" is the correct verbiage here either as it might imply that the entire body has been read, which also may not be true. Also, "read" is something you can do to a ReadableStream so it may confuse people who do not directly access .body. ```js const response = await fetch('https://www.goolge.com') const reader = response.body.getReader() await reader.read() // a single chunk reader.releaseLock() await response.text() // TypeError: Body is unusable ``` Instead, it should be something along the lines of "Body has already been consumed". Still, I don't necessarily think that the message is entirely clear and I suspect people will still have to look up what it actually means. Suggestions are welcome.
2024-04-12T22:11:28Z
6.13
nodejs/undici
3,047
nodejs__undici-3047
[ "3046" ]
dde070b53206022051da9dffb2df220b75759d5c
diff --git a/lib/dispatcher/client-h2.js b/lib/dispatcher/client-h2.js --- a/lib/dispatcher/client-h2.js +++ b/lib/dispatcher/client-h2.js @@ -55,14 +55,20 @@ const { } = http2 function parseH2Headers (headers) { - // set-cookie is always an array. Duplicates are added to the array. - // For duplicate cookie headers, the values are joined together with '; '. - headers = Object.entries(headers).flat(2) - const result = [] - for (const header of headers) { - result.push(Buffer.from(header)) + for (const [name, value] of Object.entries(headers)) { + // h2 may concat the header value by array + // e.g. Set-Cookie + if (Array.isArray(value)) { + for (const subvalue of value) { + // we need to provide each header value of header name + // because the headers handler expect name-value pair + result.push(Buffer.from(name), Buffer.from(subvalue)) + } + } else { + result.push(Buffer.from(name), Buffer.from(value)) + } } return result
diff --git a/test/fetch/http2.js b/test/fetch/http2.js --- a/test/fetch/http2.js +++ b/test/fetch/http2.js @@ -462,3 +462,48 @@ test('Issue #2386', async (t) => { controller.abort() ok(true) }) + +test('Issue #3046', async (t) => { + const server = createSecureServer(pem) + + const { strictEqual, deepStrictEqual } = tspl(t, { plan: 6 }) + + server.on('stream', async (stream, headers) => { + strictEqual(headers[':method'], 'GET') + strictEqual(headers[':path'], '/') + strictEqual(headers[':scheme'], 'https') + + stream.respond({ + 'set-cookie': ['hello=world', 'foo=bar'], + 'content-type': 'text/html; charset=utf-8', + ':status': 200 + }) + + stream.end('<h1>Hello World</h1>') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.after(closeClientAndServerAsPromise(client, server)) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'GET', + dispatcher: client + } + ) + + strictEqual(response.status, 200) + strictEqual(response.headers.get('content-type'), 'text/html; charset=utf-8') + deepStrictEqual(response.headers.getSetCookie(), ['hello=world', 'foo=bar']) +})
Exception parsing HTTP/2 Headers when multiple set-cookie is sent ## Bug Description By making requests to websites that return multiple set-cookie header with HTTP/2 enabled, an exception is triggered while processing the headers ## Reproducible By https://replit.com/@gava97/H2-headers ## Expected Behavior No exception thrown, headers parsed correctly ## Logs & Screenshots (node:1160) [UNDICI-H2] Warning: H2 support is experimental, expect them to change at any time. (Use `node --trace-warnings ...` to show where the warning was created) TypeError: fetch failed at fetch (/home/runner/H2-headers/node_modules/undici/index.js:109:13) at process.processTicksAndRejections (node:internal/process/task_queues:95:5) at async file:///home/runner/H2-headers/index.js:8:13 { [cause]: TypeError: Cannot read properties of undefined (reading 'toString') at Object.onHeaders (/home/runner/H2-headers/node_modules/undici/lib/web/fetch/index.js:2135:97) at Request.onHeaders (/home/runner/H2-headers/node_modules/undici/lib/core/request.js:243:29) at ClientHttp2Stream.<anonymous> (/home/runner/H2-headers/node_modules/undici/lib/dispatcher/client-h2.js:420:17) at Object.onceWrapper (node:events:633:26) at ClientHttp2Stream.emit (node:events:518:28) at emit (node:internal/http2/core:331:3) at process.processTicksAndRejections (node:internal/process/task_queues:85:22) ![image](https://github.com/nodejs/undici/assets/4953714/46e8ee08-4bd0-44ba-81d2-ebf8937ddd3d) ## Environment Node v20.11.1 undici 6.11.1 ### Additional context This bug is present onliy in undici >= 6.11
Yes, we changed the way we parse the response headers; it seems the root cause goes to the way we parse the `setCookie`, as with `node:http2`, multiple `set-cookie` headers are consolidated into an array of multiple values, we need to change that so `fetch` can parse it accordingly to the spec. Working on a fix
2024-04-03T09:05:32Z
6.11
nodejs/undici
3,005
nodejs__undici-3005
[ "2364" ]
7485cd9b4cf9a86cb76b1597df527eba15755bfc
diff --git a/lib/dispatcher/client-h2.js b/lib/dispatcher/client-h2.js --- a/lib/dispatcher/client-h2.js +++ b/lib/dispatcher/client-h2.js @@ -391,6 +391,18 @@ function writeH2 (client, request) { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers request.onResponseStarted() + // Due to the stream nature, it is possible we face a race condition + // where the stream has been assigned, but the request has been aborted + // the request remains in-flight and headers hasn't been received yet + // for those scenarios, best effort is to destroy the stream immediately + // as there's no value to keep it open. + if (request.aborted || request.completed) { + const err = new RequestAbortedError() + errorRequest(client, request, err) + util.destroy(stream, err) + return + } + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { stream.pause() } @@ -426,7 +438,6 @@ function writeH2 (client, request) { stream.once('close', () => { session[kOpenStreams] -= 1 - // TODO(HTTP/2): unref only if current streams count is 0 if (session[kOpenStreams] === 0) { session.unref() }
diff --git a/test/http2.js b/test/http2.js --- a/test/http2.js +++ b/test/http2.js @@ -1294,3 +1294,88 @@ test('Should throw informational error on half-closed streams (remote)', async t t.strictEqual(err.code, 'UND_ERR_INFO') }) }) + +test('#2364 - Concurrent aborts', async t => { + const server = createSecureServer(pem) + + server.on('stream', (stream, headers, _flags, rawHeaders) => { + t.strictEqual(headers['x-my-header'], 'foo') + t.strictEqual(headers[':method'], 'GET') + setTimeout(() => { + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }, 100) + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t = tspl(t, { plan: 18 }) + after(() => server.close()) + after(() => client.close()) + const controller = new AbortController() + + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }, (err, response) => { + t.ifError(err) + t.strictEqual(response.headers['content-type'], 'text/plain; charset=utf-8') + t.strictEqual(response.headers['x-custom-h2'], 'hello') + t.strictEqual(response.statusCode, 200) + response.body.dump() + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + }, + signal: controller.signal + }, (err, response) => { + t.strictEqual(err.name, 'AbortError') + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }, (err, response) => { + t.ifError(err) + t.strictEqual(response.headers['content-type'], 'text/plain; charset=utf-8') + t.strictEqual(response.headers['x-custom-h2'], 'hello') + t.strictEqual(response.statusCode, 200) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + }, + signal: controller.signal + }, (err, response) => { + t.strictEqual(err.name, 'AbortError') + }) + + controller.abort() + + await t.completed +})
[HTTP/2] Aborted requests for same domain might fail unexpectedly ## Bug Description As the title, pipelined requests might fail unexpectedly if another request toward the same domain has been aborted: ``` node:assert:399 throw err; ^ AssertionError [ERR_ASSERTION]: The expression evaluated to a falsy value: assert(!this.aborted) at Request.onHeaders (/home/runner/undici-h2-promise-any/node_modules/undici/lib/core/request.js:235:5) at ClientHttp2Stream.<anonymous> (/home/runner/undici-h2-promise-any/node_modules/undici/lib/client.js:1804:17) at Object.onceWrapper (node:events:628:26) at ClientHttp2Stream.emit (node:events:513:28) at emit (node:internal/http2/core:330:3) at process.processTicksAndRejections (node:internal/process/task_queues:85:22) { generatedMessage: true, code: 'ERR_ASSERTION', actual: false, expected: true, operator: '==' } Node.js v18.16.1 ``` ## Reproducible By https://replit.com/@isukkaw/undici-h2-promise-any ```js const undici = require('undici'); undici.setGlobalDispatcher(new undici.Agent({ allowH2: true, pipelining: 10 })); const { fetch } = undici; const fetchUbuntuXenialReleaseGpg = () => { const controller = new AbortController(); return Promise.any( [ 'https://mirror-cdn.xtom.com/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.com/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.com.hk/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.de/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.nl/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.ee/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.jp/ubuntu/dists/xenial/Release.gpg', 'https://mirrors.xtom.au/ubuntu/dists/xenial/Release.gpg' ].map(async url => { try { const resp = await fetch(url, { signal: controller.signal }); const text = await resp.text(); // The body should have been consumed, abort the requests controller.abort(); return text; } catch(e) { if (e.name !== 'AbortError') { console.error('Request Fail:', url) throw e; } } }) ); }; const fetchDebianBookwormReleaseGpg = () => { const controller = new AbortController(); return Promise.any( [ 'https://mirror-cdn.xtom.com/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.com/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.com.hk/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.de/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.nl/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.ee/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.jp/debian/dists/bookworm/Release.gpg', 'https://mirrors.xtom.au/debian/dists/bookworm/Release.gpg' ].map(async url => { try { const resp = await fetch(url, { signal: controller.signal }); const text = await resp.text(); // The body should have been consumed, abort the requests controller.abort(); return text; } catch(e) { if (e.name !== 'AbortError') { console.error('Request Fail:', url) throw e; } } }) ); }; (async () => { const [ubuntu, debian] = await Promise.all([ fetchUbuntuXenialReleaseGpg(), fetchDebianBookwormReleaseGpg() ]); console.log({ ubuntu: ubuntu.length, debian: debian.length }) })(); ``` ## Expected Behavior `AssertionError [ERR_ASSERTION]: The expression evaluated to a falsy value: assert(!this.aborted)` should never happened. ## Logs & Screenshots ``` (node:3251) [UNDICI-H2] Warning: H2 support is experimental, expect them to change at any time. (Use `node --trace-warnings ...` to show where the warning was created) node:assert:399 throw err; ^ AssertionError [ERR_ASSERTION]: The expression evaluated to a falsy value: assert(!this.aborted) at Request.onHeaders (/home/runner/undici-h2-promise-any/node_modules/undici/lib/core/request.js:235:5) at ClientHttp2Stream.<anonymous> (/home/runner/undici-h2-promise-any/node_modules/undici/lib/client.js:1804:17) at Object.onceWrapper (node:events:628:26) at ClientHttp2Stream.emit (node:events:513:28) at emit (node:internal/http2/core:330:3) at process.processTicksAndRejections (node:internal/process/task_queues:85:22) { generatedMessage: true, code: 'ERR_ASSERTION', actual: false, expected: true, operator: '==' } Node.js v18.16.1 ``` ## Environment Node.js 18.16.0 on Ubuntu 22.04.1
@metcoder95 Started doing some research about, and bug is in fact quite complex; it seems that somewhere we are reusing a `request` instance that was already aborted to handle a second request. Haven't found the root cause yet. I'll made more research later on The repro SukkaW provided still reproduces even with Pipelining disabled, maybe this issue should be re-titled? Trying to swap to undici in our software, I'm able to get this stack trace, we only issue one request to a dummy /debug/stream endpoint which pushes one sse per second and abort it after a few seconds. but we receive exactly one more chunk afterwards, resulting in this assertion failure ``` [10/25/2023, 6:25:20 PM] [PLATFORM_FETCH] Sending GET: https://localhost:8020/debug/stream undefined (GET /debug/stream YR+xTDLhXJa6jIE13bX3WWaFzDQx) undici init path /debug/stream undici SUCCESS connected {"host":"localhost:8020","hostname":"localhost","protocol":"https:","port":"8020","servername":null,"localAddress":null} (node:3957036) [UNDICI-H2] Warning: H2 support is experimental, expect them to change at any time. (Use `node --trace-warnings ...` to show where the warning was created) [10/25/2023, 6:25:20 PM] [PLATFORM_FETCH] Started response (200) for request GET (https://localhost:8020/debug/stream) undefined (GET /debug/stream YR+xTDLhXJa6jIE13bX3WWaFzDQx) data from in client /debug/stream <Buffer 7b 22 30 22 3a 22 31 36 39 38 32 37 32 37 32 31 34 39 34 22 7d 0a 0a> data from in client /debug/stream <Buffer 7b 22 31 22 3a 22 31 36 39 38 32 37 32 37 32 32 34 39 34 22 7d 0a 0a> data from in client /debug/stream <Buffer 7b 22 32 22 3a 22 31 36 39 38 32 37 32 37 32 33 34 39 35 22 7d 0a 0a> aborting undici request error This operation was aborted /debug/stream [10/25/2023, 6:25:24 PM] [PLATFORM_FETCH] Finished response (closed stream via abort controller) for request (GET https://localhost:8020/debug/stream YR+xTDLhXJa6jIE13bX3WWaFzDQx) data from in client /debug/stream <Buffer 7b 22 33 22 3a 22 31 36 39 38 32 37 32 37 32 34 34 39 34 22 7d 0a 0a> Trace: Aborted! /debug/stream <Buffer 7b 22 33 22 3a 22 31 36 39 38 32 37 32 37 32 34 34 39 34 22 7d 0a 0a> node:internal/event_target:912 process.nextTick(() => { throw err; }); ^ AssertionError [ERR_ASSERTION]: The expression evaluated to a falsy value: assert2(!this.aborted) at Request2.onData (/nix/store/wvxs5i3mvh1lbvay44i13d24yy9rn95k-esbuild_node/depengine_worker.js:4815:9) at ClientHttp2Stream.<anonymous> (/nix/store/wvxs5i3mvh1lbvay44i13d24yy9rn95k-esbuild_node/depengine_worker.js:7177:21) at ClientHttp2Stream.emit (node:events:527:28) at addChunk (node:internal/streams/readable:324:12) at readableAddChunk (node:internal/streams/readable:297:9) at ClientHttp2Stream.Readable.push (node:internal/streams/readable:234:10) at Http2Stream.onStreamRead (node:internal/stream_base_commons:190:23) Emitted 'error' event on Worker instance at: at Worker.[kOnErrorMessage] (node:internal/worker:289:10) at Worker.[kOnMessage] (node:internal/worker:300:37) at MessagePort.<anonymous> (node:internal/worker:201:57) at MessagePort.[nodejs.internal.kHybridDispatch] (node:internal/event_target:643:20) at MessagePort.exports.emitMessage (node:internal/per_context/messageport:23:28) Node.js v17.9.0 ```
2024-03-27T11:43:24Z
6.1
nodejs/undici
2,992
nodejs__undici-2992
[ "2986" ]
83f36b73eedf60a9eadb0020db29a38f4e727980
diff --git a/lib/handler/retry-handler.js b/lib/handler/retry-handler.js --- a/lib/handler/retry-handler.js +++ b/lib/handler/retry-handler.js @@ -242,14 +242,12 @@ class RetryHandler { } const { start, size, end = size } = range - assert( - start != null && Number.isFinite(start) && this.start !== start, + start != null && Number.isFinite(start), 'content-range mismatch' ) - assert(Number.isFinite(start)) assert( - end != null && Number.isFinite(end) && this.end !== end, + end != null && Number.isFinite(end), 'invalid content-length' )
diff --git a/test/retry-handler.js b/test/retry-handler.js --- a/test/retry-handler.js +++ b/test/retry-handler.js @@ -876,3 +876,106 @@ test('Should be able to properly pass the minTimeout to the RetryContext when co await t.completed }) +test('Issue#2986 - Handle custom 206', { only: true }, async t => { + t = tspl(t, { plan: 8 }) + + const chunks = [] + let counter = 0 + + // Took from: https://github.com/nxtedition/nxt-lib/blob/4b001ebc2f22cf735a398f35ff800dd553fe5933/test/undici/retry.js#L47 + let x = 0 + const server = createServer((req, res) => { + if (x === 0) { + t.deepStrictEqual(req.headers.range, 'bytes=0-3') + res.setHeader('etag', 'asd') + res.write('abc') + setTimeout(() => { + res.destroy() + }, 1e2) + } else if (x === 1) { + t.deepStrictEqual(req.headers.range, 'bytes=3-') + res.setHeader('content-range', 'bytes 3-6/6') + res.setHeader('etag', 'asd') + res.statusCode = 206 + res.end('def') + } + x++ + }) + + const dispatchOptions = { + retryOptions: { + retry: function (err, _, done) { + counter++ + + if (err.code && err.code === 'UND_ERR_DESTROYED') { + return done(false) + } + + if (err.statusCode === 206) return done(err) + + setTimeout(done, 800) + } + }, + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: (...args) => { + return client.dispatch(...args) + }, + handler: { + onRequestSent () { + t.ok(true, 'pass') + }, + onConnect () { + t.ok(true, 'pass') + }, + onBodySent () { + t.ok(true, 'pass') + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.strictEqual(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.strictEqual(Buffer.concat(chunks).toString('utf-8'), 'abcdef') + t.strictEqual(counter, 1) + }, + onError () { + t.fail() + } + } + }) + + client.dispatch( + { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json', + Range: 'bytes=0-3' + } + }, + handler + ) + + after(async () => { + await client.close() + + server.close() + await once(server, 'close') + }) + }) + + await t.completed +})
Fails to retrieve the first piece of data when downloading partial content with `RetryAgent` ## Bug Description <!-- A clear and concise description of what the bug is. --> When trying to download the file in parts (using `RetryAgent`), the download of the first part (when first byte of range is equal to zero) fails due to a `content-range mismatch` error inside `RetryHandler` `onHeaders` method. ## Reproducible By <!-- A step by step list on how the bug can be reproduced for examination. --> Here's a simple example of reproducing the issue: ```js const { setGlobalDispatcher, RetryAgent, Agent, request } = require('undici'); setGlobalDispatcher(new RetryAgent(new Agent())); const start = 0; const end = 26664011; const size = end - start + 1; const url = 'https://cdn.bitmovin.com/content/assets/art-of-motion-dash-hls-progressive/MI201109210084_mpeg-4_hd_high_1080p25_10mbits.mp4'; const options = { headers: { Range: `bytes=${start}-${end}` } }; request(url, options) .then((response) => { const contentLength = Number(response.headers['content-length']); console.log(contentLength === size); }) .catch((e) => { console.log(e.message === 'content-range mismatch'); // true }); ``` ## Expected Behavior <!-- A clear and concise description of what you expected to happen. --> I expect the request to successfully execute and return the first piece of data. If you remove `RetryAgent` from the code example above, the request will succeed: ```js const { setGlobalDispatcher, Agent, request } = require('undici'); setGlobalDispatcher(new Agent()); const start = 0; const end = 26664011; const size = end - start + 1; const url = 'https://cdn.bitmovin.com/content/assets/art-of-motion-dash-hls-progressive/MI201109210084_mpeg-4_hd_high_1080p25_10mbits.mp4'; const options = { headers: { Range: `bytes=${start}-${end}` } }; request(url, options) .then((response) => { const contentLength = Number(response.headers['content-length']); console.log(contentLength === size); // true }) .catch((e) => { console.log(e.message === 'content-range mismatch'); }); ``` ## Logs & Screenshots <!-- If applicable, add screenshots to help explain your problem, or alternatively add your console logs here. --> ``` AssertionError [ERR_ASSERTION]: content-range mismatch at RetryHandler.onHeaders (/Users/user/project/node_modules/undici/lib/handler/retry-handler.js:248:9) at Request.onHeaders (/Users/user/project/node_modules/undici/lib/core/request.js:243:29) at Parser.onHeadersComplete (/Users/user/project/node_modules/undici/lib/dispatcher/client-h1.js:507:27) at wasm_on_headers_complete (/Users/user/project/node_modules/undici/lib/dispatcher/client-h1.js:121:30) at wasm://wasm/0003626a:wasm-function[11]:0x494 at wasm://wasm/0003626a:wasm-function[51]:0x1003 at wasm://wasm/0003626a:wasm-function[68]:0x6e8e at wasm://wasm/0003626a:wasm-function[67]:0x1568 at wasm://wasm/0003626a:wasm-function[21]:0x552 at Parser.execute (/Users/user/project/node_modules/undici/lib/dispatcher/client-h1.js:262:22) { generatedMessage: false, code: 'ERR_ASSERTION', actual: false, expected: true, operator: '==' } ``` ## Environment <!-- This is just your OS and environment information [e.g. Ubuntu 18.04 LTS, Node v14.14.0] --> macOS Sonoma 14.3.1 (23D60) Node v20.11.1
2024-03-25T09:49:08Z
6.1
nodejs/undici
2,939
nodejs__undici-2939
[ "2926" ]
219da8b7b3fea7e38a7644b8bc35fe6fec97d66e
diff --git a/lib/core/errors.js b/lib/core/errors.js --- a/lib/core/errors.js +++ b/lib/core/errors.js @@ -195,6 +195,16 @@ class RequestRetryError extends UndiciError { } } +class SecureProxyConnectionError extends UndiciError { + constructor (cause, message, options) { + super(message, { cause, ...(options ?? {}) }) + this.name = 'SecureProxyConnectionError' + this.message = message || 'Secure Proxy Connection failed' + this.code = 'UND_ERR_PRX_TLS' + this.cause = cause + } +} + module.exports = { AbortError, HTTPParserError, @@ -216,5 +226,6 @@ module.exports = { ResponseContentLengthMismatchError, BalancedPoolMissingUpstreamError, ResponseExceededMaxSizeError, - RequestRetryError + RequestRetryError, + SecureProxyConnectionError } diff --git a/lib/core/request.js b/lib/core/request.js --- a/lib/core/request.js +++ b/lib/core/request.js @@ -40,7 +40,8 @@ class Request { bodyTimeout, reset, throwOnError, - expectContinue + expectContinue, + servername }, handler) { if (typeof path !== 'string') { throw new InvalidArgumentError('path must be a string') @@ -181,7 +182,7 @@ class Request { validateHandler(handler, method, upgrade) - this.servername = getServerName(this.host) + this.servername = servername || getServerName(this.host) this[kHandler] = handler diff --git a/lib/dispatcher/proxy-agent.js b/lib/dispatcher/proxy-agent.js --- a/lib/dispatcher/proxy-agent.js +++ b/lib/dispatcher/proxy-agent.js @@ -5,7 +5,7 @@ const { URL } = require('node:url') const Agent = require('./agent') const Pool = require('./pool') const DispatcherBase = require('./dispatcher-base') -const { InvalidArgumentError, RequestAbortedError } = require('../core/errors') +const { InvalidArgumentError, RequestAbortedError, SecureProxyConnectionError } = require('../core/errors') const buildConnector = require('../core/connect') const kAgent = Symbol('proxy agent') @@ -37,7 +37,7 @@ class ProxyAgent extends DispatcherBase { } const url = this.#getUrl(opts) - const { href, origin, port, protocol, username, password } = url + const { href, origin, port, protocol, username, password, hostname: proxyHostname } = url this[kProxy] = { uri: href, protocol } this[kAgent] = new Agent(opts) @@ -78,7 +78,8 @@ class ProxyAgent extends DispatcherBase { headers: { ...this[kProxyHeaders], host: requestedHost - } + }, + servername: this[kProxyTls]?.servername || proxyHostname }) if (statusCode !== 200) { socket.on('error', () => {}).destroy() @@ -96,7 +97,12 @@ class ProxyAgent extends DispatcherBase { } this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) } catch (err) { - callback(err) + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + // Throw a custom error to avoid loop in client.js#connect + callback(new SecureProxyConnectionError(err)) + } else { + callback(err) + } } } })
diff --git a/test/fixtures/client-ca-crt.pem b/test/fixtures/client-ca-crt.pem deleted file mode 100644 --- a/test/fixtures/client-ca-crt.pem +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICqDCCAZACCQC0Hman8CosTDANBgkqhkiG9w0BAQsFADAVMRMwEQYDVQQDDApu -b2RlanMub3JnMCAXDTIyMDcxOTE2MzQwMloYDzIxMjIwNzIwMTYzNDAyWjAVMRMw -EQYDVQQDDApub2RlanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAyrmvIOhsVJAinUZ0Np4o5cPz09arWAZnnDsMnU0d+NtI0lWOFCnpzJbER9eB -gJpRkOdkcsQFr0OcalExG4lQrj+yGdtLGSXVcE0aNsVSBNbNgaLbOFWfpA4c7pTF -SBLJdJ7pZ2LDrM2mXaQA30di3INsZOvuTnDSAEE8bwxnM7jDnTCOGD4asgzgknHa -NqYWJqrfEPoMcEtThX9XjBLlRq5X3YFAR8SRbMQDt2xbDLWO8mGo/y4Ezp+ol9dP -OdkX3f728EIgfk8fM7rpvHzJb8E6NPdKK/kqCjQxRJ4RMsRqKwiTgPcEqut0L6Kg -jGoDvOnc3dZ2QBrxGTYPrgZF2QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQA2DC4n -GNqQIABC82e3CovVH/LYB8M/PaqMwmXDI8kAKwk3j3lTHYD0WIyaFtCL4z/2GyDs -sgRmMlx5xVgXNv+8e793TMOqJ/0zixijguatR8r9GWdPAPhqCyCNrmUA26eyHEUV -Hx9mU7RNjv+qVe7fNXBkDorsyecclnDcxUd9k2C+RbjitnSKvhP64XqxAGk49HUH -3gw5uZw9uVlmD/dPSeKeSO4TX1HECH+WmPBKrBrcFGXNwGNzst8pFe3YVLLuseIq -4d5ngaOThGzVDJdsGIxhDfDBfH5FzDTMgEJxQQ3yXYwPR3zF4Ntn13oDkIu/vgbH -4n1eYIau6/1Y9OLX ------END CERTIFICATE----- diff --git a/test/fixtures/client-crt-2048.pem b/test/fixtures/client-crt-2048.pem deleted file mode 100644 --- a/test/fixtures/client-crt-2048.pem +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDkzCCAnugAwIBAgIUF2CLbUCxPnxARRlO7pANiXtZoLIwDQYJKoZIhvcNAQEL -BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM -GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MB4X -DTIyMDYwOTE0Mzc0N1oXDTI1MDMwNDE0Mzc0N1owWTELMAkGA1UEBhMCQVUxEzAR -BgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5 -IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEA4PbcFnMY0FC1wzsyMf04GhOx/KNcOalHu4Wy76Wys+WoJ6hO5z87 -ZIcmsg0hbys1l6DGxloTXeZwcBDoOndUg3FBZvAXRKimhXA7Qf31a9efq9GXic2W -7Kyn1jPa724Vkr/zzlWb5I/Qkk6xcQmEFCDhilbMtpnPz/BwOwn/2vbcbiHNirUk -Dn+s0pUcQlin1f2AR4Jq7/K1xsqjjB6cU0chuzrwzwrglQS7jpXQxCsRaAAIZQJB -DTVQBEo/skqWwv8xABlVQgolxABIX3Wc3RUk7xRItdWCMe92/BJCGhWVXb2hUCBu -y/yz5hX9p353JlxmXEKQlhfPzhcdDv2sdwIDAQABo1MwUTAdBgNVHQ4EFgQUQ0di -dFnBDLhSDgHpM+/KBn+WmI4wHwYDVR0jBBgwFoAUQ0didFnBDLhSDgHpM+/KBn+W -mI4wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAoCQJci8G+cUF -n030frY/OgnLJXUGC2ed9Tu+dYhEE7XQkX9WO3IK8As+jGY0kKzX7ZsvWAHHbSa3 -8qmHh1vWflU9HEc0MO0Toy6Ale2/BCjs0Oy3q2vd6t9pl3Pq2JTHyJNYu44h45we -ufQ+ttylHGZSmAqeHz4yGp1xVvjbfriDYuc0kW9UTwMpdpzR9RmqQEVD4ySxpuYV -FTj/ZiY89GdIJvsz1pmAhTUcUfuMgSlWS1nt0YR4yMkFS8KqQ1iKEApjrdDCU48W -eABaPeTCUlBCFEDuKxFVPduYVVvOHtkX/8LPH3CO7EDMoSZ1iCDZ7b2+AZbwh9j+ -dXqw+WFi7w== ------END CERTIFICATE----- diff --git a/test/fixtures/client-crt.pem b/test/fixtures/client-crt.pem deleted file mode 100644 --- a/test/fixtures/client-crt.pem +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICpDCCAYwCCQCWvC2NnLEpZjANBgkqhkiG9w0BAQUFADAVMRMwEQYDVQQDDApu -b2RlanMub3JnMCAXDTIyMDcxOTE2NDE1OFoYDzIxMjIwNzIwMTY0MTU4WjARMQ8w -DQYDVQQLDAZVbmRpY2kwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDR -SJvCSXTHrmnGz/CN94nxgmnUD17jYzfJH+lbcJkw4RDHpb6KZ85LEijeKoYoGw+c -Z7a4LfmpIR4rcN3sJWGvafJyFx4DtLYPZiNrCaMsdMWiHbbMwrpvSsf5Fq3vVeUz -Py7wxzSRiM4VOwZ7fhCJdj2YIeQJgeIZh+NN/4mpyWehS4hQSHG+cbS4c44vkET0 -Hv48G7m+4ULFCZzmG2AIW8Drh73Wymmm3kymD3kDCAY4SDSJDArxNt6lJ3sGJGO6 -jobefLFyqvLj5544Lvk4C8hD3O+e9M3OHcdyqRXf55dZ8SIWgpoGVGXb5V5g3WL/ -ncXF87jm05pMZXqOz0wdAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAK2YxxGEDgqG -tp8uX/n0nFAj1p8sfkuD+FqYg7+PN/HYqCq6Ibrz/vVABL5Khb4qQzZN/ckJhY3k -bfwEjRTOoXMhPv+IkShMDdbTunwSQUXqeLe+qmPbLt5ZccxcYVIzEhJMlnjeJ4nk -NHg3BXt8y6mIIfY0Sv4znTkV995GHLK3Ax/Fd/2aio6aRCzkBCdaXY8j0SOzFHVy -+AvgRj04K2yBEEHd4bQTdLCJQR/gFQnGj37gXQp9I4qq+/1qj4sTs8BufnGKTDVT -/jYeycIY3l4A8/72NmDSIohaJTPwFUoXNBYywOnW71+Y05PXT45lJuaOJUf2s9iH -p/eTiEsfHsk= ------END CERTIFICATE----- diff --git a/test/fixtures/client-key-2048.pem b/test/fixtures/client-key-2048.pem deleted file mode 100644 --- a/test/fixtures/client-key-2048.pem +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEA4PbcFnMY0FC1wzsyMf04GhOx/KNcOalHu4Wy76Wys+WoJ6hO -5z87ZIcmsg0hbys1l6DGxloTXeZwcBDoOndUg3FBZvAXRKimhXA7Qf31a9efq9GX -ic2W7Kyn1jPa724Vkr/zzlWb5I/Qkk6xcQmEFCDhilbMtpnPz/BwOwn/2vbcbiHN -irUkDn+s0pUcQlin1f2AR4Jq7/K1xsqjjB6cU0chuzrwzwrglQS7jpXQxCsRaAAI -ZQJBDTVQBEo/skqWwv8xABlVQgolxABIX3Wc3RUk7xRItdWCMe92/BJCGhWVXb2h -UCBuy/yz5hX9p353JlxmXEKQlhfPzhcdDv2sdwIDAQABAoIBAFVfeaCPZ2BO8Nu5 -UFBGP48t4EL3H93GDzHsCD8IC+xXgFwkdGUvyvNYkufJMeIFbN4xJp5JusXM2Oi+ -kdL2TD1hsqdFAB+PPTqwn9xoa0XU24SSEsc6HUeOMleI8FIi3c8GR5kLRhEUPtv3 -P0GdkeEtpUohrKizcHkCTyUoo09N35MFoH3Nb1iyMd10uq0iQlusljkTuukcHstK -MZQAYYcslqzyz9468O/cvsk23Ynd5FfjLgYKmdJ09qaxm4ptnF9NNJ2cLqwElbUF -xI3H5L/t1zxdwI0xZFFgDA4Ccpeq9QsRhRJGAOV94tN+4PxWXEPeQk4PM1EFDrNU -yysi/XkCgYEA+ElKG6cWQZydsb5Tk1vdJ/k18gZa5sv+WUGXkfm9EVecftGjtKQO -c7GwHO1IsLoZkhKfPpa/oifBR97DZRzw1ManEQPS980TZYei3Y9/8uPEpvgvRmm9 -MCHA5wp6YMlkZ5VN0SBRWnPhLtZ8L2/cqHOUCQf6YsIJU9/fewufrbUCgYEA5/QU -/tDBDl/f4A2R1HlIkGd1jS//CJLCc3riy0SQxcWIq6/cqflyfvRWiax5DwcO7qfh -3WbJldu9H0IWZjBCqX0v/jHvWBzaKNQCKbFFcL76Lr8bJCwlUMTH9MOhHf3uCOHD -J7YSTVJdvgzLN8K6yFhc0gI4VYQtnQTWJENObPsCgYEAlawAq6jO5uCVw3dbhGKF -cDpwBaVFGQpyGrZKu6nUCudIpL6VtCiNubqs0tNL1ZVqIr9tFdrkTMkwX7XvDj4j -A/F49u3aOJ18iuD4Eh4WYIJjos/MF+NYM/K1CdIsMbpV94dusJmN0Tw3y/dqR2Jk -n3uFCuivTOdxngk//DnmmV0CgYEA1CXNUiZSfLg5xe4DVEc9lD3cKS8d3pSEXySk -6+8hTpHV59moRJpPG0iVIcRq0NDO2n8YOOy7MWJSPpWucPZw8h362E6Jr5hr/G20 -MLffYDh8EGdgBpyN4Kqqi/allQ3cOalrWhXP9YKBFMMU10I2nekbtESti6GiKnvy -9CXPRCMCgYBZ2w+VVdhUUBA/elbuEdfbPwIYVDAk31PYg0c9jvQVusmfD1CuY/51 -JVsF5oJSosiN7WdDIETkklth0q3lAsQBKoYYMUw54RBf6FawoumB6MVdc3u4y9Ko -l9JC9czdEqb/e0LBqFiWsrtPk9WQf2gyN1mIXQPbyTT1O1J+DvUIbQ== ------END RSA PRIVATE KEY----- diff --git a/test/fixtures/client-key.pem b/test/fixtures/client-key.pem deleted file mode 100644 --- a/test/fixtures/client-key.pem +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEA0Uibwkl0x65pxs/wjfeJ8YJp1A9e42M3yR/pW3CZMOEQx6W+ -imfOSxIo3iqGKBsPnGe2uC35qSEeK3Dd7CVhr2nychceA7S2D2YjawmjLHTFoh22 -zMK6b0rH+Rat71XlMz8u8Mc0kYjOFTsGe34QiXY9mCHkCYHiGYfjTf+JqclnoUuI -UEhxvnG0uHOOL5BE9B7+PBu5vuFCxQmc5htgCFvA64e91spppt5Mpg95AwgGOEg0 -iQwK8TbepSd7BiRjuo6G3nyxcqry4+eeOC75OAvIQ9zvnvTNzh3HcqkV3+eXWfEi -FoKaBlRl2+VeYN1i/53FxfO45tOaTGV6js9MHQIDAQABAoIBACOp2+Ef42ajsiLP -DI8kv70IHECm3eSh47/CUGHkrjZGJDXhaLbtOZpRXeV+GZ57/g0JH3oDW6gWnK2K -bkbvl9XsmAQZLGQ1R1EYdrCm08efno4hwiTiiiKs+6bW1o0Sdhxlh/o/+BVU2smD -ZXdl5CuImrZyEAoOuBjhrzp7cVodSOYYK2RIAL35oAtKLR6NE40XGcxQSCdm+1eU -PzRo8TimQxujyIHrd1QV2FirmLfDFGg3LN8DS72n26bhvDg3PF6PVMF20BKTDqiu -xAyKg3weBsee2QoyegDRdgTD1PvjwWqqnsntPbvY5V8PR1DDmssfotYToNPVuJd2 -6usmBAECgYEA/21NZPZJdxRKwCiWXoqBUIY0VFajxihVxZ9pIZPXOFhpGmyj/jf6 -jBiHAqtucRdABtNxqsztGbEzJsMyNv7MqEVTAWUPH804OwW/C6Z2011GZ1AUN05n -zTxPR4eCYlxvSM+wwC8q+4mSo7hAZj5HltUI0kfEahZnGXqG4FRC1TUCgYEA0cDO -DuTrytk6EoYYCsS7ps87MYUlU97RHFrRGwf+V1Rz2RCz+XAkYCI1/tOpb0VeF1de -fX1mlM3edkLX2ooylYxv5HKPpICzPXeGK/u/HaJBRyZEq6Ms0HK8XyJOdG/UyuiZ -p9nc8eaZYvco24bT4dWe5oZ43mnydAwyK2tOgEkCgYEA/blJg9zSJSNXDYJDvC3B -PofRO2XE0XYHnYM4H06IH0RTQxhf3oskqj1C/3fjARujUiR/aLafX0ISGZMUMmTw -TsZuKZiFaYWlMZwHpj75EgQ5hy6YpkeP/OLHrboB3ksLkDweywkPnUWPEGpaLjX3 -TvDXDmqTxP3z8+8uQ2/v43ECgYB5/3BaTV+vviT+vSuip8aVQRcmuFB7ta9elJvm -4wFV/fLbn9FuFYGywHMzYhy8cVZGsTRuPM+7YPoxQrOVkqfVP7ec4d0WSxz1dV1+ -m5APRl49ac6rHd9k5jcWBjgnlRvpYNxuOlM+B2fTnfoPpR37zmn7nt8STgEM6kML -6f/gsQKBgFJH95hEgqhfEHmP23+ZWH0Dl7zD5sJJe4CYTgYriNeKKzpz2G6OVv+U -xNc8eGbnr4raPTxCCLKz6XJhuQuPQDpkoHvkhjOqZ5Tbb4fCaLdcVE0vwqBE1gGk -ryKSvahgHIykq3+RYpL4u2xypx81IBOMk7EM++Z6gdYMq0ZTN/fL ------END RSA PRIVATE KEY----- diff --git a/test/proxy-agent.js b/test/proxy-agent.js --- a/test/proxy-agent.js +++ b/test/proxy-agent.js @@ -3,15 +3,98 @@ const { tspl } = require('@matteo.collina/tspl') const { test, after } = require('node:test') const { request, fetch, setGlobalDispatcher, getGlobalDispatcher } = require('..') -const { InvalidArgumentError } = require('../lib/core/errors') -const { readFileSync } = require('node:fs') -const { join } = require('node:path') +const { InvalidArgumentError, SecureProxyConnectionError } = require('../lib/core/errors') const ProxyAgent = require('../lib/dispatcher/proxy-agent') const Pool = require('../lib/dispatcher/pool') const { createServer } = require('node:http') const https = require('node:https') const { createProxy } = require('proxy') +const certs = (() => { + const forge = require('node-forge') + const createCert = (cn, issuer, keyLength = 2048) => { + const keys = forge.pki.rsa.generateKeyPair(keyLength) + const cert = forge.pki.createCertificate() + cert.publicKey = keys.publicKey + cert.serialNumber = '' + Date.now() + cert.validity.notBefore = new Date() + cert.validity.notAfter = new Date() + cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10) + + const attrs = [{ + name: 'commonName', + value: cn + }] + cert.setSubject(attrs) + const isCa = issuer === undefined + cert.setExtensions([{ + name: 'basicConstraints', + cA: isCa + }, { + name: 'keyUsage', + keyCertSign: true, + digitalSignature: true, + nonRepudiation: true, + keyEncipherment: true, + dataEncipherment: true + }, { + name: 'extKeyUsage', + serverAuth: true, + clientAuth: true, + codeSigning: true, + emailProtection: true, + timeStamping: true + }, { + name: 'nsCertType', + client: true, + server: true, + email: true, + objsign: true, + sslCA: isCa, + emailCA: isCa, + objCA: isCa + }]) + + const alg = forge.md.sha256.create() + if (issuer !== undefined) { + cert.setIssuer(issuer.certificate.subject.attributes) + cert.sign(issuer.privateKey, alg) + } else { + cert.setIssuer(attrs) + cert.sign(keys.privateKey, alg) + } + return { + privateKey: keys.privateKey, + publicKey: keys.publicKey, + certificate: cert + } + } + + const root = createCert('CA') + const server = createCert('agent1', root) + const client = createCert('client', root) + const proxy = createCert('proxy', root) + + return { + root: { + key: forge.pki.privateKeyToPem(root.privateKey), + crt: forge.pki.certificateToPem(root.certificate) + }, + server: { + key: forge.pki.privateKeyToPem(server.privateKey), + crt: forge.pki.certificateToPem(server.certificate) + }, + client: { + key: forge.pki.privateKeyToPem(client.privateKey), + crt: forge.pki.certificateToPem(client.certificate) + }, + proxy: { + key: forge.pki.privateKeyToPem(proxy.privateKey), + crt: forge.pki.certificateToPem(proxy.certificate) + } + } +})() + test('should throw error when no uri is provided', (t) => { t = tspl(t, { plan: 2 }) t.throws(() => new ProxyAgent(), InvalidArgumentError) @@ -527,10 +610,8 @@ test('Proxy via HTTP to HTTPS endpoint', async (t) => { uri: proxyUrl, requestTls: { ca: [ - readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), servername: 'agent1' } }) @@ -579,19 +660,14 @@ test('Proxy via HTTPS to HTTPS endpoint', async (t) => { uri: proxyUrl, proxyTls: { ca: [ - readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), - servername: 'agent1', - rejectUnauthorized: false + servername: 'proxy' }, requestTls: { ca: [ - readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), servername: 'agent1' } }) @@ -640,12 +716,9 @@ test('Proxy via HTTPS to HTTP endpoint', async (t) => { uri: proxyUrl, proxyTls: { ca: [ - readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), - servername: 'agent1', - rejectUnauthorized: false + servername: 'proxy' } }) @@ -720,6 +793,55 @@ test('Proxy via HTTP to HTTP endpoint', async (t) => { proxyAgent.close() }) +test('Proxy via HTTPS to HTTP fails on wrong SNI', async (t) => { + t = tspl(t, { plan: 2 }) + const server = await buildServer() + const proxy = await buildSSLProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `https://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + uri: proxyUrl, + proxyTls: { + ca: [ + certs.root.crt + ] + } + }) + + server.on('request', function (req, res) { + t.ok(!req.connection.encrypted) + res.end(JSON.stringify(req.headers)) + }) + + server.on('secureConnection', () => { + t.fail('server is http') + }) + + proxy.on('secureConnection', () => { + t.fail('proxy is http') + }) + + proxy.on('connect', () => { + t.ok(true, 'connect to proxy') + }) + + proxy.on('request', function () { + t.fail('proxy should never receive requests') + }) + + try { + await request(serverUrl, { dispatcher: proxyAgent }) + } catch (e) { + t.ok(e instanceof SecureProxyConnectionError) + t.ok(e.cause.code === 'ERR_TLS_CERT_ALTNAME_INVALID') + } + + server.close() + proxy.close() + proxyAgent.close() +}) + function buildServer () { return new Promise((resolve) => { const server = createServer() @@ -730,10 +852,10 @@ function buildServer () { function buildSSLServer () { const serverOptions = { ca: [ - readFileSync(join(__dirname, 'fixtures', 'client-ca-crt.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') + key: certs.server.key, + cert: certs.server.crt } return new Promise((resolve) => { const server = https.createServer(serverOptions) @@ -753,10 +875,10 @@ function buildProxy (listener) { function buildSSLProxy () { const serverOptions = { ca: [ - readFileSync(join(__dirname, 'fixtures', 'client-ca-crt.pem'), 'utf8') + certs.root.crt ], - key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), - cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') + key: certs.proxy.key, + cert: certs.proxy.crt } return new Promise((resolve) => {
ProxyAgent sends wrong SNI to proxy ## Bug Description When using an encrypted connection for a proxy with the `ProxyAgent` the hostname from the actual request is used instead of the hostname from the proxy URI. ## Reproducible By Run the following script and observe that the first client hello contains the hostname of the request URL. ```typescript import { ProxyAgent, fetch } from 'undici' import fs from 'fs/promises' const proxyHostname = 'proxy.internal' const requestHostname = 'example.com' const caPath = '~/ca.pem' const enableTrace = true const test = async (): Promise<unknown> => { const ca = await fs.readFile(caPath) const response = fetch('https://'+requestHostname, { dispatcher: new ProxyAgent({ uri: 'https://'+proxyHostname+':8080', connect: { ca, servername: proxyHostname, host: proxyHostname, enableTrace }, proxyTls: { ca, servername: proxyHostname, host: proxyHostname, enableTrace }, requestTls: { ca, servername: requestHostname, host: requestHostname, enableTrace } }) }) return (await response).text() } test().then(r => console.log('Respone', r)).catch(e => console.error('fetch failed', e)) ``` ## Expected Behavior Expect is that the first client hello contains the SNI name from proxy. ## Logs & Screenshots <details> <summary>Example Output</summary> <pre> Sent Record Header: Version = TLS 1.0 (0x301) Content Type = Handshake (22) Length = 379 ClientHello, Length=375 client_version=0x303 (TLS 1.2) Random: gmt_unix_time=0x0DA529B7 random_bytes (len=28): DA63174BCBFEF26D17359C3A9E7CC94C93DDBCD4A754763905798CB8 session_id (len=32): 17331EF37F92BB7BD08EF7E3CFD80501113B73D680E82EE9F2DB861DD00CC0BA cipher_suites (len=118) {0x13, 0x02} TLS_AES_256_GCM_SHA384 {0x13, 0x03} TLS_CHACHA20_POLY1305_SHA256 {0x13, 0x01} TLS_AES_128_GCM_SHA256 {0xC0, 0x2F} TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 {0xC0, 0x2B} TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 {0xC0, 0x30} TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 {0xC0, 0x2C} TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 {0x00, 0x9E} TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 {0xC0, 0x27} TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 {0x00, 0x67} TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 {0xC0, 0x28} TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 {0x00, 0x6B} TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 {0x00, 0xA3} TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 {0x00, 0x9F} TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 {0xCC, 0xA9} TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 {0xCC, 0xA8} TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 {0xCC, 0xAA} TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256 {0xC0, 0xAF} TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8 {0xC0, 0xAD} TLS_ECDHE_ECDSA_WITH_AES_256_CCM {0xC0, 0xA3} TLS_DHE_RSA_WITH_AES_256_CCM_8 {0xC0, 0x9F} TLS_DHE_RSA_WITH_AES_256_CCM {0xC0, 0x5D} TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384 {0xC0, 0x61} TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384 {0xC0, 0x57} TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384 {0xC0, 0x53} TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384 {0x00, 0xA2} TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 {0xC0, 0xAE} TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8 {0xC0, 0xAC} TLS_ECDHE_ECDSA_WITH_AES_128_CCM {0xC0, 0xA2} TLS_DHE_RSA_WITH_AES_128_CCM_8 {0xC0, 0x9E} TLS_DHE_RSA_WITH_AES_128_CCM {0xC0, 0x5C} TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256 {0xC0, 0x60} TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256 {0xC0, 0x56} TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256 {0xC0, 0x52} TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256 {0xC0, 0x24} TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 {0x00, 0x6A} TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 {0xC0, 0x23} TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 {0x00, 0x40} TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 {0xC0, 0x0A} TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA {0xC0, 0x14} TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA {0x00, 0x39} TLS_DHE_RSA_WITH_AES_256_CBC_SHA {0x00, 0x38} TLS_DHE_DSS_WITH_AES_256_CBC_SHA {0xC0, 0x09} TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA {0xC0, 0x13} TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA {0x00, 0x33} TLS_DHE_RSA_WITH_AES_128_CBC_SHA {0x00, 0x32} TLS_DHE_DSS_WITH_AES_128_CBC_SHA {0x00, 0x9D} TLS_RSA_WITH_AES_256_GCM_SHA384 {0xC0, 0xA1} TLS_RSA_WITH_AES_256_CCM_8 {0xC0, 0x9D} TLS_RSA_WITH_AES_256_CCM {0xC0, 0x51} TLS_RSA_WITH_ARIA_256_GCM_SHA384 {0x00, 0x9C} TLS_RSA_WITH_AES_128_GCM_SHA256 {0xC0, 0xA0} TLS_RSA_WITH_AES_128_CCM_8 {0xC0, 0x9C} TLS_RSA_WITH_AES_128_CCM {0xC0, 0x50} TLS_RSA_WITH_ARIA_128_GCM_SHA256 {0x00, 0x3D} TLS_RSA_WITH_AES_256_CBC_SHA256 {0x00, 0x3C} TLS_RSA_WITH_AES_128_CBC_SHA256 {0x00, 0x35} TLS_RSA_WITH_AES_256_CBC_SHA {0x00, 0x2F} TLS_RSA_WITH_AES_128_CBC_SHA {0x00, 0xFF} TLS_EMPTY_RENEGOTIATION_INFO_SCSV compression_methods (len=1) No Compression (0x00) extensions, length = 184 extension_type=server_name(0), length=16 << I expect 0000 - 00 0e 00 00 0b 65 78 61-6d 70 6c 65 2e 63 6f .....example.co << proxy.internal 000f - 6d m << here extension_type=ec_point_formats(11), length=4 uncompressed (0) ansiX962_compressed_prime (1) ansiX962_compressed_char2 (2) extension_type=supported_groups(10), length=22 ecdh_x25519 (29) secp256r1 (P-256) (23) ecdh_x448 (30) secp521r1 (P-521) (25) secp384r1 (P-384) (24) ffdhe2048 (256) ffdhe3072 (257) ffdhe4096 (258) ffdhe6144 (259) ffdhe8192 (260) extension_type=session_ticket(35), length=0 extension_type=application_layer_protocol_negotiation(16), length=11 http/1.1 extension_type=encrypt_then_mac(22), length=0 extension_type=extended_master_secret(23), length=0 extension_type=signature_algorithms(13), length=42 ecdsa_secp256r1_sha256 (0x0403) ecdsa_secp384r1_sha384 (0x0503) ecdsa_secp521r1_sha512 (0x0603) ed25519 (0x0807) ed448 (0x0808) rsa_pss_pss_sha256 (0x0809) rsa_pss_pss_sha384 (0x080a) rsa_pss_pss_sha512 (0x080b) rsa_pss_rsae_sha256 (0x0804) rsa_pss_rsae_sha384 (0x0805) rsa_pss_rsae_sha512 (0x0806) rsa_pkcs1_sha256 (0x0401) rsa_pkcs1_sha384 (0x0501) rsa_pkcs1_sha512 (0x0601) ecdsa_sha224 (0x0303) rsa_pkcs1_sha224 (0x0301) dsa_sha224 (0x0302) dsa_sha256 (0x0402) dsa_sha384 (0x0502) dsa_sha512 (0x0602) extension_type=supported_versions(43), length=5 TLS 1.3 (772) TLS 1.2 (771) extension_type=psk_key_exchange_modes(45), length=2 psk_dhe_ke (1) extension_type=key_share(51), length=38 NamedGroup: ecdh_x25519 (29) key_exchange: (len=32): 98B6544EAFE554DF8119970FE13A5555895668249400330F5DDDD4B821590214 Received Record Header: Version = TLS 1.2 (0x303) Content Type = Handshake (22) Length = 122 ServerHello, Length=118 server_version=0x303 (TLS 1.2) Random: gmt_unix_time=0x06CBA8C8 random_bytes (len=28): A02145755E1FE3E03A7C9E25D10A59AD74540932172C2682B6CDA595 session_id (len=32): 17331EF37F92BB7BD08EF7E3CFD80501113B73D680E82EE9F2DB861DD00CC0BA cipher_suite {0x13, 0x01} TLS_AES_128_GCM_SHA256 compression_method: No Compression (0x00) extensions, length = 46 extension_type=supported_versions(43), length=2 TLS 1.3 (772) extension_type=key_share(51), length=36 NamedGroup: ecdh_x25519 (29) key_exchange: (len=32): 9B70EE06D06FAD20478885A21F07491341C211A129D9B51D850116AF8F80A903 Received Record Header: Version = TLS 1.2 (0x303) Content Type = ChangeCipherSpec (20) Length = 1 Received Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 38 Inner Content Type = Handshake (22) EncryptedExtensions, Length=17 extensions, length = 15 extension_type=application_layer_protocol_negotiation(16), length=11 http/1.1 Received Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 896 Inner Content Type = Handshake (22) Certificate, Length=875 context (len=0): certificate_list, length=871 ASN.1Cert, length=866 ------details----- Certificate: Data: Version: 3 (0x2) Serial Number: e1:a1:b2:9b:cb:f7:7f:6c:8d:19:b7:79:24:d4:d7:5b Signature Algorithm: sha256WithRSAEncryption Issuer: CN = TRAEFIK DEFAULT CERT Validity Not Before: Mar 5 20:38:57 2024 GMT Not After : Mar 5 20:38:57 2025 GMT Subject: CN = TRAEFIK DEFAULT CERT Subject Public Key Info: Public Key Algorithm: rsaEncryption Public-Key: (2048 bit) Modulus: 00:ab:45:e5:b7:3e:4c:44:28:cf:be:ec:c5:27:fb: 98:83:a6:1d:d2:33:86:16:ca:56:2f:4a:86:5f:7c: ef:22:c5:3d:ba:15:8a:bb:8d:23:81:e7:96:24:87: 08:4b:aa:be:4a:1a:e1:14:38:61:a6:27:ec:55:79: 5f:ba:ec:f5:9e:ca:19:f9:3e:68:1e:76:89:6f:42: 8c:0a:19:ca:5e:d8:80:20:5f:ef:08:49:b7:1b:b4: 73:e5:81:df:02:bd:a3:47:2d:dd:71:d1:c1:f9:4d: 4c:0e:b0:e6:e0:f6:f2:41:5e:53:8a:2a:52:41:3f: 61:1e:4e:78:1d:f4:14:ef:2a:50:e8:88:b8:b8:9c: c2:a5:d5:f9:b5:8e:bd:ea:2f:7d:fb:1f:33:03:96: 7c:3e:e0:6b:3e:c7:02:54:01:e8:0d:16:16:98:f6: e4:0c:60:0c:1e:0a:66:64:6f:c6:d8:be:5d:63:fa: 3f:89:d3:22:cc:ec:c9:77:8b:2a:0d:56:1b:ae:92: c4:f6:7a:08:e4:de:4e:9e:bd:d9:65:a3:b5:86:10: a5:68:cc:dc:ce:94:70:11:5b:85:ac:7f:6b:b3:dc: 6d:0e:71:4c:28:d1:b9:46:c9:50:42:10:a4:22:20: c8:5c:9c:d1:af:3e:0d:41:8e:d6:fd:a0:db:b1:24: 37:e3 Exponent: 65537 (0x10001) X509v3 extensions: X509v3 Key Usage: critical Digital Signature, Key Encipherment, Data Encipherment, Key Agreement X509v3 Extended Key Usage: TLS Web Server Authentication X509v3 Basic Constraints: critical CA:FALSE X509v3 Subject Alternative Name: DNS:5ee1ac8c84cad765fe44ef7ccd47dec3.74c4aef601a7ad8758f94566a17ae0b7.traefik.default Signature Algorithm: sha256WithRSAEncryption Signature Value: a0:3d:91:02:dd:ae:82:ab:53:42:36:35:e1:d3:cd:a2:af:a9: 3c:23:59:f3:8b:82:6d:54:04:e5:92:49:e0:78:fb:ad:23:36: 0d:82:10:f0:d8:17:5f:6c:9c:84:20:bb:38:32:fd:aa:ab:ef: 13:67:d5:23:82:bd:5f:6f:3b:77:66:1c:d9:18:fc:66:64:95: 21:ca:74:54:4e:a4:94:2e:b5:ed:88:f6:5b:4a:af:3b:8f:d5: 7b:09:a4:91:e1:b1:10:c9:b4:ee:71:d9:d1:ae:e1:d7:70:dd: 8c:c7:dd:fa:25:de:50:14:17:91:92:53:49:ac:86:32:e9:14: 94:6b:55:da:58:23:11:16:e0:e2:8c:6a:69:32:d2:04:93:27: f9:28:67:7d:e4:56:56:b8:e1:08:00:df:97:97:e4:6c:7f:0f: 95:c7:82:9d:69:ec:0a:78:38:36:f4:19:c3:d6:c7:9b:a7:e8: 64:20:10:c7:46:00:e7:7e:db:b8:36:8a:bb:4c:4d:f7:78:e1: f1:1b:cd:bf:c2:3c:9f:14:25:de:f9:d6:36:78:00:90:13:e5: 28:02:2a:b3:d7:5e:be:79:db:ec:9c:b9:a3:c7:7b:79:95:c5: 99:af:1f:1a:3b:19:62:e7:a4:11:25:d1:4a:14:d3:a7:79:ac: d4:bb:65:0a -----BEGIN CERTIFICATE----- MIIDXjCCAkagAwIBAgIRAOGhspvL939sjRm3eSTU11swDQYJKoZIhvcNAQELBQAw HzEdMBsGA1UEAxMUVFJBRUZJSyBERUZBVUxUIENFUlQwHhcNMjQwMzA1MjAzODU3 WhcNMjUwMzA1MjAzODU3WjAfMR0wGwYDVQQDExRUUkFFRklLIERFRkFVTFQgQ0VS VDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKtF5bc+TEQoz77sxSf7 mIOmHdIzhhbKVi9Khl987yLFPboViruNI4HnliSHCEuqvkoa4RQ4YaYn7FV5X7rs 9Z7KGfk+aB52iW9CjAoZyl7YgCBf7whJtxu0c+WB3wK9o0ct3XHRwflNTA6w5uD2 8kFeU4oqUkE/YR5OeB30FO8qUOiIuLicwqXV+bWOveovffsfMwOWfD7gaz7HAlQB 6A0WFpj25AxgDB4KZmRvxti+XWP6P4nTIszsyXeLKg1WG66SxPZ6COTeTp692WWj tYYQpWjM3M6UcBFbhax/a7PcbQ5xTCjRuUbJUEIQpCIgyFyc0a8+DUGO1v2g27Ek N+MCAwEAAaOBlDCBkTAOBgNVHQ8BAf8EBAMCA7gwEwYDVR0lBAwwCgYIKwYBBQUH AwEwDAYDVR0TAQH/BAIwADBcBgNVHREEVTBTglE1ZWUxYWM4Yzg0Y2FkNzY1ZmU0 NGVmN2NjZDQ3ZGVjMy43NGM0YWVmNjAxYTdhZDg3NThmOTQ1NjZhMTdhZTBiNy50 cmFlZmlrLmRlZmF1bHQwDQYJKoZIhvcNAQELBQADggEBAKA9kQLdroKrU0I2NeHT zaKvqTwjWfOLgm1UBOWSSeB4+60jNg2CEPDYF19snIQguzgy/aqr7xNn1SOCvV9v O3dmHNkY/GZklSHKdFROpJQute2I9ltKrzuP1XsJpJHhsRDJtO5x2dGu4ddw3YzH 3fol3lAUF5GSU0mshjLpFJRrVdpYIxEW4OKMamky0gSTJ/koZ33kVla44QgA35eX 5Gx/D5XHgp1p7Ap4ODb0GcPWx5un6GQgEMdGAOd+27g2irtMTfd44fEbzb/CPJ8U Jd751jZ4AJAT5SgCKrPXXr552+ycuaPHe3mVxZmvHxo7GWLnpBEl0UoU06d5rNS7 ZQo= -----END CERTIFICATE----- ------------------ No extensions Received Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 281 Inner Content Type = Handshake (22) CertificateVerify, Length=260 Signature Algorithm: rsa_pss_rsae_sha256 (0x0804) Signature (len=256): 063E55F5F6FA34450CF12DDECB3AF0DA37670FC69B60AEBFB6A5A809A39F4D28CC8E8B0115A53F3335A399E53D4B5CDD2B25AFA55E988A34D41FC7A3A4772DC97A83B0A0C05E672CBD0EAD09022ED3BA32940FFB46B5E0784B3608C83EABFDB45C161F6E96A38D01611F0C31AB5E993BEAC6C6036DA5E0748A5945F2DBE3F82A098A25A41386807A532986D100EE582D25B82273839E7BE6D02141FBD1F1C728EC2EE5DB1B195281035101E5E88E3B5677832837FCACD27B8342305E35CD5BC4A35EC8A1E9C6B115172AD65CC4383C97DBB86690286B4D0CE2BE8038BB9FE961920A92FC4466E0C4A15B1252E9BBFAA39F2FD135435DA21EF366931263C0A0AB Received Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 53 Inner Content Type = Handshake (22) Finished, Length=32 verify_data (len=32): 5E8F9ACE5560F2D1FCF05B383BCBA0D9629472B2E9FEC13FCCDD64002E48F394 Sent Record Header: Version = TLS 1.2 (0x303) Content Type = ChangeCipherSpec (20) Length = 1 change_cipher_spec (1) Sent Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 53 Inner Content Type = Handshake (22) Finished, Length=32 verify_data (len=32): 2F0ED8A1804E61E620C7B2B98F2ACCB02C195F4019B6A5B5A0F11003C48C5685 fetch failed TypeError: fetch failed at fetch (/home/user/git/pld/node_modules/.pnpm/undici@6.6.2/node_modules/undici/index.js:103:13) at process.processTicksAndRejections (node:internal/process/task_queues:95:5) { [cause]: Error: self-signed certificate at TLSSocket.onConnectSecure (node:_tls_wrap:1674:34) at TLSSocket.emit (node:events:518:28) at TLSSocket.emit (node:domain:488:12) at TLSSocket._finishInit (node:_tls_wrap:1085:8) at ssl.onhandshakedone (node:_tls_wrap:871:12) { code: 'DEPTH_ZERO_SELF_SIGNED_CERT' } } Received Record Header: Version = TLS 1.2 (0x303) Content Type = ApplicationData (23) Length = 139 </pre> </details> ## Environment undici@6.6.2 node v20.11.1 Pop!_OS 22.04 LTS ### Additional context A hacky workaround that allowed my setup to work, is to ignore the servername from line 91 in the _connect.js_ if the httpSocket is not defined ```typescript if(httpSocket === undefined){ servername = options.servername || util.getServerName(host) || null } ``` https://github.com/nodejs/undici/blob/03a2d439dce5a3821f459e490d5edb691cd590a5/lib/core/connect.js#L91-L101
Yeah, sounds fair, that can cause potential side effects on TLS negotiations. Though, I'll try so scope this within `ProxyAgent` instead of directly pointing to `connect.js`. would you like to send a PR to fix that? Do not forget the unit testing 🙂
2024-03-10T08:54:56Z
6.7
apollographql/apollo-client
12,450
apollographql__apollo-client-12450
[ "12192" ]
925548a9baadb19b7bb78ee84c6f9e64b55c39e9
diff --git a/integration-tests/next/src/libs/apolloClient.ts b/integration-tests/next/src/libs/apolloClient.ts --- a/integration-tests/next/src/libs/apolloClient.ts +++ b/integration-tests/next/src/libs/apolloClient.ts @@ -16,7 +16,7 @@ import { schemaLink } from "./schemaLink.ts"; export const APOLLO_STATE_PROP_NAME = "__APOLLO_STATE__"; -let apolloClient: ApolloClient<NormalizedCacheObject>; +let apolloClient: ApolloClient; const errorLink = onError(({ graphQLErrors, networkError }) => { if (graphQLErrors) @@ -64,7 +64,7 @@ export function initializeApollo( // the initial state gets hydrated here if (initialState) { // Get existing cache, loaded during client side data fetching - const existingCache = _apolloClient.extract(); + const existingCache = _apolloClient.extract() as NormalizedCacheObject; // Merge the initialState from getStaticProps/getServerSideProps // in the existing cache @@ -92,20 +92,21 @@ interface ApolloProps { } export function addApolloState( - client: ApolloClient<NormalizedCacheObject>, + client: ApolloClient, pageProps: GetServerSidePropsResult<Partial<ApolloProps>> & { props: Partial<ApolloProps>; } ) { if (pageProps?.props) { - pageProps.props[APOLLO_STATE_PROP_NAME] = client.cache.extract(); + pageProps.props[APOLLO_STATE_PROP_NAME] = + client.cache.extract() as NormalizedCacheObject; } return pageProps; } export function useApollo(pageProps?: ApolloProps) { const state = pageProps?.[APOLLO_STATE_PROP_NAME]; - const storeRef = React.useRef<ApolloClient<NormalizedCacheObject>>(); + const storeRef = React.useRef<ApolloClient>(); if (!storeRef.current) { storeRef.current = initializeApollo(state); } diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -39,7 +39,7 @@ import type { Cache } from "./types/Cache.js"; import type { MissingTree } from "./types/common.js"; import type { DataProxy } from "./types/DataProxy.js"; -export type Transaction<T> = (c: ApolloCache<T>) => void; +export type Transaction = (c: ApolloCache) => void; /** * Watched fragment options. @@ -101,7 +101,7 @@ export type WatchFragmentResult<TData> = missing: MissingTree; }; -export abstract class ApolloCache<TSerialized> implements DataProxy { +export abstract class ApolloCache implements DataProxy { public readonly assumeImmutableResults: boolean = false; // required to implement @@ -148,14 +148,12 @@ export abstract class ApolloCache<TSerialized> implements DataProxy { * Called when hydrating a cache (server side rendering, or offline storage), * and also (potentially) during hot reloads. */ - public abstract restore( - serializedState: TSerialized - ): ApolloCache<TSerialized>; + public abstract restore(serializedState: unknown): this; /** * Exposes the cache's complete state, in a serializable format for later restoration. */ - public abstract extract(optimistic?: boolean): TSerialized; + public abstract extract(optimistic?: boolean): unknown; // Optimistic API @@ -202,7 +200,7 @@ export abstract class ApolloCache<TSerialized> implements DataProxy { } public abstract performTransaction( - transaction: Transaction<TSerialized>, + transaction: Transaction, // Although subclasses may implement recordOptimisticTransaction // however they choose, the default implementation simply calls // performTransaction with a string as the second argument, allowing @@ -214,7 +212,7 @@ export abstract class ApolloCache<TSerialized> implements DataProxy { ): void; public recordOptimisticTransaction( - transaction: Transaction<TSerialized>, + transaction: Transaction, optimisticId: string ) { this.performTransaction(transaction, optimisticId); diff --git a/src/cache/core/types/Cache.ts b/src/cache/core/types/Cache.ts --- a/src/cache/core/types/Cache.ts +++ b/src/cache/core/types/Cache.ts @@ -64,7 +64,7 @@ export namespace Cache { } export interface BatchOptions< - TCache extends ApolloCache<any>, + TCache extends ApolloCache, TUpdateResult = void, > { // Same as the first parameter of performTransaction, except the cache diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -42,7 +42,7 @@ type BroadcastOptions = Pick< "optimistic" | "onWatchUpdated" >; -export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { +export class InMemoryCache extends ApolloCache { private data!: EntityStore; private optimisticData!: EntityStore; diff --git a/src/cache/inmemory/reactiveVars.ts b/src/cache/inmemory/reactiveVars.ts --- a/src/cache/inmemory/reactiveVars.ts +++ b/src/cache/inmemory/reactiveVars.ts @@ -8,25 +8,25 @@ import type { InMemoryCache } from "./inMemoryCache.js"; export interface ReactiveVar<T> { (newValue?: T): T; onNextChange(listener: ReactiveListener<T>): () => void; - attachCache(cache: ApolloCache<any>): this; - forgetCache(cache: ApolloCache<any>): boolean; + attachCache(cache: ApolloCache): this; + forgetCache(cache: ApolloCache): boolean; } type ReactiveListener<T> = (value: T) => any; // Contextual Slot that acquires its value when custom read functions are // called in Policies#readField. -export const cacheSlot = new Slot<ApolloCache<any>>(); +export const cacheSlot = new Slot<ApolloCache>(); const cacheInfoMap = new WeakMap< - ApolloCache<any>, + ApolloCache, { vars: Set<ReactiveVar<any>>; dep: OptimisticDependencyFunction<ReactiveVar<any>>; } >(); -function getCacheInfo(cache: ApolloCache<any>) { +function getCacheInfo(cache: ApolloCache) { let info = cacheInfoMap.get(cache)!; if (!info) { cacheInfoMap.set( @@ -40,7 +40,7 @@ function getCacheInfo(cache: ApolloCache<any>) { return info; } -export function forgetCache(cache: ApolloCache<any>) { +export function forgetCache(cache: ApolloCache) { getCacheInfo(cache).vars.forEach((rv) => rv.forgetCache(cache)); } @@ -52,12 +52,12 @@ export function forgetCache(cache: ApolloCache<any>) { // garbage collected in the meantime, because it is no longer reachable, // you won't be able to call recallCache(cache), and the cache will // automatically disappear from the varsByCache WeakMap. -export function recallCache(cache: ApolloCache<any>) { +export function recallCache(cache: ApolloCache) { getCacheInfo(cache).vars.forEach((rv) => rv.attachCache(cache)); } export function makeVar<T>(value: T): ReactiveVar<T> { - const caches = new Set<ApolloCache<any>>(); + const caches = new Set<ApolloCache>(); const listeners = new Set<ReactiveListener<T>>(); const rv: ReactiveVar<T> = function (newValue) { @@ -110,7 +110,7 @@ export function makeVar<T>(value: T): ReactiveVar<T> { return rv; } -type Broadcastable = ApolloCache<any> & { +type Broadcastable = ApolloCache & { // This method is protected in InMemoryCache, which we are ignoring, but // we still want some semblance of type safety when we call it. broadcastWatches?: InMemoryCache["broadcastWatches"]; diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -106,7 +106,7 @@ export interface NormalizedCacheObject { export type OptimisticStoreItem = { id: string; data: NormalizedCacheObject; - transaction: Transaction<NormalizedCacheObject>; + transaction: Transaction; }; export type ReadQueryOptions = { diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -69,7 +69,7 @@ interface DevtoolsOptions { let hasSuggestedDevtools = false; -export interface ApolloClientOptions<TCacheShape> { +export interface ApolloClientOptions { /** * The URI of the GraphQL endpoint that Apollo Client will communicate with. * @@ -94,7 +94,7 @@ export interface ApolloClientOptions<TCacheShape> { * * For more information, see [Configuring the cache](https://www.apollographql.com/docs/react/caching/cache-configuration/). */ - cache: ApolloCache<TCacheShape>; + cache: ApolloCache; /** * The time interval (in milliseconds) before Apollo Client force-fetches queries after a server-side render. * @@ -175,21 +175,21 @@ export { mergeOptions }; * receive results from the server and cache the results in a store. It also delivers updates * to GraphQL queries through `Observable` instances. */ -export class ApolloClient<TCacheShape> implements DataProxy { +export class ApolloClient implements DataProxy { public link: ApolloLink; - public cache: ApolloCache<TCacheShape>; + public cache: ApolloCache; public disableNetworkFetches: boolean; public version: string; public queryDeduplication: boolean; public defaultOptions: DefaultOptions; - public readonly typeDefs: ApolloClientOptions<TCacheShape>["typeDefs"]; + public readonly typeDefs: ApolloClientOptions["typeDefs"]; public readonly devtoolsConfig: DevtoolsOptions; - private queryManager: QueryManager<TCacheShape>; + private queryManager: QueryManager; private devToolsHookCb?: Function; private resetStoreCallbacks: Array<() => Promise<any>> = []; private clearStoreCallbacks: Array<() => Promise<any>> = []; - private localState: LocalState<TCacheShape>; + private localState: LocalState; /** * Constructs an instance of `ApolloClient`. @@ -217,7 +217,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { * }); * ``` */ - constructor(options: ApolloClientOptions<TCacheShape>) { + constructor(options: ApolloClientOptions) { if (!options.cache) { throw newInvariantError( "To initialize Apollo Client, you must specify a 'cache' property " + @@ -330,11 +330,11 @@ export class ApolloClient<TCacheShape> implements DataProxy { } type DevToolsConnector = { - push(client: ApolloClient<any>): void; + push(client: ApolloClient): void; }; const windowWithDevTools = window as Window & { [devtoolsSymbol]?: DevToolsConnector; - __APOLLO_CLIENT__?: ApolloClient<any>; + __APOLLO_CLIENT__?: ApolloClient; }; const devtoolsSymbol = Symbol.for("apollo.devtools"); (windowWithDevTools[devtoolsSymbol] = @@ -483,7 +483,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { TData = any, TVariables extends OperationVariables = OperationVariables, TContext extends Record<string, any> = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, >( options: MutationOptions<TData, TVariables, TContext> ): Promise<FetchResult<MaybeMasked<TData>>> { @@ -735,13 +735,13 @@ export class ApolloClient<TCacheShape> implements DataProxy { * active queries. */ public refetchQueries< - TCache extends ApolloCache<any> = ApolloCache<TCacheShape>, + TCache extends ApolloCache = ApolloCache, TResult = Promise<ApolloQueryResult<any>>, >( options: RefetchQueriesOptions<TCache, TResult> ): RefetchQueriesResult<TResult> { const map = this.queryManager.refetchQueries( - options as RefetchQueriesOptions<ApolloCache<TCacheShape>, TResult> + options as RefetchQueriesOptions<ApolloCache, TResult> ); const queries: ObservableQuery<any>[] = []; const results: InternalRefetchQueriesResult<TResult>[] = []; @@ -793,7 +793,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { /** * Exposes the cache's complete state, in a serializable format for later restoration. */ - public extract(optimistic?: boolean): TCacheShape { + public extract(optimistic?: boolean) { return this.cache.extract(optimistic); } @@ -804,7 +804,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { * Called when hydrating a cache (server side rendering, or offline storage), * and also (potentially) during hot reloads. */ - public restore(serializedState: TCacheShape): ApolloCache<TCacheShape> { + public restore(serializedState: unknown) { return this.cache.restore(serializedState); } diff --git a/src/core/LocalState.ts b/src/core/LocalState.ts --- a/src/core/LocalState.ts +++ b/src/core/LocalState.ts @@ -66,16 +66,16 @@ type ExecContext = { selectionsToResolve: Set<SelectionNode>; }; -type LocalStateOptions<TCacheShape> = { - cache: ApolloCache<TCacheShape>; - client?: ApolloClient<TCacheShape>; +type LocalStateOptions = { + cache: ApolloCache; + client?: ApolloClient; resolvers?: Resolvers | Resolvers[]; fragmentMatcher?: FragmentMatcher; }; -export class LocalState<TCacheShape> { - private cache: ApolloCache<TCacheShape>; - private client?: ApolloClient<TCacheShape>; +export class LocalState { + private cache: ApolloCache; + private client?: ApolloClient; private resolvers?: Resolvers; private fragmentMatcher?: FragmentMatcher; private selectionsToResolveCache = new WeakMap< @@ -88,7 +88,7 @@ export class LocalState<TCacheShape> { client, resolvers, fragmentMatcher, - }: LocalStateOptions<TCacheShape>) { + }: LocalStateOptions) { this.cache = cache; if (client) { diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -99,7 +99,7 @@ export class ObservableQuery< private initialResult: ApolloQueryResult<MaybeMasked<TData>>; private isTornDown: boolean; - private queryManager: QueryManager<any>; + private queryManager: QueryManager; private subscriptions = new Set<Subscription>(); private waitForOwnResult: boolean; @@ -123,7 +123,7 @@ export class ObservableQuery< queryInfo, options, }: { - queryManager: QueryManager<any>; + queryManager: QueryManager; queryInfo: QueryInfo; options: WatchQueryOptions<TVariables, TData>; }) { diff --git a/src/core/QueryInfo.ts b/src/core/QueryInfo.ts --- a/src/core/QueryInfo.ts +++ b/src/core/QueryInfo.ts @@ -24,10 +24,10 @@ export const enum CacheWriteBehavior { MERGE, } -const destructiveMethodCounts = new WeakMap<ApolloCache<any>, number>(); +const destructiveMethodCounts = new WeakMap<ApolloCache, number>(); function wrapDestructiveCacheMethod( - cache: ApolloCache<any>, + cache: ApolloCache, methodName: "evict" | "modify" | "reset" ) { const original = cache[methodName]; @@ -74,10 +74,10 @@ export class QueryInfo { variables?: Record<string, any>; stopped = false; - private cache: ApolloCache<any>; + private cache: ApolloCache; constructor( - queryManager: QueryManager<any>, + queryManager: QueryManager, public readonly queryId = queryManager.generateQueryId() ) { const cache = (this.cache = queryManager.cache); diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -137,8 +137,8 @@ interface MaskOperationOptions<TData> { fetchPolicy?: WatchQueryFetchPolicy; } -interface QueryManagerOptions<TStore> { - cache: ApolloCache<TStore>; +interface QueryManagerOptions { + cache: ApolloCache; link: ApolloLink; defaultOptions: DefaultOptions; documentTransform: DocumentTransform | null | undefined; @@ -146,14 +146,14 @@ interface QueryManagerOptions<TStore> { onBroadcast: undefined | (() => void); ssrMode: boolean; clientAwareness: Record<string, string>; - localState: LocalState<TStore>; + localState: LocalState; assumeImmutableResults: boolean; defaultContext: Partial<DefaultContext> | undefined; dataMasking: boolean; } -export class QueryManager<TStore> { - public cache: ApolloCache<TStore>; +export class QueryManager { + public cache: ApolloCache; public link: ApolloLink; public defaultOptions: DefaultOptions; @@ -165,7 +165,7 @@ export class QueryManager<TStore> { private queryDeduplication: boolean; private clientAwareness: Record<string, string> = {}; - private localState: LocalState<TStore>; + private localState: LocalState; private onBroadcast?: () => void; public mutationStore?: { @@ -182,7 +182,7 @@ export class QueryManager<TStore> { // @apollo/experimental-nextjs-app-support can access type info. protected fetchCancelFns = new Map<string, (error: any) => any>(); - constructor(options: QueryManagerOptions<TStore>) { + constructor(options: QueryManagerOptions) { const defaultDocumentTransform = new DocumentTransform( (document) => this.cache.transformDocument(document), // Allow the apollo cache to manage its own transform caches @@ -239,7 +239,7 @@ export class QueryManager<TStore> { TData, TVariables extends OperationVariables, TContext extends Record<string, any>, - TCache extends ApolloCache<any>, + TCache extends ApolloCache, >({ mutation, variables, @@ -413,7 +413,7 @@ export class QueryManager<TStore> { TData, TVariables, TContext, - TCache extends ApolloCache<any>, + TCache extends ApolloCache, >( mutation: { mutationId: string; @@ -624,7 +624,7 @@ export class QueryManager<TStore> { TData, TVariables, TContext, - TCache extends ApolloCache<any>, + TCache extends ApolloCache, >( optimisticResponse: any, mutation: { @@ -1106,7 +1106,7 @@ export class QueryManager<TStore> { this.queries.forEach((info) => info.notify()); } - public getLocalState(): LocalState<TStore> { + public getLocalState() { return this.localState; } @@ -1399,7 +1399,7 @@ export class QueryManager<TStore> { removeOptimistic = optimistic ? makeUniqueId("refetchQueries") : void 0, onQueryUpdated, }: InternalRefetchQueriesOptions< - ApolloCache<TStore>, + ApolloCache, TResult >): InternalRefetchQueriesMap<TResult> { const includedQueriesById = new Map< diff --git a/src/core/types.ts b/src/core/types.ts --- a/src/core/types.ts +++ b/src/core/types.ts @@ -79,10 +79,7 @@ export type InternalRefetchQueriesInclude = // Used by ApolloClient["refetchQueries"] // TODO Improve documentation comments for this public type. -export interface RefetchQueriesOptions< - TCache extends ApolloCache<any>, - TResult, -> { +export interface RefetchQueriesOptions<TCache extends ApolloCache, TResult> { updateCache?: (cache: TCache) => void; // The client.refetchQueries method discourages passing QueryOptions, by // restricting the public type of options.include to exclude QueryOptions as @@ -142,7 +139,7 @@ export interface RefetchQueriesResult<TResult> // Used by QueryManager["refetchQueries"] export interface InternalRefetchQueriesOptions< - TCache extends ApolloCache<any>, + TCache extends ApolloCache, TResult, > extends Omit<RefetchQueriesOptions<TCache, TResult>, "include"> { // Just like the refetchQueries option for a mutation, an array of strings, @@ -214,7 +211,7 @@ export type MutationUpdaterFn<T = { [key: string]: any }> = ( // The MutationUpdaterFn type is broken because it mistakenly uses the same // type parameter T for both the cache and the mutationResult. Do not use this // type unless you absolutely need it for backwards compatibility. - cache: ApolloCache<T>, + cache: ApolloCache, mutationResult: FetchResult<T> ) => void; @@ -222,7 +219,7 @@ export type MutationUpdaterFunction< TData, TVariables, TContext, - TCache extends ApolloCache<any>, + TCache extends ApolloCache, > = ( cache: TCache, result: Omit<FetchResult<Unmasked<TData>>, "context">, diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -270,7 +270,7 @@ interface MutationBaseOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > { /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#optimisticResponse:member} */ optimisticResponse?: @@ -311,7 +311,7 @@ export interface MutationOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends MutationSharedOptions<TData, TVariables, TContext, TCache> { /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#mutation:member} */ mutation: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -320,7 +320,7 @@ export interface MutationSharedOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends MutationBaseOptions<TData, TVariables, TContext, TCache> { /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#fetchPolicy:member} */ fetchPolicy?: MutationFetchPolicy; diff --git a/src/masking/maskDefinition.ts b/src/masking/maskDefinition.ts --- a/src/masking/maskDefinition.ts +++ b/src/masking/maskDefinition.ts @@ -17,7 +17,7 @@ interface MaskingContext { operationType: "query" | "mutation" | "subscription" | "fragment"; operationName: string | undefined; fragmentMap: FragmentMap; - cache: ApolloCache<unknown>; + cache: ApolloCache; mutableTargets: WeakMap<any, any>; knownChanged: WeakSet<any>; } diff --git a/src/masking/maskFragment.ts b/src/masking/maskFragment.ts --- a/src/masking/maskFragment.ts +++ b/src/masking/maskFragment.ts @@ -21,7 +21,7 @@ import { warnOnImproperCacheImplementation } from "./utils.js"; export function maskFragment<TData = unknown>( data: TData, document: TypedDocumentNode<TData> | DocumentNode, - cache: ApolloCache<unknown>, + cache: ApolloCache, fragmentName?: string ): TData { if (!cache.fragmentMatches) { diff --git a/src/masking/maskOperation.ts b/src/masking/maskOperation.ts --- a/src/masking/maskOperation.ts +++ b/src/masking/maskOperation.ts @@ -18,7 +18,7 @@ import { warnOnImproperCacheImplementation } from "./utils.js"; export function maskOperation<TData = unknown>( data: TData, document: DocumentNode | TypedDocumentNode<TData>, - cache: ApolloCache<unknown> + cache: ApolloCache ): TData { if (!cache.fragmentMatches) { if (__DEV__) { diff --git a/src/react/context/ApolloContext.ts b/src/react/context/ApolloContext.ts --- a/src/react/context/ApolloContext.ts +++ b/src/react/context/ApolloContext.ts @@ -6,7 +6,7 @@ import type { RenderPromises } from "@apollo/client/react/ssr"; import { invariant } from "@apollo/client/utilities/invariant"; export interface ApolloContextValue { - client?: ApolloClient<object>; + client?: ApolloClient; renderPromises?: RenderPromises; } diff --git a/src/react/hooks/internal/wrapHook.ts b/src/react/hooks/internal/wrapHook.ts --- a/src/react/hooks/internal/wrapHook.ts +++ b/src/react/hooks/internal/wrapHook.ts @@ -37,7 +37,7 @@ export type HookWrappers = { ) => WrappableHooks[K]; }; -interface QueryManagerWithWrappers<T> extends QueryManager<T> { +interface QueryManagerWithWrappers extends QueryManager { [wrapperSymbol]?: HookWrappers; } @@ -79,13 +79,13 @@ interface QueryManagerWithWrappers<T> extends QueryManager<T> { export function wrapHook<Hook extends (...args: any[]) => any>( hookName: keyof WrappableHooks, useHook: Hook, - clientOrObsQuery: ObservableQuery<any> | ApolloClient<any> + clientOrObsQuery: ObservableQuery<any> | ApolloClient ): Hook { const queryManager = ( clientOrObsQuery as unknown as { // both `ApolloClient` and `ObservableQuery` have a `queryManager` property // but they're both `private`, so we have to cast around for a bit here. - queryManager: QueryManagerWithWrappers<any>; + queryManager: QueryManagerWithWrappers; } )["queryManager"]; const wrappers = queryManager && queryManager[wrapperSymbol]; diff --git a/src/react/hooks/useApolloClient.ts b/src/react/hooks/useApolloClient.ts --- a/src/react/hooks/useApolloClient.ts +++ b/src/react/hooks/useApolloClient.ts @@ -19,9 +19,7 @@ import { invariant } from "@apollo/client/utilities/invariant"; * @since 3.0.0 * @returns The `ApolloClient` instance being used by the application. */ -export function useApolloClient( - override?: ApolloClient<object> -): ApolloClient<object> { +export function useApolloClient(override?: ApolloClient): ApolloClient { const context = React.useContext(getApolloContext()); const client = override || context.client; invariant( diff --git a/src/react/hooks/useFragment.ts b/src/react/hooks/useFragment.ts --- a/src/react/hooks/useFragment.ts +++ b/src/react/hooks/useFragment.ts @@ -36,7 +36,7 @@ export interface UseFragmentOptions<TData, TVars> * * @docGroup 1. Operation options */ - client?: ApolloClient<any>; + client?: ApolloClient; } // TODO: Update this to return `null` when there is no data returned from the diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -73,7 +73,7 @@ export interface LazyQueryHookOptions< skipPollAttempt?: () => boolean; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#client:member} */ - client?: ApolloClient<any>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ context?: DefaultContext; @@ -117,7 +117,7 @@ export interface LazyQueryResult<TData, TVariables extends OperationVariables> { } ) => Promise<ApolloQueryResult<MaybeMasked<TFetchData>>>; /** {@inheritDoc @apollo/client!QueryResultDocumentation#client:member} */ - client: ApolloClient<any>; + client: ApolloClient; /** {@inheritDoc @apollo/client!QueryResultDocumentation#observable:member} */ observable: ObservableQuery<TData, TVariables>; /** {@inheritDoc @apollo/client!QueryResultDocumentation#data:member} */ diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -74,7 +74,7 @@ export function useMutation< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, >( mutation: DocumentNode | TypedDocumentNode<TData, TVariables>, options?: MutationHookOptions< diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -148,7 +148,7 @@ function useInternalState< TData = any, TVariables extends OperationVariables = OperationVariables, >( - client: ApolloClient<object>, + client: ApolloClient, query: DocumentNode | TypedDocumentNode<any, any>, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>, renderPromises: RenderPromises | undefined, @@ -258,7 +258,7 @@ function useObservableSubscriptionResult< >( resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, - client: ApolloClient<object>, + client: ApolloClient, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>, watchQueryOptions: Readonly<WatchQueryOptions<TVariables, TData>>, disableNetworkFetches: boolean, @@ -381,7 +381,7 @@ function useResubscribeIfNecessary< resultData: InternalResult<TData, TVariables>, /** this hook will mutate properties on `observable` */ observable: ObsQueryWithMeta<TData, TVariables>, - client: ApolloClient<object>, + client: ApolloClient, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>, watchQueryOptions: Readonly<WatchQueryOptions<TVariables, TData>> ) { @@ -420,7 +420,7 @@ function createMakeWatchQueryOptions< TData = any, TVariables extends OperationVariables = OperationVariables, >( - client: ApolloClient<object>, + client: ApolloClient, query: DocumentNode | TypedDocumentNode<TData, TVariables>, { skip, @@ -476,7 +476,7 @@ function createMakeWatchQueryOptions< function getObsQueryOptions<TData, TVariables extends OperationVariables>( observable: ObservableQuery<TData, TVariables> | undefined, - client: ApolloClient<object>, + client: ApolloClient, queryHookOptions: QueryHookOptions<TData, TVariables>, watchQueryOptions: Partial<WatchQueryOptions<TVariables, TData>> ): WatchQueryOptions<TVariables, TData> { @@ -508,7 +508,7 @@ function setResult<TData, TVariables extends OperationVariables>( nextResult: ApolloQueryResult<MaybeMasked<TData>>, resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, - client: ApolloClient<object>, + client: ApolloClient, forceUpdate: () => void ) { const previousResult = resultData.current; @@ -530,7 +530,7 @@ function setResult<TData, TVariables extends OperationVariables>( function getCurrentResult<TData, TVariables extends OperationVariables>( resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, - client: ApolloClient<object> + client: ApolloClient ): InternalQueryResult<TData, TVariables> { // Using this.result as a cache ensures getCurrentResult continues returning // the same (===) result object, unless state.setResult has been called, or @@ -562,7 +562,7 @@ function toQueryResult<TData, TVariables extends OperationVariables>( result: ApolloQueryResult<MaybeMasked<TData>>, previousData: MaybeMasked<TData> | undefined, observable: ObservableQuery<TData, TVariables>, - client: ApolloClient<object> + client: ApolloClient ): InternalQueryResult<TData, TVariables> { const { data, partial, ...resultWithoutPartial } = result; const queryResult: InternalQueryResult<TData, TVariables> = { diff --git a/src/react/hooks/useQueryRefHandlers.ts b/src/react/hooks/useQueryRefHandlers.ts --- a/src/react/hooks/useQueryRefHandlers.ts +++ b/src/react/hooks/useQueryRefHandlers.ts @@ -62,7 +62,7 @@ export function useQueryRefHandlers< // return any truthy value that is passed in as an override so we cast the result (unwrapped["observable"] as any) : undefined - ) as ApolloClient<any> | ObservableQuery<TData>; + ) as ApolloClient | ObservableQuery<TData>; return wrapHook( "useQueryRefHandlers", diff --git a/src/react/hooks/useReadQuery.ts b/src/react/hooks/useReadQuery.ts --- a/src/react/hooks/useReadQuery.ts +++ b/src/react/hooks/useReadQuery.ts @@ -54,7 +54,7 @@ export function useReadQuery<TData>( // return any truthy value that is passed in as an override so we cast the result (unwrapped["observable"] as any) : undefined - ) as ApolloClient<any> | ObservableQuery<TData>; + ) as ApolloClient | ObservableQuery<TData>; return wrapHook( "useReadQuery", diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -320,7 +320,7 @@ function createSubscription< TData = any, TVariables extends OperationVariables = OperationVariables, >( - client: ApolloClient<any>, + client: ApolloClient, query: TypedDocumentNode<TData, TVariables>, variables: TVariables | undefined, fetchPolicy: FetchPolicy | undefined, diff --git a/src/react/hooks/useSuspenseFragment.ts b/src/react/hooks/useSuspenseFragment.ts --- a/src/react/hooks/useSuspenseFragment.ts +++ b/src/react/hooks/useSuspenseFragment.ts @@ -55,7 +55,7 @@ export type UseSuspenseFragmentOptions< * * @docGroup 1. Operation options */ - client?: ApolloClient<any>; + client?: ApolloClient; } & VariablesOption<NoInfer<TVariables>>; export type UseSuspenseFragmentResult<TData> = { data: MaybeMasked<TData> }; diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -36,7 +36,7 @@ export interface UseSuspenseQueryResult< TData = unknown, TVariables extends OperationVariables = OperationVariables, > { - client: ApolloClient<any>; + client: ApolloClient; data: MaybeMasked<TData>; error: ErrorLike | undefined; fetchMore: FetchMoreFunction<TData, TVariables>; @@ -338,7 +338,7 @@ interface UseWatchQueryOptionsHookOptions< TData, TVariables extends OperationVariables, > { - client: ApolloClient<unknown>; + client: ApolloClient; query: DocumentNode | TypedDocumentNode<TData, TVariables>; options: SkipToken | SuspenseQueryHookOptions<TData, TVariables>; } diff --git a/src/react/internal/cache/FragmentReference.ts b/src/react/internal/cache/FragmentReference.ts --- a/src/react/internal/cache/FragmentReference.ts +++ b/src/react/internal/cache/FragmentReference.ts @@ -41,7 +41,7 @@ export class FragmentReference< private references = 0; constructor( - client: ApolloClient<any>, + client: ApolloClient, watchFragmentOptions: WatchFragmentOptions<TData, TVariables> & { from: string; }, @@ -173,7 +173,7 @@ export class FragmentReference< } private getDiff<TData, TVariables>( - client: ApolloClient<any>, + client: ApolloClient, options: WatchFragmentOptions<TData, TVariables> & { from: string } ) { const { cache } = client; diff --git a/src/react/internal/cache/SuspenseCache.ts b/src/react/internal/cache/SuspenseCache.ts --- a/src/react/internal/cache/SuspenseCache.ts +++ b/src/react/internal/cache/SuspenseCache.ts @@ -56,7 +56,7 @@ export class SuspenseCache { getFragmentRef<TData, TVariables>( cacheKey: FragmentCacheKey, - client: ApolloClient<any>, + client: ApolloClient, options: WatchFragmentOptions<TData, TVariables> & { from: string } ) { const ref = this.fragmentRefs.lookupArray(cacheKey) as { diff --git a/src/react/internal/cache/getSuspenseCache.ts b/src/react/internal/cache/getSuspenseCache.ts --- a/src/react/internal/cache/getSuspenseCache.ts +++ b/src/react/internal/cache/getSuspenseCache.ts @@ -14,7 +14,7 @@ declare module "@apollo/client/core" { const suspenseCacheSymbol = Symbol.for("apollo.suspenseCache"); export function getSuspenseCache( - client: ApolloClient<object> & { + client: ApolloClient & { [suspenseCacheSymbol]?: SuspenseCache; } ) { diff --git a/src/react/query-preloader/createQueryPreloader.ts b/src/react/query-preloader/createQueryPreloader.ts --- a/src/react/query-preloader/createQueryPreloader.ts +++ b/src/react/query-preloader/createQueryPreloader.ts @@ -153,7 +153,7 @@ export interface PreloadQueryFunction { * @since 3.9.0 */ export function createQueryPreloader( - client: ApolloClient<any> + client: ApolloClient ): PreloadQueryFunction { return wrapHook( "createQueryPreloader", diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -43,7 +43,7 @@ export type { export type { DefaultContext as Context } from "../../core/index.js"; export type CommonOptions<TOptions> = TOptions & { - client?: ApolloClient<object>; + client?: ApolloClient; }; /* Query types */ @@ -55,7 +55,7 @@ export interface BaseQueryOptions< /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#ssr:member} */ ssr?: boolean; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#client:member} */ - client?: ApolloClient<any>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ context?: DefaultContext; } @@ -120,7 +120,7 @@ export interface QueryResult< TVariables extends OperationVariables = OperationVariables, > extends ObservableQueryFields<TData, TVariables> { /** {@inheritDoc @apollo/client!QueryResultDocumentation#client:member} */ - client: ApolloClient<any>; + client: ApolloClient; /** {@inheritDoc @apollo/client!QueryResultDocumentation#observable:member} */ observable: ObservableQuery<TData, TVariables>; /** {@inheritDoc @apollo/client!QueryResultDocumentation#data:member} */ @@ -161,7 +161,7 @@ export interface SuspenseQueryHookOptions< TVariables extends OperationVariables = OperationVariables, > { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#client:member} */ - client?: ApolloClient<any>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ context?: DefaultContext; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#variables:member} */ @@ -230,7 +230,7 @@ export type LoadableQueryHookFetchPolicy = Extract< export interface LoadableQueryHookOptions { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#client:member} */ - client?: ApolloClient<any>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ context?: DefaultContext; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#errorPolicy:member} */ @@ -255,10 +255,10 @@ export interface BaseMutationOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends MutationSharedOptions<TData, TVariables, TContext, TCache> { /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#client:member} */ - client?: ApolloClient<object>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#notifyOnNetworkStatusChange:member} */ notifyOnNetworkStatusChange?: boolean; /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#onCompleted:member} */ @@ -274,7 +274,7 @@ export interface MutationFunctionOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends BaseMutationOptions<TData, TVariables, TContext, TCache> { /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#mutation:member} */ mutation?: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -290,7 +290,7 @@ export interface MutationResult<TData = any> { /** {@inheritDoc @apollo/client!MutationResultDocumentation#called:member} */ called: boolean; /** {@inheritDoc @apollo/client!MutationResultDocumentation#client:member} */ - client: ApolloClient<object>; + client: ApolloClient; /** {@inheritDoc @apollo/client!MutationResultDocumentation#reset:member} */ reset: () => void; } @@ -299,7 +299,7 @@ export declare type MutationFunction< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > = ( options?: MutationFunctionOptions<TData, TVariables, TContext, TCache> ) => Promise<FetchResult<MaybeMasked<TData>>>; @@ -308,14 +308,14 @@ export interface MutationHookOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends BaseMutationOptions<TData, TVariables, TContext, TCache> {} export interface MutationDataOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > extends BaseMutationOptions<TData, TVariables, TContext, TCache> { mutation: DocumentNode | TypedDocumentNode<TData, TVariables>; } @@ -324,7 +324,7 @@ export type MutationTuple< TData, TVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache = ApolloCache, > = [ mutate: ( options?: MutationFunctionOptions<TData, TVariables, TContext, TCache> @@ -337,12 +337,12 @@ export type MutationTuple< /* Subscription types */ export interface OnDataOptions<TData = any> { - client: ApolloClient<object>; + client: ApolloClient; data: SubscriptionResult<TData>; } export interface OnSubscriptionDataOptions<TData = any> { - client: ApolloClient<object>; + client: ApolloClient; subscriptionData: SubscriptionResult<TData>; } @@ -361,7 +361,7 @@ export interface BaseSubscriptionOptions< | boolean | ((options: BaseSubscriptionOptions<TData, TVariables>) => boolean); /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#client:member} */ - client?: ApolloClient<object>; + client?: ApolloClient; /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#skip:member} */ skip?: boolean; /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#context:member} */ diff --git a/src/utilities/internal/getMemoryInternals.ts b/src/utilities/internal/getMemoryInternals.ts --- a/src/utilities/internal/getMemoryInternals.ts +++ b/src/utilities/internal/getMemoryInternals.ts @@ -131,7 +131,7 @@ function getCurrentCacheSizes() { ); } -function _getApolloClientMemoryInternals(this: ApolloClient<any>) { +function _getApolloClientMemoryInternals(this: ApolloClient) { if (!__DEV__) throw new Error("only supported in development mode"); return { @@ -155,7 +155,7 @@ function _getApolloClientMemoryInternals(this: ApolloClient<any>) { }; } -function _getApolloCacheMemoryInternals(this: ApolloCache<any>) { +function _getApolloCacheMemoryInternals(this: ApolloCache) { return { cache: { fragmentQueryDocuments: getWrapperInformation(this["getFragmentDoc"]),
diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -15,6 +15,7 @@ import { createFragmentRegistry, InMemoryCache, makeVar, + NormalizedCacheObject, PossibleTypesMap, } from "@apollo/client/cache"; import { @@ -3735,7 +3736,7 @@ describe("custom document transforms", () => { ], }); - const cache = client.cache.extract(); + const cache = client.cache.extract() as NormalizedCacheObject; expect(cache["Dog:1"]).toEqual({ id: 1, @@ -4227,7 +4228,7 @@ describe("custom document transforms", () => { }, }); - const cache = client.cache.extract(); + const cache = client.cache.extract() as NormalizedCacheObject; expect(cache["User:1"]).toEqual({ __typename: "User", @@ -4458,7 +4459,7 @@ describe("custom document transforms", () => { }, }); - const cache = client.cache.extract(); + const cache = client.cache.extract() as NormalizedCacheObject; expect(cache["Profile:1"]).toEqual({ __typename: "Profile", diff --git a/src/__tests__/dataMasking.ts b/src/__tests__/dataMasking.ts --- a/src/__tests__/dataMasking.ts +++ b/src/__tests__/dataMasking.ts @@ -5888,7 +5888,7 @@ describe("client.mutate", () => { }); }); -class TestCache extends ApolloCache<unknown> { +class TestCache extends ApolloCache { public diff<T>(query: Cache.DiffOptions): DataProxy.DiffResult<T> { return { result: null, complete: false }; } @@ -5901,9 +5901,7 @@ class TestCache extends ApolloCache<unknown> { return undefined; } - public performTransaction( - transaction: <TSerialized>(c: ApolloCache<TSerialized>) => void - ): void { + public performTransaction(transaction: (c: ApolloCache) => void): void { transaction(this); } @@ -5919,7 +5917,7 @@ class TestCache extends ApolloCache<unknown> { return new Promise<void>(() => null); } - public restore(serializedState: unknown): ApolloCache<unknown> { + public restore(serializedState: unknown): this { return this; } diff --git a/src/__tests__/fetchMore.ts b/src/__tests__/fetchMore.ts --- a/src/__tests__/fetchMore.ts +++ b/src/__tests__/fetchMore.ts @@ -519,7 +519,7 @@ describe("fetchMore on an observable query", () => { `; function makeClient(): { - client: ApolloClient<any>; + client: ApolloClient; linkRequests: Array<{ operationName: string; offset: number; @@ -573,7 +573,7 @@ describe("fetchMore on an observable query", () => { }; } - function checkCacheExtract1234678(cache: ApolloCache<any>) { + function checkCacheExtract1234678(cache: ApolloCache) { expect(cache.extract()).toEqual({ ROOT_QUERY: { __typename: "Query", diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -343,7 +343,7 @@ describe("Cache manipulation", () => { start: ( _1: any, variables: { field: string }, - { cache }: { cache: ApolloCache<any> } + { cache }: { cache: ApolloCache } ) => { cache.writeQuery({ query, data: { field: variables.field } }); return { @@ -582,7 +582,7 @@ describe("Sample apps", () => { return ( _result: {}, variables: { amount: number }, - { cache }: { cache: ApolloCache<any> } + { cache }: { cache: ApolloCache } ): null => { const read = client.readQuery<{ count: number }>({ query, @@ -669,7 +669,7 @@ describe("Sample apps", () => { return ( _result: {}, variables: Todo, - { cache }: { cache: ApolloCache<any> } + { cache }: { cache: ApolloCache } ): null => { const data = updater(client.readQuery({ query, variables }), variables); cache.writeQuery({ query, variables, data }); diff --git a/src/__tests__/optimistic.ts b/src/__tests__/optimistic.ts --- a/src/__tests__/optimistic.ts +++ b/src/__tests__/optimistic.ts @@ -3,7 +3,11 @@ import { assign, cloneDeep } from "lodash"; import { firstValueFrom, from, lastValueFrom, Observable } from "rxjs"; import { map, take, toArray } from "rxjs/operators"; -import { Cache, InMemoryCache } from "@apollo/client/cache"; +import { + Cache, + InMemoryCache, + NormalizedCacheObject, +} from "@apollo/client/cache"; import { ApolloCache, ApolloClient, @@ -19,7 +23,6 @@ import { } from "@apollo/client/testing"; import { addTypenameToDocument } from "@apollo/client/utilities"; -import { QueryManager } from "../core/QueryManager.js"; import { ObservableStream } from "../testing/internal/index.js"; describe("optimistic mutation results", () => { @@ -228,7 +231,9 @@ describe("optimistic mutation results", () => { }); { - const dataInStore = client.cache.extract(true); + const dataInStore = client.cache.extract( + true + ) as NormalizedCacheObject; expect((dataInStore["TodoList5"] as any).todos.length).toBe(4); expect((dataInStore["Todo99"] as any).text).toBe( "Optimistically generated" @@ -240,7 +245,9 @@ describe("optimistic mutation results", () => { ); { - const dataInStore = client.cache.extract(true); + const dataInStore = client.cache.extract( + true + ) as NormalizedCacheObject; expect((dataInStore["TodoList5"] as any).todos.length).toBe(3); expect(dataInStore).not.toHaveProperty("Todo99"); } @@ -345,7 +352,7 @@ describe("optimistic mutation results", () => { await expect(stream).toEmitNext(); - const queryManager: QueryManager<any> = (client as any).queryManager; + const queryManager = client["queryManager"]; const promise = client .mutate({ @@ -919,7 +926,9 @@ describe("optimistic mutation results", () => { "todos" in initialList && Array.isArray(initialList.todos); - const initialList = client.cache.extract(true)[id]; + const initialList = (client.cache.extract(true) as NormalizedCacheObject)[ + id + ]; if (!isTodoList(initialList)) { throw new Error("Expected TodoList"); @@ -960,7 +969,7 @@ describe("optimistic mutation results", () => { }, }); - const list = client.cache.extract(true)[id]; + const list = (client.cache.extract(true) as NormalizedCacheObject)[id]; if (!isTodoList(list)) { throw new Error("Expected TodoList"); @@ -1910,7 +1919,7 @@ describe("optimistic mutation results", () => { type Item = ReturnType<typeof makeItem>; type Data = { items: Item[] }; - function append(cache: ApolloCache<any>, item: Item) { + function append(cache: ApolloCache, item: Item) { const data = cache.readQuery<Data>({ query }); cache.writeQuery({ query, diff --git a/src/__tests__/resultCacheCleaning.ts b/src/__tests__/resultCacheCleaning.ts --- a/src/__tests__/resultCacheCleaning.ts +++ b/src/__tests__/resultCacheCleaning.ts @@ -1,6 +1,6 @@ import { makeExecutableSchema } from "@graphql-tools/schema"; -import { InMemoryCache, NormalizedCacheObject } from "@apollo/client/cache"; +import { InMemoryCache } from "@apollo/client/cache"; import { ApolloClient, gql, Resolvers } from "@apollo/client/core"; import { SchemaLink } from "@apollo/client/link/schema"; @@ -135,7 +135,7 @@ describe("resultCache cleaning", () => { }, }; - let client: ApolloClient<NormalizedCacheObject>; + let client: ApolloClient; beforeEach(() => { client = new ApolloClient({ diff --git a/src/cache/core/__tests__/cache.ts b/src/cache/core/__tests__/cache.ts --- a/src/cache/core/__tests__/cache.ts +++ b/src/cache/core/__tests__/cache.ts @@ -6,7 +6,7 @@ import { Cache, DataProxy } from "@apollo/client/cache"; import { Reference } from "../../../utilities/graphql/storeUtils.js"; import { ApolloCache } from "../cache.js"; -class TestCache extends ApolloCache<unknown> { +class TestCache extends ApolloCache { constructor() { super(); } @@ -23,9 +23,7 @@ class TestCache extends ApolloCache<unknown> { return undefined; } - public performTransaction( - transaction: <TSerialized>(c: ApolloCache<TSerialized>) => void - ): void { + public performTransaction(transaction: (c: ApolloCache) => void): void { transaction(this); } @@ -36,7 +34,7 @@ class TestCache extends ApolloCache<unknown> { } public recordOptimisticTransaction( - transaction: <TSerialized>(c: ApolloCache<TSerialized>) => void, + transaction: (c: ApolloCache) => void, id: string ): void {} @@ -46,7 +44,7 @@ class TestCache extends ApolloCache<unknown> { return new Promise<void>(() => null); } - public restore(serializedState: unknown): ApolloCache<unknown> { + public restore(serializedState: unknown) { return this; } diff --git a/src/cache/inmemory/__tests__/entityStore.ts b/src/cache/inmemory/__tests__/entityStore.ts --- a/src/cache/inmemory/__tests__/entityStore.ts +++ b/src/cache/inmemory/__tests__/entityStore.ts @@ -1690,7 +1690,7 @@ describe("EntityStore", () => { } `; - function writeInitialData(cache: ApolloCache<any>) { + function writeInitialData(cache: ApolloCache) { cache.writeQuery({ query, data: { diff --git a/src/core/__tests__/ApolloClient/general.test.ts b/src/core/__tests__/ApolloClient/general.test.ts --- a/src/core/__tests__/ApolloClient/general.test.ts +++ b/src/core/__tests__/ApolloClient/general.test.ts @@ -38,9 +38,9 @@ import { // This was originally imported from the ObservableQuery test, but the import // causes that test file to run when trying to run just this file so this is now // inlined. -const mockFetchQuery = (queryManager: QueryManager<any>) => { +const mockFetchQuery = (queryManager: QueryManager) => { const fetchObservableWithInfo = queryManager["fetchObservableWithInfo"]; - const fetchQueryByPolicy: QueryManager<any>["fetchQueryByPolicy"] = ( + const fetchQueryByPolicy: QueryManager["fetchQueryByPolicy"] = ( queryManager as any ).fetchQueryByPolicy; @@ -1748,10 +1748,12 @@ describe("ApolloClient", () => { }, }; + const cache = new InMemoryCache({ + dataIdFromObject: getIdField, + }); + const client = new ApolloClient({ - cache: new InMemoryCache({ - dataIdFromObject: getIdField, - }), + cache, link: new MockLink([ { request: { query: mutation }, @@ -1765,7 +1767,7 @@ describe("ApolloClient", () => { expect(result).toEqualFetchResult({ data }); // Make sure we updated the store with the new data - expect(client.cache.extract()["5"]).toEqual({ + expect(cache.extract()["5"]).toEqual({ id: "5", isPrivate: true, }); @@ -1786,10 +1788,11 @@ describe("ApolloClient", () => { isPrivate: true, }, }; + const cache = new InMemoryCache({ + dataIdFromObject: getIdField, + }); const client = new ApolloClient({ - cache: new InMemoryCache({ - dataIdFromObject: getIdField, - }), + cache, link: new MockLink([ { request: { query: mutation }, @@ -1803,7 +1806,7 @@ describe("ApolloClient", () => { expect(result).toEqualFetchResult({ data }); // Make sure we updated the store with the new data - expect(client.cache.extract()["5"]).toEqual({ + expect(cache.extract()["5"]).toEqual({ id: "5", isPrivate: true, }); @@ -1826,10 +1829,11 @@ describe("ApolloClient", () => { }, }; + const cache = new InMemoryCache({ + dataIdFromObject: getIdField, + }); const client = new ApolloClient({ - cache: new InMemoryCache({ - dataIdFromObject: getIdField, - }), + cache, link: new MockLink([ { request: { query: mutation }, @@ -1843,7 +1847,7 @@ describe("ApolloClient", () => { expect(result).toEqualFetchResult({ data }); // Make sure we updated the store with the new data - expect(client.cache.extract()["5"]).toEqual({ + expect(cache.extract()["5"]).toEqual({ id: "5", isPrivate: true, }); @@ -2170,8 +2174,9 @@ describe("ApolloClient", () => { lastName: "Pandya", }, }; + const cache = new InMemoryCache(); const client = new ApolloClient({ - cache: new InMemoryCache(), + cache, link: new MockLink([ { request: { query }, @@ -2196,7 +2201,7 @@ describe("ApolloClient", () => { client.query({ query, fetchPolicy: "network-only" }) ).rejects.toThrow(); - expect(client.cache.extract().ROOT_QUERY!.author).toEqual(data.author); + expect(cache.extract().ROOT_QUERY!.author).toEqual(data.author); }); it("should be able to unsubscribe from a polling query subscription", async () => { @@ -2260,8 +2265,9 @@ describe("ApolloClient", () => { lastName: "Smith", }, }; + const cache = new InMemoryCache(); const client = new ApolloClient({ - cache: new InMemoryCache(), + cache, link: new MockLink([ { request: { query }, @@ -2286,7 +2292,7 @@ describe("ApolloClient", () => { networkStatus: NetworkStatus.ready, partial: false, }); - expect(client.cache.extract().ROOT_QUERY!.author).toEqual(data.author); + expect(cache.extract().ROOT_QUERY!.author).toEqual(data.author); await expect(stream).toEmitApolloQueryResult({ data, @@ -2295,7 +2301,7 @@ describe("ApolloClient", () => { networkStatus: NetworkStatus.error, partial: false, }); - expect(client.cache.extract().ROOT_QUERY!.author).toEqual(data.author); + expect(cache.extract().ROOT_QUERY!.author).toEqual(data.author); }); it("should not fire next on an observer if there is no change in the result", async () => { @@ -3034,7 +3040,7 @@ describe("ApolloClient", () => { const mocks = mockFetchQuery(queryManager); const queryId = "1"; - const getQuery: QueryManager<any>["getQuery"] = ( + const getQuery: QueryManager["getQuery"] = ( queryManager as any ).getQuery.bind(queryManager); diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -42,15 +42,15 @@ import { import type { ObservableAndInfo } from "../QueryManager.js"; import { QueryManager } from "../QueryManager.js"; -export const mockFetchQuery = (queryManager: QueryManager<any>) => { +export const mockFetchQuery = (queryManager: QueryManager) => { const mocks = { fetchObservableWithInfo: jest.fn< ObservableAndInfo<unknown>, - Parameters<QueryManager<any>["fetchObservableWithInfo"]> + Parameters<QueryManager["fetchObservableWithInfo"]> >(queryManager["fetchObservableWithInfo"].bind(queryManager)), fetchQueryByPolicy: jest.fn< ObservableAndInfo<unknown>, - Parameters<QueryManager<any>["fetchQueryByPolicy"]> + Parameters<QueryManager["fetchQueryByPolicy"]> >(queryManager["fetchQueryByPolicy"].bind(queryManager)), }; diff --git a/src/react/context/__tests__/ApolloProvider.test.tsx b/src/react/context/__tests__/ApolloProvider.test.tsx --- a/src/react/context/__tests__/ApolloProvider.test.tsx +++ b/src/react/context/__tests__/ApolloProvider.test.tsx @@ -31,7 +31,7 @@ describe("<ApolloProvider /> Component", () => { it("should support the 2.0", () => { render( - <ApolloProvider client={{} as ApolloClient<any>}> + <ApolloProvider client={{} as ApolloClient}> <div className="unique">Test</div> </ApolloProvider> ); @@ -114,8 +114,8 @@ describe("<ApolloProvider /> Component", () => { describe.each< [ string, - Omit<ApolloProviderProps<any>, "children">, - Omit<ApolloProviderProps<any>, "children">, + Omit<ApolloProviderProps, "children">, + Omit<ApolloProviderProps, "children">, ] >([["client", { client }, { client: anotherClient }]])( "context value stability, %s prop", diff --git a/src/react/hooks/__tests__/useFragment.test.tsx b/src/react/hooks/__tests__/useFragment.test.tsx --- a/src/react/hooks/__tests__/useFragment.test.tsx +++ b/src/react/hooks/__tests__/useFragment.test.tsx @@ -2438,7 +2438,7 @@ describe.skip("Type Tests", () => { fragmentName?: string; optimistic?: boolean; variables?: TVars; - client?: ApolloClient<any>; + client?: ApolloClient; }>(); }); }); diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -240,7 +240,7 @@ async function renderWithMocks( async function renderWithClient( ui: React.ReactElement, - options: { client: ApolloClient<any> }, + options: { client: ApolloClient }, { render: doRender }: { render: AsyncRenderFn | typeof renderAsync } ) { const { client } = options; diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -1707,7 +1707,7 @@ describe("useMutation Hook", () => { expect(data).toBeUndefined(); void createTodo({ variables }); - const dataInStore = client.cache.extract(true); + const dataInStore = cache.extract(true); expect(dataInStore["Todo:1"]).toEqual( optimisticResponse.createTodo ); diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -72,13 +72,13 @@ import { const IS_REACT_19 = React.version.startsWith("19"); -type RenderSuspenseHookOptions<Props, TSerializedCache = {}> = Omit< +type RenderSuspenseHookOptions<Props> = Omit< RenderHookOptions<Props>, "wrapper" > & { - client?: ApolloClient<TSerializedCache>; + client?: ApolloClient; link?: ApolloLink; - cache?: ApolloCache<TSerializedCache>; + cache?: ApolloCache; mocks?: MockedResponse[]; strictMode?: boolean; }; diff --git a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx --- a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx @@ -63,7 +63,7 @@ async function renderDefaultTestApp<TData>({ client, queryRef, }: { - client: ApolloClient<any>; + client: ApolloClient; queryRef: QueryRef<TData>; }) { const renderStream = createRenderStream({ diff --git a/src/testing/core/mocking/mockClient.ts b/src/testing/core/mocking/mockClient.ts --- a/src/testing/core/mocking/mockClient.ts +++ b/src/testing/core/mocking/mockClient.ts @@ -1,6 +1,5 @@ import type { DocumentNode } from "graphql"; -import type { NormalizedCacheObject } from "@apollo/client/cache"; import { InMemoryCache } from "@apollo/client/cache"; import { ApolloClient } from "@apollo/client/core"; @@ -10,7 +9,7 @@ export function createMockClient<TData>( data: TData, query: DocumentNode, variables = {} -): ApolloClient<NormalizedCacheObject> { +): ApolloClient { return new ApolloClient({ link: mockSingleLink({ request: { query, variables }, diff --git a/src/testing/internal/renderHelpers.tsx b/src/testing/internal/renderHelpers.tsx --- a/src/testing/internal/renderHelpers.tsx +++ b/src/testing/internal/renderHelpers.tsx @@ -6,7 +6,7 @@ import type { MockedProviderProps } from "@apollo/client/testing/react"; import { MockedProvider } from "@apollo/client/testing/react"; export function createClientWrapper( - client: ApolloClient<any>, + client: ApolloClient, Wrapper: React.JSXElementConstructor<{ children: React.ReactNode; }> = React.Fragment @@ -23,7 +23,7 @@ export function createClientWrapper( } export function createMockWrapper( - renderOptions: MockedProviderProps<any>, + renderOptions: MockedProviderProps, Wrapper: React.JSXElementConstructor<{ children: React.ReactNode; }> = React.Fragment diff --git a/src/testing/react/MockedProvider.tsx b/src/testing/react/MockedProvider.tsx --- a/src/testing/react/MockedProvider.tsx +++ b/src/testing/react/MockedProvider.tsx @@ -10,10 +10,10 @@ import { ApolloProvider } from "@apollo/client/react/context"; import type { MockedResponse } from "@apollo/client/testing/core"; import { MockLink } from "@apollo/client/testing/core"; -export interface MockedProviderProps<TSerializedCache = {}> { +export interface MockedProviderProps { mocks?: ReadonlyArray<MockedResponse<any, any>>; defaultOptions?: DefaultOptions; - cache?: ApolloCache<TSerializedCache>; + cache?: ApolloCache; resolvers?: Resolvers; childProps?: object; children?: any; @@ -27,7 +27,7 @@ export interface MockedProviderProps<TSerializedCache = {}> { } interface MockedProviderState { - client: ApolloClient<any>; + client: ApolloClient; } export class MockedProvider extends React.Component<
[4.0] Remove `TCacheShape` generic argument on `ApolloClient` `TCacheShape` hasn't provided a ton of type-safety value so we plan to remove it in 4.0
2025-03-17T19:09:24Z
4
apollographql/apollo-client
12,451
apollographql__apollo-client-12451
[ "12306" ]
876d07042cf3348b3769b2bb415aa5f70ce9844c
diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -106,10 +106,10 @@ export abstract class ApolloCache implements DataProxy { // required to implement // core API - public abstract read<TData = any, TVariables = any>( + public abstract read<TData = unknown, TVariables = OperationVariables>( query: Cache.ReadOptions<TVariables, TData> ): Unmasked<TData> | null; - public abstract write<TData = any, TVariables = any>( + public abstract write<TData = unknown, TVariables = OperationVariables>( write: Cache.WriteOptions<TData, TVariables> ): Reference | undefined; @@ -124,8 +124,11 @@ export abstract class ApolloCache implements DataProxy { * returned if it contains at least one field that can be fulfilled from the * cache. */ - public abstract diff<T>(query: Cache.DiffOptions): Cache.DiffResult<T>; - public abstract watch<TData = any, TVariables = any>( + public abstract diff< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >(query: Cache.DiffOptions<TData, TVariables>): Cache.DiffResult<TData>; + public abstract watch<TData = unknown, TVariables = OperationVariables>( watch: Cache.WatchOptions<TData, TVariables> ): () => void; @@ -247,10 +250,10 @@ export abstract class ApolloCache implements DataProxy { } // DataProxy API - public readQuery<QueryType, TVariables = any>( - options: Cache.ReadQueryOptions<QueryType, TVariables>, + public readQuery<TData = unknown, TVariables = OperationVariables>( + options: Cache.ReadQueryOptions<TData, TVariables>, optimistic = !!options.optimistic - ): Unmasked<QueryType> | null { + ): Unmasked<TData> | null { return this.read({ ...options, rootId: options.id || "ROOT_QUERY", @@ -259,7 +262,7 @@ export abstract class ApolloCache implements DataProxy { } /** {@inheritDoc @apollo/client!ApolloClient#watchFragment:member(1)} */ - public watchFragment<TData = any, TVars = OperationVariables>( + public watchFragment<TData = unknown, TVars = OperationVariables>( options: WatchFragmentOptions<TData, TVars> ): Observable<WatchFragmentResult<TData>> { const { @@ -360,10 +363,10 @@ export abstract class ApolloCache implements DataProxy { cache: WeakCache, }); - public readFragment<FragmentType, TVariables = any>( - options: Cache.ReadFragmentOptions<FragmentType, TVariables>, + public readFragment<TData = unknown, TVariables = OperationVariables>( + options: Cache.ReadFragmentOptions<TData, TVariables>, optimistic = !!options.optimistic - ): Unmasked<FragmentType> | null { + ): Unmasked<TData> | null { return this.read({ ...options, query: this.getFragmentDoc(options.fragment, options.fragmentName), @@ -372,7 +375,7 @@ export abstract class ApolloCache implements DataProxy { }); } - public writeQuery<TData = any, TVariables = any>({ + public writeQuery<TData = unknown, TVariables = OperationVariables>({ id, data, ...options @@ -385,7 +388,7 @@ export abstract class ApolloCache implements DataProxy { ); } - public writeFragment<TData = any, TVariables = any>({ + public writeFragment<TData = unknown, TVariables = OperationVariables>({ id, data, fragment, @@ -401,7 +404,7 @@ export abstract class ApolloCache implements DataProxy { ); } - public updateQuery<TData = any, TVariables = any>( + public updateQuery<TData = unknown, TVariables = OperationVariables>( options: Cache.UpdateQueryOptions<TData, TVariables>, update: (data: Unmasked<TData> | null) => Unmasked<TData> | null | void ): Unmasked<TData> | null { @@ -416,7 +419,7 @@ export abstract class ApolloCache implements DataProxy { }); } - public updateFragment<TData = any, TVariables = any>( + public updateFragment<TData = unknown, TVariables = OperationVariables>( options: Cache.UpdateFragmentOptions<TData, TVariables>, update: (data: Unmasked<TData> | null) => Unmasked<TData> | null | void ): Unmasked<TData> | null { diff --git a/src/cache/core/types/Cache.ts b/src/cache/core/types/Cache.ts --- a/src/cache/core/types/Cache.ts +++ b/src/cache/core/types/Cache.ts @@ -1,3 +1,4 @@ +import type { OperationVariables } from "@apollo/client/core"; import type { Unmasked } from "@apollo/client/masking"; import type { ApolloCache } from "../cache.js"; @@ -6,12 +7,12 @@ import type { AllFieldsModifier, Modifiers } from "./common.js"; import { DataProxy } from "./DataProxy.js"; export namespace Cache { - export type WatchCallback<TData = any> = ( + export type WatchCallback<TData = unknown> = ( diff: Cache.DiffResult<TData>, lastDiff?: Cache.DiffResult<TData> ) => void; - export interface ReadOptions<TVariables = any, TData = any> + export interface ReadOptions<TVariables = OperationVariables, TData = unknown> extends DataProxy.Query<TVariables, TData> { rootId?: string; previousResult?: any; @@ -19,22 +20,26 @@ export namespace Cache { returnPartialData?: boolean; } - export interface WriteOptions<TResult = any, TVariables = any> - extends Omit<DataProxy.Query<TVariables, TResult>, "id">, - Omit<DataProxy.WriteOptions<TResult>, "data"> { + export interface WriteOptions< + TData = unknown, + TVariables = OperationVariables, + > extends Omit<DataProxy.Query<TVariables, TData>, "id">, + Omit<DataProxy.WriteOptions<TData>, "data"> { dataId?: string; - result: Unmasked<TResult>; + result: Unmasked<TData>; } - export interface DiffOptions<TData = any, TVariables = any> + export interface DiffOptions<TData = unknown, TVariables = OperationVariables> extends Omit<ReadOptions<TVariables, TData>, "rootId"> { // The DiffOptions interface is currently just an alias for // ReadOptions, though DiffOptions used to be responsible for // declaring the returnPartialData option. } - export interface WatchOptions<TData = any, TVariables = any> - extends DiffOptions<TData, TVariables> { + export interface WatchOptions< + TData = unknown, + TVariables = OperationVariables, + > extends DiffOptions<TData, TVariables> { watcher?: object; immediate?: boolean; callback: WatchCallback<TData>; diff --git a/src/cache/core/types/DataProxy.ts b/src/cache/core/types/DataProxy.ts --- a/src/cache/core/types/DataProxy.ts +++ b/src/cache/core/types/DataProxy.ts @@ -1,6 +1,7 @@ import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; import type { DocumentNode } from "graphql"; // ignore-comment eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved +import type { OperationVariables } from "@apollo/client/core"; import type { Unmasked } from "@apollo/client/masking"; import type { DeepPartial, Reference } from "@apollo/client/utilities"; @@ -125,15 +126,15 @@ export namespace DataProxy { "data" > {} - export type DiffResult<T> = + export type DiffResult<TData> = | { - result: T; + result: TData; complete: true; missing?: never; fromOptimisticTransaction?: boolean; } | { - result: DeepPartial<T> | null; + result: DeepPartial<TData> | null; complete: false; missing?: MissingFieldError; fromOptimisticTransaction?: boolean; @@ -150,25 +151,25 @@ export interface DataProxy { /** * Reads a GraphQL query from the root query id. */ - readQuery<QueryType, TVariables = any>( - options: DataProxy.ReadQueryOptions<QueryType, TVariables>, + readQuery<TData = unknown, TVariables = OperationVariables>( + options: DataProxy.ReadQueryOptions<TData, TVariables>, optimistic?: boolean - ): Unmasked<QueryType> | null; + ): Unmasked<TData> | null; /** * Reads a GraphQL fragment from any arbitrary id. If there is more than * one fragment in the provided document then a `fragmentName` must be * provided to select the correct fragment. */ - readFragment<FragmentType, TVariables = any>( - options: DataProxy.ReadFragmentOptions<FragmentType, TVariables>, + readFragment<TData = unknown, TVariables = OperationVariables>( + options: DataProxy.ReadFragmentOptions<TData, TVariables>, optimistic?: boolean - ): Unmasked<FragmentType> | null; + ): Unmasked<TData> | null; /** * Writes a GraphQL query to the root query id. */ - writeQuery<TData = any, TVariables = any>( + writeQuery<TData = unknown, TVariables = OperationVariables>( options: DataProxy.WriteQueryOptions<TData, TVariables> ): Reference | undefined; @@ -177,7 +178,7 @@ export interface DataProxy { * one fragment in the provided document then a `fragmentName` must be * provided to select the correct fragment. */ - writeFragment<TData = any, TVariables = any>( + writeFragment<TData = unknown, TVariables = OperationVariables>( options: DataProxy.WriteFragmentOptions<TData, TVariables> ): Reference | undefined; } diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -47,16 +47,16 @@ export class InMemoryCache extends ApolloCache { private optimisticData!: EntityStore; protected config: InMemoryCacheConfig; - private watches = new Set<Cache.WatchOptions>(); + private watches = new Set<Cache.WatchOptions<any, any>>(); private storeReader!: StoreReader; private storeWriter!: StoreWriter; private addTypenameTransform = new DocumentTransform(addTypenameToDocument); private maybeBroadcastWatch!: OptimisticWrapperFunction< - [Cache.WatchOptions, BroadcastOptions?], + [Cache.WatchOptions<any, any>, BroadcastOptions?], any, - [Cache.WatchOptions] + [Cache.WatchOptions<any, any>] >; // Override the default value, since InMemoryCache result objects are frozen @@ -171,13 +171,19 @@ export class InMemoryCache extends ApolloCache { return (optimistic ? this.optimisticData : this.data).extract(); } - public read<T>( - options: Cache.ReadOptions & { returnPartialData: true } - ): T | DeepPartial<T> | null; + public read<TData = unknown>( + options: Cache.ReadOptions<OperationVariables, TData> & { + returnPartialData: true; + } + ): TData | DeepPartial<TData> | null; - public read<T>(options: Cache.ReadOptions): T | null; + public read<TData = unknown>( + options: Cache.ReadOptions<OperationVariables, TData> + ): TData | null; - public read<T>(options: Cache.ReadOptions): T | DeepPartial<T> | null { + public read<TData = unknown>( + options: Cache.ReadOptions<OperationVariables, TData> + ): TData | DeepPartial<TData> | null { const { // Since read returns data or null, without any additional metadata // about whether/where there might have been missing fields, the @@ -189,7 +195,7 @@ export class InMemoryCache extends ApolloCache { returnPartialData = false, } = options; - return this.storeReader.diffQueryAgainstStore<T>({ + return this.storeReader.diffQueryAgainstStore<TData>({ ...options, store: options.optimistic ? this.optimisticData : this.data, config: this.config, @@ -197,7 +203,9 @@ export class InMemoryCache extends ApolloCache { }).result; } - public write(options: Cache.WriteOptions): Reference | undefined { + public write<TData = unknown, TVariables = OperationVariables>( + options: Cache.WriteOptions<TData, TVariables> + ): Reference | undefined { try { ++this.txCount; return this.storeWriter.writeToStore(this.data, options); @@ -239,9 +247,10 @@ export class InMemoryCache extends ApolloCache { } } - public diff<TData, TVariables extends OperationVariables = any>( - options: Cache.DiffOptions<TData, TVariables> - ): Cache.DiffResult<TData> { + public diff< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, + >(options: Cache.DiffOptions<TData, TVariables>): Cache.DiffResult<TData> { return this.storeReader.diffQueryAgainstStore({ ...options, store: options.optimistic ? this.optimisticData : this.data, @@ -250,7 +259,7 @@ export class InMemoryCache extends ApolloCache { }); } - public watch<TData = any, TVariables = any>( + public watch<TData = unknown, TVariables = OperationVariables>( watch: Cache.WatchOptions<TData, TVariables> ): () => void { if (!this.watches.size) { diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -1,5 +1,6 @@ import type { DocumentNode, FieldNode } from "graphql"; +import type { OperationVariables } from "@apollo/client/core"; import type { Reference, StoreObject, @@ -157,7 +158,7 @@ export interface MergeTree { export interface ReadMergeModifyContext { store: NormalizedCache; - variables?: Record<string, any>; + variables?: OperationVariables; // A JSON.stringify-serialized version of context.variables. varString?: string; } diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -3,7 +3,7 @@ import { Trie } from "@wry/trie"; import type { FieldNode, SelectionSetNode } from "graphql"; import { Kind } from "graphql"; -import type { Cache } from "@apollo/client/core"; +import type { Cache, OperationVariables } from "@apollo/client/core"; import type { FragmentMap, FragmentMapFunction, @@ -127,9 +127,15 @@ export class StoreWriter { private fragments?: InMemoryCacheConfig["fragments"] ) {} - public writeToStore( + public writeToStore<TData = unknown, TVariables = OperationVariables>( store: NormalizedCache, - { query, result, dataId, variables, overwrite }: Cache.WriteOptions + { + query, + result, + dataId, + variables, + overwrite, + }: Cache.WriteOptions<TData, TVariables> ): Reference | undefined { const operationDefinition = getOperationDefinition(query)!; const merger = makeProcessedFieldsMerger(); @@ -145,7 +151,7 @@ export class StoreWriter { merge<T>(existing: T, incoming: T) { return merger.merge(existing, incoming) as T; }, - variables, + variables: variables as OperationVariables, varString: canonicalStringify(variables), ...extractFragmentContext(query, this.fragments), overwrite: !!overwrite, diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -418,9 +418,11 @@ export class ApolloClient implements DataProxy { * a description of store reactivity. */ public watchQuery< - T = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, - >(options: WatchQueryOptions<TVariables, T>): ObservableQuery<T, TVariables> { + >( + options: WatchQueryOptions<TVariables, TData> + ): ObservableQuery<TData, TVariables> { if (this.defaultOptions.watchQuery) { options = mergeOptions(this.defaultOptions.watchQuery, options); } @@ -434,7 +436,7 @@ export class ApolloClient implements DataProxy { options = { ...options, fetchPolicy: "cache-first" }; } - return this.queryManager.watchQuery<T, TVariables>(options); + return this.queryManager.watchQuery<TData, TVariables>(options); } /** @@ -447,11 +449,11 @@ export class ApolloClient implements DataProxy { * server at all or just resolve from the cache, etc. */ public query< - T = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( - options: QueryOptions<TVariables, T> - ): Promise<ApolloQueryResult<MaybeMasked<T>>> { + options: QueryOptions<TVariables, TData> + ): Promise<ApolloQueryResult<MaybeMasked<TData>>> { if (this.defaultOptions.query) { options = mergeOptions(this.defaultOptions.query, options); } @@ -468,7 +470,7 @@ export class ApolloClient implements DataProxy { options = { ...options, fetchPolicy: "cache-first" }; } - return this.queryManager.query<T, TVariables>(options); + return this.queryManager.query<TData, TVariables>(options); } /** @@ -480,7 +482,7 @@ export class ApolloClient implements DataProxy { * It takes options as an object with the following keys and values: */ public mutate< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, TContext extends Record<string, any> = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -500,14 +502,14 @@ export class ApolloClient implements DataProxy { * `Observable` which either emits received data or an error. */ public subscribe< - T = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( - options: SubscriptionOptions<TVariables, T> - ): Observable<FetchResult<MaybeMasked<T>>> { + options: SubscriptionOptions<TVariables, TData> + ): Observable<FetchResult<MaybeMasked<TData>>> { const id = this.queryManager.generateQueryId(); - return this.queryManager.startGraphQLSubscription<T>(options).pipe( + return this.queryManager.startGraphQLSubscription<TData>(options).pipe( map((result) => ({ ...result, data: this.queryManager.maskOperation({ @@ -529,11 +531,11 @@ export class ApolloClient implements DataProxy { * @param optimistic - Set to `true` to allow `readQuery` to return * optimistic results. Is `false` by default. */ - public readQuery<T = any, TVariables = OperationVariables>( - options: DataProxy.Query<TVariables, T>, + public readQuery<TData = unknown, TVariables = OperationVariables>( + options: DataProxy.Query<TVariables, TData>, optimistic: boolean = false - ): Unmasked<T> | null { - return this.cache.readQuery<T, TVariables>(options, optimistic); + ): Unmasked<TData> | null { + return this.cache.readQuery<TData, TVariables>(options, optimistic); } /** @@ -553,12 +555,9 @@ export class ApolloClient implements DataProxy { * to optimistic updates. */ - public watchFragment< - TFragmentData = unknown, - TVariables = OperationVariables, - >( - options: WatchFragmentOptions<TFragmentData, TVariables> - ): Observable<WatchFragmentResult<TFragmentData>> { + public watchFragment<TData = unknown, TVariables = OperationVariables>( + options: WatchFragmentOptions<TData, TVariables> + ): Observable<WatchFragmentResult<TData>> { return this.cache.watchFragment({ ...options, [Symbol.for("apollo.dataMasking")]: this.queryManager.dataMasking, @@ -579,7 +578,7 @@ export class ApolloClient implements DataProxy { * @param optimistic - Set to `true` to allow `readFragment` to return * optimistic results. Is `false` by default. */ - public readFragment<T = any, TVariables = OperationVariables>( + public readFragment<T = unknown, TVariables = OperationVariables>( options: DataProxy.Fragment<TVariables, T>, optimistic: boolean = false ): Unmasked<T> | null { @@ -591,7 +590,7 @@ export class ApolloClient implements DataProxy { * the store. This method will start at the root query. To start at a * specific id returned by `dataIdFromObject` then use `writeFragment`. */ - public writeQuery<TData = any, TVariables = OperationVariables>( + public writeQuery<TData = unknown, TVariables = OperationVariables>( options: DataProxy.WriteQueryOptions<TData, TVariables> ): Reference | undefined { const ref = this.cache.writeQuery<TData, TVariables>(options); @@ -614,7 +613,7 @@ export class ApolloClient implements DataProxy { * in a document with multiple fragments then you must also specify a * `fragmentName`. */ - public writeFragment<TData = any, TVariables = OperationVariables>( + public writeFragment<TData = unknown, TVariables = OperationVariables>( options: DataProxy.WriteFragmentOptions<TData, TVariables> ): Reference | undefined { const ref = this.cache.writeFragment<TData, TVariables>(options); diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -45,7 +45,7 @@ import type { const { assign, hasOwnProperty } = Object; export interface FetchMoreOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, > { updateQuery?: ( @@ -64,7 +64,7 @@ interface Last<TData, TVariables> { } export class ObservableQuery< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > implements diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -309,7 +309,7 @@ export class QueryManager { this.broadcastQueries(); return new Promise((resolve, reject) => { - return this.getObservableFromLink( + return this.getObservableFromLink<TData>( mutation, { ...context, @@ -411,7 +411,7 @@ export class QueryManager { public markMutationResult< TData, - TVariables, + TVariables extends OperationVariables, TContext, TCache extends ApolloCache, >( @@ -622,7 +622,7 @@ export class QueryManager { public markMutationOptimistic< TData, - TVariables, + TVariables extends OperationVariables, TContext, TCache extends ApolloCache, >( @@ -1008,9 +1008,9 @@ export class QueryManager { this.getQuery(observableQuery.queryId).setObservableQuery(observableQuery); } - public startGraphQLSubscription<T = any>( + public startGraphQLSubscription<TData = unknown>( options: SubscriptionOptions - ): Observable<FetchResult<T>> { + ): Observable<FetchResult<TData>> { let { query, variables } = options; const { fetchPolicy, @@ -1023,7 +1023,12 @@ export class QueryManager { variables = this.getVariables(query, variables); const makeObservable = (variables: OperationVariables) => - this.getObservableFromLink<T>(query, context, variables, extensions).pipe( + this.getObservableFromLink<TData>( + query, + context, + variables, + extensions + ).pipe( map((result) => { if (fetchPolicy !== "no-cache") { // the subscription interface should handle not sending us results we no longer subscribe to. @@ -1070,7 +1075,7 @@ export class QueryManager { .addExportedVariables(query, variables, context) .then(makeObservable); - return new Observable<FetchResult<T>>((observer) => { + return new Observable<FetchResult<TData>>((observer) => { let sub: Subscription | null = null; observablePromise.then( (observable) => (sub = observable.subscribe(observer)), @@ -1116,7 +1121,7 @@ export class QueryManager { observable?: Observable<FetchResult<any>>; }>(false); - private getObservableFromLink<T = any>( + private getObservableFromLink<TData = unknown>( query: DocumentNode, context: any, variables?: OperationVariables, @@ -1124,8 +1129,8 @@ export class QueryManager { // Prefer context.queryDeduplication if specified. deduplication: boolean = context?.queryDeduplication ?? this.queryDeduplication - ): Observable<FetchResult<T>> { - let observable: Observable<FetchResult<T>> | undefined; + ): Observable<FetchResult<TData>> { + let observable: Observable<FetchResult<TData>> | undefined; const { serverQuery, clientQuery } = this.getDocumentInfo(query); if (serverQuery) { @@ -1171,10 +1176,10 @@ export class QueryManager { ); } } else { - observable = execute(link, operation) as Observable<FetchResult<T>>; + observable = execute(link, operation) as Observable<FetchResult<TData>>; } } else { - observable = of({ data: {} } as FetchResult<T>); + observable = of({ data: {} } as FetchResult<TData>); context = this.prepareContext(context); } @@ -1211,7 +1216,7 @@ export class QueryManager { // through the link chain. const linkDocument = this.cache.transformForLink(options.query); - return this.getObservableFromLink( + return this.getObservableFromLink<TData>( linkDocument, options.context, options.variables @@ -1253,7 +1258,7 @@ export class QueryManager { } const aqr: ApolloQueryResult<TData> = { - data: result.data, + data: result.data as TData, loading: false, networkStatus: NetworkStatus.ready, partial: !result.data, diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -55,7 +55,10 @@ export type ErrorPolicy = "none" | "ignore" | "all"; /** * Query options. */ -export interface QueryOptions<TVariables = OperationVariables, TData = any> { +export interface QueryOptions< + TVariables = OperationVariables, + TData = unknown, +> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#query:member} */ query: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -86,7 +89,7 @@ export interface QueryOptions<TVariables = OperationVariables, TData = any> { */ export interface WatchQueryOptions< TVariables extends OperationVariables = OperationVariables, - TData = any, + TData = unknown, > extends SharedWatchQueryOptions<TVariables, TData> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#query:member} */ query: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -146,7 +149,7 @@ export interface NextFetchPolicyContext< initialFetchPolicy: WatchQueryFetchPolicy; } -export interface FetchMoreQueryOptions<TVariables, TData = any> { +export interface FetchMoreQueryOptions<TVariables, TData = unknown> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#query:member} */ query?: DocumentNode | TypedDocumentNode<TData, TVariables>; /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#variables:member} */ @@ -175,7 +178,7 @@ export type UpdateQueryOptions<TData, TVariables> = { ); export interface UpdateQueryMapFn< - TData = any, + TData = unknown, TVariables = OperationVariables, > { ( @@ -190,7 +193,7 @@ export interface UpdateQueryMapFn< } export type SubscribeToMoreUpdateQueryFn< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, TSubscriptionData = TData, > = { @@ -208,7 +211,7 @@ export type SubscribeToMoreUpdateQueryFn< }; export interface SubscribeToMoreOptions< - TData = any, + TData = unknown, TSubscriptionVariables extends OperationVariables = OperationVariables, TSubscriptionData = TData, TVariables extends OperationVariables = TSubscriptionVariables, @@ -245,7 +248,7 @@ export interface SubscribeToMoreFunction< export interface SubscriptionOptions< TVariables = OperationVariables, - TData = any, + TData = unknown, > { /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#query:member} */ query: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -267,7 +270,7 @@ export interface SubscriptionOptions< } interface MutationBaseOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -308,7 +311,7 @@ interface MutationBaseOptions< } export interface MutationOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -317,7 +320,7 @@ export interface MutationOptions< mutation: DocumentNode | TypedDocumentNode<TData, TVariables>; } export interface MutationSharedOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, diff --git a/src/react/hooks/useFragment.ts b/src/react/hooks/useFragment.ts --- a/src/react/hooks/useFragment.ts +++ b/src/react/hooks/useFragment.ts @@ -16,13 +16,13 @@ import { useDeepMemo, wrapHook } from "./internal/index.js"; import { useApolloClient } from "./useApolloClient.js"; import { useSyncExternalStore } from "./useSyncExternalStore.js"; -export interface UseFragmentOptions<TData, TVars> +export interface UseFragmentOptions<TData, TVariables> extends Omit< - Cache.DiffOptions<NoInfer<TData>, NoInfer<TVars>>, + Cache.DiffOptions<NoInfer<TData>, NoInfer<TVariables>>, "id" | "query" | "optimistic" | "previousResult" | "returnPartialData" >, Omit< - Cache.ReadFragmentOptions<TData, TVars>, + Cache.ReadFragmentOptions<TData, TVariables>, "id" | "variables" | "returnPartialData" > { from: StoreObject | Reference | FragmentType<NoInfer<TData>> | string | null; @@ -53,9 +53,10 @@ export type UseFragmentResult<TData> = missing?: MissingTree; }; -export function useFragment<TData = any, TVars = OperationVariables>( - options: UseFragmentOptions<TData, TVars> -): UseFragmentResult<TData> { +export function useFragment< + TData = unknown, + TVariables extends OperationVariables = OperationVariables, +>(options: UseFragmentOptions<TData, TVariables>): UseFragmentResult<TData> { return wrapHook( "useFragment", // eslint-disable-next-line react-compiler/react-compiler @@ -64,8 +65,8 @@ export function useFragment<TData = any, TVars = OperationVariables>( )(options); } -function useFragment_<TData = any, TVars = OperationVariables>( - options: UseFragmentOptions<TData, TVars> +function useFragment_<TData, TVariables extends OperationVariables>( + options: UseFragmentOptions<TData, TVariables> ): UseFragmentResult<TData> { const client = useApolloClient(options.client); const { cache } = client; @@ -100,7 +101,7 @@ function useFragment_<TData = any, TVars = OperationVariables>( } const { cache } = client; - const diff = cache.diff<TData>({ + const diff = cache.diff<TData, TVariables>({ ...stableOptions, returnPartialData: true, id: from, diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -39,7 +39,7 @@ import { useApolloClient } from "./useApolloClient.js"; import { useSyncExternalStore } from "./useSyncExternalStore.js"; export interface LazyQueryHookOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#fetchPolicy:member} */ @@ -200,7 +200,7 @@ const EAGER_METHODS = [ * @returns A tuple in the form of `[execute, result]` */ export function useLazyQuery< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( query: DocumentNode | TypedDocumentNode<TData, TVariables>, diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -71,7 +71,7 @@ import { useApolloClient } from "./useApolloClient.js"; * @returns A tuple in the form of `[mutate, result]` */ export function useMutation< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -86,7 +86,9 @@ export function useMutation< ): MutationTuple<TData, TVariables, TContext, TCache> { const client = useApolloClient(options?.client); verifyDocumentType(mutation, DocumentType.Mutation); - const [result, setResult] = React.useState<Omit<MutationResult, "reset">>({ + const [result, setResult] = React.useState< + Omit<MutationResult<TData>, "reset"> + >({ called: false, loading: false, client, @@ -147,10 +149,7 @@ export function useMutation< executeOptions.onError || ref.current.options?.onError; if (error && onError) { - onError( - error, - clientOptions as MutationOptions<TData, OperationVariables> - ); + onError(error, clientOptions); } if (mutationId === ref.current.mutationId) { @@ -171,10 +170,7 @@ export function useMutation< executeOptions.onCompleted || ref.current.options?.onCompleted; if (!error) { - onCompleted?.( - response.data!, - clientOptions as MutationOptions<TData, OperationVariables> - ); + onCompleted?.(response.data!, clientOptions); } return response; @@ -201,10 +197,7 @@ export function useMutation< executeOptions.onError || ref.current.options?.onError; if (onError) { - onError( - error, - clientOptions as MutationOptions<TData, OperationVariables> - ); + onError(error, clientOptions); // TODO(brian): why are we returning this here??? return { data: void 0, errors: error }; diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -111,7 +111,7 @@ interface InternalState<TData, TVariables extends OperationVariables> { * @returns Query result object */ export function useQuery< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( query: DocumentNode | TypedDocumentNode<TData, TVariables>, @@ -125,10 +125,7 @@ export function useQuery< )(query, options); } -function useQuery_< - TData = any, - TVariables extends OperationVariables = OperationVariables, ->( +function useQuery_<TData, TVariables extends OperationVariables>( query: DocumentNode | TypedDocumentNode<TData, TVariables>, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> ) { @@ -144,10 +141,7 @@ function useQuery_< ); } -function useInternalState< - TData = any, - TVariables extends OperationVariables = OperationVariables, ->( +function useInternalState<TData, TVariables extends OperationVariables>( client: ApolloClient, query: DocumentNode | TypedDocumentNode<any, any>, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>, @@ -197,10 +191,7 @@ function useInternalState< return internalState; } -function useQueryInternals< - TData = any, - TVariables extends OperationVariables = OperationVariables, ->( +function useQueryInternals<TData, TVariables extends OperationVariables>( query: DocumentNode | TypedDocumentNode<TData, TVariables>, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> ) { @@ -253,8 +244,8 @@ function useQueryInternals< } function useObservableSubscriptionResult< - TData = any, - TVariables extends OperationVariables = OperationVariables, + TData, + TVariables extends OperationVariables, >( resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, @@ -374,8 +365,8 @@ function useRegisterSSRObservable( // this hook is not compatible with any rules of React, and there's no good way to rewrite it. // it should stay a separate hook that will not be optimized by the compiler function useResubscribeIfNecessary< - TData = any, - TVariables extends OperationVariables = OperationVariables, + TData, + TVariables extends OperationVariables, >( /** this hook will mutate properties on `resultData` */ resultData: InternalResult<TData, TVariables>, @@ -417,8 +408,8 @@ function useResubscribeIfNecessary< * but the `observable` might differ between calls to `make`. */ function createMakeWatchQueryOptions< - TData = any, - TVariables extends OperationVariables = OperationVariables, + TData, + TVariables extends OperationVariables, >( client: ApolloClient, query: DocumentNode | TypedDocumentNode<TData, TVariables>, diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -109,7 +109,7 @@ import { useSyncExternalStore } from "./useSyncExternalStore.js"; * @returns Query result object */ export function useSubscription< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( subscription: DocumentNode | TypedDocumentNode<TData, TVariables>, @@ -317,7 +317,7 @@ export function useSubscription< } function createSubscription< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( client: ApolloClient, diff --git a/src/react/internal/cache/FragmentReference.ts b/src/react/internal/cache/FragmentReference.ts --- a/src/react/internal/cache/FragmentReference.ts +++ b/src/react/internal/cache/FragmentReference.ts @@ -5,7 +5,7 @@ import type { WatchFragmentOptions, WatchFragmentResult, } from "@apollo/client/cache"; -import type { ApolloClient } from "@apollo/client/core"; +import type { ApolloClient, OperationVariables } from "@apollo/client/core"; import type { MaybeMasked } from "@apollo/client/masking"; import type { PromiseWithState } from "@apollo/client/utilities"; import { @@ -25,7 +25,7 @@ interface FragmentReferenceOptions { export class FragmentReference< TData = unknown, - TVariables = Record<string, unknown>, + TVariables extends OperationVariables = OperationVariables, > { public readonly observable: Observable<WatchFragmentResult<TData>>; public readonly key: FragmentKey = {}; @@ -172,14 +172,14 @@ export class FragmentReference< ); } - private getDiff<TData, TVariables>( + private getDiff<TData, TVariables extends OperationVariables>( client: ApolloClient, options: WatchFragmentOptions<TData, TVariables> & { from: string } ) { const { cache } = client; const { from, fragment, fragmentName } = options; - const diff = cache.diff({ + const diff = cache.diff<TData, TVariables>({ ...options, query: cache["getFragmentDoc"](fragment, fragmentName), returnPartialData: true, diff --git a/src/react/internal/cache/SuspenseCache.ts b/src/react/internal/cache/SuspenseCache.ts --- a/src/react/internal/cache/SuspenseCache.ts +++ b/src/react/internal/cache/SuspenseCache.ts @@ -3,6 +3,7 @@ import { Trie } from "@wry/trie"; import type { ApolloClient, ObservableQuery, + OperationVariables, WatchFragmentOptions, } from "@apollo/client/core"; @@ -34,7 +35,7 @@ export class SuspenseCache { this.options = options; } - getQueryRef<TData = any>( + getQueryRef<TData = unknown>( cacheKey: CacheKey, createObservable: () => ObservableQuery<TData> ) { @@ -54,7 +55,7 @@ export class SuspenseCache { return ref.current; } - getFragmentRef<TData, TVariables>( + getFragmentRef<TData, TVariables extends OperationVariables>( cacheKey: FragmentCacheKey, client: ApolloClient, options: WatchFragmentOptions<TData, TVariables> & { from: string } diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -50,7 +50,7 @@ export type CommonOptions<TOptions> = TOptions & { export interface BaseQueryOptions< TVariables extends OperationVariables = OperationVariables, - TData = any, + TData = unknown, > extends SharedWatchQueryOptions<TVariables, TData> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#ssr:member} */ ssr?: boolean; @@ -61,7 +61,7 @@ export interface BaseQueryOptions< } export interface QueryFunctionOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends BaseQueryOptions<TVariables, TData> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#skip:member} */ @@ -116,7 +116,7 @@ export interface ObservableQueryFields< } export interface QueryResult< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends ObservableQueryFields<TData, TVariables> { /** {@inheritDoc @apollo/client!QueryResultDocumentation#client:member} */ @@ -138,7 +138,7 @@ export interface QueryResult< } export interface QueryDataOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends QueryFunctionOptions<TData, TVariables> { children?: (result: QueryResult<TData, TVariables>) => ReactTypes.ReactNode; @@ -147,7 +147,7 @@ export interface QueryDataOptions< } export interface QueryHookOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends QueryFunctionOptions<TData, TVariables> {} @@ -252,7 +252,7 @@ export type RefetchQueriesFunction = ( ) => InternalRefetchQueriesInclude; export interface BaseMutationOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -264,14 +264,17 @@ export interface BaseMutationOptions< /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#onCompleted:member} */ onCompleted?: ( data: MaybeMasked<TData>, - clientOptions?: BaseMutationOptions + clientOptions?: BaseMutationOptions<TData, TVariables, TContext, TCache> ) => void; /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#onError:member} */ - onError?: (error: ErrorLike, clientOptions?: BaseMutationOptions) => void; + onError?: ( + error: ErrorLike, + clientOptions?: BaseMutationOptions<TData, TVariables, TContext, TCache> + ) => void; } export interface MutationFunctionOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -280,7 +283,7 @@ export interface MutationFunctionOptions< mutation?: DocumentNode | TypedDocumentNode<TData, TVariables>; } -export interface MutationResult<TData = any> { +export interface MutationResult<TData = unknown> { /** {@inheritDoc @apollo/client!MutationResultDocumentation#data:member} */ data?: MaybeMasked<TData> | null; /** {@inheritDoc @apollo/client!MutationResultDocumentation#error:member} */ @@ -296,7 +299,7 @@ export interface MutationResult<TData = any> { } export declare type MutationFunction< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -305,14 +308,14 @@ export declare type MutationFunction< ) => Promise<FetchResult<MaybeMasked<TData>>>; export interface MutationHookOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, > extends BaseMutationOptions<TData, TVariables, TContext, TCache> {} export interface MutationDataOptions< - TData = any, + TData = unknown, TVariables = OperationVariables, TContext = DefaultContext, TCache extends ApolloCache = ApolloCache, @@ -336,18 +339,18 @@ export type MutationTuple< /* Subscription types */ -export interface OnDataOptions<TData = any> { +export interface OnDataOptions<TData = unknown> { client: ApolloClient; data: SubscriptionResult<TData>; } -export interface OnSubscriptionDataOptions<TData = any> { +export interface OnSubscriptionDataOptions<TData = unknown> { client: ApolloClient; subscriptionData: SubscriptionResult<TData>; } export interface BaseSubscriptionOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > { /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#variables:member} */ @@ -385,7 +388,10 @@ export interface BaseSubscriptionOptions< ignoreResults?: boolean; } -export interface SubscriptionResult<TData = any, TVariables = any> { +export interface SubscriptionResult< + TData = unknown, + TVariables = OperationVariables, +> { /** {@inheritDoc @apollo/client!SubscriptionResultDocumentation#loading:member} */ loading: boolean; /** {@inheritDoc @apollo/client!SubscriptionResultDocumentation#data:member} */ @@ -401,7 +407,7 @@ export interface SubscriptionResult<TData = any, TVariables = any> { } export interface SubscriptionHookOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends BaseSubscriptionOptions<TData, TVariables> {} @@ -409,7 +415,7 @@ export interface SubscriptionHookOptions< * @deprecated This type is not used anymore. It will be removed in the next major version of Apollo Client */ export interface SubscriptionDataOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, > extends BaseSubscriptionOptions<TData, TVariables> { subscription: DocumentNode | TypedDocumentNode<TData, TVariables>; diff --git a/src/utilities/types/NoInfer.ts b/src/utilities/types/NoInfer.ts --- a/src/utilities/types/NoInfer.ts +++ b/src/utilities/types/NoInfer.ts @@ -6,7 +6,7 @@ This type was first suggested [in this Github discussion](https://github.com/mic Example usage: ```ts export function useQuery< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables, >( query: DocumentNode | TypedDocumentNode<TData, TVariables>,
diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -2785,7 +2785,7 @@ describe("ApolloClient", () => { client.setLink(newLink); - const { data } = await client.query({ + const { data } = await client.query<any>({ query: gql` { widgets diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -696,7 +696,7 @@ describe("client", () => { cache: new InMemoryCache(), }); - return client.query({ query }).then((result: FormattedExecutionResult) => { + return client.query({ query }).then((result) => { expect(result.data).toEqual(data); }); }); diff --git a/src/__tests__/dataMasking.ts b/src/__tests__/dataMasking.ts --- a/src/__tests__/dataMasking.ts +++ b/src/__tests__/dataMasking.ts @@ -12,11 +12,12 @@ import { gql, InMemoryCache, NetworkStatus, + OperationVariables, Reference, TypedDocumentNode, } from "@apollo/client/core"; import { CombinedGraphQLErrors } from "@apollo/client/errors"; -import { MaskedDocumentNode } from "@apollo/client/masking"; +import { MaskedDocumentNode, Unmasked } from "@apollo/client/masking"; import { MockedResponse, MockLink, @@ -5889,7 +5890,7 @@ describe("client.mutate", () => { }); class TestCache extends ApolloCache { - public diff<T>(query: Cache.DiffOptions): DataProxy.DiffResult<T> { + public diff<T>(query: Cache.DiffOptions<T>): DataProxy.DiffResult<T> { return { result: null, complete: false }; } @@ -5905,9 +5906,9 @@ class TestCache extends ApolloCache { transaction(this); } - public read<T, TVariables = any>( - query: Cache.ReadOptions<TVariables> - ): T | null { + public read<T = unknown, TVariables = OperationVariables>( + query: Cache.ReadOptions<TVariables, T> + ): Unmasked<T> | null { return null; } @@ -5921,11 +5922,13 @@ class TestCache extends ApolloCache { return this; } - public watch(watch: Cache.WatchOptions): () => void { + public watch<T, TVariables>( + watch: Cache.WatchOptions<T, TVariables> + ): () => void { return function () {}; } - public write<TResult = any, TVariables = any>( + public write<TResult = unknown, TVariables = OperationVariables>( _: Cache.WriteOptions<TResult, TVariables> ): Reference | undefined { return; diff --git a/src/__tests__/local-state/export.ts b/src/__tests__/local-state/export.ts --- a/src/__tests__/local-state/export.ts +++ b/src/__tests__/local-state/export.ts @@ -159,7 +159,7 @@ describe("@client @export tests", () => { const { data } = await client.query({ query }); - expect({ ...data }).toMatchObject({ + expect(data).toMatchObject({ currentAuthor: testAuthor, postCount: testPostCount, }); diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -45,7 +45,7 @@ describe("General functionality", () => { }); return client.query({ query }).then(({ data }) => { - expect({ ...data }).toMatchObject({ field: 1 }); + expect(data).toMatchObject({ field: 1 }); }); }); @@ -95,7 +95,7 @@ describe("General functionality", () => { }); return client.query({ query }).then(({ data }) => { - expect({ ...data }).toMatchObject({ field: 1 }); + expect(data).toMatchObject({ field: 1 }); }); }); @@ -159,14 +159,14 @@ describe("General functionality", () => { return client .query({ query }) .then(({ data }) => { - expect({ ...data }).toMatchObject({ field: 1 }); + expect(data).toMatchObject({ field: 1 }); expect(count).toBe(1); }) .then(() => client .query({ query, fetchPolicy: "network-only" }) .then(({ data }) => { - expect({ ...data }).toMatchObject({ field: 1 }); + expect(data).toMatchObject({ field: 1 }); expect(count).toBe(2); }) ); @@ -245,7 +245,7 @@ describe("Cache manipulation", () => { client .query({ query }) - .then(({ data }) => expect({ ...data }).toMatchObject({ field: "yo" })); + .then(({ data }) => expect(data).toMatchObject({ field: "yo" })); } ); @@ -281,7 +281,7 @@ describe("Cache manipulation", () => { .mutate({ mutation }) .then(() => client.query({ query })) .then(({ data }) => { - expect({ ...data }).toMatchObject({ field: 1 }); + expect(data).toMatchObject({ field: 1 }); }); }); @@ -363,13 +363,13 @@ describe("Cache manipulation", () => { return client .mutate({ mutation, variables: { id: "1234" } }) .then(({ data }) => { - expect({ ...data }).toEqual({ + expect(data).toEqual({ start: { field: "1234", __typename: "Field" }, }); }) .then(() => client.query({ query })) .then(({ data }) => { - expect({ ...data }).toMatchObject({ field: "1234" }); + expect(data).toMatchObject({ field: "1234" }); }); }); @@ -506,7 +506,7 @@ describe("Cache manipulation", () => { }, }); const stream = new ObservableStream( - client.watchQuery({ query, variables: { id: entityId } }) + client.watchQuery<any>({ query, variables: { id: entityId } }) ); { @@ -689,7 +689,7 @@ describe("Sample apps", () => { }; client.addResolvers(resolvers); - const stream = new ObservableStream(client.watchQuery({ query })); + const stream = new ObservableStream(client.watchQuery<any>({ query })); { const { data } = await stream.takeNext(); @@ -1123,7 +1123,7 @@ describe("Combining client and server state/operations", () => { }, }); - await client.mutate({ + await client.mutate<any>({ mutation, update(proxy, { data: { updateUser } }) { proxy.writeQuery({ diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -212,7 +212,7 @@ describe("mutation results", () => { await firstValueFrom(from(obsQuery)); await client.mutate({ mutation }); - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); expect(newResult.data.todoList.todos[0].completed).toBe(true); }); @@ -528,7 +528,7 @@ describe("mutation results", () => { }); return client - .mutate({ + .mutate<any>({ mutation, update( cache, @@ -606,7 +606,7 @@ describe("mutation results", () => { }); return client - .mutate({ + .mutate<any>({ mutation, keepRootFields: true, update( @@ -687,7 +687,7 @@ describe("mutation results", () => { }); return client - .mutate({ + .mutate<any>({ mutation, fetchPolicy: "no-cache", update( @@ -776,7 +776,7 @@ describe("mutation results", () => { }, }); - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); // There should be one more todo item than before expect(newResult.data.todoList.todos.length).toBe(4); @@ -840,7 +840,7 @@ describe("mutation results", () => { }, }, }); - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); // There should be one more todo item than before expect(newResult.data.todoList.todos.length).toBe(4); @@ -1384,7 +1384,7 @@ describe("mutation results", () => { }, }); - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); // There should be one more todo item than before expect(newResult.data.todoList.todos.length).toBe(4); @@ -1466,7 +1466,7 @@ describe("mutation results", () => { }); }, }); - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); // There should be one more todo item than before expect(newResult.data.todoList.todos.length).toBe(4); diff --git a/src/__tests__/optimistic.ts b/src/__tests__/optimistic.ts --- a/src/__tests__/optimistic.ts +++ b/src/__tests__/optimistic.ts @@ -979,7 +979,7 @@ describe("optimistic mutation results", () => { await promise; - const result = await client.query({ query }); + const result = await client.query<any>({ query }); stream.unsubscribe(); @@ -1393,7 +1393,7 @@ describe("optimistic mutation results", () => { // wrap the QueryObservable with an rxjs observable const promise = lastValueFrom( - client.watchQuery({ query }).pipe( + client.watchQuery<any>({ query }).pipe( map((value) => value.data.todoList.todos), take(5), toArray() @@ -1543,7 +1543,7 @@ describe("optimistic mutation results", () => { "Optimistically generated" ); await promise; - const newResult = await client.query({ query }); + const newResult = await client.query<any>({ query }); stream.unsubscribe(); // There should be one more todo item than before @@ -1839,7 +1839,7 @@ describe("optimistic mutation results", () => { }); const promise = lastValueFrom( - client.watchQuery({ query }).pipe( + client.watchQuery<any>({ query }).pipe( map((value) => value.data.todoList.todos), take(5), toArray() @@ -1900,7 +1900,7 @@ describe("optimistic mutation results", () => { ), }); - const query = gql` + const query: TypedDocumentNode<Data> = gql` query { items { text @@ -1920,7 +1920,7 @@ describe("optimistic mutation results", () => { type Data = { items: Item[] }; function append(cache: ApolloCache, item: Item) { - const data = cache.readQuery<Data>({ query }); + const data = cache.readQuery({ query }); cache.writeQuery({ query, data: { diff --git a/src/cache/core/__tests__/cache.ts b/src/cache/core/__tests__/cache.ts --- a/src/cache/core/__tests__/cache.ts +++ b/src/cache/core/__tests__/cache.ts @@ -2,6 +2,7 @@ import { expectTypeOf } from "expect-type"; import { gql } from "graphql-tag"; import { Cache, DataProxy } from "@apollo/client/cache"; +import { OperationVariables, Unmasked } from "@apollo/client/core"; import { Reference } from "../../../utilities/graphql/storeUtils.js"; import { ApolloCache } from "../cache.js"; @@ -27,9 +28,9 @@ class TestCache extends ApolloCache { transaction(this); } - public read<T, TVariables = any>( - query: Cache.ReadOptions<TVariables> - ): T | null { + public read<T = unknown, TVariables = OperationVariables>( + query: Cache.ReadOptions<TVariables, T> + ): Unmasked<T> | null { return null; } @@ -48,11 +49,13 @@ class TestCache extends ApolloCache { return this; } - public watch(watch: Cache.WatchOptions): () => void { + public watch<T = unknown, TVariables = OperationVariables>( + watch: Cache.WatchOptions<T, TVariables> + ): () => void { return function () {}; } - public write<TResult = any, TVariables = any>( + public write<TResult = unknown, TVariables = OperationVariables>( _: Cache.WriteOptions<TResult, TVariables> ): Reference | undefined { return; diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -1403,7 +1403,7 @@ describe("Cache", () => { name: "Ben Newman", }); - cache.updateFragment( + cache.updateFragment<any>( { id: bnId, fragment: usernameFragment, diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -5,6 +5,7 @@ import { ApolloClient, DocumentNode, NetworkStatus, + OperationVariables, Reference, StoreObject, TypedDocumentNode, @@ -1194,7 +1195,10 @@ describe("type policies", function () { }, }; - function check<TData extends typeof data, TVars>( + function check< + TData extends typeof data, + TVars extends OperationVariables, + >( query: DocumentNode | TypedDocumentNode<TData, TVars>, variables?: TVars ) { diff --git a/src/cache/inmemory/__tests__/writeToStore.ts b/src/cache/inmemory/__tests__/writeToStore.ts --- a/src/cache/inmemory/__tests__/writeToStore.ts +++ b/src/cache/inmemory/__tests__/writeToStore.ts @@ -2895,7 +2895,14 @@ describe("writing to the store", () => { }, }); - const query = gql` + type Data = { + counter: { + __typename: "Counter"; + count: number; + }; + }; + + const query: TypedDocumentNode<Data> = gql` query { counter { count @@ -2903,14 +2910,14 @@ describe("writing to the store", () => { } `; - const results: number[] = []; + const results: Data[] = []; const promise = new Promise<void>((resolve) => { cache.watch({ query, optimistic: true, callback(diff) { - results.push(diff.result); + results.push(diff.result as Data); expect(diff.result).toEqual({ counter: { __typename: "Counter", diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -3378,7 +3378,7 @@ describe("ObservableQuery", () => { cache, }); - const observable = client.watchQuery(queryOptions); + const observable = client.watchQuery<any>(queryOptions); const values: any[] = []; return new Promise<any[]>((resolve, reject) => { diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -1294,7 +1294,7 @@ describe("useMutation Hook", () => { }) ); - const mutation = gql` + const mutation: TypedDocumentNode<any> = gql` mutation DoSomething { doSomething { time @@ -2677,8 +2677,8 @@ describe("useMutation Hook", () => { ); const Test = () => { - const { data } = useQuery(QUERY_1); - const [mutate] = useMutation(MUTATION_1, { + const { data } = useQuery<any>(QUERY_1); + const [mutate] = useMutation<any>(MUTATION_1, { awaitRefetchQueries: true, refetchQueries: [QUERY_1], }); @@ -2909,7 +2909,7 @@ describe("useMutation Hook", () => { }); const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION_DEFER, { update }), + () => useMutation<any>(CREATE_TODO_MUTATION_DEFER, { update }), { wrapper: ({ children }) => ( <ApolloProvider client={client}>{children}</ApolloProvider> diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -892,7 +892,7 @@ describe("useQuery Hook", () => { setName = setName1; return [ useQuery(query, { variables: { name } }), - useMutation(mutation, { + useMutation<any>(mutation, { update(cache, { data }) { cache.writeQuery({ query, @@ -4559,7 +4559,7 @@ describe("useQuery Hook", () => { using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( - () => useQuery(query, { variables: { limit: 2 } }), + () => useQuery<any>(query, { variables: { limit: 2 } }), { wrapper } ); @@ -4628,7 +4628,7 @@ describe("useQuery Hook", () => { const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( () => - useQuery(query, { + useQuery<any>(query, { variables: { limit: 2 }, notifyOnNetworkStatusChange: true, }), @@ -7263,7 +7263,7 @@ describe("useQuery Hook", () => { const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( () => ({ - mutation: useMutation(mutation, { + mutation: useMutation<any>(mutation, { optimisticResponse: { addCar: carData }, update(cache, { data }) { cache.modify({ @@ -9174,7 +9174,10 @@ describe("useQuery Hook", () => { // TODO: See if we can rewrite this with renderHookToSnapshotStream and // check output of hook to ensure its a stable object it("should handle a simple query", async () => { - const query = gql` + const query: TypedDocumentNode< + { hello: string }, + Record<string, never> + > = gql` { hello } @@ -9186,7 +9189,11 @@ describe("useQuery Hook", () => { }, ]; - const Component = ({ query }: any) => { + const Component = ({ + query, + }: { + query: TypedDocumentNode<{ hello: string }, Record<string, never>>; + }) => { const [counter, setCounter] = useState(0); const result = useQuery(query); @@ -9206,7 +9213,7 @@ describe("useQuery Hook", () => { return ( <div> - {result.data.hello} + {result.data!.hello} {counter} </div> ); diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -9270,7 +9270,7 @@ describe("useSuspenseQuery", () => { partial: false, }); - cache.updateQuery({ query }, (data) => ({ + cache.updateQuery<any>({ query }, (data) => ({ hero: { ...data.hero, name: "C3PO", diff --git a/src/react/ssr/__tests__/useQuery.test.tsx b/src/react/ssr/__tests__/useQuery.test.tsx --- a/src/react/ssr/__tests__/useQuery.test.tsx +++ b/src/react/ssr/__tests__/useQuery.test.tsx @@ -1,11 +1,10 @@ /** @jest-environment node */ import type { Trie } from "@wry/trie"; -import { DocumentNode } from "graphql"; import { gql } from "graphql-tag"; import React from "react"; import { InMemoryCache } from "@apollo/client/cache"; -import { ApolloClient } from "@apollo/client/core"; +import { ApolloClient, TypedDocumentNode } from "@apollo/client/core"; import { ApolloProvider, getApolloContext } from "@apollo/client/react/context"; import { useApolloClient, useQuery } from "@apollo/client/react/hooks"; import { renderToStringWithData } from "@apollo/client/react/ssr"; @@ -13,7 +12,7 @@ import { MockedResponse, mockSingleLink } from "@apollo/client/testing"; import { MockedProvider } from "@apollo/client/testing/react"; describe("useQuery Hook SSR", () => { - const CAR_QUERY: DocumentNode = gql` + const CAR_QUERY: TypedDocumentNode<typeof CAR_RESULT_DATA> = gql` query { cars { make @@ -48,7 +47,7 @@ describe("useQuery Hook SSR", () => { const { loading, data } = useQuery(CAR_QUERY); if (!loading) { expect(data).toEqual(CAR_RESULT_DATA); - const { make, model, vin } = data.cars[0]; + const { make, model, vin } = data!.cars[0]; return ( <div> {make}, {model}, {vin} @@ -96,7 +95,7 @@ describe("useQuery Hook SSR", () => { expect(loading).toBeTruthy(); if (!loading) { - const { make } = data.cars[0]; + const { make } = data!.cars[0]; return <div>{make}</div>; } return null; @@ -220,7 +219,7 @@ describe("useQuery Hook SSR", () => { if (!loading) { expect(data).toEqual(CAR_RESULT_DATA); - const { make, model, vin } = data.cars[0]; + const { make, model, vin } = data!.cars[0]; return ( <div> {make}, {model}, {vin} @@ -263,7 +262,7 @@ describe("useQuery Hook SSR", () => { }, }); - const query = gql` + const query: TypedDocumentNode<typeof initialData> = gql` query GetSearchResults { getSearchResults @client { locale @@ -318,7 +317,7 @@ describe("useQuery Hook SSR", () => { getSearchResults: { pagination: { pageLimit }, }, - } = data; + } = data!; return <div>{pageLimit}</div>; } return null; @@ -360,7 +359,7 @@ describe("useQuery Hook SSR", () => { React.useContext(getApolloContext()).renderPromises!["queryInfoTrie"]; if (!loading) { expect(data).toEqual(CAR_RESULT_DATA); - const { make, model, vin } = data.cars[0]; + const { make, model, vin } = data!.cars[0]; return ( <div> {make}, {model}, {vin} diff --git a/src/testing/react/__tests__/MockedProvider.test.tsx b/src/testing/react/__tests__/MockedProvider.test.tsx --- a/src/testing/react/__tests__/MockedProvider.test.tsx +++ b/src/testing/react/__tests__/MockedProvider.test.tsx @@ -1210,7 +1210,7 @@ describe("@client testing", () => { }); function Component() { - const { loading, data } = useQuery(gql` + const { loading, data } = useQuery<any>(gql` { networkStatus @client { isOnline @@ -1254,7 +1254,7 @@ describe("@client testing", () => { }); function Component() { - const { loading, data } = useQuery(gql` + const { loading, data } = useQuery<any>(gql` { networkStatus @client { isOnline
[4.0] Default `TData` types in query APIs to `unknown` instead of `any` Switching from `any` to `unknown` should provide better type safety in cases where we don't know the type of `TData`. Using `any` is largely considered a bad practice if you can avoid it.
2025-03-17T21:36:21Z
4
apollographql/apollo-client
12,497
apollographql__apollo-client-12497
[ "12483" ]
d3383033d306b7d66e90f5f3170c24453cd76464
diff --git a/src/config/jest/areCombinedGraphQLErrorsEqual.ts b/src/config/jest/areCombinedGraphQLErrorsEqual.ts --- a/src/config/jest/areCombinedGraphQLErrorsEqual.ts +++ b/src/config/jest/areCombinedGraphQLErrorsEqual.ts @@ -12,7 +12,9 @@ export const areCombinedGraphQLErrorsEqual: Tester = function ( if (isACombinedGraphQLErrors && isBCombinedGraphQLErrors) { return ( - a.message === b.message && this.equals(a.errors, b.errors, customTesters) + a.message === b.message && + this.equals(a.errors, b.errors, customTesters) && + this.equals(a.data, b.data, customTesters) ); } else if (isACombinedGraphQLErrors === isBCombinedGraphQLErrors) { return undefined; diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -339,9 +339,7 @@ export class QueryManager { mergeMap((result) => { const hasErrors = graphQLResultHasError(result); if (hasErrors && errorPolicy === "none") { - throw new CombinedGraphQLErrors( - getGraphQLErrorsFromResult(result) - ); + throw new CombinedGraphQLErrors(result); } if (mutationStoreValue) { @@ -401,9 +399,7 @@ export class QueryManager { }; if (graphQLResultHasError(storeResult)) { - result.error = new CombinedGraphQLErrors( - getGraphQLErrorsFromResult(storeResult) - ); + result.error = new CombinedGraphQLErrors(storeResult); } if (storeResult.extensions) { @@ -1065,7 +1061,7 @@ export class QueryManager { }; if (graphQLResultHasError(rawResult)) { - result.error = new CombinedGraphQLErrors(rawResult.errors!); + result.error = new CombinedGraphQLErrors(rawResult); } else if (graphQLResultHasProtocolErrors(rawResult)) { result.error = rawResult.extensions[PROTOCOL_ERRORS_SYMBOL]; // Don't emit protocol errors added by HttpLink @@ -1262,7 +1258,7 @@ export class QueryManager { queryInfo.resetLastWrite(); queryInfo.reset(); // Throwing here effectively calls observer.error. - throw new CombinedGraphQLErrors(graphQLErrors); + throw new CombinedGraphQLErrors(result); } // Use linkDocument rather than queryInfo.document so the // operation/fragments used to write the result are the same as the @@ -1291,7 +1287,7 @@ export class QueryManager { } if (hasErrors && errorPolicy !== "ignore") { - aqr.error = new CombinedGraphQLErrors(graphQLErrors); + aqr.error = new CombinedGraphQLErrors(result); aqr.networkStatus = NetworkStatus.error; } diff --git a/src/errors/CombinedGraphQLErrors.ts b/src/errors/CombinedGraphQLErrors.ts --- a/src/errors/CombinedGraphQLErrors.ts +++ b/src/errors/CombinedGraphQLErrors.ts @@ -1,5 +1,8 @@ import type { GraphQLFormattedError } from "graphql"; +import type { FetchResult } from "@apollo/client/core"; +import { getGraphQLErrorsFromResult } from "@apollo/client/utilities"; + /** * Represents the combined list of GraphQL errors returned from the server in a * GraphQL response. @@ -8,13 +11,19 @@ export class CombinedGraphQLErrors extends Error { /** * The raw list of GraphQL errors returned in a GraphQL response. */ - errors: ReadonlyArray<GraphQLFormattedError>; + readonly errors: ReadonlyArray<GraphQLFormattedError>; + + /** + * Partial data returned in the GraphQL response. + */ + readonly data: Record<string, unknown> | null | undefined; + + constructor(result: FetchResult<unknown>) { + const errors = getGraphQLErrorsFromResult(result); - constructor( - errors: Array<GraphQLFormattedError> | ReadonlyArray<GraphQLFormattedError> - ) { super(formatMessage(errors)); this.errors = errors; + this.data = result.data as Record<string, unknown>; this.name = "CombinedGraphQLErrors"; Object.setPrototypeOf(this, CombinedGraphQLErrors.prototype); diff --git a/src/link/subscriptions/index.ts b/src/link/subscriptions/index.ts --- a/src/link/subscriptions/index.ts +++ b/src/link/subscriptions/index.ts @@ -77,7 +77,9 @@ export class GraphQLWsLink extends ApolloLink { } return observer.error( - new CombinedGraphQLErrors(Array.isArray(err) ? err : [err]) + new CombinedGraphQLErrors({ + errors: Array.isArray(err) ? err : [err], + }) ); }, // casting around a wrong type in graphql-ws, which incorrectly expects `Sink<ExecutionResult>`
diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -593,7 +593,7 @@ describe("client", () => { }); await expect(client.query({ query })).rejects.toEqual( - new CombinedGraphQLErrors(errors) + new CombinedGraphQLErrors({ errors }) ); }); @@ -638,7 +638,7 @@ describe("client", () => { }); await expect(client.query({ query })).rejects.toEqual( - new CombinedGraphQLErrors(errors) + new CombinedGraphQLErrors({ data, errors }) ); }); @@ -1950,7 +1950,9 @@ describe("client", () => { await expect(stream).toEmitApolloQueryResult({ data: initialData, - error: new CombinedGraphQLErrors([{ message: "network failure" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "network failure" }], + }), loading: false, networkStatus: NetworkStatus.error, partial: false, @@ -2189,7 +2191,7 @@ describe("client", () => { }); await expect(client.mutate({ mutation })).rejects.toEqual( - new CombinedGraphQLErrors(errors) + new CombinedGraphQLErrors({ data, errors }) ); }); @@ -2205,9 +2207,11 @@ describe("client", () => { } `; const data = { - person: { - firstName: "John", - lastName: "Smith", + newPerson: { + person: { + firstName: "John", + lastName: "Smith", + }, }, }; const errors = [{ message: "Some kind of GraphQL error." }]; @@ -2216,9 +2220,7 @@ describe("client", () => { request: { query: mutation }, result: { errors, - data: { - newPerson: data, - }, + data, }, }), cache: new InMemoryCache(), @@ -2227,8 +2229,8 @@ describe("client", () => { await expect( client.mutate({ mutation, errorPolicy: "all" }) ).resolves.toEqualStrictTyped({ - data: { newPerson: data }, - error: new CombinedGraphQLErrors(errors), + data, + error: new CombinedGraphQLErrors({ data, errors }), }); }); @@ -2782,9 +2784,10 @@ describe("client", () => { client.query({ query, errorPolicy: "all" }) ).resolves.toEqualStrictTyped({ data: { posts: null }, - error: new CombinedGraphQLErrors([ - { message: 'Cannot query field "foo" on type "Post".' }, - ]), + error: new CombinedGraphQLErrors({ + data: { posts: null }, + errors: [{ message: 'Cannot query field "foo" on type "Post".' }], + }), }); }); @@ -3771,7 +3774,7 @@ describe("@connection", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors(errors), + error: new CombinedGraphQLErrors({ errors }), }); }); diff --git a/src/__tests__/dataMasking.ts b/src/__tests__/dataMasking.ts --- a/src/__tests__/dataMasking.ts +++ b/src/__tests__/dataMasking.ts @@ -978,7 +978,17 @@ describe("client.watchQuery", () => { name: null, }, }, - error: new CombinedGraphQLErrors([{ message: "Couldn't get name" }]), + error: new CombinedGraphQLErrors({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: null, + age: 34, + }, + }, + errors: [{ message: "Couldn't get name" }], + }), loading: false, networkStatus: NetworkStatus.error, partial: false, @@ -3832,7 +3842,7 @@ describe("client.query", () => { }); await expect(client.query({ query, errorPolicy: "none" })).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "User not logged in" }]) + new CombinedGraphQLErrors({ errors: [{ message: "User not logged in" }] }) ); }); @@ -3871,7 +3881,10 @@ describe("client.query", () => { expect(result).toEqualStrictTyped({ data: { currentUser: null }, - error: new CombinedGraphQLErrors([{ message: "User not logged in" }]), + error: new CombinedGraphQLErrors({ + data: { currentUser: null }, + errors: [{ message: "User not logged in" }], + }), }); }); @@ -3923,9 +3936,17 @@ describe("client.query", () => { name: "Test User", }, }, - error: new CombinedGraphQLErrors([ - { message: "Could not determine age" }, - ]), + error: new CombinedGraphQLErrors({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: null, + }, + }, + errors: [{ message: "Could not determine age" }], + }), }); }); @@ -4384,7 +4405,10 @@ describe("client.subscribe", () => { await expect(stream).toEmitStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Something went wrong" }]), + error: new CombinedGraphQLErrors({ + data: { addedComment: null }, + errors: [{ message: "Something went wrong" }], + }), }); }); @@ -4428,7 +4452,10 @@ describe("client.subscribe", () => { await expect(stream).toEmitStrictTyped({ data: { addedComment: null }, - error: new CombinedGraphQLErrors([{ message: "Something went wrong" }]), + error: new CombinedGraphQLErrors({ + data: { addedComment: null }, + errors: [{ message: "Something went wrong" }], + }), }); }); @@ -4477,7 +4504,17 @@ describe("client.subscribe", () => { await expect(stream).toEmitStrictTyped({ data: { addedComment: { __typename: "Comment", id: 1 } }, - error: new CombinedGraphQLErrors([{ message: "Could not get author" }]), + error: new CombinedGraphQLErrors({ + data: { + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: null, + }, + }, + errors: [{ message: "Could not get author" }], + }), }); }); @@ -5471,7 +5508,7 @@ describe("client.mutate", () => { await expect( client.mutate({ mutation, errorPolicy: "none" }) ).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "User not logged in" }]) + new CombinedGraphQLErrors({ errors: [{ message: "User not logged in" }] }) ); }); @@ -5529,7 +5566,10 @@ describe("client.mutate", () => { }) ).resolves.toEqualStrictTyped({ data: { updateUser: null }, - error: new CombinedGraphQLErrors([{ message: "User not logged in" }]), + error: new CombinedGraphQLErrors({ + data: { updateUser: null }, + errors: [{ message: "User not logged in" }], + }), }); }); @@ -5598,9 +5638,17 @@ describe("client.mutate", () => { name: "Test User", }, }, - error: new CombinedGraphQLErrors([ - { message: "Could not determine age" }, - ]), + error: new CombinedGraphQLErrors({ + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: null, + }, + }, + errors: [{ message: "Could not determine age" }], + }), }); }); diff --git a/src/__tests__/graphqlSubscriptions.ts b/src/__tests__/graphqlSubscriptions.ts --- a/src/__tests__/graphqlSubscriptions.ts +++ b/src/__tests__/graphqlSubscriptions.ts @@ -171,18 +171,21 @@ describe("GraphQL Subscriptions", () => { await expect(stream).toEmitStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([ - { - message: "This is an error", - locations: [ - { - column: 3, - line: 2, - }, - ], - path: ["result"], - }, - ]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [ + { + message: "This is an error", + locations: [ + { + column: 3, + line: 2, + }, + ], + path: ["result"], + }, + ], + }), }); }); @@ -216,18 +219,21 @@ describe("GraphQL Subscriptions", () => { await expect(stream).toEmitStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([ - { - message: "This is an error", - locations: [ - { - column: 3, - line: 2, - }, - ], - path: ["result"], - }, - ]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [ + { + message: "This is an error", + locations: [ + { + column: 3, + line: 2, + }, + ], + path: ["result"], + }, + ], + }), }); link.simulateResult(results[0]); @@ -288,7 +294,10 @@ describe("GraphQL Subscriptions", () => { await expect(stream).toEmitStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "This is an error" }]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "This is an error" }], + }), }); await expect(stream).toComplete(); diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -1179,7 +1179,10 @@ describe("Combining client and server state/operations", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ + data: { user: null }, + errors: [error], + }), loading: false, networkStatus: NetworkStatus.error, partial: true, diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -346,7 +346,12 @@ describe("mutation results", () => { newName: "Hugh Willson", }, }) - ).rejects.toThrow(new CombinedGraphQLErrors([expectedFakeError])); + ).rejects.toThrow( + new CombinedGraphQLErrors({ + data: { newPerson: { __typename: "Person", name: "Hugh Willson" } }, + errors: [expectedFakeError], + }) + ); expect(client.cache.extract()).toMatchSnapshot(); @@ -384,7 +389,10 @@ describe("mutation results", () => { name: "Ellen Shapiro", }, }, - error: new CombinedGraphQLErrors([expectedFakeError]), + error: new CombinedGraphQLErrors({ + data: { newPerson: { __typename: "Person", name: "Ellen Shapiro" } }, + errors: [expectedFakeError], + }), }); expect(client.cache.extract()).toMatchSnapshot(); @@ -529,7 +537,10 @@ describe("mutation results", () => { }) ).resolves.toEqualStrictTyped({ data: { newPerson: null }, - error: new CombinedGraphQLErrors([{ message: "Oops" }]), + error: new CombinedGraphQLErrors({ + data: { newPerson: null }, + errors: [{ message: "Oops" }], + }), extensions: { requestLimit: 10, }, @@ -1143,7 +1154,9 @@ describe("mutation results", () => { }, }, }) - ).rejects.toThrow(new CombinedGraphQLErrors([{ message: "mock error" }])); + ).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: "mock error" }] }) + ); await expect( client.mutate({ @@ -1157,7 +1170,9 @@ describe("mutation results", () => { }, }, }) - ).rejects.toThrow(new CombinedGraphQLErrors([{ message: "mock error" }])); + ).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: "mock error" }] }) + ); await obsQuery.refetch(); }); @@ -1739,7 +1754,9 @@ describe("mutation results", () => { }); }, }) - ).rejects.toThrow(new CombinedGraphQLErrors([{ message: "mock error" }])); + ).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: "mock error" }] }) + ); await expect( client.mutate({ @@ -1774,7 +1791,9 @@ describe("mutation results", () => { }); }, }) - ).rejects.toThrow(new CombinedGraphQLErrors([{ message: "mock error" }])); + ).rejects.toThrow( + new CombinedGraphQLErrors({ errors: [{ message: "mock error" }] }) + ); await obsQuery.refetch(); }); diff --git a/src/core/__tests__/ApolloClient/general.test.ts b/src/core/__tests__/ApolloClient/general.test.ts --- a/src/core/__tests__/ApolloClient/general.test.ts +++ b/src/core/__tests__/ApolloClient/general.test.ts @@ -111,9 +111,9 @@ describe("ApolloClient", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([ - { message: "This is an error message." }, - ]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "This is an error message." }], + }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -145,9 +145,9 @@ describe("ApolloClient", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, loading: false, - error: new CombinedGraphQLErrors([ - { message: "This is an error message." }, - ]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "This is an error message." }], + }), networkStatus: 8, partial: true, }); @@ -178,9 +178,10 @@ describe("ApolloClient", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([ - { message: "This is an error message." }, - ]), + error: new CombinedGraphQLErrors({ + data: { allPeople: { people: { name: "Ada Lovelace" } } }, + errors: [{ message: "This is an error message." }], + }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -246,7 +247,7 @@ describe("ApolloClient", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([null as any]), + error: new CombinedGraphQLErrors({ errors: [null as any] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -1748,7 +1749,7 @@ describe("ApolloClient", () => { }); await expect(client.mutate({ mutation })).rejects.toThrow( - new CombinedGraphQLErrors(errors) + new CombinedGraphQLErrors({ errors }) ); }); @@ -2198,7 +2199,7 @@ describe("ApolloClient", () => { }, ]), }).query({ query }) - ).rejects.toEqual(new CombinedGraphQLErrors(graphQLErrors)); + ).rejects.toEqual(new CombinedGraphQLErrors({ errors: graphQLErrors })); }); it("should not empty the store when a non-polling query fails due to a network error", async () => { @@ -2944,7 +2945,9 @@ describe("ApolloClient", () => { partial: false, }); - const expectedError = new CombinedGraphQLErrors(secondResult.errors); + const expectedError = new CombinedGraphQLErrors({ + errors: secondResult.errors, + }); await expect(handle.refetch()).rejects.toThrow(expectedError); await expect(stream).toEmitApolloQueryResult({ diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -81,7 +81,10 @@ describe("ObservableQuery", () => { const error = new GraphQLError("is offline.", undefined, null, null, [ "people_one", ]); - const wrappedError = new CombinedGraphQLErrors([error]); + const wrappedError = new CombinedGraphQLErrors({ + data: dataOne, + errors: [error], + }); describe("reobserve", () => { describe("to change pollInterval", () => { @@ -449,7 +452,7 @@ describe("ObservableQuery", () => { }, { request: { query, variables }, - result: { errors: [error] }, + result: { data: dataOne, errors: [error] }, }, { request: { query, variables }, @@ -468,12 +471,12 @@ describe("ObservableQuery", () => { }); await expect(observable.refetch()).rejects.toThrow( - new CombinedGraphQLErrors([error]) + new CombinedGraphQLErrors({ data: dataOne, errors: [error] }) ); await expect(stream).toEmitApolloQueryResult({ data: dataOne, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ data: dataOne, errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: false, @@ -1028,14 +1031,14 @@ describe("ObservableQuery", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, }); expect(observable.getCurrentResult()).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -2300,7 +2303,7 @@ describe("ObservableQuery", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -2308,7 +2311,7 @@ describe("ObservableQuery", () => { expect(observable.getCurrentResult()).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -2331,7 +2334,7 @@ describe("ObservableQuery", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -2342,7 +2345,7 @@ describe("ObservableQuery", () => { expect(currentResult).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -2371,14 +2374,14 @@ describe("ObservableQuery", () => { await expect(stream).toEmitApolloQueryResult({ data: dataOne, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ data: dataOne, errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: false, }); expect(observable.getCurrentResult()).toEqualStrictTyped({ data: dataOne, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ data: dataOne, errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: false, @@ -2435,7 +2438,7 @@ describe("ObservableQuery", () => { await expect(stream).toEmitApolloQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ data: dataOne, errors: [error] }), loading: false, networkStatus: NetworkStatus.error, partial: true, diff --git a/src/core/__tests__/equalByQuery.ts b/src/core/__tests__/equalByQuery.ts --- a/src/core/__tests__/equalByQuery.ts +++ b/src/core/__tests__/equalByQuery.ts @@ -290,7 +290,10 @@ describe("equalByQuery", () => { { data: data123 }, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), } ) ).toBe(false); @@ -300,7 +303,10 @@ describe("equalByQuery", () => { query, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), }, { data: data123 } ) @@ -311,11 +317,17 @@ describe("equalByQuery", () => { query, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), }, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), } ) ).toBe(true); @@ -325,11 +337,17 @@ describe("equalByQuery", () => { query, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), }, { data: data123, - error: new CombinedGraphQLErrors([differentError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [differentError], + }), } ) ).toBe(false); @@ -339,11 +357,17 @@ describe("equalByQuery", () => { query, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), }, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), } ) ).toBe(true); @@ -353,11 +377,17 @@ describe("equalByQuery", () => { query, { data: data123, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: data123, + errors: [oopsError], + }), }, { data: { ...data123, b: 100 }, - error: new CombinedGraphQLErrors([oopsError]), + error: new CombinedGraphQLErrors({ + data: { ...data123, b: 100 }, + errors: [oopsError], + }), } ) ).toBe(true); @@ -365,18 +395,30 @@ describe("equalByQuery", () => { expect( equalByQuery( query, - { data: data123, error: new CombinedGraphQLErrors([]) }, - { data: data123, error: new CombinedGraphQLErrors([]) } + { + data: data123, + error: new CombinedGraphQLErrors({ data: data123, errors: [] }), + }, + { + data: data123, + error: new CombinedGraphQLErrors({ data: data123, errors: [] }), + } ) ).toBe(true); expect( equalByQuery( query, - { data: data123, error: new CombinedGraphQLErrors([]) }, + { + data: data123, + error: new CombinedGraphQLErrors({ data: data123, errors: [] }), + }, { data: { ...data123, b: 100 }, - error: new CombinedGraphQLErrors([]), + error: new CombinedGraphQLErrors({ + data: { ...data123, b: 100 }, + errors: [], + }), } ) ).toBe(true); diff --git a/src/link/subscriptions/__tests__/graphqlWsLink.ts b/src/link/subscriptions/__tests__/graphqlWsLink.ts --- a/src/link/subscriptions/__tests__/graphqlWsLink.ts +++ b/src/link/subscriptions/__tests__/graphqlWsLink.ts @@ -169,7 +169,7 @@ describe("GraphQLWSlink", () => { const obs = execute(link, { query: subscription }); await expect(observableToArray(obs)).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "Foo bar." }]) + new CombinedGraphQLErrors({ errors: [{ message: "Foo bar." }] }) ); }); }); diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -2550,7 +2550,7 @@ it("applies `errorPolicy` on next fetch when it changes between renders", async error: null, result: { data: { greeting: "Hello" }, - error: new CombinedGraphQLErrors([{ message: "oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "oops" }] }), networkStatus: NetworkStatus.error, }, }); @@ -3197,7 +3197,7 @@ it("properly handles changing options along with changing `variables`", async () name: "Doctor Strangecache", }, }, - error: new CombinedGraphQLErrors([{ message: "oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "oops" }] }), networkStatus: NetworkStatus.error, }, }); @@ -4817,7 +4817,17 @@ it("masks partial data returned from data on errors with errorPolicy `all`", asy name: null, }, }, - error: new CombinedGraphQLErrors([{ message: "Couldn't get name" }]), + error: new CombinedGraphQLErrors({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: null, + age: 34, + }, + }, + errors: [{ message: "Couldn't get name" }], + }), networkStatus: NetworkStatus.error, }); } @@ -5206,7 +5216,9 @@ describe("refetch", () => { expect(renderedComponents).toStrictEqual(["ErrorFallback"]); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([{ message: "Something went wrong" }]) + new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }) ); } @@ -5400,9 +5412,9 @@ describe("refetch", () => { name: "Spider-Man", }, }, - error: new CombinedGraphQLErrors([ - { message: "Something went wrong" }, - ]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }), networkStatus: NetworkStatus.error, }, }); @@ -5501,9 +5513,16 @@ describe("refetch", () => { name: null, }, }, - error: new CombinedGraphQLErrors([ - { message: "Something went wrong" }, - ]), + error: new CombinedGraphQLErrors({ + data: { + character: { + __typename: "Character", + id: "1", + name: null, + }, + }, + errors: [{ message: "Something went wrong" }], + }), networkStatus: NetworkStatus.error, }, }); @@ -5608,7 +5627,10 @@ describe("refetch", () => { expect(renderedComponents).toStrictEqual([ErrorFallback]); expect(snapshot).toEqual({ - error: new CombinedGraphQLErrors([{ message: "Oops couldn't fetch" }]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Oops couldn't fetch" }], + }), result: null, }); } @@ -5629,7 +5651,10 @@ describe("refetch", () => { // TODO: We should reset the snapshot between renders to better capture // the actual result. This makes it seem like the error is rendered, but // in this is just leftover from the previous snapshot. - error: new CombinedGraphQLErrors([{ message: "Oops couldn't fetch" }]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Oops couldn't fetch" }], + }), result: { data: { todo: { id: "1", name: "Clean room", completed: true } }, error: undefined, @@ -5735,7 +5760,10 @@ describe("refetch", () => { expect(renderedComponents).toStrictEqual([ErrorFallback]); expect(snapshot).toEqual({ - error: new CombinedGraphQLErrors([{ message: "Oops couldn't fetch" }]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Oops couldn't fetch" }], + }), result: null, }); } @@ -5753,9 +5781,10 @@ describe("refetch", () => { expect(renderedComponents).toStrictEqual([ErrorFallback]); expect(snapshot).toEqual({ - error: new CombinedGraphQLErrors([ - { message: "Oops couldn't fetch again" }, - ]), + error: new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Oops couldn't fetch again" }], + }), result: null, }); } diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -1695,7 +1695,7 @@ describe("useLazyQuery Hook", () => { } await expect(execute()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "error 1" }]) + new CombinedGraphQLErrors({ errors: [{ message: "error 1" }] }) ); { @@ -1707,13 +1707,13 @@ describe("useLazyQuery Hook", () => { loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new CombinedGraphQLErrors([{ message: "error 1" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error 1" }] }), variables: {}, }); } await expect(execute()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "error 2" }]) + new CombinedGraphQLErrors({ errors: [{ message: "error 2" }] }) ); { @@ -1725,7 +1725,7 @@ describe("useLazyQuery Hook", () => { loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new CombinedGraphQLErrors([{ message: "error 2" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error 2" }] }), variables: {}, }); } @@ -1788,7 +1788,10 @@ describe("useLazyQuery Hook", () => { await expect(execute()).resolves.toEqualStrictTyped({ data: { currentUser: null }, - error: new CombinedGraphQLErrors([{ message: "Not logged in" }]), + error: new CombinedGraphQLErrors({ + data: { currentUser: null }, + errors: [{ message: "Not logged in" }], + }), }); { @@ -1800,14 +1803,20 @@ describe("useLazyQuery Hook", () => { loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new CombinedGraphQLErrors([{ message: "Not logged in" }]), + error: new CombinedGraphQLErrors({ + data: { currentUser: null }, + errors: [{ message: "Not logged in" }], + }), variables: {}, }); } await expect(execute()).resolves.toEqualStrictTyped({ data: { currentUser: null }, - error: new CombinedGraphQLErrors([{ message: "Not logged in 2" }]), + error: new CombinedGraphQLErrors({ + data: { currentUser: null }, + errors: [{ message: "Not logged in 2" }], + }), }); { @@ -1819,7 +1828,10 @@ describe("useLazyQuery Hook", () => { loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new CombinedGraphQLErrors([{ message: "Not logged in 2" }]), + error: new CombinedGraphQLErrors({ + data: { currentUser: null }, + errors: [{ message: "Not logged in 2" }], + }), variables: {}, }); } @@ -2411,7 +2423,7 @@ describe("useLazyQuery Hook", () => { expect(execute).toBe(originalExecute); await expect(execute({ variables: { id: "2" } })).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "Oops" }]) + new CombinedGraphQLErrors({ errors: [{ message: "Oops" }] }) ); { @@ -2419,7 +2431,7 @@ describe("useLazyQuery Hook", () => { expect(result).toEqualLazyQueryResult({ data: { user: { id: "1", name: "John Doe" } }, - error: new CombinedGraphQLErrors([{ message: "Oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "Oops" }] }), called: true, loading: false, networkStatus: NetworkStatus.error, @@ -2446,7 +2458,7 @@ describe("useLazyQuery Hook", () => { expect(result).toEqualLazyQueryResult({ data: { user: { id: "1", name: "John Doe" } }, - error: new CombinedGraphQLErrors([{ message: "Oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "Oops" }] }), called: true, loading: false, networkStatus: NetworkStatus.error, @@ -4271,9 +4283,10 @@ test("applies `errorPolicy` on next fetch when it changes between renders", asyn data: { character: null, }, - error: new CombinedGraphQLErrors([ - { message: "Could not find character 1" }, - ]), + error: new CombinedGraphQLErrors({ + data: { character: null }, + errors: [{ message: "Could not find character 1" }], + }), called: true, loading: false, networkStatus: NetworkStatus.error, diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -1804,7 +1804,7 @@ it("applies `errorPolicy` on next fetch when it changes between renders", async expect(renderedComponents).not.toContain(ErrorFallback); expect(snapshot.result).toEqual({ data: { greeting: "Hello" }, - error: new CombinedGraphQLErrors([{ message: "oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "oops" }] }), networkStatus: NetworkStatus.error, }); } @@ -2723,7 +2723,9 @@ it("throws errors when errors are returned after calling `refetch`", async () => expect(renderedComponents).toStrictEqual([ErrorFallback]); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([{ message: "Something went wrong" }]) + new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }) ); } }); @@ -2895,7 +2897,9 @@ it('returns errors after calling `refetch` when errorPolicy is set to "all"', as expect(snapshot.error).toBeNull(); expect(snapshot.result).toEqual({ data: { character: { id: "1", name: "Captain Marvel" } }, - error: new CombinedGraphQLErrors([{ message: "Something went wrong" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }), networkStatus: NetworkStatus.error, }); } @@ -2983,7 +2987,10 @@ it('handles partial data results after calling `refetch` when errorPolicy is set expect(snapshot.error).toBeNull(); expect(snapshot.result).toEqual({ data: { character: { id: "1", name: null } }, - error: new CombinedGraphQLErrors([{ message: "Something went wrong" }]), + error: new CombinedGraphQLErrors({ + data: { character: { id: "1", name: null } }, + errors: [{ message: "Something went wrong" }], + }), networkStatus: NetworkStatus.error, }); } diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -430,12 +430,18 @@ describe("useMutation Hook", () => { const [createTodo] = getCurrentSnapshot(); await expect(createTodo({ variables })).rejects.toThrow( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]) + new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }) ); expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenLastCalledWith( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), expect.anything() ); }); @@ -483,7 +489,7 @@ describe("useMutation Hook", () => { const [createTodo] = getCurrentSnapshot(); await expect(createTodo({ variables })).rejects.toThrow( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]) + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) ); { @@ -502,7 +508,9 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -555,7 +563,7 @@ describe("useMutation Hook", () => { const [createTodo] = getCurrentSnapshot(); await expect(createTodo({ variables })).rejects.toThrow( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]) + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) ); { @@ -574,7 +582,10 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -628,7 +639,10 @@ describe("useMutation Hook", () => { await expect(createTodo({ variables })).resolves.toEqualStrictTyped({ data: CREATE_TODO_RESULT, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), }); { @@ -647,7 +661,10 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: CREATE_TODO_RESULT, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -709,7 +726,10 @@ describe("useMutation Hook", () => { await expect(createTodo({ variables })).resolves.toEqualStrictTyped({ data: CREATE_TODO_RESULT, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), }); { @@ -728,7 +748,10 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: CREATE_TODO_RESULT, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -736,7 +759,10 @@ describe("useMutation Hook", () => { expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenLastCalledWith( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + new CombinedGraphQLErrors({ + data: CREATE_TODO_RESULT, + errors: [{ message: CREATE_TODO_ERROR }], + }), expect.anything() ); expect(onCompleted).not.toHaveBeenCalled(); @@ -1473,7 +1499,7 @@ describe("useMutation Hook", () => { await expect( createTodo({ variables, onCompleted, onError }) - ).rejects.toThrow(new CombinedGraphQLErrors(errors)); + ).rejects.toThrow(new CombinedGraphQLErrors({ errors })); { const [, result] = await takeSnapshot(); @@ -1491,7 +1517,7 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors(errors), + error: new CombinedGraphQLErrors({ errors }), loading: false, called: true, }); @@ -1502,7 +1528,7 @@ describe("useMutation Hook", () => { expect(onCompleted).toHaveBeenCalledTimes(0); expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledWith( - new CombinedGraphQLErrors(errors), + new CombinedGraphQLErrors({ errors }), expect.objectContaining({ variables }) ); }); @@ -1552,7 +1578,7 @@ describe("useMutation Hook", () => { const [createTodo] = getCurrentSnapshot(); const onError = jest.fn(); await expect(createTodo({ variables, onError })).rejects.toThrow( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]) + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) ); { @@ -1571,7 +1597,9 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -1579,7 +1607,7 @@ describe("useMutation Hook", () => { expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledWith( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }), expect.objectContaining({ variables }) ); expect(hookOnError).not.toHaveBeenCalled(); @@ -1652,7 +1680,7 @@ describe("useMutation Hook", () => { } await expect(createTodo({ variables })).rejects.toThrow( - new CombinedGraphQLErrors(errors) + new CombinedGraphQLErrors({ errors }) ); { @@ -1671,7 +1699,7 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors(errors), + error: new CombinedGraphQLErrors({ errors }), loading: false, called: true, }); @@ -1683,7 +1711,7 @@ describe("useMutation Hook", () => { expect(onError).toHaveBeenCalledTimes(0); expect(onError1).toHaveBeenCalledTimes(1); expect(onError1).toHaveBeenCalledWith( - new CombinedGraphQLErrors(errors), + new CombinedGraphQLErrors({ errors }), expect.objectContaining({ variables }) ); }); @@ -4000,7 +4028,7 @@ describe("useMutation Hook", () => { ); await expect(promise).rejects.toThrow( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]) + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }) ); { @@ -4008,7 +4036,9 @@ describe("useMutation Hook", () => { expect(result).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: CREATE_TODO_ERROR }], + }), loading: false, called: true, }); @@ -4018,7 +4048,7 @@ describe("useMutation Hook", () => { expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenLastCalledWith( - new CombinedGraphQLErrors([{ message: CREATE_TODO_ERROR }]), + new CombinedGraphQLErrors({ errors: [{ message: CREATE_TODO_ERROR }] }), expect.anything() ); expect(consoleSpies.error).not.toHaveBeenCalled(); diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -2913,7 +2913,7 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error" }] }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -2968,9 +2968,10 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([ - { message: 'Could not fetch "hello"' }, - ]), + error: new CombinedGraphQLErrors({ + data: { hello: null }, + errors: [{ message: 'Could not fetch "hello"' }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3137,9 +3138,10 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: { hello: null }, - error: new CombinedGraphQLErrors([ - { message: 'Could not fetch "hello"' }, - ]), + error: new CombinedGraphQLErrors({ + data: { hello: null }, + errors: [{ message: 'Could not fetch "hello"' }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3251,7 +3253,7 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error" }] }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3266,7 +3268,7 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error" }] }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3562,7 +3564,7 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "error" }] }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3678,7 +3680,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error 1" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "error 1" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3687,7 +3691,7 @@ describe("useQuery Hook", () => { } await expect(getCurrentSnapshot().refetch()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "error 2" }]) + new CombinedGraphQLErrors({ errors: [{ message: "error 2" }] }) ); { @@ -3707,7 +3711,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error 2" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "error 2" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3771,7 +3777,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error 1" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "error 1" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3780,7 +3788,7 @@ describe("useQuery Hook", () => { } await expect(getCurrentSnapshot().refetch()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "error 2" }]) + new CombinedGraphQLErrors({ errors: [{ message: "error 2" }] }) ); { @@ -3788,7 +3796,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "error 2" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "error 2" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3852,7 +3862,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "same error" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "same error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3861,7 +3873,7 @@ describe("useQuery Hook", () => { } await expect(getCurrentSnapshot().refetch()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "same error" }]) + new CombinedGraphQLErrors({ errors: [{ message: "same error" }] }) ); { @@ -3881,7 +3893,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "same error" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "same error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3951,7 +3965,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "same error" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "same error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -3985,7 +4001,7 @@ describe("useQuery Hook", () => { } await expect(getCurrentSnapshot().refetch()).rejects.toEqual( - new CombinedGraphQLErrors([{ message: "same error" }]) + new CombinedGraphQLErrors({ errors: [{ message: "same error" }] }) ); { @@ -4004,7 +4020,9 @@ describe("useQuery Hook", () => { expect(result).toEqualQueryResult({ // TODO: Is this correct behavior here? data: { hello: "world" }, - error: new CombinedGraphQLErrors([{ message: "same error" }]), + error: new CombinedGraphQLErrors({ + errors: [{ message: "same error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: { hello: "world" }, @@ -4947,7 +4965,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -4972,7 +4993,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -4994,7 +5018,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5007,7 +5034,10 @@ describe("useQuery Hook", () => { snapshot.useQueryResult?.observable.getCurrentResult(false)! ).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -5051,7 +5081,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5064,7 +5097,10 @@ describe("useQuery Hook", () => { snapshot.useQueryResult?.observable.getCurrentResult(false)! ).toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, partial: true, @@ -5218,7 +5254,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5243,7 +5282,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5268,7 +5310,10 @@ describe("useQuery Hook", () => { // the other query has finished and re-rendered. expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5464,7 +5509,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5489,7 +5537,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -5511,7 +5562,10 @@ describe("useQuery Hook", () => { expect(snapshot.useQueryResult!).toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Intentional error" }]), + error: new CombinedGraphQLErrors({ + data: { person: null }, + errors: [{ message: "Intentional error" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -9368,13 +9422,15 @@ describe("useQuery Hook", () => { name: "R2-D2", }, }, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ]), + error: new CombinedGraphQLErrors({ + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), loading: false, networkStatus: NetworkStatus.error, previousData: { @@ -9542,13 +9598,24 @@ describe("useQuery Hook", () => { name: "R2-D2", }, }, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { homeWorld: null, id: "1000", name: "Luke Skywalker" }, + { homeWorld: "Alderaan", id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, }, - ]), + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), loading: false, networkStatus: NetworkStatus.error, previousData: { @@ -10002,7 +10069,10 @@ describe("useQuery Hook", () => { await expect(takeSnapshot()).resolves.toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ + data: { user: { __typename: "User", id: "1", name: null } }, + errors: [graphQLError], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -10024,7 +10094,10 @@ describe("useQuery Hook", () => { await expect(takeSnapshot()).resolves.toEqualQueryResult({ data: undefined, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ + data: { user: { __typename: "User", id: "1", name: null } }, + errors: [graphQLError], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, @@ -10999,7 +11072,12 @@ describe("useQuery Hook", () => { name: null, }, }, - error: new CombinedGraphQLErrors([{ message: "Couldn't get name" }]), + error: new CombinedGraphQLErrors({ + data: { + currentUser: { __typename: "User", id: 1, name: null, age: 34 }, + }, + errors: [{ message: "Couldn't get name" }], + }), loading: false, networkStatus: NetworkStatus.error, previousData: undefined, diff --git a/src/react/hooks/__tests__/useSubscription.test.tsx b/src/react/hooks/__tests__/useSubscription.test.tsx --- a/src/react/hooks/__tests__/useSubscription.test.tsx +++ b/src/react/hooks/__tests__/useSubscription.test.tsx @@ -154,7 +154,10 @@ describe("useSubscription Hook", () => { await expect(takeSnapshot()).resolves.toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "test" }]), + error: new CombinedGraphQLErrors({ + data: errorResult.result.data, + errors: [{ message: "test" }], + }), loading: false, }); @@ -162,7 +165,10 @@ describe("useSubscription Hook", () => { expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledWith( - new CombinedGraphQLErrors([{ message: "test" }]) + new CombinedGraphQLErrors({ + data: errorResult.result.data, + errors: [{ message: "test" }], + }) ); }); @@ -233,14 +239,20 @@ describe("useSubscription Hook", () => { await expect(takeSnapshot()).resolves.toEqualStrictTyped({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "test" }]), + error: new CombinedGraphQLErrors({ + data: errorResult.result.data, + errors: [{ message: "test" }], + }), loading: false, }); expect(onData).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenLastCalledWith( - new CombinedGraphQLErrors([{ message: "test" }]) + new CombinedGraphQLErrors({ + data: errorResult.result.data, + errors: [{ message: "test" }], + }) ); expect(onComplete).toHaveBeenCalledTimes(0); @@ -1241,16 +1253,14 @@ followed by new in-flight setup", async () => { const snapshot = await takeSnapshot(); expect(snapshot).toEqualStrictTyped({ loading: false, - error: new CombinedGraphQLErrors( - graphQlErrorResult.result!.errors as any - ), + error: new CombinedGraphQLErrors(graphQlErrorResult.result!), data: undefined, }); } expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledWith( - new CombinedGraphQLErrors(graphQlErrorResult.result!.errors!) + new CombinedGraphQLErrors(graphQlErrorResult.result!) ); expect(onData).toHaveBeenCalledTimes(0); expect(errorBoundaryOnError).toHaveBeenCalledTimes(0); @@ -1275,16 +1285,14 @@ followed by new in-flight setup", async () => { const snapshot = await takeSnapshot(); expect(snapshot).toEqualStrictTyped({ loading: false, - error: new CombinedGraphQLErrors( - graphQlErrorResult.result!.errors! - ), + error: new CombinedGraphQLErrors(graphQlErrorResult.result!), data: { totalLikes: 42 }, }); } expect(onError).toHaveBeenCalledTimes(1); expect(onError).toHaveBeenCalledWith( - new CombinedGraphQLErrors(graphQlErrorResult.result!.errors!) + new CombinedGraphQLErrors(graphQlErrorResult.result!) ); expect(onData).toHaveBeenCalledTimes(0); expect(errorBoundaryOnError).toHaveBeenCalledTimes(0); @@ -1754,7 +1762,7 @@ describe("`restart` callback", () => { expect(snapshot).toEqualStrictTyped({ loading: false, data: undefined, - error: new CombinedGraphQLErrors([error]), + error: new CombinedGraphQLErrors({ errors: [error] }), }); } diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -3560,7 +3560,9 @@ describe("useSuspenseQuery", () => { expect(error).toBeInstanceOf(CombinedGraphQLErrors); expect(error).toEqual( - new CombinedGraphQLErrors([{ message: "`id` should not be null" }]) + new CombinedGraphQLErrors({ + errors: [{ message: "`id` should not be null" }], + }) ); }); @@ -3697,7 +3699,9 @@ describe("useSuspenseQuery", () => { expect(error).toBeInstanceOf(CombinedGraphQLErrors); expect(error).toEqual( - new CombinedGraphQLErrors([{ message: "`id` should not be null" }]) + new CombinedGraphQLErrors({ + errors: [{ message: "`id` should not be null" }], + }) ); }); @@ -3725,7 +3729,7 @@ describe("useSuspenseQuery", () => { const [error] = renders.errors; expect(error).toBeInstanceOf(CombinedGraphQLErrors); - expect(error).toEqual(new CombinedGraphQLErrors(graphQLErrors)); + expect(error).toEqual(new CombinedGraphQLErrors({ errors: graphQLErrors })); }); it('does not throw or return network errors when errorPolicy is set to "ignore"', async () => { @@ -3952,7 +3956,7 @@ describe("useSuspenseQuery", () => { expect(result.current).toEqualStrictTyped({ data: undefined, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ errors: [graphQLError] }), }); }); @@ -3964,14 +3968,16 @@ describe("useSuspenseQuery", () => { { data: undefined, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ errors: [graphQLError] }), }, ]); const { error } = result.current; expect(error).toBeInstanceOf(CombinedGraphQLErrors); - expect(error).toEqual(new CombinedGraphQLErrors([graphQLError])); + expect(error).toEqual( + new CombinedGraphQLErrors({ errors: [graphQLError] }) + ); }); it('responds to cache updates and clears errors after an error returns when errorPolicy is set to "all"', async () => { @@ -3993,7 +3999,7 @@ describe("useSuspenseQuery", () => { expect(result.current).toEqualStrictTyped({ data: undefined, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ errors: [graphQLError] }), }); }); @@ -4027,7 +4033,7 @@ describe("useSuspenseQuery", () => { { data: undefined, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([graphQLError]), + error: new CombinedGraphQLErrors({ errors: [graphQLError] }), }, { data: { currentUser: { id: "1", name: "Cache User" } }, @@ -4050,7 +4056,7 @@ describe("useSuspenseQuery", () => { { mocks } ); - const expectedError = new CombinedGraphQLErrors(graphQLErrors); + const expectedError = new CombinedGraphQLErrors({ errors: graphQLErrors }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -4091,7 +4097,10 @@ describe("useSuspenseQuery", () => { { mocks } ); - const expectedError = new CombinedGraphQLErrors([graphQLError]); + const expectedError = new CombinedGraphQLErrors({ + data: { currentUser: { id: "1", name: null } }, + errors: [graphQLError], + }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -4122,7 +4131,7 @@ describe("useSuspenseQuery", () => { { mocks } ); - const expectedError = new CombinedGraphQLErrors([graphQLError]); + const expectedError = new CombinedGraphQLErrors({ errors: [graphQLError] }); await waitFor(() => { expect(result.current.error).toEqual(expectedError); @@ -4168,7 +4177,7 @@ describe("useSuspenseQuery", () => { { mocks, initialProps: { id: "1" } } ); - const expectedError = new CombinedGraphQLErrors(graphQLErrors); + const expectedError = new CombinedGraphQLErrors({ errors: graphQLErrors }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -4584,7 +4593,9 @@ describe("useSuspenseQuery", () => { }); expect(renders.errors).toEqual([ - new CombinedGraphQLErrors([{ message: "Something went wrong" }]), + new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }), ]); expect(renders.frames).toEqualStrictTyped([ { @@ -4695,9 +4706,9 @@ describe("useSuspenseQuery", () => { { mocks } ); - const expectedError = new CombinedGraphQLErrors([ - { message: "Something went wrong" }, - ]); + const expectedError = new CombinedGraphQLErrors({ + errors: [{ message: "Something went wrong" }], + }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -4772,9 +4783,10 @@ describe("useSuspenseQuery", () => { { mocks } ); - const expectedError = new CombinedGraphQLErrors([ - { message: "Something went wrong" }, - ]); + const expectedError = new CombinedGraphQLErrors({ + data: { user: { id: "1", name: null } }, + errors: [{ message: "Something went wrong" }], + }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -6089,7 +6101,7 @@ describe("useSuspenseQuery", () => { expect(result.current).toEqualStrictTyped({ ...successMock.result, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([{ message: "oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "oops" }] }), }); }); @@ -6114,7 +6126,7 @@ describe("useSuspenseQuery", () => { { ...successMock.result, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([{ message: "oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "oops" }] }), }, ]); }); @@ -6664,7 +6676,9 @@ describe("useSuspenseQuery", () => { void result.current.refetch(); }); - const expectedError = new CombinedGraphQLErrors([{ message: "oops" }]); + const expectedError = new CombinedGraphQLErrors({ + errors: [{ message: "oops" }], + }); await waitFor(() => { expect(result.current).toEqualStrictTyped({ @@ -8577,7 +8591,9 @@ describe("useSuspenseQuery", () => { expect(error).toBeInstanceOf(CombinedGraphQLErrors); expect(error).toEqual( - new CombinedGraphQLErrors([{ message: "Could not fetch greeting" }]) + new CombinedGraphQLErrors({ + errors: [{ message: "Could not fetch greeting" }], + }) ); }); @@ -8623,7 +8639,10 @@ describe("useSuspenseQuery", () => { expect(error).toBeInstanceOf(CombinedGraphQLErrors); expect(error).toEqual( - new CombinedGraphQLErrors([{ message: "Could not fetch greeting" }]) + new CombinedGraphQLErrors({ + data: { greeting: null }, + errors: [{ message: "Could not fetch greeting" }], + }) ); }); @@ -8754,12 +8773,15 @@ describe("useSuspenseQuery", () => { expect(error).toBeInstanceOf(CombinedGraphQLErrors); expect(error).toEqual( - new CombinedGraphQLErrors([ - { - message: "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], - }, - ]) + new CombinedGraphQLErrors({ + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }) ); }); @@ -8877,13 +8899,32 @@ describe("useSuspenseQuery", () => { }, }, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, }, - ]), + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), }); }); @@ -8928,13 +8969,32 @@ describe("useSuspenseQuery", () => { }, }, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, }, - ]), + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), }, ]); }); @@ -9198,13 +9258,24 @@ describe("useSuspenseQuery", () => { }, }, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, }, - ]), + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), }); }); @@ -9345,13 +9416,24 @@ describe("useSuspenseQuery", () => { }, }, networkStatus: NetworkStatus.error, - error: new CombinedGraphQLErrors([ - { - message: - "homeWorld for character with ID 1000 could not be fetched.", - path: ["hero", "heroFriends", 0, "homeWorld"], + error: new CombinedGraphQLErrors({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: null }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, }, - ]), + errors: [ + { + message: + "homeWorld for character with ID 1000 could not be fetched.", + path: ["hero", "heroFriends", 0, "homeWorld"], + }, + ], + }), }, { data: { @@ -10670,7 +10752,10 @@ describe("useSuspenseQuery", () => { expect(renderedComponents).toStrictEqual([ErrorFallback]); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([{ message: "Could not fetch letters" }]) + new CombinedGraphQLErrors({ + data: null, + errors: [{ message: "Could not fetch letters" }], + }) ); } @@ -11611,7 +11696,17 @@ describe("useSuspenseQuery", () => { }); expect(result?.error).toEqual( - new CombinedGraphQLErrors([{ message: "Couldn't get name" }]) + new CombinedGraphQLErrors({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: null, + age: 34, + }, + }, + errors: [{ message: "Couldn't get name" }], + }) ); } }); diff --git a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx --- a/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx +++ b/src/react/query-preloader/__tests__/createQueryPreloader.test.tsx @@ -1499,7 +1499,7 @@ test("throws when error is returned", async () => { expect(renderedComponents).toStrictEqual(["ErrorFallback"]); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([{ message: "Oops" }]) + new CombinedGraphQLErrors({ errors: [{ message: "Oops" }] }) ); } }); @@ -1532,7 +1532,7 @@ test("returns error when error policy is 'all'", async () => { expect(renderedComponents).toStrictEqual(["ReadQueryHook"]); expect(snapshot.result).toEqual({ data: undefined, - error: new CombinedGraphQLErrors([{ message: "Oops" }]), + error: new CombinedGraphQLErrors({ errors: [{ message: "Oops" }] }), networkStatus: NetworkStatus.error, }); expect(snapshot.error).toEqual(null); diff --git a/src/react/ssr/__tests__/getDataFromTree.test.tsx b/src/react/ssr/__tests__/getDataFromTree.test.tsx --- a/src/react/ssr/__tests__/getDataFromTree.test.tsx +++ b/src/react/ssr/__tests__/getDataFromTree.test.tsx @@ -125,7 +125,10 @@ describe("SSR", () => { expect(data).toMatchObject({ allPeople: { people: null } }); expect(error).toBeDefined(); expect(error).toEqual( - new CombinedGraphQLErrors([{ message: "this is an error" }]) + new CombinedGraphQLErrors({ + data: { allPeople: { people: null } }, + errors: [{ message: "this is an error" }], + }) ); } diff --git a/src/testing/experimental/__tests__/createTestSchema.test.tsx b/src/testing/experimental/__tests__/createTestSchema.test.tsx --- a/src/testing/experimental/__tests__/createTestSchema.test.tsx +++ b/src/testing/experimental/__tests__/createTestSchema.test.tsx @@ -743,9 +743,12 @@ describe("schema proxy", () => { const { snapshot } = await renderStream.takeRender(); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([ - { message: "Could not resolve type", path: ["viewer", "book"] }, - ]) + new CombinedGraphQLErrors({ + data: null, + errors: [ + { message: "Could not resolve type", path: ["viewer", "book"] }, + ], + }) ); } }); @@ -818,9 +821,11 @@ describe("schema proxy", () => { const { snapshot } = await renderStream.takeRender(); expect(snapshot.error).toEqual( - new CombinedGraphQLErrors([ - { message: 'Expected { foo: "bar" } to be a GraphQL schema.' }, - ]) + new CombinedGraphQLErrors({ + errors: [ + { message: 'Expected { foo: "bar" } to be a GraphQL schema.' }, + ], + }) ); } });
[4.0] Add `data` property to `CombinedGraphQLErrors` to access partial data When using `errorPolicy: 'none'` (the default), data returned from the server is discarded when errors are also returned. We'd like to make any partial data returned from the server available on the `CombinedGraphQLErrors` instance.
2025-03-28T22:24:53Z
4
apollographql/apollo-client
12,533
apollographql__apollo-client-12533
[ "12529" ]
ae0dcad89924e6b8090ca3182df30e528589b562
diff --git a/src/link/core/ApolloLink.ts b/src/link/core/ApolloLink.ts --- a/src/link/core/ApolloLink.ts +++ b/src/link/core/ApolloLink.ts @@ -1,4 +1,4 @@ -import type { Observable, Subscriber } from "rxjs"; +import type { Observable } from "rxjs"; import { EMPTY } from "rxjs"; import { @@ -138,29 +138,6 @@ export class ApolloLink { throw newInvariantError("request is not implemented"); } - protected onError( - error: any, - observer?: Subscriber<FetchResult> - ): false | void { - if (observer && observer.error) { - observer.error(error); - // Returning false indicates that observer.error does not need to be - // called again, since it was already called (on the previous line). - // Calling observer.error again would not cause any real problems, - // since only the first call matters, but custom onError functions - // might have other reasons for wanting to prevent the default - // behavior by returning false. - return false; - } - // Throw errors will be passed to observer.error. - throw error; - } - - public setOnError(fn: ApolloLink["onError"]): this { - this.onError = fn; - return this; - } - /** * @internal * Used to iterate through all links that are concatenations or `split` links.
diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -164,9 +164,7 @@ describe("mutation results", () => { delay, }, ...mockedResponses - ).setOnError((error) => { - throw error; - }), + ), cache: new InMemoryCache({ dataIdFromObject: (obj: any) => { if (obj.id && obj.__typename) { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -3654,9 +3654,7 @@ describe("type policies", function () { }, }, }, - ]).setOnError((error) => { - throw new Error(error); - }); + ]); const client = new ApolloClient({ link, cache }); @@ -4095,9 +4093,7 @@ describe("type policies", function () { }, }, }, - ]).setOnError((error) => { - throw new Error(error); - }); + ]); const client = new ApolloClient({ link, cache }); diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -823,7 +823,11 @@ describe("useQuery Hook", () => { }, { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks} cache={cache}> + <MockedProvider + mocks={mocks} + cache={cache} + mockLinkDefaultOptions={{ delay: 0 }} + > {children} </MockedProvider> ), @@ -2106,8 +2110,6 @@ describe("useQuery Hook", () => { const link = new MockLink(mocks); const requestSpy = jest.spyOn(link, "request"); - const onErrorFn = jest.fn(); - link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( <MockedProvider link={link} cache={cache}> {children} @@ -2162,10 +2164,6 @@ describe("useQuery Hook", () => { ) ).rejects.toThrow(); - await waitFor(() => { - expect(onErrorFn).toHaveBeenCalledTimes(0); - }); - requestSpy.mockRestore(); }); @@ -2201,8 +2199,6 @@ describe("useQuery Hook", () => { const link = new MockLink(mocks); const requestSpy = jest.spyOn(link, "request"); - const onErrorFn = jest.fn(); - link.setOnError(onErrorFn); const client = new ApolloClient({ queryDeduplication: false, @@ -2275,7 +2271,6 @@ describe("useQuery Hook", () => { jest.advanceTimersByTime(200); expect(requestSpy).toHaveBeenCalledTimes(2); - expect(onErrorFn).toHaveBeenCalledTimes(0); jest.useRealTimers(); }); @@ -2307,8 +2302,6 @@ describe("useQuery Hook", () => { const cache = new InMemoryCache(); const link = new MockLink(mocks); const requestSpy = jest.spyOn(link, "request"); - const onErrorFn = jest.fn(); - link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( <React.StrictMode> <MockedProvider link={link} cache={cache}> @@ -2362,7 +2355,6 @@ describe("useQuery Hook", () => { ) ).rejects.toThrow(); expect(requestSpy).toHaveBeenCalledTimes(requestSpyCallCount); - expect(onErrorFn).toHaveBeenCalledTimes(0); requestSpy.mockRestore(); }); @@ -2398,8 +2390,6 @@ describe("useQuery Hook", () => { const link = new MockLink(mocks); const requestSpy = jest.spyOn(link, "request"); - const onErrorFn = jest.fn(); - link.setOnError(onErrorFn); const client = new ApolloClient({ link, cache }); @@ -2464,7 +2454,6 @@ describe("useQuery Hook", () => { await expect(takeSnapshot).not.toRerender({ timeout: 50 }); // TODO rarely seeing 3 here investigate further expect(requestSpy).toHaveBeenCalledTimes(2); - expect(onErrorFn).toHaveBeenCalledTimes(0); }); it("should start and stop polling in Strict Mode", async () => { @@ -2495,8 +2484,6 @@ describe("useQuery Hook", () => { const cache = new InMemoryCache(); const link = new MockLink(mocks); const requestSpy = jest.spyOn(link, "request"); - const onErrorFn = jest.fn(); - link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( <React.StrictMode> <MockedProvider link={link} cache={cache}> @@ -2555,7 +2542,6 @@ describe("useQuery Hook", () => { getCurrentSnapshot().startPolling(20); expect(requestSpy).toHaveBeenCalledTimes(2); - expect(onErrorFn).toHaveBeenCalledTimes(0); { const result = await takeSnapshot(); @@ -2582,7 +2568,6 @@ describe("useQuery Hook", () => { } expect(requestSpy).toHaveBeenCalledTimes(4); - expect(onErrorFn).toHaveBeenCalledTimes(0); requestSpy.mockRestore(); }); diff --git a/src/testing/core/mocking/mockClient.ts b/src/testing/core/mocking/mockClient.ts --- a/src/testing/core/mocking/mockClient.ts +++ b/src/testing/core/mocking/mockClient.ts @@ -5,6 +5,7 @@ import { InMemoryCache } from "@apollo/client/cache"; import { mockSingleLink } from "./mockLink.js"; +// TODO: Deprecate this function export function createMockClient<TData>( data: TData, query: DocumentNode, @@ -14,8 +15,6 @@ export function createMockClient<TData>( link: mockSingleLink({ request: { query, variables }, result: { data }, - }).setOnError((error) => { - throw error; }), cache: new InMemoryCache(), }); diff --git a/src/testing/core/mocking/mockLink.ts b/src/testing/core/mocking/mockLink.ts --- a/src/testing/core/mocking/mockLink.ts +++ b/src/testing/core/mocking/mockLink.ts @@ -172,14 +172,7 @@ export class MockLink extends ApolloLink { ); } - return throwError(() => { - const error = new Error(message); - - // TODO: Remove this once `onError` and `setOnError` is removed. - if (this.onError(error) !== false) { - return error; - } - }); + return throwError(() => new Error(message)); } if (matched.maxUsageCount > 1) { diff --git a/src/testing/react/__tests__/MockedProvider.test.tsx b/src/testing/react/__tests__/MockedProvider.test.tsx --- a/src/testing/react/__tests__/MockedProvider.test.tsx +++ b/src/testing/react/__tests__/MockedProvider.test.tsx @@ -998,61 +998,6 @@ describe("General use", () => { consoleSpy.mockRestore(); }); - it("should support custom error handling using setOnError", async () => { - let finished = false; - function Component({ ...variables }: Variables) { - useQuery<Data, Variables>(query, { variables }); - return null; - } - - const mockLink = new MockLink([], { showWarnings: false }); - mockLink.setOnError((error) => { - expect(error).toMatchSnapshot(); - finished = true; - }); - const link = ApolloLink.from([errorLink, mockLink]); - - render( - <MockedProvider link={link}> - <Component {...variables} /> - </MockedProvider> - ); - - await waitFor(() => { - expect(finished).toBe(true); - }); - }); - - it("should pipe exceptions thrown in custom onError functions through the link chain", async () => { - let finished = false; - function Component({ ...variables }: Variables) { - const { loading, error } = useQuery<Data, Variables>(query, { - variables, - }); - if (!loading) { - expect(error).toMatchSnapshot(); - finished = true; - } - return null; - } - - const mockLink = new MockLink([], { showWarnings: false }); - mockLink.setOnError(() => { - throw new Error("oh no!"); - }); - const link = ApolloLink.from([errorLink, mockLink]); - - render( - <MockedProvider link={link}> - <Component {...variables} /> - </MockedProvider> - ); - - await waitFor(() => { - expect(finished).toBe(true); - }); - }); - it("should support loading state testing with delay", async () => { jest.useFakeTimers(); diff --git a/src/testing/react/__tests__/__snapshots__/MockedProvider.test.tsx.snap b/src/testing/react/__tests__/__snapshots__/MockedProvider.test.tsx.snap --- a/src/testing/react/__tests__/__snapshots__/MockedProvider.test.tsx.snap +++ b/src/testing/react/__tests__/__snapshots__/MockedProvider.test.tsx.snap @@ -84,8 +84,6 @@ Object { } `; -exports[`General use should pipe exceptions thrown in custom onError functions through the link chain 1`] = `[Error: oh no!]`; - exports[`General use should return "Mocked response should contain" errors in response 1`] = ` [Error: Mocked response should contain either \`result\`, \`error\` or a \`delay\` of \`Infinity\`: { @@ -111,32 +109,6 @@ Request variables: {} ] `; -exports[`General use should support custom error handling using setOnError 1`] = ` -[Error: No more mocked responses for the query: -query GetUser($username: String!) { - user(username: $username) { - id - __typename - } -} - -Request variables: {"username":"mock_username"} -] -`; - -exports[`General use should support custom error handling using setOnError 2`] = ` -[Error: No more mocked responses for the query: -query GetUser($username: String!) { - user(username: $username) { - id - __typename - } -} - -Request variables: {"username":"mock_username"} -] -`; - exports[`General use should use the mock if the \`variables\` callback function returns true 1`] = ` Object { "__typename": "User",
[4.0] Remove `setOnError` and `onError` from `ApolloLink` These APIs are only used by `MockLink` and appear to only be there to rewrite errors. This code is not called anywhere else. We'd prefer to remove this API.
2025-04-04T21:57:23Z
4
apollographql/apollo-client
12,379
apollographql__apollo-client-12379
[ "12184" ]
9ed45c503a55c777138b60ce5fc905749dc538aa
diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -49,7 +49,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { protected config: InMemoryCacheConfig; private watches = new Set<Cache.WatchOptions>(); - private addTypename: boolean; private storeReader!: StoreReader; private storeWriter!: StoreWriter; @@ -75,7 +74,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { constructor(config: InMemoryCacheConfig = {}) { super(); this.config = normalizeConfig(config); - this.addTypename = !!this.config.addTypename; this.policies = new Policies({ cache: this, @@ -117,7 +115,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { this, (this.storeReader = new StoreReader({ cache: this, - addTypename: this.addTypename, resultCacheMaxSize: this.config.resultCacheMaxSize, canonizeResults: shouldCanonizeResults(this.config), canon: @@ -525,7 +522,9 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { } public transformDocument(document: DocumentNode): DocumentNode { - return this.addTypenameToDocument(this.addFragmentsToDocument(document)); + return this.addTypenameTransform.transformDocument( + this.addFragmentsToDocument(document) + ); } public fragmentMatches( @@ -550,13 +549,6 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { return fragments ? fragments.transform(document) : document; } - private addTypenameToDocument(document: DocumentNode) { - if (this.addTypename) { - return this.addTypenameTransform.transformDocument(document); - } - return document; - } - // This method is wrapped by maybeBroadcastWatch, which is called by // broadcastWatches, so that we compute and broadcast results only when // the data that would be broadcast might have changed. It would be diff --git a/src/cache/inmemory/readFromStore.ts b/src/cache/inmemory/readFromStore.ts --- a/src/cache/inmemory/readFromStore.ts +++ b/src/cache/inmemory/readFromStore.ts @@ -84,7 +84,6 @@ type ExecSubSelectedArrayOptions = { interface StoreReaderConfig { cache: InMemoryCache; - addTypename?: boolean; resultCacheMaxSize?: number; canonizeResults?: boolean; canon?: ObjectCanon; @@ -129,7 +128,6 @@ export class StoreReader { private config: { cache: InMemoryCache; - addTypename: boolean; resultCacheMaxSize?: number; canonizeResults: boolean; fragments?: InMemoryCacheConfig["fragments"]; @@ -147,7 +145,6 @@ export class StoreReader { constructor(config: StoreReaderConfig) { this.config = compact(config, { - addTypename: config.addTypename !== false, canonizeResults: shouldCanonizeResults(config), }); @@ -349,14 +346,10 @@ export class StoreReader { let missing: MissingTree | undefined; const missingMerger = new DeepMerger(); - if ( - this.config.addTypename && - typeof typename === "string" && - !policies.rootIdsByTypename[typename] - ) { + if (typeof typename === "string" && !policies.rootIdsByTypename[typename]) { // Ensure we always include a default value for the __typename - // field, if we have one, and this.config.addTypename is true. Note - // that this field can be overridden by other merged objects. + // field, if we have one. Note that this field can be overridden by other + // merged objects. objectsToMerge.push({ __typename: typename }); } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -137,7 +137,6 @@ export type DiffQueryAgainstStoreOptions = ReadQueryOptions & { export type ApolloReducerConfig = { dataIdFromObject?: KeyFieldsFunction; - addTypename?: boolean; }; export interface InMemoryCacheConfig extends ApolloReducerConfig {
diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -1178,7 +1178,6 @@ describe("ApolloClient", () => { return result.__typename + result.id; } }, - addTypename: true, }), }); } @@ -1612,9 +1611,7 @@ describe("ApolloClient", () => { it("will not use a default id getter if __typename is not present", () => { const client = new ApolloClient({ link: ApolloLink.empty(), - cache: new InMemoryCache({ - addTypename: false, - }), + cache: new InMemoryCache(), }); client.writeQuery({ @@ -1832,9 +1829,7 @@ describe("ApolloClient", () => { it("will not use a default id getter if id is present and __typename is not present", () => { const client = new ApolloClient({ link: ApolloLink.empty(), - cache: new InMemoryCache({ - addTypename: false, - }), + cache: new InMemoryCache(), }); client.writeQuery({ @@ -1881,9 +1876,7 @@ describe("ApolloClient", () => { it("will not use a default id getter if _id is present but __typename is not present", () => { const client = new ApolloClient({ link: ApolloLink.empty(), - cache: new InMemoryCache({ - addTypename: false, - }), + cache: new InMemoryCache(), }); client.writeQuery({ @@ -1930,9 +1923,7 @@ describe("ApolloClient", () => { it("will not use a default id getter if either _id or id is present when __typename is not also present", () => { const client = new ApolloClient({ link: ApolloLink.empty(), - cache: new InMemoryCache({ - addTypename: false, - }), + cache: new InMemoryCache(), }); client.writeQuery({ diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -165,7 +165,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ query, variables }); @@ -210,7 +210,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); { @@ -276,7 +276,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); { @@ -434,9 +434,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }).restore( - initialState.data - ), + cache: new InMemoryCache().restore(initialState.data), }); const result = await client.query({ query }); @@ -460,6 +458,7 @@ describe("client", () => { allPeople: { people: [ { + __typename: "Person", name: "Luke Skywalker", }, ], @@ -477,6 +476,7 @@ describe("client", () => { 'allPeople({"first":1})': { people: [ { + __typename: "Person", name: "Luke Skywalker", }, ], @@ -486,13 +486,11 @@ describe("client", () => { }, }; - const finalState = assign({}, initialState, {}); + const finalState = Object.assign({}, initialState); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }).restore( - initialState.data - ), + cache: new InMemoryCache().restore(initialState.data), }); const result = await client.query({ query }); @@ -554,9 +552,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }).restore( - initialState.data - ), + cache: new InMemoryCache().restore(initialState.data), }); expect(client.restore(initialState.data)).toEqual( @@ -588,7 +584,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await expect(client.query({ query })).rejects.toEqual( @@ -633,7 +629,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await expect(client.query({ query })).rejects.toEqual( @@ -664,7 +660,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await expect(client.query({ query })).rejects.toThrow( @@ -697,7 +693,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); return client.query({ query }).then((result: FormattedExecutionResult) => { @@ -748,7 +744,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const handle = client.watchQuery({ query }); @@ -791,7 +787,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const handle = client.watchQuery({ query }); @@ -833,7 +829,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const handle = client.watchQuery({ query }); @@ -861,12 +857,6 @@ describe("client", () => { } `; - const result = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; const transformedResult = { author: { firstName: "John", @@ -875,21 +865,14 @@ describe("client", () => { }, }; - const link = mockSingleLink( - { - request: { query }, - result: { data: result }, - }, - { - request: { query: transformedQuery }, - result: { data: transformedResult }, - }, - false - ); + const link = mockSingleLink({ + request: { query: transformedQuery }, + result: { data: transformedResult }, + }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: true }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ query }); @@ -915,12 +898,6 @@ describe("client", () => { } } `; - const result = { - author: { - firstName: "John", - lastName: "Smith", - }, - }; const transformedResult = { author: { firstName: "John", @@ -928,21 +905,14 @@ describe("client", () => { __typename: "Author", }, }; - const link = mockSingleLink( - { - request: { query }, - result: { data: result }, - }, - { - request: { query: transformedQuery }, - result: { data: transformedResult }, - }, - false - ); + const link = mockSingleLink({ + request: { query: transformedQuery }, + result: { data: transformedResult }, + }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: true }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ @@ -973,6 +943,7 @@ describe("client", () => { author { firstName lastName + __typename } } `; @@ -993,7 +964,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); @@ -1032,7 +1003,7 @@ describe("client", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.mutate({ mutation }); @@ -1069,7 +1040,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ @@ -1114,7 +1085,7 @@ describe("client", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ query }); @@ -1150,7 +1121,7 @@ describe("client", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ query }); @@ -1357,7 +1328,7 @@ describe("client", () => { ]); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); return client.query({ query }).then((actualResult) => { @@ -1382,7 +1353,7 @@ describe("client", () => { ]); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); return client.mutate({ mutation }).then((actualResult) => { @@ -1425,7 +1396,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), queryDeduplication: false, }); @@ -1473,7 +1444,7 @@ describe("client", () => { ); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const q1 = client.query({ query: queryDoc }); @@ -1519,7 +1490,7 @@ describe("client", () => { ); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), queryDeduplication: false, }); @@ -1574,7 +1545,7 @@ describe("client", () => { ); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); // The first query gets tracked in the dedup logic, the second one ignores it and runs anyways @@ -1701,7 +1672,6 @@ describe("client", () => { cache: new InMemoryCache({ dataIdFromObject: (obj: any) => obj.id, - addTypename: false, }), }); @@ -1786,7 +1756,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); client.writeQuery({ query, data: initialData }); @@ -1811,7 +1781,7 @@ describe("client", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obs = client.watchQuery({ @@ -1832,7 +1802,7 @@ describe("client", () => { const link = mockSingleLink(); // no queries = no replies. const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obs = client.watchQuery({ @@ -1854,7 +1824,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); client.writeQuery({ query, data: initialData }); @@ -1974,7 +1944,7 @@ describe("client", () => { it("forces the query to rerun", async () => { const client = new ApolloClient({ link: makeLink(), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); // Run a query first to initialize the store @@ -1989,7 +1959,7 @@ describe("client", () => { const client = new ApolloClient({ link: makeLink(), ssrMode: true, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const options: QueryOptions = { query, fetchPolicy: "network-only" }; @@ -2011,7 +1981,7 @@ describe("client", () => { const client = new ApolloClient({ link: makeLink(), ssrForceFetchDelay: 100, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); // Run a query first to initialize the store @@ -2056,7 +2026,7 @@ describe("client", () => { result: { data }, error: networkError, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await expect(client.mutate({ mutation })).rejects.toThrow( @@ -2087,7 +2057,7 @@ describe("client", () => { request: { query: mutation }, result: { data, errors }, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await expect(client.mutate({ mutation })).rejects.toEqual( @@ -2123,7 +2093,7 @@ describe("client", () => { }, }, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const result = await client.mutate({ mutation, errorPolicy: "all" }); @@ -2161,7 +2131,7 @@ describe("client", () => { request: { query: mutation }, result: { data, errors }, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const result = await client.mutate({ mutation, errorPolicy: "ignore" }); @@ -2195,7 +2165,7 @@ describe("client", () => { request: { query: mutation }, result: { data, errors }, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const mutatePromise = client.mutate({ mutation, @@ -2494,7 +2464,7 @@ describe("client", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const handle = client.watchQuery({ @@ -2534,7 +2504,7 @@ describe("client", () => { ); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const observable = client.watchQuery({ @@ -3305,7 +3275,7 @@ describe("@connection", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), defaultOptions: { watchQuery: { fetchPolicy: "cache-and-network", @@ -3530,7 +3500,7 @@ describe("@connection", () => { }); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), defaultOptions: { query: { errorPolicy: "all" }, }, @@ -3561,7 +3531,7 @@ describe("@connection", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), defaultOptions: { mutate: { variables: { id: 1 } }, }, diff --git a/src/__tests__/graphqlSubscriptions.ts b/src/__tests__/graphqlSubscriptions.ts --- a/src/__tests__/graphqlSubscriptions.ts +++ b/src/__tests__/graphqlSubscriptions.ts @@ -50,7 +50,7 @@ describe("GraphQL Subscriptions", () => { // This test calls directly through Apollo Client const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const stream = new ObservableStream(client.subscribe(defaultOptions)); @@ -66,7 +66,7 @@ describe("GraphQL Subscriptions", () => { // This test calls directly through Apollo Client const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const stream = new ObservableStream(client.subscribe(options)); @@ -82,7 +82,7 @@ describe("GraphQL Subscriptions", () => { const link = mockObservableLink(); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obs = client.subscribe(options); @@ -99,7 +99,7 @@ describe("GraphQL Subscriptions", () => { const link = mockObservableLink(); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const stream = new ObservableStream(client.subscribe(options)); @@ -117,7 +117,7 @@ describe("GraphQL Subscriptions", () => { it("should not cache subscription data if a `no-cache` fetch policy is used", async () => { const link = mockObservableLink(); - const cache = new InMemoryCache({ addTypename: false }); + const cache = new InMemoryCache(); const client = new ApolloClient({ link, cache, @@ -138,7 +138,7 @@ describe("GraphQL Subscriptions", () => { const link = mockObservableLink(); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obs = client.subscribe(options); @@ -372,7 +372,7 @@ describe("GraphQL Subscriptions", () => { const link = mockObservableLink(); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const stream = new ObservableStream(client.subscribe(defaultOptions)); @@ -404,7 +404,7 @@ describe("GraphQL Subscriptions", () => { const link = mockObservableLink(); const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obs = client.subscribe(options); diff --git a/src/__tests__/local-state/export.ts b/src/__tests__/local-state/export.ts --- a/src/__tests__/local-state/export.ts +++ b/src/__tests__/local-state/export.ts @@ -616,9 +616,7 @@ describe("@client @export tests", () => { expect(print(request.query)).toBe(print(expectedServerQuery)); return Observable.of({ data }); }), - cache: new InMemoryCache({ - addTypename: true, - }), + cache: new InMemoryCache(), resolvers: { Query: { currentFilter() { diff --git a/src/__tests__/local-state/resolvers.ts b/src/__tests__/local-state/resolvers.ts --- a/src/__tests__/local-state/resolvers.ts +++ b/src/__tests__/local-state/resolvers.ts @@ -36,7 +36,7 @@ const setupTestWithResolvers = ({ delay?: number; }) => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: serverQuery || query, variables }, @@ -169,7 +169,7 @@ describe("Basic resolver capabilities", () => { await expect(stream).toEmitApolloQueryResult({ data: { foo: { bar: true, __typename: "ClientData" }, - bar: { baz: true }, + bar: { baz: true, __typename: "Bar" }, }, loading: false, networkStatus: NetworkStatus.ready, @@ -266,7 +266,7 @@ describe("Basic resolver capabilities", () => { }); await expect(stream).toEmitApolloQueryResult({ - data: { foo: { bar: 1 } }, + data: { foo: { __typename: "Foo", bar: 1 } }, loading: false, networkStatus: NetworkStatus.ready, partial: false, @@ -300,7 +300,7 @@ describe("Basic resolver capabilities", () => { }); await expect(stream).toEmitApolloQueryResult({ - data: { foo: { bar: 1 } }, + data: { foo: { __typename: "Foo", bar: 1 } }, loading: false, networkStatus: NetworkStatus.ready, partial: false, @@ -360,8 +360,10 @@ describe("Basic resolver capabilities", () => { await expect(stream).toEmitApolloQueryResult({ data: { author: { + __typename: "Author", name: "John Smith", stats: { + __typename: "Stats", totalPosts: 100, postsToday: 10, }, @@ -559,11 +561,14 @@ describe("Basic resolver capabilities", () => { resolvers, query, serverQuery, - serverResult: { data: { bar: { baz: true } } }, + serverResult: { data: { bar: { __typename: "Bar", baz: true } } }, }); await expect(stream).toEmitApolloQueryResult({ - data: { foo: { bar: true }, bar: { baz: true } }, + data: { + foo: { __typename: "Foo", bar: true }, + bar: { __typename: "Bar", baz: true }, + }, loading: false, networkStatus: NetworkStatus.ready, partial: false, diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -1073,7 +1073,7 @@ describe("mutation results", () => { result: resetMutationResult, } ), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const watchedQuery = client.watchQuery({ @@ -1121,7 +1121,7 @@ describe("mutation results", () => { let count = 0; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: ApolloLink.from([ ({ variables }: any) => new Observable((observer) => { @@ -1198,7 +1198,7 @@ describe("mutation results", () => { let count = 0; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: ApolloLink.from([ ({ variables }: any) => new Observable((observer) => { @@ -1273,7 +1273,7 @@ describe("mutation results", () => { let count = 0; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: ApolloLink.from([ ({ variables }: any) => new Observable((observer) => { @@ -1643,7 +1643,7 @@ describe("mutation results", () => { request: { query: mutation } as any, result: result1, }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const result = await client.mutate<{ foo: { bar: string } }>({ diff --git a/src/__tests__/subscribeToMore.ts b/src/__tests__/subscribeToMore.ts --- a/src/__tests__/subscribeToMore.ts +++ b/src/__tests__/subscribeToMore.ts @@ -65,7 +65,7 @@ describe("subscribeToMore", () => { const link = ApolloLink.split(isSub, wSLink, httpLink); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -117,7 +117,7 @@ describe("subscribeToMore", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obsHandle = client.watchQuery<(typeof req1)["result"]["data"]>({ @@ -172,7 +172,7 @@ describe("subscribeToMore", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const obsHandle = client.watchQuery({ @@ -219,7 +219,7 @@ describe("subscribeToMore", () => { const link = ApolloLink.split(isSub, wSLink, httpLink); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }).restore({ + cache: new InMemoryCache().restore({ ROOT_QUERY: { entry: [ { @@ -335,7 +335,7 @@ describe("subscribeToMore", () => { const link = ApolloLink.split(isSub, wSLink, httpLink); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap --- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap @@ -818,6 +818,7 @@ Object { "ROOT_QUERY": Object { "__typename": "Query", "todos": Object { + "__typename": "TodosConnection", "edges": Array [ Object { "__ref": "TodoEdge:edge1", @@ -854,6 +855,7 @@ Object { "ROOT_QUERY": Object { "__typename": "Query", "todos": Object { + "__typename": "TodosConnection", "edges": Array [ Object { "__ref": "TodoEdge:edge1", diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -31,13 +31,10 @@ describe("Cache", () => { ) { const cachesList: InMemoryCache[][] = [ initialDataForCaches.map((data) => - new InMemoryCache({ - addTypename: false, - }).restore(cloneDeep(data)) + new InMemoryCache().restore(cloneDeep(data)) ), initialDataForCaches.map((data) => new InMemoryCache({ - addTypename: false, resultCaching: false, }).restore(cloneDeep(data)) ), @@ -56,12 +53,10 @@ describe("Cache", () => { ) { const caches = [ new InMemoryCache({ - addTypename: false, ...config, resultCaching: true, }), new InMemoryCache({ - addTypename: false, ...config, resultCaching: false, }), @@ -402,7 +397,7 @@ describe("Cache", () => { } `, }) - ).toEqual({ e: 4, h: { i: 7 } }); + ).toEqual({ __typename: "Foo", e: 4, h: { __typename: "Bar", i: 7 } }); expect( proxy.readFragment({ id: "foo", @@ -419,7 +414,13 @@ describe("Cache", () => { } `, }) - ).toEqual({ e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } }); + ).toEqual({ + __typename: "Foo", + e: 4, + f: 5, + g: 6, + h: { __typename: "Bar", i: 7, j: 8, k: 9 }, + }); expect( proxy.readFragment({ id: "bar", @@ -429,7 +430,7 @@ describe("Cache", () => { } `, }) - ).toEqual({ i: 7 }); + ).toEqual({ __typename: "Bar", i: 7 }); expect( proxy.readFragment({ id: "bar", @@ -441,7 +442,7 @@ describe("Cache", () => { } `, }) - ).toEqual({ i: 7, j: 8, k: 9 }); + ).toEqual({ __typename: "Bar", i: 7, j: 8, k: 9 }); expect( proxy.readFragment({ id: "foo", @@ -465,7 +466,13 @@ describe("Cache", () => { `, fragmentName: "fragmentFoo", }) - ).toEqual({ e: 4, f: 5, g: 6, h: { i: 7, j: 8, k: 9 } }); + ).toEqual({ + __typename: "Foo", + e: 4, + f: 5, + g: 6, + h: { __typename: "Bar", i: 7, j: 8, k: 9 }, + }); expect( proxy.readFragment({ id: "bar", @@ -489,7 +496,7 @@ describe("Cache", () => { `, fragmentName: "fragmentBar", }) - ).toEqual({ i: 7, j: 8, k: 9 }); + ).toEqual({ __typename: "Bar", i: 7, j: 8, k: 9 }); } ); @@ -519,7 +526,7 @@ describe("Cache", () => { value: 42, }, }) - ).toEqual({ a: 1, b: 2 }); + ).toEqual({ __typename: "Foo", a: 1, b: 2 }); } ); @@ -573,7 +580,7 @@ describe("Cache", () => { } `, }) - ).toEqual({ a: 1, b: 2, c: 3 }); + ).toEqual({ __typename: "Foo", a: 1, b: 2, c: 3 }); } ); @@ -1129,7 +1136,6 @@ describe("Cache", () => { "will write some deeply nested data into the store at any id", { dataIdFromObject: (o: any) => o.id, - addTypename: false, }, (proxy) => { proxy.writeFragment({ @@ -1248,42 +1254,34 @@ describe("Cache", () => { } ); - itWithCacheConfig( - "writes data that can be read back", - { - addTypename: true, - }, - (proxy) => { - const readWriteFragment = gql` - fragment aFragment on query { - getSomething { - id - } + itWithCacheConfig("writes data that can be read back", {}, (proxy) => { + const readWriteFragment = gql` + fragment aFragment on query { + getSomething { + id } - `; - const data = { - __typename: "query", - getSomething: { id: "123", __typename: "Something" }, - }; - proxy.writeFragment({ - data, - id: "query", - fragment: readWriteFragment, - }); + } + `; + const data = { + __typename: "query", + getSomething: { id: "123", __typename: "Something" }, + }; + proxy.writeFragment({ + data, + id: "query", + fragment: readWriteFragment, + }); - const result = proxy.readFragment({ - fragment: readWriteFragment, - id: "query", - }); - expect(result).toEqual(data); - } - ); + const result = proxy.readFragment({ + fragment: readWriteFragment, + id: "query", + }); + expect(result).toEqual(data); + }); itWithCacheConfig( "will write some data to the store with variables", - { - addTypename: true, - }, + {}, (proxy) => { proxy.writeFragment({ data: { diff --git a/src/cache/inmemory/__tests__/fragmentMatcher.ts b/src/cache/inmemory/__tests__/fragmentMatcher.ts --- a/src/cache/inmemory/__tests__/fragmentMatcher.ts +++ b/src/cache/inmemory/__tests__/fragmentMatcher.ts @@ -6,12 +6,9 @@ import { hasOwn } from "../helpers"; describe("fragment matching", () => { it("can match exact types with or without possibleTypes", () => { - const cacheWithoutPossibleTypes = new InMemoryCache({ - addTypename: true, - }); + const cacheWithoutPossibleTypes = new InMemoryCache(); const cacheWithPossibleTypes = new InMemoryCache({ - addTypename: true, possibleTypes: { Animal: ["Cat", "Dog"], }, @@ -57,7 +54,6 @@ describe("fragment matching", () => { it("can match interface subtypes", () => { const cache = new InMemoryCache({ - addTypename: true, possibleTypes: { Animal: ["Cat", "Dog"], }, @@ -89,7 +85,6 @@ describe("fragment matching", () => { it("can match union member types", () => { const cache = new InMemoryCache({ - addTypename: true, possibleTypes: { Status: ["PASSING", "FAILING", "SKIPPED"], }, @@ -139,7 +134,6 @@ describe("fragment matching", () => { it("can match indirect subtypes while avoiding cycles", () => { const cache = new InMemoryCache({ - addTypename: true, possibleTypes: { Animal: ["Animal", "Bug", "Mammal"], Bug: ["Ant", "Spider", "RolyPoly"], @@ -186,9 +180,7 @@ describe("fragment matching", () => { }); it("can match against the root Query", () => { - const cache = new InMemoryCache({ - addTypename: true, - }); + const cache = new InMemoryCache(); const query = gql` query AllPeople { diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -3527,7 +3527,6 @@ describe("type policies", function () { it("can handle Relay-style pagination without args", async () => { const cache = new InMemoryCache({ - addTypename: false, typePolicies: { Query: { fields: { @@ -3606,6 +3605,7 @@ describe("type policies", function () { result: { data: { todos: { + __typename: "TodosConnection", totalCount: 1292, }, }, @@ -3619,6 +3619,7 @@ describe("type policies", function () { result: { data: { todos: { + __typename: "TodosConnection", edges: secondEdges, pageInfo: secondPageInfo, totalCount: 1292, @@ -3633,6 +3634,7 @@ describe("type policies", function () { result: { data: { todos: { + __typename: "TodosConnection", totalCount: 1293, extraMetaData: "extra", }, @@ -3652,6 +3654,7 @@ describe("type policies", function () { networkStatus: NetworkStatus.ready, data: { todos: { + __typename: "TodosConnection", totalCount: 1292, }, }, @@ -3662,6 +3665,7 @@ describe("type policies", function () { ROOT_QUERY: { __typename: "Query", todos: { + __typename: "TodosConnection", edges: [], pageInfo: { endCursor: "", @@ -3684,6 +3688,7 @@ describe("type policies", function () { networkStatus: NetworkStatus.ready, data: { todos: { + __typename: "TodosConnection", edges: secondEdges, pageInfo: secondPageInfo, totalCount: 1292, @@ -3700,6 +3705,7 @@ describe("type policies", function () { networkStatus: NetworkStatus.ready, data: { todos: { + __typename: "TodosConnection", totalCount: 1293, extraMetaData: "extra", }, @@ -3711,7 +3717,6 @@ describe("type policies", function () { it("can handle Relay-style pagination", async () => { const cache = new InMemoryCache({ - addTypename: false, typePolicies: { Query: { fields: { diff --git a/src/core/__tests__/ApolloClient/general.test.ts b/src/core/__tests__/ApolloClient/general.test.ts --- a/src/core/__tests__/ApolloClient/general.test.ts +++ b/src/core/__tests__/ApolloClient/general.test.ts @@ -37,7 +37,7 @@ import { waitFor } from "@testing-library/react"; import { wait } from "../../../testing/core"; import { ApolloClient, ApolloQueryResult } from "../../../core"; import { mockFetchQuery } from "../ObservableQuery"; -import { Concast, print } from "../../../utilities"; +import { addTypenameToDocument, Concast, print } from "../../../utilities"; import { mockDeferStream, ObservableStream, @@ -61,7 +61,7 @@ describe("ApolloClient", () => { delay?: number; }) => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, result, error, delay }, ]), @@ -253,7 +253,7 @@ describe("ApolloClient", () => { const client = new ApolloClient({ link: new MockLink([{ request: { query }, error }]), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const observable = client.watchQuery({ query }); @@ -347,7 +347,7 @@ describe("ApolloClient", () => { const client = new ApolloClient({ link: mockedSingleLink, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const observableQuery = client.watchQuery({ @@ -420,7 +420,7 @@ describe("ApolloClient", () => { const client = new ApolloClient({ link: mockedSingleLink, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), defaultOptions: { watchQuery: { fetchPolicy: "cache-and-network", @@ -487,7 +487,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([{ request: { query }, result: expResult }]), }); const handle = client.watchQuery({ @@ -548,7 +548,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, @@ -673,7 +673,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, @@ -749,7 +749,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, result: { data: data1 } }, { request, result: { data: data2 } }, @@ -813,7 +813,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, result: { data: data1 } }, { request, result: { data: data2 } }, @@ -875,7 +875,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([{ request, result: { data: data1 } }]), }); @@ -1076,7 +1076,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, result: { data: data1 } }, { request, result: { data: data2 } }, @@ -1120,7 +1120,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, result: { data: data1 } }, { request, result: { data: data2 } }, @@ -1189,7 +1189,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query }, @@ -1273,7 +1273,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query }, @@ -1343,7 +1343,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -1424,7 +1424,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -1475,7 +1475,7 @@ describe("ApolloClient", () => { `; const data = { list: [null, { value: 1 }] }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([{ request: { query }, result: { data } }]), }); const observable = client.watchQuery({ query }); @@ -1517,7 +1517,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: primeQuery }, result: { data: data1 } }, ]), @@ -1551,7 +1551,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: mutation }, @@ -1573,7 +1573,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: mutation }, @@ -1599,7 +1599,7 @@ describe("ApolloClient", () => { const errors = [new GraphQLError("foo")]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: mutation }, @@ -1621,7 +1621,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: mutation, variables: { listId: "1" } }, @@ -1659,7 +1659,6 @@ describe("ApolloClient", () => { const client = new ApolloClient({ cache: new InMemoryCache({ dataIdFromObject: getIdField, - addTypename: false, }), link: new MockLink([ { @@ -1697,7 +1696,6 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ cache: new InMemoryCache({ - addTypename: false, dataIdFromObject: getIdField, }), link: new MockLink([ @@ -1738,7 +1736,6 @@ describe("ApolloClient", () => { const client = new ApolloClient({ cache: new InMemoryCache({ - addTypename: false, dataIdFromObject: getIdField, }), link: new MockLink([ @@ -1790,7 +1787,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -1868,7 +1865,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -1913,7 +1910,7 @@ describe("ApolloClient", () => { it("warns if you forget the template literal tag", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); expect(() => { void client.query({ @@ -2037,7 +2034,7 @@ describe("ApolloClient", () => { await expect( new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([{ request: { query }, error: networkError }]), }).query({ query }) ).rejects.toEqual(new ApolloError({ networkError })); @@ -2055,7 +2052,7 @@ describe("ApolloClient", () => { const graphQLErrors = [new GraphQLError("GraphQL error")]; await expect( new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -2082,7 +2079,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -2127,7 +2124,7 @@ describe("ApolloClient", () => { }; const observable = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -2172,7 +2169,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -2222,7 +2219,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -2299,7 +2296,6 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ cache: new InMemoryCache({ - addTypename: false, dataIdFromObject: (object) => { if (object.__typename && object.id) { return object.__typename + "__" + object.id; @@ -2390,7 +2386,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -2707,7 +2703,6 @@ describe("ApolloClient", () => { let mergeCount = 0; const client = new ApolloClient({ cache: new InMemoryCache({ - addTypename: false, typePolicies: { Query: { fields: { @@ -2791,7 +2786,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request, result: firstResult }, { request, result: secondResult }, @@ -2921,7 +2916,7 @@ describe("ApolloClient", () => { ]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink(mockedResponses), }); const queryManager = client["queryManager"]; @@ -2990,7 +2985,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3051,7 +3046,7 @@ describe("ApolloClient", () => { const client = new ApolloClient({ ssrMode: true, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3136,7 +3131,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -3228,7 +3223,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3299,7 +3294,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3372,7 +3367,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3433,7 +3428,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3490,7 +3485,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3587,7 +3582,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -3645,7 +3640,7 @@ describe("ApolloClient", () => { it("should change the store state to an empty state", () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -3696,7 +3691,7 @@ describe("ApolloClient", () => { }) ); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); const observable = client.watchQuery({ query }); @@ -3751,7 +3746,7 @@ describe("ApolloClient", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); const observable = client.watchQuery({ query }); @@ -3804,7 +3799,7 @@ describe("ApolloClient", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -3858,7 +3853,7 @@ describe("ApolloClient", () => { ); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -3890,7 +3885,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -3932,7 +3927,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -3962,7 +3957,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -3998,7 +3993,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4034,7 +4029,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4142,7 +4137,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4236,7 +4231,7 @@ describe("ApolloClient", () => { }) ); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); const observable = client.watchQuery({ query }); @@ -4289,7 +4284,7 @@ describe("ApolloClient", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); const observable = client.watchQuery({ query }); @@ -4338,7 +4333,7 @@ describe("ApolloClient", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -4385,7 +4380,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4419,7 +4414,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4450,7 +4445,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4486,7 +4481,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4522,7 +4517,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4559,7 +4554,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); @@ -4609,7 +4604,7 @@ describe("ApolloClient", () => { ); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -4666,7 +4661,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4736,7 +4731,7 @@ describe("ApolloClient", () => { fortuneCookie: "Buy it", }; const result = await new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4775,7 +4770,7 @@ describe("ApolloClient", () => { const fullData = { fortuneCookie, author }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -4861,7 +4856,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: testQuery }, @@ -4933,7 +4928,7 @@ describe("ApolloClient", () => { b: { x2: 3, y2: 2, z2: 1 }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -5024,7 +5019,7 @@ describe("ApolloClient", () => { }; const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -5106,7 +5101,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5188,7 +5183,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5263,7 +5258,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5339,7 +5334,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5417,7 +5412,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const mutationVariables = { id: "2345" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -5510,7 +5505,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const mutationVariables = { id: "2345" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -5603,7 +5598,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const mutationVariables = { id: "2345" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -5694,7 +5689,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5765,7 +5760,7 @@ describe("ApolloClient", () => { }, }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -5844,7 +5839,7 @@ describe("ApolloClient", () => { }; const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -5925,7 +5920,7 @@ describe("ApolloClient", () => { }; const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6020,7 +6015,7 @@ describe("ApolloClient", () => { function makeClient() { return new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6249,7 +6244,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6349,7 +6344,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6445,7 +6440,7 @@ describe("ApolloClient", () => { const variables = { id: "1234" }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6546,7 +6541,7 @@ describe("ApolloClient", () => { const refetchError = new Error("Refetch failed"); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -6659,7 +6654,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -6721,7 +6716,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, ...clientAwareness, }); @@ -6750,13 +6745,13 @@ describe("ApolloClient", () => { } `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, result: { data: { - author: { firstName: "John" }, + author: { __typename: "Author", firstName: "John" }, }, }, }, @@ -6769,7 +6764,7 @@ describe("ApolloClient", () => { // without reaching into internal state expect( client["queryManager"]["inFlightLinkObservables"].peek( - print(query), + print(addTypenameToDocument(query)), "{}" ) ).toEqual({ @@ -6787,7 +6782,7 @@ describe("ApolloClient", () => { `; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query }, @@ -6807,7 +6802,7 @@ describe("ApolloClient", () => { // without checking internal state expect( client["queryManager"]["inFlightLinkObservables"].peek( - print(query), + print(addTypenameToDocument(query)), "{}" ) ).toBeUndefined(); @@ -6985,7 +6980,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, @@ -7057,7 +7052,7 @@ describe("ApolloClient", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query: query1 }, diff --git a/src/core/__tests__/ApolloClient/links.test.ts b/src/core/__tests__/ApolloClient/links.test.ts --- a/src/core/__tests__/ApolloClient/links.test.ts +++ b/src/core/__tests__/ApolloClient/links.test.ts @@ -49,7 +49,7 @@ describe("Link interactions", () => { const mockLink = new MockSubscriptionLink(); const link = ApolloLink.from([evictionLink, mockLink]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -93,7 +93,7 @@ describe("Link interactions", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -165,7 +165,7 @@ describe("Link interactions", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -236,7 +236,7 @@ describe("Link interactions", () => { const mockLink = new MockSubscriptionLink(); const link = ApolloLink.from([evictionLink, mockLink]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -265,7 +265,7 @@ describe("Link interactions", () => { const mockLink = new MockSubscriptionLink(); const link = ApolloLink.from([evictionLink, mockLink]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -361,6 +361,7 @@ describe("Link interactions", () => { books { id title + __typename } } `; @@ -380,7 +381,7 @@ describe("Link interactions", () => { const client = new ApolloClient({ link, - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); diff --git a/src/core/__tests__/ApolloClient/multiple-results.test.ts b/src/core/__tests__/ApolloClient/multiple-results.test.ts --- a/src/core/__tests__/ApolloClient/multiple-results.test.ts +++ b/src/core/__tests__/ApolloClient/multiple-results.test.ts @@ -35,7 +35,7 @@ describe("mutiple results", () => { }; const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -93,7 +93,7 @@ describe("mutiple results", () => { }; const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -164,7 +164,7 @@ describe("mutiple results", () => { }; const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -229,7 +229,7 @@ describe("mutiple results", () => { }; const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -294,7 +294,7 @@ describe("mutiple results", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -91,7 +91,7 @@ describe("ObservableQuery", () => { describe("to change pollInterval", () => { it("starts polling if goes from 0 -> something", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -139,7 +139,7 @@ describe("ObservableQuery", () => { it("stops polling if goes from something -> 0", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -178,7 +178,7 @@ describe("ObservableQuery", () => { it("can change from x>0 to y>0", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -245,7 +245,7 @@ describe("ObservableQuery", () => { const variables2 = { first: 1 }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { @@ -316,7 +316,7 @@ describe("ObservableQuery", () => { const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { @@ -388,7 +388,7 @@ describe("ObservableQuery", () => { const variables2 = { first: 1 }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -450,7 +450,7 @@ describe("ObservableQuery", () => { it("if query is refetched, and an error is returned, no other observer callbacks will be called", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -496,7 +496,7 @@ describe("ObservableQuery", () => { it("does a network request if fetchPolicy becomes networkOnly", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -558,7 +558,7 @@ describe("ObservableQuery", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); // fetch first data from server @@ -620,7 +620,7 @@ describe("ObservableQuery", () => { ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -680,7 +680,7 @@ describe("ObservableQuery", () => { }, ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); const observable = client.watchQuery({ @@ -734,7 +734,7 @@ describe("ObservableQuery", () => { }, ]); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link, }); @@ -766,7 +766,7 @@ describe("ObservableQuery", () => { it("returns a promise which eventually returns data", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -820,7 +820,7 @@ describe("ObservableQuery", () => { describe("setVariables", () => { it("reruns query if the variables change", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -869,7 +869,7 @@ describe("ObservableQuery", () => { it("does invalidate the currentResult data if the variables change", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -952,7 +952,7 @@ describe("ObservableQuery", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -1001,7 +1001,7 @@ describe("ObservableQuery", () => { it("does not invalidate the currentResult errors if the variables change", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -1058,7 +1058,7 @@ describe("ObservableQuery", () => { it("does not perform a query when unsubscribed if variables change", async () => { // Note: no responses, will throw if a query is made const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([]), }); const observable = client.watchQuery({ query, variables }); @@ -1079,7 +1079,7 @@ describe("ObservableQuery", () => { ]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink(mockedResponses), }); const firstRequest = mockedResponses[0].request; @@ -1130,7 +1130,7 @@ describe("ObservableQuery", () => { ]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink(mockedResponses), }); const firstRequest = mockedResponses[0].request; @@ -1170,7 +1170,7 @@ describe("ObservableQuery", () => { it("does not rerun query if variables do not change", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -1199,7 +1199,7 @@ describe("ObservableQuery", () => { it("handles variables changing while a query is in-flight", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -1246,7 +1246,7 @@ describe("ObservableQuery", () => { ]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink(mockedResponses), }); const firstRequest = mockedResponses[0].request; @@ -1398,7 +1398,7 @@ describe("ObservableQuery", () => { ]; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink(mockedResponses), }); const firstRequest = mockedResponses[0].request; @@ -1448,7 +1448,7 @@ describe("ObservableQuery", () => { const variables2 = { first: 1 }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { @@ -1544,7 +1544,7 @@ describe("ObservableQuery", () => { const variables2 = { first: 1 }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { @@ -1825,7 +1825,7 @@ describe("ObservableQuery", () => { } const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([makeMock("a", "b", "c"), makeMock("d", "e")]), }); const observableWithoutVariables = client.watchQuery({ @@ -1913,8 +1913,8 @@ describe("ObservableQuery", () => { const mocks = [makeMock("a", "b", "c"), makeMock("d", "e")]; const firstRequest = mocks[0].request; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), - link: new MockLink(mocks, true, { showWarnings: false }), + cache: new InMemoryCache(), + link: new MockLink(mocks, { showWarnings: false }), }); const observableWithVarsVar = client.watchQuery({ @@ -2009,7 +2009,7 @@ describe("ObservableQuery", () => { } const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([makeMock("a", "b", "c"), makeMock("d", "e")]), }); @@ -2196,7 +2196,7 @@ describe("ObservableQuery", () => { it("returns the current query status immediately", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2236,7 +2236,7 @@ describe("ObservableQuery", () => { it("returns results from the store immediately", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2266,7 +2266,7 @@ describe("ObservableQuery", () => { it("returns errors from the store immediately", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2293,7 +2293,7 @@ describe("ObservableQuery", () => { it("returns referentially equal errors", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2325,7 +2325,7 @@ describe("ObservableQuery", () => { it("returns errors with data if errorPolicy is all", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2363,7 +2363,7 @@ describe("ObservableQuery", () => { it("errors out if errorPolicy is none", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2385,7 +2385,7 @@ describe("ObservableQuery", () => { it("errors out if errorPolicy is none and the observable has completed", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2408,7 +2408,7 @@ describe("ObservableQuery", () => { it("ignores errors with data if errorPolicy is ignore", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2458,7 +2458,7 @@ describe("ObservableQuery", () => { }; const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2519,7 +2519,7 @@ describe("ObservableQuery", () => { it("returns loading even if full data is available when using network-only fetchPolicy", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -2575,7 +2575,7 @@ describe("ObservableQuery", () => { it("returns loading on no-cache fetchPolicy queries when calling getCurrentResult", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3131,7 +3131,7 @@ describe("ObservableQuery", () => { it("returns optimistic mutation results from the store", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3300,7 +3300,7 @@ describe("ObservableQuery", () => { const graphQLError = new GraphQLError("oh no!"); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3330,7 +3330,7 @@ describe("ObservableQuery", () => { const networkError = new Error("oh no!"); const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3481,7 +3481,7 @@ describe("ObservableQuery", () => { describe("updateQuery", () => { it("should be able to determine if the previous result is complete", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3543,7 +3543,7 @@ describe("ObservableQuery", () => { it("QueryInfo does not notify for !== but deep-equal results", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, @@ -3620,7 +3620,7 @@ describe("ObservableQuery", () => { it("ObservableQuery#map respects Symbol.species", async () => { const client = new ApolloClient({ - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), link: new MockLink([ { request: { query, variables }, diff --git a/src/core/__tests__/fetchPolicies.ts b/src/core/__tests__/fetchPolicies.ts --- a/src/core/__tests__/fetchPolicies.ts +++ b/src/core/__tests__/fetchPolicies.ts @@ -110,7 +110,7 @@ describe("network-only", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); @@ -135,7 +135,7 @@ describe("network-only", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query, fetchPolicy: "network-only" }); @@ -157,7 +157,7 @@ describe("network-only", () => { const client = new ApolloClient({ link: inspector.concat(createFailureLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); let didFail = false; @@ -183,7 +183,7 @@ describe("network-only", () => { const client = new ApolloClient({ link: inspector.concat(createMutationLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); @@ -210,7 +210,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); const actualResult = await client.query({ fetchPolicy: "no-cache", query }); @@ -231,7 +231,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); @@ -253,7 +253,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query, fetchPolicy: "no-cache" }); @@ -276,7 +276,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createFailureLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); let didFail = false; @@ -302,7 +302,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createMutationLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ query }); @@ -325,7 +325,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); await client.query({ @@ -352,7 +352,7 @@ describe("no-cache", () => { const client = new ApolloClient({ link: inspector.concat(createFailureLink()), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); let didFail = false; @@ -834,9 +834,7 @@ describe("nextFetchPolicy", () => { } (${args.useDefaultOptions ? "" : "not "}using defaults)`, async () => { const client = new ApolloClient({ link: makeLink(), - cache: new InMemoryCache({ - addTypename: true, - }), + cache: new InMemoryCache(), defaultOptions: { watchQuery: args.useDefaultOptions ? diff --git a/src/link/persisted-queries/__tests__/react.test.tsx b/src/link/persisted-queries/__tests__/react.test.tsx --- a/src/link/persisted-queries/__tests__/react.test.tsx +++ b/src/link/persisted-queries/__tests__/react.test.tsx @@ -14,6 +14,7 @@ import { getDataFromTree } from "../../../react/ssr/getDataFromTree"; import { createPersistedQueryLink as createPersistedQuery, VERSION } from ".."; import { useQuery } from "../../../react"; import { OperationVariables } from "../../../core"; +import { addTypenameToDocument } from "../../../utilities"; function sha256(data: string) { const hash = crypto.createHash("sha256"); @@ -54,7 +55,7 @@ const data2 = { }; const response = JSON.stringify({ data }); const response2 = JSON.stringify({ data: data2 }); -const queryString = print(query); +const queryString = print(addTypenameToDocument(query)); const hash = sha256(queryString); @@ -85,7 +86,7 @@ describe("react application", () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), ssrMode: true, }); @@ -134,7 +135,7 @@ describe("react application", () => { // reset client and try with different input object const client2 = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), ssrMode: true, }); diff --git a/src/react/hooks/__tests__/useSubscription.test.tsx b/src/react/hooks/__tests__/useSubscription.test.tsx --- a/src/react/hooks/__tests__/useSubscription.test.tsx +++ b/src/react/hooks/__tests__/useSubscription.test.tsx @@ -46,7 +46,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook(() => useSubscription(subscription), { @@ -113,7 +113,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onError = jest.fn(); @@ -165,7 +165,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onComplete = jest.fn(); @@ -204,7 +204,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn(); @@ -260,7 +260,7 @@ describe("useSubscription Hook", () => { link.onSetup(onSetup); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn(); @@ -324,7 +324,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result, rerender } = renderHook( ({ skip }) => useSubscription(subscription, { skip }), @@ -420,7 +420,7 @@ describe("useSubscription Hook", () => { }); const client = new ApolloClient({ link: concat(contextLink, link), - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook( @@ -488,7 +488,7 @@ describe("useSubscription Hook", () => { }); const client = new ApolloClient({ link: concat(extensionsLink, link), - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook( @@ -551,7 +551,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook( @@ -620,7 +620,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook(() => useSubscription(subscription), { @@ -668,7 +668,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result } = renderHook( @@ -746,7 +746,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); renderHook( @@ -789,7 +789,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn(); @@ -837,7 +837,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onSubscriptionData = jest.fn(); @@ -876,7 +876,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { rerender } = renderHook( @@ -909,7 +909,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); renderHook( @@ -952,7 +952,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onComplete = jest.fn(); @@ -1002,7 +1002,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onSubscriptionComplete = jest.fn(); @@ -1043,7 +1043,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { rerender } = renderHook( @@ -1098,7 +1098,7 @@ describe("useSubscription Hook", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); let renderCount = 0; @@ -1147,7 +1147,7 @@ followed by new in-flight setup", async () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const { result, unmount, rerender } = renderHook( @@ -1798,7 +1798,7 @@ describe("ignoreResults", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); @@ -1872,7 +1872,7 @@ describe("ignoreResults", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); @@ -1942,7 +1942,7 @@ describe("ignoreResults", () => { link.onSetup(subscriptionCreated); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); @@ -2016,7 +2016,7 @@ describe("ignoreResults", () => { link.onSetup(subscriptionCreated); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); diff --git a/src/react/ssr/__tests__/getDataFromTree.test.tsx b/src/react/ssr/__tests__/getDataFromTree.test.tsx --- a/src/react/ssr/__tests__/getDataFromTree.test.tsx +++ b/src/react/ssr/__tests__/getDataFromTree.test.tsx @@ -48,7 +48,7 @@ describe("SSR", () => { }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); interface Data { @@ -103,7 +103,7 @@ describe("SSR", () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); function App() { diff --git a/src/react/ssr/__tests__/useQuery.test.tsx b/src/react/ssr/__tests__/useQuery.test.tsx --- a/src/react/ssr/__tests__/useQuery.test.tsx +++ b/src/react/ssr/__tests__/useQuery.test.tsx @@ -324,7 +324,7 @@ describe("useQuery Hook SSR", () => { }; const app = ( - <MockedProvider addTypename cache={cache}> + <MockedProvider cache={cache}> <Component /> </MockedProvider> ); diff --git a/src/testing/core/mocking/__tests__/mockLink.ts b/src/testing/core/mocking/__tests__/mockLink.ts --- a/src/testing/core/mocking/__tests__/mockLink.ts +++ b/src/testing/core/mocking/__tests__/mockLink.ts @@ -164,7 +164,7 @@ describe("mockLink", () => { // in the operation before calling the Link, so we have to do the same here // when we call `execute` const defaults = { done: true }; - const link = new MockLink(mocks, false, { showWarnings: false }); + const link = new MockLink(mocks, { showWarnings: false }); { // Non-optional variable is missing, should not match. const stream = new ObservableStream( diff --git a/src/testing/core/mocking/mockClient.ts b/src/testing/core/mocking/mockClient.ts --- a/src/testing/core/mocking/mockClient.ts +++ b/src/testing/core/mocking/mockClient.ts @@ -17,6 +17,6 @@ export function createMockClient<TData>( }).setOnError((error) => { throw error; }), - cache: new InMemoryCache({ addTypename: false }), + cache: new InMemoryCache(), }); } diff --git a/src/testing/core/mocking/mockLink.ts b/src/testing/core/mocking/mockLink.ts --- a/src/testing/core/mocking/mockLink.ts +++ b/src/testing/core/mocking/mockLink.ts @@ -56,27 +56,23 @@ export interface MockLinkOptions { showWarnings?: boolean; } -function requestToKey(request: GraphQLRequest, addTypename: Boolean): string { +function requestToKey(request: GraphQLRequest): string { const queryString = - request.query && - print(addTypename ? addTypenameToDocument(request.query) : request.query); + request.query && print(addTypenameToDocument(request.query)); const requestKey = { query: queryString }; return JSON.stringify(requestKey); } export class MockLink extends ApolloLink { public operation!: Operation; - public addTypename: Boolean = true; public showWarnings: boolean = true; private mockedResponsesByKey: { [key: string]: MockedResponse[] } = {}; constructor( mockedResponses: ReadonlyArray<MockedResponse<any, any>>, - addTypename: Boolean = true, options: MockLinkOptions = Object.create(null) ) { super(); - this.addTypename = addTypename; this.showWarnings = options.showWarnings ?? true; if (mockedResponses) { @@ -89,10 +85,7 @@ export class MockLink extends ApolloLink { public addMockedResponse(mockedResponse: MockedResponse) { const normalizedMockedResponse = this.normalizeMockedResponse(mockedResponse); - const key = requestToKey( - normalizedMockedResponse.request, - this.addTypename - ); + const key = requestToKey(normalizedMockedResponse.request); let mockedResponses = this.mockedResponsesByKey[key]; if (!mockedResponses) { mockedResponses = []; @@ -103,7 +96,7 @@ export class MockLink extends ApolloLink { public request(operation: Operation): Observable<FetchResult> | null { this.operation = operation; - const key = requestToKey(operation, this.addTypename); + const key = requestToKey(operation); const unmatchedVars: Array<Record<string, any>> = []; const requestVariables = operation.variables || {}; const mockedResponses = this.mockedResponsesByKey[key]; @@ -266,17 +259,8 @@ export interface MockApolloLink extends ApolloLink { // Pass in multiple mocked responses, so that you can test flows that end up // making multiple queries to the server. -// NOTE: The last arg can optionally be an `addTypename` arg. -export function mockSingleLink(...mockedResponses: Array<any>): MockApolloLink { - // To pull off the potential typename. If this isn't a boolean, we'll just - // set it true later. - let maybeTypename = mockedResponses[mockedResponses.length - 1]; - let mocks = mockedResponses.slice(0, mockedResponses.length - 1); - - if (typeof maybeTypename !== "boolean") { - mocks = mockedResponses; - maybeTypename = true; - } - - return new MockLink(mocks, maybeTypename); +export function mockSingleLink( + ...mockedResponses: Array<MockedResponse<any, any>> +): MockApolloLink { + return new MockLink(mockedResponses); } diff --git a/src/testing/react/MockedProvider.tsx b/src/testing/react/MockedProvider.tsx --- a/src/testing/react/MockedProvider.tsx +++ b/src/testing/react/MockedProvider.tsx @@ -12,7 +12,6 @@ import type { ApolloCache } from "../../cache/index.js"; export interface MockedProviderProps<TSerializedCache = {}> { mocks?: ReadonlyArray<MockedResponse<any, any>>; - addTypename?: boolean; defaultOptions?: DefaultOptions; cache?: ApolloCache<TSerializedCache>; resolvers?: Resolvers; @@ -35,16 +34,11 @@ export class MockedProvider extends React.Component< MockedProviderProps, MockedProviderState > { - public static defaultProps: MockedProviderProps = { - addTypename: true, - }; - constructor(props: MockedProviderProps) { super(props); const { mocks, - addTypename, defaultOptions, cache, resolvers, @@ -53,10 +47,10 @@ export class MockedProvider extends React.Component< connectToDevTools = false, } = this.props; const client = new ApolloClient({ - cache: cache || new Cache({ addTypename }), + cache: cache || new Cache(), defaultOptions, connectToDevTools, - link: link || new MockLink(mocks || [], addTypename, { showWarnings }), + link: link || new MockLink(mocks || [], { showWarnings }), resolvers, }); diff --git a/src/testing/react/__tests__/MockedProvider.test.tsx b/src/testing/react/__tests__/MockedProvider.test.tsx --- a/src/testing/react/__tests__/MockedProvider.test.tsx +++ b/src/testing/react/__tests__/MockedProvider.test.tsx @@ -575,7 +575,7 @@ describe("General use", () => { const link = ApolloLink.from([ errorLink, - new MockLink([], true, { showWarnings: false }), + new MockLink([], { showWarnings: false }), ]); render( @@ -637,7 +637,7 @@ describe("General use", () => { }, ]; - const mockLink = new MockLink(mocks, true, { showWarnings: false }); + const mockLink = new MockLink(mocks, { showWarnings: false }); const link = ApolloLink.from([errorLink, mockLink]); const Wrapper = ({ children }: { children: React.ReactNode }) => ( <MockedProvider link={link}>{children}</MockedProvider> @@ -688,7 +688,7 @@ describe("General use", () => { }, ]; - const mockLink = new MockLink(mocks, true, { showWarnings: false }); + const mockLink = new MockLink(mocks, { showWarnings: false }); const link = ApolloLink.from([errorLink, mockLink]); const Wrapper = ({ children }: { children: React.ReactNode }) => ( <MockedProvider link={link}>{children}</MockedProvider> @@ -746,7 +746,7 @@ describe("General use", () => { }, ]; - const mockLink = new MockLink(mocks, true, { showWarnings: false }); + const mockLink = new MockLink(mocks, { showWarnings: false }); const link = ApolloLink.from([errorLink, mockLink]); const Wrapper = ({ children }: { children: React.ReactNode }) => ( <MockedProvider link={link}>{children}</MockedProvider> @@ -811,7 +811,7 @@ describe("General use", () => { }, ]; - const mockLink = new MockLink(mocks, true, { showWarnings: false }); + const mockLink = new MockLink(mocks, { showWarnings: false }); const link = ApolloLink.from([errorLink, mockLink]); const Wrapper = ({ children }: { children: React.ReactNode }) => ( <MockedProvider link={link}>{children}</MockedProvider> @@ -982,7 +982,7 @@ describe("General use", () => { }, ]; - const link = new MockLink(mocksDifferentQuery, false, { + const link = new MockLink(mocksDifferentQuery, { showWarnings: false, }); @@ -1008,7 +1008,7 @@ describe("General use", () => { return null; } - const mockLink = new MockLink([], true, { showWarnings: false }); + const mockLink = new MockLink([], { showWarnings: false }); mockLink.setOnError((error) => { expect(error).toMatchSnapshot(); finished = true; @@ -1039,7 +1039,7 @@ describe("General use", () => { return null; } - const mockLink = new MockLink([], true, { showWarnings: false }); + const mockLink = new MockLink([], { showWarnings: false }); mockLink.setOnError(() => { throw new Error("oh no!"); }); diff --git a/src/testing/react/__tests__/mockSubscriptionLink.test.tsx b/src/testing/react/__tests__/mockSubscriptionLink.test.tsx --- a/src/testing/react/__tests__/mockSubscriptionLink.test.tsx +++ b/src/testing/react/__tests__/mockSubscriptionLink.test.tsx @@ -21,7 +21,7 @@ describe("mockSubscriptionLink", () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }), + cache: new Cache(), }); let renderCountA = 0;
[4.0] Deprecate `addTypename` option in `InMemoryCache` `__typename` is incredibly important to Apollo Client, especially `InMemoryCache` which allows normalization to work correctly. We'd like to deprecate and remove the `addTypename` option in 4.0.
2025-02-14T23:19:51Z
3.13
apollographql/apollo-client
12,300
apollographql__apollo-client-12300
[ "8958" ]
af3a90a05dbdabac9365297478bfa5f0382bcf9c
diff --git a/config/entryPoints.js b/config/entryPoints.js --- a/config/entryPoints.js +++ b/config/entryPoints.js @@ -27,6 +27,7 @@ const entryPoints = [ { dirs: ["testing"], extensions: [".js", ".jsx"] }, { dirs: ["testing", "core"] }, { dirs: ["testing", "experimental"] }, + { dirs: ["testing", "react"] }, { dirs: ["utilities"] }, { dirs: ["utilities", "subscriptions", "relay"] }, { dirs: ["utilities", "subscriptions", "urql"] }, diff --git a/src/index.ts b/src/index.ts --- a/src/index.ts +++ b/src/index.ts @@ -1,2 +1 @@ export * from "./core/index.js"; -export * from "./react/index.js";
diff --git a/docs/source/api/react/testing.md b/docs/source/api/react/testing.md --- a/docs/source/api/react/testing.md +++ b/docs/source/api/react/testing.md @@ -11,7 +11,7 @@ api_reference: true ```js -import { MockedProvider } from "@apollo/client/testing"; +import { MockedProvider } from "@apollo/client/testing/react"; ``` The `MockedProvider` component is a mocked version of [`ApolloProvider`](./hooks/#the-apolloprovider-component) that doesn't send network requests to your API. Instead, it allows you to specify the exact response payload for a given GraphQL operation. This enables you to test your application's operations without communicating with a server. diff --git a/docs/source/development-testing/testing.mdx b/docs/source/development-testing/testing.mdx --- a/docs/source/development-testing/testing.mdx +++ b/docs/source/development-testing/testing.mdx @@ -21,7 +21,8 @@ Let's say we want to test the following `Dog` component, which executes a basic ```jsx title="dog.jsx" import React from "react"; -import { gql, useQuery } from "@apollo/client"; +import { gql } from "@apollo/client"; +import { useQuery } from "@apollo/client/react"; // Make sure that both the query and the component are exported export const GET_DOG_QUERY = gql` @@ -55,7 +56,7 @@ A basic rendering test for the component looks like this (minus mocked responses ```jsx title="dog.test.js" import "@testing-library/jest-dom"; import { render, screen } from "@testing-library/react"; -import { MockedProvider } from "@apollo/client/testing"; +import { MockedProvider } from "@apollo/client/testing/react"; import { GET_DOG_QUERY, Dog } from "./dog"; const mocks = []; // We'll fill this in next @@ -119,7 +120,7 @@ Combining our code above, we get the following complete test: ```jsx title="dog.test.js" import "@testing-library/jest-dom"; import { render, screen } from "@testing-library/react"; -import { MockedProvider } from "@apollo/client/testing"; +import { MockedProvider } from "@apollo/client/testing/react"; import { GET_DOG_QUERY, Dog } from "./dog"; const mocks = [ @@ -317,7 +318,8 @@ The following `DeleteButton` component executes the `DELETE_DOG_MUTATION` to del ```jsx title="delete-dog.jsx" import React from "react"; -import { gql, useMutation } from "@apollo/client"; +import { gql } from "@apollo/client"; +import { useMutation } from "@apollo/client/react"; export const DELETE_DOG_MUTATION = gql` mutation deleteDog($name: String!) { @@ -350,7 +352,7 @@ We can test the initial rendering of this component just like we [tested our `Do import '@testing-library/jest-dom'; import userEvent from '@testing-library/user-event'; import { render, screen } from '@testing-library/react'; -import { MockedProvider } from "@apollo/client/testing"; +import { MockedProvider } from "@apollo/client/testing/react"; import { DeleteButton, DELETE_DOG_MUTATION } from "./delete-dog"; it("should render without error", () => { diff --git a/scripts/memory/tests.js b/scripts/memory/tests.js --- a/scripts/memory/tests.js +++ b/scripts/memory/tests.js @@ -59,10 +59,6 @@ describe("@apollo/client/apollo-client.cjs", () => { assert.strictEqual(typeof bundle.ApolloClient, "function"); assert.strictEqual(typeof bundle.InMemoryCache, "function"); - // TODO This will change in AC4 when we move all React exports to the - // @apollo/client/react entry point (see issue #8190). - assert.strictEqual(typeof bundle.ApolloProvider, "function"); - // The CommonJS bundles referred to by the "main" fields in the various // package.json files that we generate during `npm run build` are all // independent, non-overlapping bundles, but apollo-client.cjs is its own diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -4,13 +4,10 @@ exports[`exports of public entry points @apollo/client 1`] = ` Array [ "ApolloCache", "ApolloClient", - "ApolloConsumer", "ApolloError", "ApolloLink", - "ApolloProvider", "Cache", "DocumentTransform", - "DocumentType", "HttpLink", "InMemoryCache", "MissingFieldError", @@ -20,7 +17,6 @@ Array [ "checkFetcher", "concat", "createHttpLink", - "createQueryPreloader", "createSignalIfSupported", "defaultDataIdFromObject", "defaultPrinter", @@ -33,7 +29,6 @@ Array [ "from", "fromError", "fromPromise", - "getApolloContext", "gql", "isApolloError", "isNetworkRequestSettled", @@ -41,10 +36,7 @@ Array [ "makeReference", "makeVar", "mergeOptions", - "operationName", "parseAndCheckHttpResponse", - "parser", - "resetApolloContext", "resetCaches", "rewriteURIForGET", "selectHttpOptionsAndBody", @@ -52,22 +44,9 @@ Array [ "selectURI", "serializeFetchParameter", "setLogVerbosity", - "skipToken", "split", "throwServerError", "toPromise", - "useApolloClient", - "useBackgroundQuery", - "useFragment", - "useLazyQuery", - "useLoadableQuery", - "useMutation", - "useQuery", - "useQueryRefHandlers", - "useReactiveVar", - "useReadQuery", - "useSubscription", - "useSuspenseQuery", ] `; @@ -358,7 +337,6 @@ exports[`exports of public entry points @apollo/client/testing 1`] = ` Array [ "MockLink", "MockSubscriptionLink", - "MockedProvider", "createMockClient", "mockObservableLink", "mockSingleLink", @@ -392,6 +370,12 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/testing/react 1`] = ` +Array [ + "MockedProvider", +] +`; + exports[`exports of public entry points @apollo/client/utilities 1`] = ` Array [ "AutoCleanedStrongCache", diff --git a/src/__tests__/exports.ts b/src/__tests__/exports.ts --- a/src/__tests__/exports.ts +++ b/src/__tests__/exports.ts @@ -31,6 +31,7 @@ import * as reactSSR from "../react/ssr"; import * as testing from "../testing"; import * as testingCore from "../testing/core"; import * as testingExperimental from "../testing/experimental"; +import * as testingReact from "../testing/react"; import * as utilities from "../utilities"; import * as utilitiesGlobals from "../utilities/globals"; import * as urqlUtilities from "../utilities/subscriptions/urql"; @@ -77,6 +78,7 @@ describe("exports of public entry points", () => { check("@apollo/client/testing", testing); check("@apollo/client/testing/core", testingCore); check("@apollo/client/testing/experimental", testingExperimental); + check("@apollo/client/testing/react", testingReact); check("@apollo/client/utilities", utilities); check("@apollo/client/utilities/globals", utilitiesGlobals); check("@apollo/client/utilities/subscriptions/urql", urqlUtilities); diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -23,7 +23,6 @@ import { MockLink, MockSubscriptionLink, mockSingleLink, - MockedProvider, wait, } from "../../../testing"; import { @@ -70,6 +69,7 @@ import { disableActEnvironment, useTrackRenders, } from "@testing-library/react-render-stream"; +import { MockedProvider } from "../../../testing/react"; afterEach(() => { jest.useRealTimers(); diff --git a/src/react/hooks/__tests__/useFragment.test.tsx b/src/react/hooks/__tests__/useFragment.test.tsx --- a/src/react/hooks/__tests__/useFragment.test.tsx +++ b/src/react/hooks/__tests__/useFragment.test.tsx @@ -14,7 +14,6 @@ import { UseFragmentResult, useFragment, } from "../useFragment"; -import { MockedProvider } from "../../../testing"; import { ApolloProvider } from "../../context"; import { InMemoryCache, @@ -41,6 +40,7 @@ import { renderHookToSnapshotStream, useTrackRenders, } from "@testing-library/react-render-stream"; +import { MockedProvider } from "../../../testing/react"; describe("useFragment", () => { it("is importable and callable", () => { diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -15,7 +15,6 @@ import { import { Observable } from "../../../utilities"; import { ApolloProvider } from "../../../react"; import { - MockedProvider, mockSingleLink, wait, tick, @@ -31,6 +30,7 @@ import { disableActEnvironment, renderHookToSnapshotStream, } from "@testing-library/react-render-stream"; +import { MockedProvider } from "../../../testing/react"; const IS_REACT_17 = React.version.startsWith("17"); const IS_REACT_18 = React.version.startsWith("18"); diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -19,8 +19,6 @@ import { split, } from "../../../core"; import { - MockedProvider, - MockedProviderProps, MockedResponse, MockLink, MockSubscriptionLink, @@ -60,6 +58,7 @@ import { useTrackRenders, AsyncRenderFn, } from "@testing-library/react-render-stream"; +import { MockedProvider, MockedProviderProps } from "../../../testing/react"; const IS_REACT_19 = React.version.startsWith("19"); afterEach(() => { diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -19,7 +19,6 @@ import { } from "../../../core"; import { InMemoryCache } from "../../../cache"; import { - MockedProvider, MockSubscriptionLink, mockSingleLink, MockedResponse, @@ -40,6 +39,7 @@ import { } from "@testing-library/react-render-stream"; import { MutationTuple, QueryResult } from "../../types/types"; import { invariant } from "../../../utilities/globals"; +import { MockedProvider } from "../../../testing/react"; describe("useMutation Hook", () => { interface Todo { diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -21,7 +21,6 @@ import { Observable, Reference, concatPagination } from "../../../utilities"; import { ApolloLink } from "../../../link/core"; import { MockLink, - MockedProvider, MockSubscriptionLink, mockSingleLink, tick, @@ -41,6 +40,7 @@ import { renderHookToSnapshotStream, disableActEnvironment, } from "@testing-library/react-render-stream"; +import { MockedProvider } from "../../../testing/react"; const IS_REACT_17 = React.version.startsWith("17"); const IS_REACT_18 = React.version.startsWith("18"); diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -38,7 +38,6 @@ import { offsetLimitPagination, } from "../../../utilities"; import { - MockedProvider, MockedResponse, MockSubscriptionLink, MockLink, @@ -66,6 +65,7 @@ import { disableActEnvironment, useTrackRenders, } from "@testing-library/react-render-stream"; +import { MockedProvider } from "../../../testing/react"; const IS_REACT_19 = React.version.startsWith("19"); diff --git a/src/react/ssr/__tests__/useQuery.test.tsx b/src/react/ssr/__tests__/useQuery.test.tsx --- a/src/react/ssr/__tests__/useQuery.test.tsx +++ b/src/react/ssr/__tests__/useQuery.test.tsx @@ -2,17 +2,14 @@ import React from "react"; import { DocumentNode } from "graphql"; import gql from "graphql-tag"; -import { - MockedProvider, - MockedResponse, - mockSingleLink, -} from "../../../testing"; +import { MockedResponse, mockSingleLink } from "../../../testing"; import { ApolloClient } from "../../../core"; import { InMemoryCache } from "../../../cache"; import { ApolloProvider, getApolloContext } from "../../context"; import { useApolloClient, useQuery } from "../../hooks"; import { renderToStringWithData } from ".."; import type { Trie } from "@wry/trie"; +import { MockedProvider } from "../../../testing/react"; describe("useQuery Hook SSR", () => { const CAR_QUERY: DocumentNode = gql` diff --git a/src/testing/index.ts b/src/testing/index.ts --- a/src/testing/index.ts +++ b/src/testing/index.ts @@ -1,4 +1,2 @@ import "../utilities/globals/index.js"; -export type { MockedProviderProps } from "./react/MockedProvider.js"; -export { MockedProvider } from "./react/MockedProvider.js"; export * from "./core/index.js"; diff --git a/src/testing/react/index.ts b/src/testing/react/index.ts new file mode 100644 --- /dev/null +++ b/src/testing/react/index.ts @@ -0,0 +1,2 @@ +export { MockedProvider } from "./MockedProvider.js"; +export type { MockedProviderProps } from "./MockedProvider.js";
Apollo client requires react ?!? Hi guys! Thank you for the great project it opens up many possibilities using graphql! I am an early bird user from the moment that apollo introduced apollo-client! I have created a package called `@rxdi/graphql-client ` which helps me to wrap all of the `apollo-client logic and initialize it inside a dependency injection called @rxdi. So far so good but i decided to upgrade `apollo-client` to a new `3.*.*` version using `@apollo/client` package and i got a bundle error that says `Cannot resolve dependency 'react'` ``` 1 | import { invariant } from "../../utilities/globals/index.js"; > 2 | import * as React from 'react'; | ^ 3 | import { getApolloContext } from "./ApolloContext.js"; 4 | export var ApolloProvider = function (_a) { 5 | var client = _a.client, children = _a.children; ``` So i have investigated a little bit and i found out that inside the barrel export ['index.ts'](https://github.com/apollographql/apollo-client/blob/main/src/index.ts) there is a line `export * from './react';` i was shocked to see this so i follow it up and i found this file https://github.com/apollographql/apollo-client/blob/main/src/react/context/ApolloProvider.tsx 1. So i am confused why react doesn't present as a dependency but it is required inside the code ? 2. Why even there is a code related to a `framework` which tight couples the `@apollo/client` to `react` ? 3. I now need to fork and remove all of this code in order to get the new version of `@apollo/client` does that sounds right ? 4. Does now `@apollo/client` can be called as `react` framework for graphql so i can skip next time when i want to install something which was framework agnostic ? I am not a React user i have my own framework build using Web Components if you want you can check it out using this starter https://github.com/rxdi/starter-client-side-lit-html By the way this starter is using also `apollo-client` for interacting with Elon Musk Space X API One way to mitigate this issue is to try to setup the `@apollo/client` not using the barrel export but instead a specific one by path. I have found that `@apollo/client/core` suits me well so wish me luck ! :) Please don't get me wrong i like every framework but i decided to invest time and build my own stack and i am relying to a framework agnostic packages. Cheers, Kristiyan Tachev
This is something we’re trying to fix, but will have to wait til a major version release, as it would be a breaking change. For now, you will have to continue importing the client from the `@apollo/client/core` entry point. At some point, we do hope to isolate the different entry points so that other framework users do not have to import or download the React layer. This could be an interesting consideration for AC 4.0, So i'll add it to our milestone for consideration. Cc @bignimbus Just ran into this while trying to use Apollo with Solid. Same here when using Apollo with Svelte. It is very annoying to have a peerDependency to React. Importing from `@apollo/client/core` does not really solve the problem since that module is still living under the `@apollo/client` package which has a React dependency. The correct way to solve this problem will be to separate the `@apollo/client/core` under a dedicated `@apollo/client-core` library React agnostic. @andreasonny83 `react` is only an [optional peer dependency](https://github.com/apollographql/apollo-client/blob/0aecf3e6fbe4fb371f4596305dc2a1cf82022f14/package.json#L77C1-L82C7) - your bundler should not force you to install it, only install it correctly in your `node_modules` folder structure in a way that is discoverable from Apollo Client if you already have `react` in your dependencies. @phryneas that is true, however the `peerDependenciesMeta` was only introduced by NPM v7 meaning that using older versions on NPM will result on an error when trying to consume a library with a dependency on Apollo Client. Can I ask what is the reason for Apollo to keep the Core package under `@apollo/client` rather than exporting it to a separate npm package? @andreasonny83 npm v7 shipped with node 15, which went End of Life 2 years ago. Node 16 (the oldest officially supported node version, which will receive support for one more month at this point) already ships with npm v8. In one month, the oldest officially supported node version still receiving security updates will be node 18, which ships with npm 9.5. I know things can't always be super up-to-date, but there has to be a limit how far back we can support things :/ Also, note that the two worst case scenarios here are that you either get a warning message that doesn't stop you from doing anything or install a package of a few kilobytes that will not make it into your bundle in the end. > Can I ask what is the reason for Apollo to keep the Core package under @apollo/client rather than exporting it to a separate npm package? I'm sorry, but that decision predates me. I would guess it was mainly to help users keep their packages up-to-date, since before `@apollo/client` you had to install about 10 different packages and had to keep all of them in sync, which was very cumbersome and lead to a lot of bugs and frustration.
2025-01-22T21:45:52Z
3.12
apollographql/apollo-client
12,224
apollographql__apollo-client-12224
[ "12190" ]
69c1cb6f831941598987185238a299b050a364bd
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -308,7 +308,6 @@ export class ObservableQuery< if ( __DEV__ && !diff.complete && - !this.options.partialRefetch && !result.loading && !result.data && !result.error diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -78,9 +78,6 @@ export interface QueryOptions<TVariables = OperationVariables, TData = any> { /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#returnPartialData:member} */ returnPartialData?: boolean; - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#partialRefetch:member} */ - partialRefetch?: boolean; - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#canonizeResults:member} */ canonizeResults?: boolean; } @@ -136,9 +133,6 @@ export interface SharedWatchQueryOptions< /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#returnPartialData:member} */ returnPartialData?: boolean; - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#partialRefetch:member} */ - partialRefetch?: boolean; - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#canonizeResults:member} */ canonizeResults?: boolean; diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -11,7 +11,6 @@ * makeWatchQueryOptions * isSSRAllowed * disableNetworkFetches - * partialRefetch * renderPromises * isSyncSSR * callbacks @@ -260,7 +259,6 @@ export function useQueryInternals< const isSyncSSR = !!renderPromises; const disableNetworkFetches = client.disableNetworkFetches; const ssrAllowed = options.ssr !== false && !options.skip; - const partialRefetch = options.partialRefetch; const makeWatchQueryOptions = createMakeWatchQueryOptions( client, @@ -301,7 +299,6 @@ export function useQueryInternals< options, watchQueryOptions, disableNetworkFetches, - partialRefetch, isSyncSSR, { onCompleted: options.onCompleted || noop, @@ -329,7 +326,6 @@ function useObservableSubscriptionResult< options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>, watchQueryOptions: Readonly<WatchQueryOptions<TVariables, TData>>, disableNetworkFetches: boolean, - partialRefetch: boolean | undefined, isSyncSSR: boolean, callbacks: { onCompleted: (data: MaybeMasked<TData>) => void; @@ -410,7 +406,6 @@ function useObservableSubscriptionResult< resultData, observable, client, - partialRefetch, handleStoreChange, callbackRef.current ); @@ -445,7 +440,6 @@ function useObservableSubscriptionResult< resultData, observable, client, - partialRefetch, handleStoreChange, callbackRef.current ); @@ -468,33 +462,14 @@ function useObservableSubscriptionResult< }; }, - [ - disableNetworkFetches, - isSyncSSR, - observable, - resultData, - partialRefetch, - client, - ] + [disableNetworkFetches, isSyncSSR, observable, resultData, client] ), () => currentResultOverride || - getCurrentResult( - resultData, - observable, - callbackRef.current, - partialRefetch, - client - ), + getCurrentResult(resultData, observable, callbackRef.current, client), () => currentResultOverride || - getCurrentResult( - resultData, - observable, - callbackRef.current, - partialRefetch, - client - ) + getCurrentResult(resultData, observable, callbackRef.current, client) ); } @@ -656,7 +631,6 @@ function setResult<TData, TVariables extends OperationVariables>( resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, client: ApolloClient<object>, - partialRefetch: boolean | undefined, forceUpdate: () => void, callbacks: Callbacks<TData> ) { @@ -674,7 +648,7 @@ function setResult<TData, TVariables extends OperationVariables>( } resultData.current = toQueryResult( - unsafeHandlePartialRefetch(nextResult, observable, partialRefetch), + nextResult, resultData.previousData, observable, client @@ -716,21 +690,17 @@ function getCurrentResult<TData, TVariables extends OperationVariables>( resultData: InternalResult<TData, TVariables>, observable: ObservableQuery<TData, TVariables>, callbacks: Callbacks<TData>, - partialRefetch: boolean | undefined, client: ApolloClient<object> ): InternalQueryResult<TData, TVariables> { // Using this.result as a cache ensures getCurrentResult continues returning // the same (===) result object, unless state.setResult has been called, or // we're doing server rendering and therefore override the result below. if (!resultData.current) { - // WARNING: SIDE-EFFECTS IN THE RENDER FUNCTION - // this could call unsafeHandlePartialRefetch setResult( observable.getCurrentResult(), resultData, observable, client, - partialRefetch, () => {}, callbacks ); @@ -779,34 +749,6 @@ export function toQueryResult<TData, TVariables extends OperationVariables>( return queryResult; } -function unsafeHandlePartialRefetch< - TData, - TVariables extends OperationVariables, ->( - result: ApolloQueryResult<MaybeMasked<TData>>, - observable: ObservableQuery<TData, TVariables>, - partialRefetch: boolean | undefined -): ApolloQueryResult<MaybeMasked<TData>> { - // TODO: This code should be removed when the partialRefetch option is - // removed. I was unable to get this hook to behave reasonably in certain - // edge cases when this block was put in an effect. - if ( - result.partial && - partialRefetch && - !result.loading && - (!result.data || Object.keys(result.data).length === 0) && - observable.options.fetchPolicy !== "cache-only" - ) { - observable.refetch(); - return { - ...result, - loading: true, - networkStatus: NetworkStatus.refetch, - }; - } - return result; -} - const ssrDisabledResult = maybeDeepFreeze({ loading: true, data: void 0 as any, diff --git a/src/react/types/types.documentation.ts b/src/react/types/types.documentation.ts --- a/src/react/types/types.documentation.ts +++ b/src/react/types/types.documentation.ts @@ -106,16 +106,6 @@ export interface QueryOptionsDocumentation { */ refetchWritePolicy_suspense: unknown; - /** - * If `true`, causes a query refetch if the query result is detected as partial. - * - * The default value is `false`. - * - * @deprecated - * Setting this option is unnecessary in Apollo Client 3, thanks to a more consistent application of fetch policies. It might be removed in a future release. - */ - partialRefetch: unknown; - /** * Whether to canonize cache results before returning them. Canonization * takes some extra time, but it speeds up future deep equality comparisons.
diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -2724,7 +2724,6 @@ describe("ApolloClient", () => { pollInterval: 100, notifyOnNetworkStatusChange: true, returnPartialData: true, - partialRefetch: true, }, }, }); diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -635,7 +635,6 @@ describe("QueryManager", () => { watchQuery: { fetchPolicy: "cache-and-network", returnPartialData: false, - partialRefetch: true, notifyOnNetworkStatusChange: true, }, query: { diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -6747,215 +6747,6 @@ describe("useQuery Hook", () => { }); }); - describe("Partial refetch", () => { - it("should attempt a refetch when data is missing and partialRefetch is true", async () => { - using consoleSpy = spyOnConsole("error"); - const query = gql` - { - hello - } - `; - - const link = mockSingleLink( - { - request: { query }, - result: { data: {} }, - delay: 20, - }, - { - request: { query }, - result: { data: { hello: "world" } }, - delay: 20, - } - ); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - }); - - const { result } = renderHook( - () => - useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ), - } - ); - - expect(result.current.loading).toBe(true); - expect(result.current.data).toBe(undefined); - expect(result.current.error).toBe(undefined); - expect(result.current.networkStatus).toBe(NetworkStatus.loading); - - await waitFor( - () => { - expect(result.current.networkStatus).toBe(NetworkStatus.refetch); - }, - { interval: 1 } - ); - expect(result.current.loading).toBe(true); - expect(result.current.data).toBe(undefined); - expect(result.current.error).toBe(undefined); - - expect(consoleSpy.error).toHaveBeenCalledTimes(1); - expect(consoleSpy.error.mock.calls[0][0]).toMatch("Missing field"); - - await waitFor( - () => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, - { interval: 1 } - ); - - expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual({ hello: "world" }); - expect(result.current.error).toBe(undefined); - }); - - it("should attempt a refetch when data is missing and partialRefetch is true 2", async () => { - const query = gql` - query people { - allPeople(first: 1) { - people { - name - } - } - } - `; - - const data = { - allPeople: { people: [{ name: "Luke Skywalker" }] }, - }; - - using consoleSpy = spyOnConsole("error"); - const link = mockSingleLink( - { request: { query }, result: { data: {} }, delay: 20 }, - { request: { query }, result: { data }, delay: 20 } - ); - - const client = new ApolloClient({ - link, - cache: new InMemoryCache(), - }); - - const { result } = renderHook( - () => - useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ), - } - ); - - expect(result.current.loading).toBe(true); - expect(result.current.data).toBe(undefined); - expect(result.current.error).toBe(undefined); - expect(result.current.networkStatus).toBe(NetworkStatus.loading); - - await waitFor( - () => { - expect(result.current.networkStatus).toBe(NetworkStatus.refetch); - }, - { interval: 1 } - ); - expect(result.current.loading).toBe(true); - expect(result.current.data).toBe(undefined); - expect(result.current.error).toBe(undefined); - - expect(consoleSpy.error).toHaveBeenCalledTimes(1); - expect(consoleSpy.error.mock.calls[0][0]).toMatch("Missing field"); - - await waitFor( - () => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, - { interval: 1 } - ); - expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual(data); - expect(result.current.error).toBe(undefined); - }); - - it("should attempt a refetch when data is missing, partialRefetch is true and addTypename is false for the cache", async () => { - using consoleSpy = spyOnConsole("error"); - const query = gql` - { - hello - } - `; - - const link = mockSingleLink( - { - request: { query }, - result: { data: {} }, - delay: 20, - }, - { - request: { query }, - result: { data: { hello: "world" } }, - delay: 20, - } - ); - - const client = new ApolloClient({ - link, - // THIS LINE IS THE ONLY DIFFERENCE FOR THIS TEST - cache: new InMemoryCache({ addTypename: false }), - }); - - const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ); - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => - useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), - { wrapper } - ); - - { - const result = await takeSnapshot(); - expect(result.loading).toBe(true); - expect(result.data).toBe(undefined); - expect(result.error).toBe(undefined); - expect(result.networkStatus).toBe(NetworkStatus.loading); - } - - { - const result = await takeSnapshot(); - expect(result.networkStatus).toBe(NetworkStatus.refetch); - expect(result.loading).toBe(true); - expect(result.error).toBe(undefined); - expect(result.data).toBe(undefined); - } - - const calls = consoleSpy.error.mock.calls; - expect(calls.length).toBe(1); - expect(calls[0][0]).toMatch("Missing field"); - - { - const result = await takeSnapshot(); - expect(result.networkStatus).toBe(NetworkStatus.ready); - expect(result.loading).toBe(false); - expect(result.data).toEqual({ hello: "world" }); - expect(result.error).toBe(undefined); - } - }); - }); - describe("Client Resolvers", () => { it("should receive up to date @client(always: true) fields on entity update", async () => { const query = gql`
[4.0] Remove deprecated `partialRefetch` option https://www.apollographql.com/docs/react/data/queries#queryhookoptions-interface-partialrefetch
2024-12-13T20:28:02Z
3.12
apollographql/apollo-client
12,254
apollographql__apollo-client-12254
[ "12206" ]
51e6c0f8657d20cedc570c6e9a244f877047dd61
diff --git a/src/link/http/selectHttpOptionsAndBody.ts b/src/link/http/selectHttpOptionsAndBody.ts --- a/src/link/http/selectHttpOptionsAndBody.ts +++ b/src/link/http/selectHttpOptionsAndBody.ts @@ -105,7 +105,7 @@ const defaultHttpOptions: HttpQueryOptions = { const defaultHeaders = { // headers are case insensitive (https://stackoverflow.com/a/5259004) - accept: "*/*", + accept: "application/graphql-response+json", // The content-type header describes the type of the body of the request, and // so it typically only is sent with requests that actually have bodies. One // could imagine that Apollo Client would remove this header when constructing
diff --git a/src/link/batch-http/__tests__/batchHttpLink.ts b/src/link/batch-http/__tests__/batchHttpLink.ts --- a/src/link/batch-http/__tests__/batchHttpLink.ts +++ b/src/link/batch-http/__tests__/batchHttpLink.ts @@ -610,7 +610,7 @@ describe("SharedHttpTest", () => { .headers as Record<string, string>; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); }); @@ -628,7 +628,7 @@ describe("SharedHttpTest", () => { .headers as Record<string, string>; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); }); @@ -708,7 +708,7 @@ describe("SharedHttpTest", () => { .headers as Record<string, string>; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -733,7 +733,7 @@ describe("SharedHttpTest", () => { .headers as Record<string, string>; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -758,7 +758,7 @@ describe("SharedHttpTest", () => { const headers: any = fetchMock.lastCall()![1]!.headers; expect(headers.AUTHORIZATION).toBe("1234"); expect(headers["CONTENT-TYPE"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -788,7 +788,7 @@ describe("SharedHttpTest", () => { const headers: any = fetchMock.lastCall()![1]!.headers; expect(headers.AUTHORIZATION).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -813,7 +813,7 @@ describe("SharedHttpTest", () => { const headers: any = fetchMock.lastCall()![1]!.headers; expect(headers.AUTHORIZATION).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -710,7 +710,7 @@ describe("HttpLink", () => { const headers = fetchMock.lastCall()![1]!.headers as any; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -728,7 +728,7 @@ describe("HttpLink", () => { const headers = fetchMock.lastCall()![1]!.headers as any; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); }); @@ -752,7 +752,7 @@ describe("HttpLink", () => { const headers = fetchMock.lastCall()![1]!.headers as any; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -776,7 +776,7 @@ describe("HttpLink", () => { const headers = fetchMock.lastCall()![1]!.headers as any; expect(headers.authorization).toBe("1234"); expect(headers["content-type"]).toBe("application/json"); - expect(headers.accept).toBe("*/*"); + expect(headers.accept).toBe("application/graphql-response+json"); }) ); } @@ -1911,7 +1911,7 @@ describe("HttpLink", () => { "/graphql", expect.objectContaining({ headers: { - accept: "*/*", + accept: "application/graphql-response+json", "content-type": "application/json", }, }) diff --git a/src/link/http/__tests__/selectHttpOptionsAndBody.ts b/src/link/http/__tests__/selectHttpOptionsAndBody.ts --- a/src/link/http/__tests__/selectHttpOptionsAndBody.ts +++ b/src/link/http/__tests__/selectHttpOptionsAndBody.ts @@ -36,7 +36,7 @@ describe("selectHttpOptionsAndBody", () => { it("the fallbackConfig is used if no other configs are specified", () => { const defaultHeaders = { - accept: "*/*", + accept: "application/graphql-response+json", "content-type": "application/json", };
[4.0] Switch to `application/graphql-response+json` as default `Accept` header https://graphql.org/learn/serving-over-http/ Spec: https://graphql.github.io/graphql-over-http/draft/#sec-Media-Types
2025-01-07T23:37:01Z
3.12
apollographql/apollo-client
12,121
apollographql__apollo-client-12121
[ "11678" ]
3b6b6cc09c070996da045d1b50593ec290a99419
diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -510,6 +510,8 @@ export class ApolloClient<TCacheShape> implements DataProxy { >( options: SubscriptionOptions<TVariables, T> ): Observable<FetchResult<MaybeMasked<T>>> { + const id = this.queryManager.generateQueryId(); + return this.queryManager .startGraphQLSubscription<T>(options) .map((result) => ({ @@ -517,6 +519,8 @@ export class ApolloClient<TCacheShape> implements DataProxy { data: this.queryManager.maskOperation({ document: options.query, data: result.data, + fetchPolicy: options.fetchPolicy, + id, }), })); } diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -1140,6 +1140,8 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, data: this.queryManager.maskOperation({ document: this.query, data: result.data, + fetchPolicy: this.options.fetchPolicy, + id: this.queryId, }), } : result; diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -13,6 +13,7 @@ import { hasDirectives, isExecutionPatchIncrementalResult, isExecutionPatchResult, + isFullyUnmaskedOperation, removeDirectivesFromDocument, } from "../utilities/index.js"; import type { Cache, ApolloCache } from "../cache/index.js"; @@ -51,6 +52,7 @@ import type { MutationOptions, ErrorPolicy, MutationFetchPolicy, + WatchQueryFetchPolicy, } from "./watchQueryOptions.js"; import { ObservableQuery, logMissingFieldErrors } from "./ObservableQuery.js"; import { NetworkStatus, isNetworkRequestInFlight } from "./networkStatus.js"; @@ -118,6 +120,8 @@ interface MaskFragmentOptions<TData> { interface MaskOperationOptions<TData> { document: DocumentNode; data: TData; + id: string; + fetchPolicy?: WatchQueryFetchPolicy; } export interface QueryManagerOptions<TStore> { @@ -362,6 +366,8 @@ export class QueryManager<TStore> { data: self.maskOperation({ document: mutation, data: storeResult.data, + fetchPolicy, + id: mutationId, }) as any, }); } @@ -819,7 +825,12 @@ export class QueryManager<TStore> { (result) => result && { ...result, - data: this.maskOperation({ document: query, data: result.data }), + data: this.maskOperation({ + document: query, + data: result.data, + fetchPolicy: options.fetchPolicy, + id: queryId, + }), } ) .finally(() => this.stopQuery(queryId)); @@ -1554,11 +1565,34 @@ export class QueryManager<TStore> { return results; } + private noCacheWarningsByQueryId = new Set<string>(); + public maskOperation<TData = unknown>( options: MaskOperationOptions<TData> ): MaybeMasked<TData> { const { document, data } = options; + if (__DEV__) { + const { fetchPolicy, id } = options; + const operationType = getOperationDefinition(document)?.operation; + const operationId = (operationType?.[0] ?? "o") + id; + + if ( + this.dataMasking && + fetchPolicy === "no-cache" && + !isFullyUnmaskedOperation(document) && + !this.noCacheWarningsByQueryId.has(operationId) + ) { + this.noCacheWarningsByQueryId.add(operationId); + + invariant.warn( + '[%s]: Fragments masked by data masking are inaccessible when using fetch policy "no-cache". Please add `@unmask` to each fragment spread to access the data.', + getOperationName(document) ?? + `Unnamed ${operationType ?? "operation"}` + ); + } + } + return ( this.dataMasking ? maskOperation(data, document, this.cache) diff --git a/src/utilities/graphql/fragments.ts b/src/utilities/graphql/fragments.ts --- a/src/utilities/graphql/fragments.ts +++ b/src/utilities/graphql/fragments.ts @@ -1,5 +1,6 @@ import { invariant, newInvariantError } from "../globals/index.js"; +import { BREAK, visit } from "graphql"; import type { DocumentNode, FragmentDefinitionNode, @@ -143,3 +144,21 @@ export function getFragmentFromSelection( return null; } } + +export function isFullyUnmaskedOperation(document: DocumentNode) { + let isUnmasked = true; + + visit(document, { + FragmentSpread: (node) => { + isUnmasked = + !!node.directives && + node.directives.some((directive) => directive.name.value === "unmask"); + + if (!isUnmasked) { + return BREAK; + } + }, + }); + + return isUnmasked; +} diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -23,6 +23,7 @@ export { createFragmentMap, getFragmentQueryDocument, getFragmentFromSelection, + isFullyUnmaskedOperation, } from "./graphql/fragments.js"; export {
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -465,6 +465,7 @@ Array [ "isExecutionPatchInitialResult", "isExecutionPatchResult", "isField", + "isFullyUnmaskedOperation", "isInlineFragment", "isMutationOperation", "isNonEmptyArray", diff --git a/src/__tests__/dataMasking.ts b/src/__tests__/dataMasking.ts --- a/src/__tests__/dataMasking.ts +++ b/src/__tests__/dataMasking.ts @@ -26,6 +26,9 @@ import { createFragmentRegistry } from "../cache/inmemory/fragmentRegistry"; import { isSubscriptionOperation } from "../utilities"; import { MaskedDocumentNode } from "../masking"; +const NO_CACHE_WARNING = + '[%s]: Fragments masked by data masking are inaccessible when using fetch policy "no-cache". Please add `@unmask` to each fragment spread to access the data.'; + describe("client.watchQuery", () => { test("masks queries when dataMasking is `true`", async () => { type UserFieldsFragment = { @@ -2317,221 +2320,504 @@ describe("client.watchQuery", () => { }, }); }); -}); -describe("client.watchFragment", () => { - test("masks watched fragments when dataMasking is `true`", async () => { + test("warns and returns masked result when used with no-cache fetch policy", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { - __typename: "User"; - id: number; age: number; - } & { " $fragmentName"?: "UserFieldsFragment" } & { - " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; - }; + } & { " $fragmentName"?: "UserFieldsFragment" }; - type NameFieldsFragment = { - __typename: "User"; - firstName: string; - lastName: string; - } & { " $fragmentName"?: "NameFieldsFragment" }; + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } - const nameFieldsFragment: MaskedDocumentNode<NameFieldsFragment> = gql` - fragment NameFields on User { - firstName - lastName + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } } - `; - const userFieldsFragment: MaskedDocumentNode<UserFieldsFragment> = gql` fragment UserFields on User { - id age - ...NameFields } - - ${nameFieldsFragment} `; + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + const client = new ApolloClient({ dataMasking: true, cache: new InMemoryCache(), + link: new MockLink(mocks), }); - client.writeFragment({ - fragment: userFieldsFragment, - fragmentName: "UserFields", - data: { - __typename: "User", - id: 1, - age: 30, - firstName: "Test", - lastName: "User", - }, - }); - - const fragmentStream = new ObservableStream( - client.watchFragment({ - fragment: userFieldsFragment, - fragmentName: "UserFields", - from: { __typename: "User", id: 1 }, - }) - ); - - const { data, complete } = await fragmentStream.takeNext(); - - expect(data).toEqual({ __typename: "User", id: 1, age: 30 }); - expect(complete).toBe(true); - invariant(complete, "Should never be incomplete"); - - const nestedFragmentStream = new ObservableStream( - client.watchFragment({ fragment: nameFieldsFragment, from: data }) - ); + const observable = client.watchQuery({ query, fetchPolicy: "no-cache" }); + const stream = new ObservableStream(observable); { - const { data, complete } = await nestedFragmentStream.takeNext(); + const { data } = await stream.takeNext(); - expect(complete).toBe(true); expect(data).toEqual({ - __typename: "User", - firstName: "Test", - lastName: "User", + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + }, }); } + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(NO_CACHE_WARNING, "MaskedQuery"); }); - test("does not mask watched fragments when dataMasking is disabled", async () => { + test("does not warn on no-cache queries when data masking is disabled", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { - __typename: "User"; - id: number; age: number; - } & { " $fragmentName"?: "UserFieldsFragment" } & { - " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; - }; + } & { " $fragmentName"?: "UserFieldsFragment" }; - type NameFieldsFragment = { - __typename: "User"; - firstName: string; - lastName: string; - } & { " $fragmentName"?: "NameFieldsFragment" }; + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } - const nameFieldsFragment: TypedDocumentNode<NameFieldsFragment> = gql` - fragment NameFields on User { - __typename - firstName - lastName + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } } - `; - const userFieldsFragment: TypedDocumentNode<UserFieldsFragment> = gql` fragment UserFields on User { - __typename - id age - ...NameFields } - - ${nameFieldsFragment} `; + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + const client = new ApolloClient({ dataMasking: false, cache: new InMemoryCache(), + link: new MockLink(mocks), }); - client.writeFragment({ - fragment: userFieldsFragment, - fragmentName: "UserFields", - data: { - __typename: "User", - id: 1, - age: 30, - firstName: "Test", - lastName: "User", - }, - }); - - const fragmentStream = new ObservableStream( - client.watchFragment({ - fragment: userFieldsFragment, - fragmentName: "UserFields", - from: { __typename: "User", id: 1 }, - }) - ); - - const { data, complete } = await fragmentStream.takeNext(); - - expect(data).toEqual({ - __typename: "User", - id: 1, - age: 30, - firstName: "Test", - lastName: "User", - }); - expect(complete).toBe(true); - invariant(complete, "Should never be incomplete"); - - const nestedFragmentStream = new ObservableStream( - client.watchFragment({ fragment: nameFieldsFragment, from: data }) - ); + const observable = client.watchQuery({ query, fetchPolicy: "no-cache" }); + const stream = new ObservableStream(observable); { - const { data, complete } = await nestedFragmentStream.takeNext(); + const { data } = await stream.takeNext(); - expect(complete).toBe(true); expect(data).toEqual({ - __typename: "User", - firstName: "Test", - lastName: "User", + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, }); } + + expect(console.warn).not.toHaveBeenCalled(); }); - test("does not mask watched fragments by default", async () => { + test("does not warn on no-cache queries when all fragments use `@unmask`", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { - __typename: "User"; - id: number; age: number; - } & { " $fragmentName"?: "UserFieldsFragment" } & { - " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; - }; + } & { " $fragmentName"?: "UserFieldsFragment" }; - type NameFieldsFragment = { - __typename: "User"; - firstName: string; - lastName: string; - } & { " $fragmentName"?: "NameFieldsFragment" }; + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } - const nameFieldsFragment: TypedDocumentNode<NameFieldsFragment> = gql` - fragment NameFields on User { - __typename - firstName - lastName + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields @unmask + } } - `; - const userFieldsFragment: TypedDocumentNode<UserFieldsFragment> = gql` fragment UserFields on User { - __typename - id age - ...NameFields } - - ${nameFieldsFragment} `; - const client = new ApolloClient({ - cache: new InMemoryCache(), - }); - - client.writeFragment({ - fragment: userFieldsFragment, - fragmentName: "UserFields", - data: { - __typename: "User", - id: 1, - age: 30, - firstName: "Test", + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const observable = client.watchQuery({ query, fetchPolicy: "no-cache" }); + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + } + + expect(console.warn).not.toHaveBeenCalled(); + }); + + test("warns on no-cache queries when at least one fragment does not use `@unmask`", async () => { + using _ = spyOnConsole("warn"); + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields @unmask + } + } + + fragment UserFields on User { + age + ...ProfileFields + } + + fragment ProfileFields on User { + username + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + username: "testuser", + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const observable = client.watchQuery({ query, fetchPolicy: "no-cache" }); + const stream = new ObservableStream(observable); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + } + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(NO_CACHE_WARNING, "MaskedQuery"); + }); +}); + +describe("client.watchFragment", () => { + test("masks watched fragments when dataMasking is `true`", async () => { + type UserFieldsFragment = { + __typename: "User"; + id: number; + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" } & { + " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; + }; + + type NameFieldsFragment = { + __typename: "User"; + firstName: string; + lastName: string; + } & { " $fragmentName"?: "NameFieldsFragment" }; + + const nameFieldsFragment: MaskedDocumentNode<NameFieldsFragment> = gql` + fragment NameFields on User { + firstName + lastName + } + `; + + const userFieldsFragment: MaskedDocumentNode<UserFieldsFragment> = gql` + fragment UserFields on User { + id + age + ...NameFields + } + + ${nameFieldsFragment} + `; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + }); + + client.writeFragment({ + fragment: userFieldsFragment, + fragmentName: "UserFields", + data: { + __typename: "User", + id: 1, + age: 30, + firstName: "Test", + lastName: "User", + }, + }); + + const fragmentStream = new ObservableStream( + client.watchFragment({ + fragment: userFieldsFragment, + fragmentName: "UserFields", + from: { __typename: "User", id: 1 }, + }) + ); + + const { data, complete } = await fragmentStream.takeNext(); + + expect(data).toEqual({ __typename: "User", id: 1, age: 30 }); + expect(complete).toBe(true); + invariant(complete, "Should never be incomplete"); + + const nestedFragmentStream = new ObservableStream( + client.watchFragment({ fragment: nameFieldsFragment, from: data }) + ); + + { + const { data, complete } = await nestedFragmentStream.takeNext(); + + expect(complete).toBe(true); + expect(data).toEqual({ + __typename: "User", + firstName: "Test", + lastName: "User", + }); + } + }); + + test("does not mask watched fragments when dataMasking is disabled", async () => { + type UserFieldsFragment = { + __typename: "User"; + id: number; + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" } & { + " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; + }; + + type NameFieldsFragment = { + __typename: "User"; + firstName: string; + lastName: string; + } & { " $fragmentName"?: "NameFieldsFragment" }; + + const nameFieldsFragment: TypedDocumentNode<NameFieldsFragment> = gql` + fragment NameFields on User { + __typename + firstName + lastName + } + `; + + const userFieldsFragment: TypedDocumentNode<UserFieldsFragment> = gql` + fragment UserFields on User { + __typename + id + age + ...NameFields + } + + ${nameFieldsFragment} + `; + + const client = new ApolloClient({ + dataMasking: false, + cache: new InMemoryCache(), + }); + + client.writeFragment({ + fragment: userFieldsFragment, + fragmentName: "UserFields", + data: { + __typename: "User", + id: 1, + age: 30, + firstName: "Test", + lastName: "User", + }, + }); + + const fragmentStream = new ObservableStream( + client.watchFragment({ + fragment: userFieldsFragment, + fragmentName: "UserFields", + from: { __typename: "User", id: 1 }, + }) + ); + + const { data, complete } = await fragmentStream.takeNext(); + + expect(data).toEqual({ + __typename: "User", + id: 1, + age: 30, + firstName: "Test", + lastName: "User", + }); + expect(complete).toBe(true); + invariant(complete, "Should never be incomplete"); + + const nestedFragmentStream = new ObservableStream( + client.watchFragment({ fragment: nameFieldsFragment, from: data }) + ); + + { + const { data, complete } = await nestedFragmentStream.takeNext(); + + expect(complete).toBe(true); + expect(data).toEqual({ + __typename: "User", + firstName: "Test", + lastName: "User", + }); + } + }); + + test("does not mask watched fragments by default", async () => { + type UserFieldsFragment = { + __typename: "User"; + id: number; + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" } & { + " $fragmentRefs"?: { NameFieldsFragment: NameFieldsFragment }; + }; + + type NameFieldsFragment = { + __typename: "User"; + firstName: string; + lastName: string; + } & { " $fragmentName"?: "NameFieldsFragment" }; + + const nameFieldsFragment: TypedDocumentNode<NameFieldsFragment> = gql` + fragment NameFields on User { + __typename + firstName + lastName + } + `; + + const userFieldsFragment: TypedDocumentNode<UserFieldsFragment> = gql` + fragment UserFields on User { + __typename + id + age + ...NameFields + } + + ${nameFieldsFragment} + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + }); + + client.writeFragment({ + fragment: userFieldsFragment, + fragmentName: "UserFields", + data: { + __typename: "User", + id: 1, + age: 30, + firstName: "Test", lastName: "User", }, }); @@ -3564,27 +3850,288 @@ describe("client.query", () => { link: new MockLink(mocks), }); - const { data, errors } = await client.query({ query, errorPolicy: "all" }); + const { data, errors } = await client.query({ query, errorPolicy: "all" }); + + expect(data).toEqual({ + currentUser: null, + }); + + expect(errors).toEqual([{ message: "User not logged in" }]); + }); + + test("masks fragment data in fields nulled by errors when using errorPolicy `all`", async () => { + const query = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: null, + }, + }, + errors: [{ message: "Could not determine age" }], + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data, errors } = await client.query({ query, errorPolicy: "all" }); + + expect(data).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + }, + }); + + expect(errors).toEqual([{ message: "Could not determine age" }]); + }); + + test("warns and returns masked result when used with no-cache fetch policy", async () => { + using _ = spyOnConsole("warn"); + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.query({ query, fetchPolicy: "no-cache" }); + + expect(data).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + }, + }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(NO_CACHE_WARNING, "MaskedQuery"); + }); + + test("does not warn on no-cache queries when data masking is disabled", async () => { + using _ = spyOnConsole("warn"); + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: false, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.query({ query, fetchPolicy: "no-cache" }); + + expect(data).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + + expect(console.warn).not.toHaveBeenCalled(); + }); + + test("does not warn on no-cache queries when all fragments use `@unmask`", async () => { + using _ = spyOnConsole("warn"); + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: MaskedDocumentNode<Query, never> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields @unmask + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.query({ query, fetchPolicy: "no-cache" }); expect(data).toEqual({ - currentUser: null, + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, }); - expect(errors).toEqual([{ message: "User not logged in" }]); + expect(console.warn).not.toHaveBeenCalled(); }); - test("masks fragment data in fields nulled by errors when using errorPolicy `all`", async () => { - const query = gql` + test("warns on no-cache queries when at least one fragment does not use `@unmask`", async () => { + using _ = spyOnConsole("warn"); + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: MaskedDocumentNode<Query, never> = gql` query MaskedQuery { currentUser { id name - ...UserFields + ...UserFields @unmask } } fragment UserFields on User { age + ...ProfileFields + } + + fragment ProfileFields on User { + username } `; @@ -3597,10 +4144,10 @@ describe("client.query", () => { __typename: "User", id: 1, name: "Test User", - age: null, + age: 30, + username: "testuser", }, }, - errors: [{ message: "Could not determine age" }], }, }, ]; @@ -3611,17 +4158,19 @@ describe("client.query", () => { link: new MockLink(mocks), }); - const { data, errors } = await client.query({ query, errorPolicy: "all" }); + const { data } = await client.query({ query, fetchPolicy: "no-cache" }); expect(data).toEqual({ currentUser: { __typename: "User", id: 1, name: "Test User", + age: 30, }, }); - expect(errors).toEqual([{ message: "Could not determine age" }]); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith(NO_CACHE_WARNING, "MaskedQuery"); }); }); @@ -3810,23 +4359,259 @@ describe("client.subscribe", () => { data: { addedComment: null, }, - errors: [{ message: "Something went wrong" }], + errors: [{ message: "Something went wrong" }], + }, + }); + + const error = await stream.takeError(); + + expect(error).toEqual( + new ApolloError({ graphQLErrors: [{ message: "Something went wrong" }] }) + ); + }); + + test("handles errors returned from the subscription when errorPolicy is `all`", async () => { + const subscription = gql` + subscription NewCommentSubscription { + addedComment { + id + ...CommentFields + } + } + + fragment CommentFields on Comment { + comment + author + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link, + }); + + const observable = client.subscribe({ + query: subscription, + errorPolicy: "all", + }); + const stream = new ObservableStream(observable); + + link.simulateResult({ + result: { + data: { + addedComment: null, + }, + errors: [{ message: "Something went wrong" }], + }, + }); + + const { data, errors } = await stream.takeNext(); + + expect(data).toEqual({ addedComment: null }); + expect(errors).toEqual([{ message: "Something went wrong" }]); + }); + + test("masks partial data for errors returned from the subscription when errorPolicy is `all`", async () => { + const subscription = gql` + subscription NewCommentSubscription { + addedComment { + id + ...CommentFields + } + } + + fragment CommentFields on Comment { + comment + author + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link, + }); + + const observable = client.subscribe({ + query: subscription, + errorPolicy: "all", + }); + const stream = new ObservableStream(observable); + + link.simulateResult({ + result: { + data: { + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: null, + }, + }, + errors: [{ message: "Could not get author" }], + }, + }); + + const { data, errors } = await stream.takeNext(); + + expect(data).toEqual({ addedComment: { __typename: "Comment", id: 1 } }); + expect(errors).toEqual([{ message: "Could not get author" }]); + }); + + test("warns and returns masked result when used with no-cache fetch policy", async () => { + using _ = spyOnConsole("warn"); + const subscription = gql` + subscription NewCommentSubscription { + addedComment { + id + ...CommentFields + } + } + + fragment CommentFields on Comment { + comment + author + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link, + }); + + const observable = client.subscribe({ + query: subscription, + fetchPolicy: "no-cache", + }); + const stream = new ObservableStream(observable); + + link.simulateResult({ + result: { + data: { + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: "Test User", + }, + }, + }, + }); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual({ + addedComment: { + __typename: "Comment", + id: 1, + }, + }); + } + + link.simulateResult({ + result: { + data: { + addedComment: { + __typename: "Comment", + id: 2, + comment: "Test comment 2", + author: "Test User", + }, + }, + }, + }); + + { + const { data } = await stream.takeNext(); + + expect(data).toEqual({ + addedComment: { + __typename: "Comment", + id: 2, + }, + }); + } + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + NO_CACHE_WARNING, + "NewCommentSubscription" + ); + }); + + test("does not warn on no-cache queries when data masking is disabled", async () => { + using _ = spyOnConsole("warn"); + const subscription = gql` + subscription NewCommentSubscription { + addedComment { + id + ...CommentFields + } + } + + fragment CommentFields on Comment { + comment + author + } + `; + + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + dataMasking: false, + cache: new InMemoryCache(), + link, + }); + + const observable = client.subscribe({ + query: subscription, + fetchPolicy: "no-cache", + }); + const stream = new ObservableStream(observable); + + link.simulateResult({ + result: { + data: { + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: "Test User", + }, + }, }, }); - const error = await stream.takeError(); + const { data } = await stream.takeNext(); - expect(error).toEqual( - new ApolloError({ graphQLErrors: [{ message: "Something went wrong" }] }) - ); + expect(data).toEqual({ + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: "Test User", + }, + }); + + expect(console.warn).not.toHaveBeenCalled(); }); - test("handles errors returned from the subscription when errorPolicy is `all`", async () => { + test("does not warn on no-cache queries when all fragments use `@unmask`", async () => { + using _ = spyOnConsole("warn"); const subscription = gql` subscription NewCommentSubscription { addedComment { id - ...CommentFields + ...CommentFields @unmask } } @@ -3846,37 +4631,56 @@ describe("client.subscribe", () => { const observable = client.subscribe({ query: subscription, - errorPolicy: "all", + fetchPolicy: "no-cache", }); const stream = new ObservableStream(observable); link.simulateResult({ result: { data: { - addedComment: null, + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: "Test User", + }, }, - errors: [{ message: "Something went wrong" }], }, }); - const { data, errors } = await stream.takeNext(); + const { data } = await stream.takeNext(); - expect(data).toEqual({ addedComment: null }); - expect(errors).toEqual([{ message: "Something went wrong" }]); + expect(data).toEqual({ + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: "Test User", + }, + }); + + expect(console.warn).not.toHaveBeenCalled(); }); - test("masks partial data for errors returned from the subscription when errorPolicy is `all`", async () => { + test("warns on no-cache queries when at least one fragment does not use `@unmask`", async () => { + using _ = spyOnConsole("warn"); const subscription = gql` subscription NewCommentSubscription { addedComment { id - ...CommentFields + ...CommentFields @unmask } } fragment CommentFields on Comment { comment - author + author { + ...AuthorFields + } + } + + fragment AuthorFields on User { + name } `; @@ -3890,7 +4694,7 @@ describe("client.subscribe", () => { const observable = client.subscribe({ query: subscription, - errorPolicy: "all", + fetchPolicy: "no-cache", }); const stream = new ObservableStream(observable); @@ -3901,17 +4705,28 @@ describe("client.subscribe", () => { __typename: "Comment", id: 1, comment: "Test comment", - author: null, + author: { __typename: "User", name: "Test User" }, }, }, - errors: [{ message: "Could not get author" }], }, }); - const { data, errors } = await stream.takeNext(); + const { data } = await stream.takeNext(); - expect(data).toEqual({ addedComment: { __typename: "Comment", id: 1 } }); - expect(errors).toEqual([{ message: "Could not get author" }]); + expect(data).toEqual({ + addedComment: { + __typename: "Comment", + id: 1, + comment: "Test comment", + author: { __typename: "User" }, + }, + }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + NO_CACHE_WARNING, + "NewCommentSubscription" + ); }); }); @@ -4298,10 +5113,197 @@ describe("observableQuery.subscribeToMore", () => { }); } }); -}); +}); + +describe("client.mutate", () => { + test("masks data returned from client.mutate when dataMasking is `true`", async () => { + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Mutation { + updateUser: { + __typename: "User"; + id: number; + name: string; + } & { + " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; + }; + } + + const mutation: MaskedDocumentNode<Mutation, never> = gql` + mutation MaskedMutation { + updateUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query: mutation }, + result: { + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.mutate({ mutation }); + + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + }, + }); + }); + + test("does not mask data returned from client.mutate when dataMasking is `false`", async () => { + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Mutation { + updateUser: { + __typename: "User"; + id: number; + name: string; + } & { + " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; + }; + } + + const mutation: TypedDocumentNode<Mutation, never> = gql` + mutation MaskedMutation { + updateUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query: mutation }, + result: { + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + dataMasking: false, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.mutate({ mutation }); + + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + }); + + test("does not mask data returned from client.mutate by default", async () => { + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Mutation { + updateUser: { + __typename: "User"; + id: number; + name: string; + } & { + " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; + }; + } + + const mutation: TypedDocumentNode<Mutation, never> = gql` + mutation MaskedMutation { + updateUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query: mutation }, + result: { + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + }, + ]; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data } = await client.mutate({ mutation }); + + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + }); -describe("client.mutate", () => { - test("masks data returned from client.mutate when dataMasking is `true`", async () => { + test("does not mask data passed to update function", async () => { type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; @@ -4346,24 +5348,34 @@ describe("client.mutate", () => { }, ]; + const cache = new InMemoryCache(); const client = new ApolloClient({ dataMasking: true, - cache: new InMemoryCache(), + cache, link: new MockLink(mocks), }); - const { data } = await client.mutate({ mutation }); + const update = jest.fn(); + await client.mutate({ mutation, update }); - expect(data).toEqual({ - updateUser: { - __typename: "User", - id: 1, - name: "Test User", + expect(update).toHaveBeenCalledTimes(1); + expect(update).toHaveBeenCalledWith( + cache, + { + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, }, - }); + { context: undefined, variables: {} } + ); }); - test("does not mask data returned from client.mutate when dataMasking is `false`", async () => { + test("handles errors returned when using errorPolicy `none`", async () => { type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; @@ -4378,7 +5390,7 @@ describe("client.mutate", () => { }; } - const mutation: TypedDocumentNode<Mutation, never> = gql` + const mutation: MaskedDocumentNode<Mutation, never> = gql` mutation MaskedMutation { updateUser { id @@ -4396,37 +5408,83 @@ describe("client.mutate", () => { { request: { query: mutation }, result: { - data: { - updateUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 30, - }, - }, + errors: [{ message: "User not logged in" }], }, }, ]; const client = new ApolloClient({ - dataMasking: false, + dataMasking: true, cache: new InMemoryCache(), link: new MockLink(mocks), }); - const { data } = await client.mutate({ mutation }); + await expect( + client.mutate({ mutation, errorPolicy: "none" }) + ).rejects.toEqual( + new ApolloError({ + graphQLErrors: [{ message: "User not logged in" }], + }) + ); + }); - expect(data).toEqual({ - updateUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 30, + test("handles errors returned when using errorPolicy `all`", async () => { + type UserFieldsFragment = { + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Mutation { + updateUser: + | ({ + __typename: "User"; + id: number; + name: string; + } & { + " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; + }) + | null; + } + + const mutation: MaskedDocumentNode<Mutation, never> = gql` + mutation MaskedMutation { + updateUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query: mutation }, + result: { + data: { updateUser: null }, + errors: [{ message: "User not logged in" }], + }, }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { data, errors } = await client.mutate({ + mutation, + errorPolicy: "all", }); + + expect(data).toEqual({ updateUser: null }); + expect(errors).toEqual([{ message: "User not logged in" }]); }); - test("does not mask data returned from client.mutate by default", async () => { + test("masks fragment data in fields nulled by errors when using errorPolicy `all`", async () => { type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; @@ -4441,7 +5499,7 @@ describe("client.mutate", () => { }; } - const mutation: TypedDocumentNode<Mutation, never> = gql` + const mutation: MaskedDocumentNode<Mutation, never> = gql` mutation MaskedMutation { updateUser { id @@ -4464,31 +5522,38 @@ describe("client.mutate", () => { __typename: "User", id: 1, name: "Test User", - age: 30, + age: null, }, }, + errors: [{ message: "Could not determine age" }], }, }, ]; const client = new ApolloClient({ + dataMasking: true, cache: new InMemoryCache(), link: new MockLink(mocks), }); - const { data } = await client.mutate({ mutation }); + const { data, errors } = await client.mutate({ + mutation, + errorPolicy: "all", + }); expect(data).toEqual({ updateUser: { __typename: "User", id: 1, name: "Test User", - age: 30, }, }); + + expect(errors).toEqual([{ message: "Could not determine age" }]); }); - test("does not mask data passed to update function", async () => { + test("warns and returns masked result when used with no-cache fetch policy", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; @@ -4533,34 +5598,31 @@ describe("client.mutate", () => { }, ]; - const cache = new InMemoryCache(); const client = new ApolloClient({ dataMasking: true, - cache, + cache: new InMemoryCache(), link: new MockLink(mocks), }); - const update = jest.fn(); - await client.mutate({ mutation, update }); + const { data } = await client.mutate({ mutation, fetchPolicy: "no-cache" }); - expect(update).toHaveBeenCalledTimes(1); - expect(update).toHaveBeenCalledWith( - cache, - { - data: { - updateUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 30, - }, - }, + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", }, - { context: undefined, variables: {} } + }); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + NO_CACHE_WARNING, + "MaskedMutation" ); }); - test("handles errors returned when using errorPolicy `none`", async () => { + test("does not warn on no-cache queries when data masking is disabled", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; @@ -4593,41 +5655,52 @@ describe("client.mutate", () => { { request: { query: mutation }, result: { - errors: [{ message: "User not logged in" }], + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, }, }, ]; const client = new ApolloClient({ - dataMasking: true, + dataMasking: false, cache: new InMemoryCache(), link: new MockLink(mocks), }); - await expect( - client.mutate({ mutation, errorPolicy: "none" }) - ).rejects.toEqual( - new ApolloError({ - graphQLErrors: [{ message: "User not logged in" }], - }) - ); + const { data } = await client.mutate({ mutation, fetchPolicy: "no-cache" }); + + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }); + + expect(console.warn).not.toHaveBeenCalled(); }); - test("handles errors returned when using errorPolicy `all`", async () => { + test("does not warn on no-cache queries when all fragments use `@unmask`", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { age: number; } & { " $fragmentName"?: "UserFieldsFragment" }; interface Mutation { - updateUser: - | ({ - __typename: "User"; - id: number; - name: string; - } & { - " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; - }) - | null; + updateUser: { + __typename: "User"; + id: number; + name: string; + } & { + " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment }; + }; } const mutation: MaskedDocumentNode<Mutation, never> = gql` @@ -4635,7 +5708,7 @@ describe("client.mutate", () => { updateUser { id name - ...UserFields + ...UserFields @unmask } } @@ -4648,8 +5721,14 @@ describe("client.mutate", () => { { request: { query: mutation }, result: { - data: { updateUser: null }, - errors: [{ message: "User not logged in" }], + data: { + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, }, }, ]; @@ -4660,19 +5739,30 @@ describe("client.mutate", () => { link: new MockLink(mocks), }); - const { data, errors } = await client.mutate({ - mutation, - errorPolicy: "all", + const { data } = await client.mutate({ mutation, fetchPolicy: "no-cache" }); + + expect(data).toEqual({ + updateUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, }); - expect(data).toEqual({ updateUser: null }); - expect(errors).toEqual([{ message: "User not logged in" }]); + expect(console.warn).not.toHaveBeenCalled(); }); - test("masks fragment data in fields nulled by errors when using errorPolicy `all`", async () => { + test("warns on no-cache queries when at least one fragment does not use `@unmask`", async () => { + using _ = spyOnConsole("warn"); type UserFieldsFragment = { age: number; - } & { " $fragmentName"?: "UserFieldsFragment" }; + } & { " $fragmentName"?: "UserFieldsFragment" } & { + " $fragmentRefs"?: { ProfileFieldsFragment: ProfileFieldsFragment }; + }; + type ProfileFieldsFragment = { + username: number; + } & { " $fragmentName"?: "ProfileFieldsFragment" }; interface Mutation { updateUser: { @@ -4689,12 +5779,17 @@ describe("client.mutate", () => { updateUser { id name - ...UserFields + ...UserFields @unmask } } fragment UserFields on User { age + ...ProfileFieldsFragment + } + + fragment ProfileFieldsFragment on User { + username } `; @@ -4707,10 +5802,10 @@ describe("client.mutate", () => { __typename: "User", id: 1, name: "Test User", - age: null, + age: 30, + username: "testuser", }, }, - errors: [{ message: "Could not determine age" }], }, }, ]; @@ -4721,20 +5816,22 @@ describe("client.mutate", () => { link: new MockLink(mocks), }); - const { data, errors } = await client.mutate({ - mutation, - errorPolicy: "all", - }); + const { data } = await client.mutate({ mutation, fetchPolicy: "no-cache" }); expect(data).toEqual({ updateUser: { __typename: "User", id: 1, name: "Test User", + age: 30, }, }); - expect(errors).toEqual([{ message: "Could not determine age" }]); + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + NO_CACHE_WARNING, + "MaskedMutation" + ); }); });
[Data masking] Allow `useFragment` to work with `no-cache` queries Subject to change based on technical exploration
As a starting point, we are going to leave `useFragment` alone because introducing support for `no-cache` would turn it into a "polymorphic" hook which might be confusing. Instead, we plan to warn when using `no-cache` with data masking and require that you use `@unmask` in order to get the full data result. This option gives us the most flexibility to update this in the future if we find it to be problematic in a non-breaking way.
2024-11-13T00:03:41Z
3.11
apollographql/apollo-client
12,367
apollographql__apollo-client-12367
[ "12100" ]
8b1390bf0050c6bd4d5a32b67c3b96369d0552d4
diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -1,37 +1,165 @@ import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; +import { equal } from "@wry/equality"; import type { DocumentNode } from "graphql"; import * as React from "rehackt"; import type { ApolloClient, + ApolloError, + ApolloQueryResult, + DefaultContext, + ErrorPolicy, + FetchMoreQueryOptions, + MaybeMasked, ObservableQuery, OperationVariables, + RefetchWritePolicy, + SubscribeToMoreFunction, + Unmasked, + UpdateQueryMapFn, + WatchQueryFetchPolicy, WatchQueryOptions, } from "@apollo/client/core"; +import { NetworkStatus } from "@apollo/client/core"; +import type { NoInfer } from "@apollo/client/react"; import type { - LazyQueryHookExecOptions, - LazyQueryHookOptions, - LazyQueryResultTuple, - NoInfer, - QueryHookOptions, + ObservableQueryFields, + VariablesOption, } from "@apollo/client/react"; -import { mergeOptions } from "@apollo/client/utilities"; +import type { OnlyRequiredProperties } from "@apollo/client/utilities"; +import { maybeDeepFreeze } from "@apollo/client/utilities"; +import { invariant } from "@apollo/client/utilities/invariant"; +import type { NextFetchPolicyContext } from "../../core/watchQueryOptions.js"; + +import { useRenderGuard } from "./internal/index.js"; +import { useDeepMemo } from "./internal/useDeepMemo.js"; import { useIsomorphicLayoutEffect } from "./internal/useIsomorphicLayoutEffect.js"; -import type { InternalResult } from "./useQuery.js"; -import { - createMakeWatchQueryOptions, - getDefaultFetchPolicy, - getObsQueryOptions, - toQueryResult, - useQueryInternals, -} from "./useQuery.js"; +import { useApolloClient } from "./useApolloClient.js"; +import { useSyncExternalStore } from "./useSyncExternalStore.js"; + +export interface LazyQueryHookOptions< + TData = any, + TVariables extends OperationVariables = OperationVariables, +> { + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#fetchPolicy:member} */ + fetchPolicy?: WatchQueryFetchPolicy; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#nextFetchPolicy:member} */ + nextFetchPolicy?: + | WatchQueryFetchPolicy + | (( + this: WatchQueryOptions<TVariables, TData>, + currentFetchPolicy: WatchQueryFetchPolicy, + context: NextFetchPolicyContext<TData, TVariables> + ) => WatchQueryFetchPolicy); + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#refetchWritePolicy:member} */ + refetchWritePolicy?: RefetchWritePolicy; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#errorPolicy:member} */ + errorPolicy?: ErrorPolicy; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#pollInterval:member} */ + pollInterval?: number; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#notifyOnNetworkStatusChange:member} */ + notifyOnNetworkStatusChange?: boolean; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#returnPartialData:member} */ + returnPartialData?: boolean; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#canonizeResults:member} */ + canonizeResults?: boolean; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#skipPollAttempt:member} */ + skipPollAttempt?: () => boolean; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#client:member} */ + client?: ApolloClient<any>; + + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ + context?: DefaultContext; +} + +export type LazyQueryHookExecOptions< + TVariables extends OperationVariables = OperationVariables, +> = { + /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ + context?: DefaultContext; +} & VariablesOption<TVariables>; + +export interface LazyQueryResult<TData, TVariables extends OperationVariables> { + /** {@inheritDoc @apollo/client!QueryResultDocumentation#startPolling:member} */ + startPolling: (pollInterval: number) => void; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#stopPolling:member} */ + stopPolling: () => void; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#subscribeToMore:member} */ + subscribeToMore: SubscribeToMoreFunction<TData, TVariables>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#updateQuery:member} */ + updateQuery: (mapFn: UpdateQueryMapFn<TData, TVariables>) => void; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#refetch:member} */ + refetch: ( + variables?: Partial<TVariables> + ) => Promise<ApolloQueryResult<MaybeMasked<TData>>>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#variables:member} */ + variables: TVariables | undefined; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#fetchMore:member} */ + fetchMore: < + TFetchData = TData, + TFetchVars extends OperationVariables = TVariables, + >( + fetchMoreOptions: FetchMoreQueryOptions<TFetchVars, TFetchData> & { + updateQuery?: ( + previousQueryResult: Unmasked<TData>, + options: { + fetchMoreResult: Unmasked<TFetchData>; + variables: TFetchVars; + } + ) => Unmasked<TData>; + } + ) => Promise<ApolloQueryResult<MaybeMasked<TFetchData>>>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#client:member} */ + client: ApolloClient<any>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#observable:member} */ + observable: ObservableQuery<TData, TVariables>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#data:member} */ + data: MaybeMasked<TData> | undefined; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#previousData:member} */ + previousData?: MaybeMasked<TData>; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#error:member} */ + error?: ApolloError; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#loading:member} */ + loading: boolean; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#networkStatus:member} */ + networkStatus: NetworkStatus; + /** {@inheritDoc @apollo/client!QueryResultDocumentation#called:member} */ + called: boolean; +} + +export type LazyQueryResultTuple< + TData, + TVariables extends OperationVariables, +> = [ + execute: LazyQueryExecFunction<TData, TVariables>, + result: LazyQueryResult<TData, TVariables>, +]; + +export type LazyQueryExecFunction< + TData, + TVariables extends OperationVariables, +> = ( + ...args: [TVariables] extends [never] ? + [options?: LazyQueryHookExecOptions<TVariables>] + : Record<string, never> extends OnlyRequiredProperties<TVariables> ? + [options?: LazyQueryHookExecOptions<TVariables>] + : [options: LazyQueryHookExecOptions<TVariables>] +) => Promise<ApolloQueryResult<TData>>; // The following methods, when called will execute the query, regardless of // whether the useLazyQuery execute function was called before. const EAGER_METHODS = [ "refetch", - "reobserve", "fetchMore", "updateQuery", "startPolling", @@ -81,112 +209,153 @@ export function useLazyQuery< query: DocumentNode | TypedDocumentNode<TData, TVariables>, options?: LazyQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> ): LazyQueryResultTuple<TData, TVariables> { - const execOptionsRef = - React.useRef<Partial<LazyQueryHookExecOptions<TData, TVariables>>>(void 0); - const optionsRef = - React.useRef<LazyQueryHookOptions<TData, TVariables>>(void 0); - const queryRef = React.useRef< - DocumentNode | TypedDocumentNode<TData, TVariables> - >(void 0); - const merged = mergeOptions(options, execOptionsRef.current || {}); - const document = merged?.query ?? query; - - // Use refs to track options and the used query to ensure the `execute` - // function remains referentially stable between renders. - optionsRef.current = options; - queryRef.current = document; - - const queryHookOptions = { - ...merged, - skip: !execOptionsRef.current, - }; - const { - obsQueryFields, - result: useQueryResult, - client, - resultData, - observable, - onQueryExecuted, - } = useQueryInternals(document, queryHookOptions); + const client = useApolloClient(options?.client); + const previousDataRef = React.useRef<TData>(undefined); + const resultRef = React.useRef<ApolloQueryResult<TData>>(undefined); + const stableOptions = useDeepMemo(() => options, [options]); + const calledDuringRender = useRenderGuard(); + + function createObservable() { + return client.watchQuery({ + ...options, + query, + initialFetchPolicy: options?.fetchPolicy, + fetchPolicy: "standby", + }); + } + + const [currentClient, setCurrentClient] = React.useState(client); + const [observable, setObservable] = React.useState(createObservable); + + if (currentClient !== client) { + setCurrentClient(client); + setObservable(createObservable()); + } + + // TODO: Revisit after we have RxJS in place. We should be able to use + // observable.getCurrentResult() (or equivalent) to get these values which + // will hopefully alleviate the need for us to use refs to track these values. + const updateResult = React.useCallback( + (result: ApolloQueryResult<TData>, forceUpdate: () => void) => { + const previousData = resultRef.current?.data; + + if (previousData && !equal(previousData, result.data)) { + // eslint-disable-next-line react-compiler/react-compiler + previousDataRef.current = previousData; + } + + resultRef.current = result; + + forceUpdate(); + }, + [] + ); + + const observableResult = useSyncExternalStore( + React.useCallback( + (forceUpdate) => { + const subscription = observable.subscribe((result) => { + if (!equal(resultRef.current, result)) { + updateResult(result, forceUpdate); + } + }); - const initialFetchPolicy = - observable.options.initialFetchPolicy || - getDefaultFetchPolicy( - queryHookOptions.defaultOptions, - client.defaultOptions - ); + return () => { + subscription.unsubscribe(); + }; + }, + [observable, updateResult] + ), + () => resultRef.current || initialResult, + () => initialResult + ); - const forceUpdateState = React.useReducer((tick) => tick + 1, 0)[1]; + const [, forceUpdateState] = React.useReducer((tick) => tick + 1, 0); // We use useMemo here to make sure the eager methods have a stable identity. const eagerMethods = React.useMemo(() => { const eagerMethods: Record<string, any> = {}; for (const key of EAGER_METHODS) { - const method = obsQueryFields[key]; eagerMethods[key] = function () { - if (!execOptionsRef.current) { - execOptionsRef.current = {}; - // Only the first time populating execOptionsRef.current matters here. - forceUpdateState(); - } - // @ts-expect-error this is just too generic to type - return method.apply(this, arguments); + invariant( + resultRef.current, + "useLazyQuery: '%s' cannot be called before executing the query.", + key + ); + + // @ts-expect-error this is just to generic to type + return observable[key].apply(observable, arguments); }; } - return eagerMethods as typeof obsQueryFields; - }, [forceUpdateState, obsQueryFields]); + return eagerMethods as Pick< + ObservableQueryFields<TData, TVariables>, + (typeof EAGER_METHODS)[number] + >; + }, [observable]); - const called = !!execOptionsRef.current; - const result = React.useMemo( - () => ({ - ...useQueryResult, - ...eagerMethods, - called, - }), - [useQueryResult, eagerMethods, called] - ); + React.useEffect(() => { + const updatedOptions: Partial<WatchQueryOptions<TVariables, TData>> = { + query, + errorPolicy: stableOptions?.errorPolicy, + context: stableOptions?.context, + refetchWritePolicy: stableOptions?.refetchWritePolicy, + returnPartialData: stableOptions?.returnPartialData, + notifyOnNetworkStatusChange: stableOptions?.notifyOnNetworkStatusChange, + nextFetchPolicy: options?.nextFetchPolicy, + skipPollAttempt: options?.skipPollAttempt, + }; - const execute = React.useCallback<LazyQueryResultTuple<TData, TVariables>[0]>( - (executeOptions) => { - execOptionsRef.current = - executeOptions ? - { - ...executeOptions, - fetchPolicy: executeOptions.fetchPolicy || initialFetchPolicy, - } - : { - fetchPolicy: initialFetchPolicy, - }; - - const options = mergeOptions(optionsRef.current, { - query: queryRef.current, - ...execOptionsRef.current, - }); - - const promise = executeQuery( - resultData, - observable, - client, - document, - { ...options, skip: false }, - onQueryExecuted - ).then((queryResult) => Object.assign(queryResult, eagerMethods)); - - // Because the return value of `useLazyQuery` is usually floated, we need - // to catch the promise to prevent unhandled rejections. - promise.catch(() => {}); + // Wait to apply the changed fetch policy until after the execute + // function has been called. The execute function will handle setting the + // the fetch policy away from standby for us when called for the first time. + if ( + observable.options.fetchPolicy !== "standby" && + stableOptions?.fetchPolicy + ) { + updatedOptions.fetchPolicy = stableOptions?.fetchPolicy; + } + + observable.silentSetOptions(updatedOptions); + }, [ + query, + observable, + stableOptions, + // Ensure inline functions don't suffer from stale closures by checking for + // these deps separately. @wry/equality doesn't compare function identity + // so `stableOptions` isn't updated when using inline functions. + options?.nextFetchPolicy, + options?.skipPollAttempt, + ]); + + const execute: LazyQueryExecFunction<TData, TVariables> = React.useCallback( + (...args) => { + invariant( + !calledDuringRender(), + "useLazyQuery: 'execute' should not be called during render. To start a query during render, use the 'useQuery' hook." + ); + + const [executeOptions] = args; + + const options: Partial<WatchQueryOptions<TVariables, TData>> = { + ...executeOptions, + // TODO: Figure out a better way to reset variables back to empty + variables: (executeOptions?.variables ?? {}) as TVariables, + }; + + if (observable.options.fetchPolicy === "standby") { + options.fetchPolicy = observable.options.initialFetchPolicy; + } + + const promise = observable.setOptions(options); + + // TODO: This should be fixed in core + if (!resultRef.current && stableOptions?.notifyOnNetworkStatusChange) { + updateResult(observable.getCurrentResult(), forceUpdateState); + } return promise; }, - [ - client, - document, - eagerMethods, - initialFetchPolicy, - observable, - resultData, - onQueryExecuted, - ] + [observable, stableOptions, updateResult, calledDuringRender] ); const executeRef = React.useRef(execute); @@ -198,41 +367,26 @@ export function useLazyQuery< (...args) => executeRef.current(...args), [] ); - return [stableExecute, result]; -} - -function executeQuery<TData, TVariables extends OperationVariables>( - resultData: InternalResult<TData, TVariables>, - observable: ObservableQuery<TData, TVariables>, - client: ApolloClient<object>, - currentQuery: DocumentNode, - options: QueryHookOptions<TData, TVariables> & { - query?: DocumentNode; - }, - onQueryExecuted: (options: WatchQueryOptions<TVariables, TData>) => void -) { - const query = options.query || currentQuery; - const watchQueryOptions = createMakeWatchQueryOptions( - client, - query, - options, - false - )(observable); - const promise = observable.reobserve( - getObsQueryOptions(observable, client, options, watchQueryOptions) - ); - onQueryExecuted(watchQueryOptions); - - return promise.then( - (result) => - toQueryResult(result, resultData.previousData, observable, client), - () => - toQueryResult( - observable.getCurrentResult(), - resultData.previousData, - observable, - client - ) + const result = React.useMemo( + () => ({ + ...eagerMethods, + ...observableResult, + client, + previousData: previousDataRef.current, + variables: observable.variables, + observable, + called: !!resultRef.current, + }), + [client, observableResult, eagerMethods, observable] ); + + return [stableExecute, result]; } + +const initialResult: ApolloQueryResult<any> = maybeDeepFreeze({ + data: undefined, + loading: false, + networkStatus: NetworkStatus.ready, + partial: true, +}); diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -62,7 +62,7 @@ interface ObsQueryWithMeta<TData, TVariables extends OperationVariables> [lastWatchOptions]?: WatchQueryOptions<TVariables, TData>; } -export interface InternalResult<TData, TVariables extends OperationVariables> { +interface InternalResult<TData, TVariables extends OperationVariables> { // These members are populated by getCurrentResult and setResult, and it's // okay/normal for them to be initially undefined. current?: undefined | InternalQueryResult<TData, TVariables>; @@ -132,7 +132,12 @@ function useQuery_< query: DocumentNode | TypedDocumentNode<TData, TVariables>, options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> ) { - const { result, obsQueryFields } = useQueryInternals(query, options); + const result = useQueryInternals(query, options); + const obsQueryFields = React.useMemo( + () => bindObservableMethods(result.observable), + [result.observable] + ); + return React.useMemo( () => ({ ...result, ...obsQueryFields }), [result, obsQueryFields] @@ -177,33 +182,6 @@ function useInternalState< let [internalState, updateInternalState] = React.useState(createInternalState); - /** - * Used by `useLazyQuery` when a new query is executed. - * We keep this logic here since it needs to update things in unsafe - * ways and here we at least can keep track of that in a single place. - */ - function onQueryExecuted( - watchQueryOptions: WatchQueryOptions<TVariables, TData> - ) { - // this needs to be set to prevent an immediate `resubscribe` in the - // next rerender of the `useQuery` internals - Object.assign(internalState.observable, { - [lastWatchOptions]: watchQueryOptions, - }); - const resultData = internalState.resultData; - updateInternalState({ - ...internalState, - // might be a different query - query: watchQueryOptions.query, - resultData: Object.assign(resultData, { - // We need to modify the previous `resultData` object as we rely on the - // object reference in other places - previousData: resultData.current?.data || resultData.previousData, - current: undefined, - }), - }); - } - if (client !== internalState.client || query !== internalState.query) { // If the client or query have changed, we need to create a new InternalState. // This will trigger a re-render with the new state, but it will also continue @@ -213,13 +191,13 @@ function useInternalState< // triggered with the new state. const newInternalState = createInternalState(internalState); updateInternalState(newInternalState); - return [newInternalState, onQueryExecuted] as const; + return newInternalState; } - return [internalState, onQueryExecuted] as const; + return internalState; } -export function useQueryInternals< +function useQueryInternals< TData = any, TVariables extends OperationVariables = OperationVariables, >( @@ -240,7 +218,7 @@ export function useQueryInternals< isSyncSSR ); - const [{ observable, resultData }, onQueryExecuted] = useInternalState( + const { observable, resultData } = useInternalState( client, query, options, @@ -259,11 +237,6 @@ export function useQueryInternals< watchQueryOptions ); - const obsQueryFields = React.useMemo( - () => bindObservableMethods(observable), - [observable] - ); - useRegisterSSRObservable(observable, renderPromises, ssrAllowed); const result = useObservableSubscriptionResult<TData, TVariables>( @@ -276,14 +249,7 @@ export function useQueryInternals< isSyncSSR ); - return { - result, - obsQueryFields, - observable, - resultData, - client, - onQueryExecuted, - }; + return result; } function useObservableSubscriptionResult< @@ -450,7 +416,7 @@ function useResubscribeIfNecessary< * This is two-step curried because we want to reuse the `make` function, * but the `observable` might differ between calls to `make`. */ -export function createMakeWatchQueryOptions< +function createMakeWatchQueryOptions< TData = any, TVariables extends OperationVariables = OperationVariables, >( @@ -508,10 +474,7 @@ export function createMakeWatchQueryOptions< }; } -export function getObsQueryOptions< - TData, - TVariables extends OperationVariables, ->( +function getObsQueryOptions<TData, TVariables extends OperationVariables>( observable: ObservableQuery<TData, TVariables> | undefined, client: ApolloClient<object>, queryHookOptions: QueryHookOptions<TData, TVariables>, @@ -584,10 +547,7 @@ function getCurrentResult<TData, TVariables extends OperationVariables>( return resultData.current!; } -export function getDefaultFetchPolicy< - TData, - TVariables extends OperationVariables, ->( +function getDefaultFetchPolicy<TData, TVariables extends OperationVariables>( queryHookDefaultOptions?: Partial<WatchQueryOptions<TVariables, TData>>, clientDefaultOptions?: DefaultOptions ): WatchQueryFetchPolicy { @@ -598,7 +558,7 @@ export function getDefaultFetchPolicy< ); } -export function toQueryResult<TData, TVariables extends OperationVariables>( +function toQueryResult<TData, TVariables extends OperationVariables>( result: ApolloQueryResult<MaybeMasked<TData>>, previousData: MaybeMasked<TData> | undefined, observable: ObservableQuery<TData, TVariables>, diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -151,20 +151,6 @@ export interface QueryHookOptions< TVariables extends OperationVariables = OperationVariables, > extends QueryFunctionOptions<TData, TVariables> {} -export interface LazyQueryHookOptions< - TData = any, - TVariables extends OperationVariables = OperationVariables, -> extends BaseQueryOptions<TVariables, TData> { - /** @internal */ - defaultOptions?: Partial<WatchQueryOptions<TVariables, TData>>; -} -export interface LazyQueryHookExecOptions< - TData = any, - TVariables extends OperationVariables = OperationVariables, -> extends LazyQueryHookOptions<TData, TVariables> { - query?: DocumentNode | TypedDocumentNode<TData, TVariables>; -} - export type SuspenseQueryHookFetchPolicy = Extract< WatchQueryFetchPolicy, "cache-first" | "network-only" | "no-cache" | "cache-and-network" @@ -264,47 +250,6 @@ export interface LoadableQueryHookOptions { returnPartialData?: boolean; } -/** - * @deprecated This type will be removed in the next major version of Apollo Client - */ -export interface QueryLazyOptions<TVariables> { - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#variables:member} */ - variables?: TVariables; - /** {@inheritDoc @apollo/client!QueryOptionsDocumentation#context:member} */ - context?: DefaultContext; -} - -/** - * @deprecated This type will be removed in the next major version of Apollo Client - */ -export type LazyQueryResult< - TData, - TVariables extends OperationVariables, -> = QueryResult<TData, TVariables>; - -/** - * @deprecated This type will be removed in the next major version of Apollo Client - */ -export type QueryTuple< - TData, - TVariables extends OperationVariables, -> = LazyQueryResultTuple<TData, TVariables>; - -export type LazyQueryExecFunction< - TData, - TVariables extends OperationVariables, -> = ( - options?: Partial<LazyQueryHookExecOptions<TData, TVariables>> -) => Promise<QueryResult<TData, TVariables>>; - -export type LazyQueryResultTuple< - TData, - TVariables extends OperationVariables, -> = [ - execute: LazyQueryExecFunction<TData, TVariables>, - result: QueryResult<TData, TVariables>, -]; - /* Mutation types */ export type RefetchQueriesFunction = (
diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -1,22 +1,27 @@ -import { act, renderHook, waitFor } from "@testing-library/react"; +import { act, renderHook, screen, waitFor } from "@testing-library/react"; import { disableActEnvironment, renderHookToSnapshotStream, } from "@testing-library/react-render-stream"; +import { userEvent } from "@testing-library/user-event"; import { expectTypeOf } from "expect-type"; import { GraphQLError } from "graphql"; import { gql } from "graphql-tag"; import React from "react"; +import { ErrorBoundary } from "react-error-boundary"; import { Observable } from "rxjs"; import { ApolloClient, ApolloError, ApolloLink, + ApolloQueryResult, ErrorPolicy, InMemoryCache, NetworkStatus, + RefetchWritePolicy, TypedDocumentNode, + WatchQueryFetchPolicy, } from "@apollo/client/core"; import { Masked, MaskedDocumentNode, Unmasked } from "@apollo/client/masking"; import { ApolloProvider } from "@apollo/client/react"; @@ -24,19 +29,21 @@ import { MockLink, mockSingleLink, MockSubscriptionLink, - tick, wait, } from "@apollo/client/testing"; import { MockedProvider } from "@apollo/client/testing/react"; import { DeepPartial } from "@apollo/client/utilities"; import { InvariantError } from "@apollo/client/utilities/invariant"; -import { QueryResult } from "../../types/types.js"; +import { + renderAsync, + setupSimpleCase, + setupVariablesCase, + spyOnConsole, + VariablesCaseVariables, +} from "../../../testing/internal/index.js"; import { useLazyQuery } from "../useLazyQuery.js"; -const IS_REACT_17 = React.version.startsWith("17"); -const IS_REACT_18 = React.version.startsWith("18"); - describe("useLazyQuery Hook", () => { const helloQuery: TypedDocumentNode<{ hello: string; @@ -66,9 +73,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -78,26 +84,19 @@ describe("useLazyQuery Hook", () => { } const [execute] = getCurrentSnapshot(); + const result = await execute(); - setTimeout(() => execute()); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - } + expect(result).toEqualApolloQueryResult({ + data: { hello: "world" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world" }, called: true, loading: false, @@ -106,64 +105,11 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } - }); - - it("should set `called` to false by default", async () => { - using _disabledAct = disableActEnvironment(); - const { takeSnapshot } = await renderHookToSnapshotStream( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={[]}>{children}</MockedProvider> - ), - } - ); - - const [, { called }] = await takeSnapshot(); - - expect(called).toBe(false); - }); - - it("should set `called` to true after calling the lazy execute function", async () => { - const mocks = [ - { - request: { query: helloQuery }, - result: { data: { hello: "world" } }, - delay: 20, - }, - ]; - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream(() => useLazyQuery(helloQuery), { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - }); - - { - const [, { loading, called }] = await takeSnapshot(); - expect(loading).toBe(false); - expect(called).toBe(false); - } - - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute()); - - { - const [, { loading, called }] = await takeSnapshot(); - expect(loading).toBe(true); - expect(called).toBe(true); - } - { - const [, { loading, called }] = await takeSnapshot(); - expect(loading).toBe(false); - expect(called).toBe(true); - } + await expect(takeSnapshot).not.toRerender(); }); - it("should use variables defined in hook options (if any), when running the lazy execution function", async () => { + it("should use variables passed to execute function when running the lazy execution function", async () => { const query = gql` query ($id: number) { hello(id: $id) @@ -180,51 +126,39 @@ describe("useLazyQuery Hook", () => { using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream( - () => - useLazyQuery(query, { - variables: { id: 1 }, - }), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - } - ); + await renderHookToSnapshotStream(() => useLazyQuery(query), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + variables: {}, }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute()); + const [execute] = getCurrentSnapshot(); + const result = await execute({ variables: { id: 1 } }); - { - const [, result] = await takeSnapshot(); + expect(result).toEqualApolloQueryResult({ + data: { hello: "world 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: { id: 1 }, - }); - } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: false, @@ -233,35 +167,23 @@ describe("useLazyQuery Hook", () => { variables: { id: 1 }, }); } - }); - it("should use variables passed into lazy execution function, overriding similar variables defined in Hook options", async () => { - const query = gql` - query ($id: number) { - hello(id: $id) - } - `; + await expect(takeSnapshot).not.toRerender(); + }); + test("sets initial loading state when notifyOnNetworkStatusChange is true", async () => { const mocks = [ { - request: { query, variables: { id: 1 } }, - result: { data: { hello: "world 1" } }, - delay: 20, - }, - { - request: { query, variables: { id: 2 } }, - result: { data: { hello: "world 2" } }, - delay: 20, + request: { query: helloQuery }, + result: { data: { hello: "world" } }, + delay: 50, }, ]; using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( - () => - useLazyQuery(query, { - variables: { id: 1 }, - }), + () => useLazyQuery(helloQuery, { notifyOnNetworkStatusChange: true }), { wrapper: ({ children }) => ( <MockedProvider mocks={mocks}>{children}</MockedProvider> @@ -272,226 +194,143 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + variables: {}, }); } const [execute] = getCurrentSnapshot(); - setTimeout(() => execute({ variables: { id: 2 } })); + const result = await execute(); + + expect(result).toEqualApolloQueryResult({ + data: { hello: "world" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, networkStatus: NetworkStatus.loading, previousData: undefined, - variables: { id: 2 }, + variables: {}, }); } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { hello: "world 2" }, + expect(result).toEqualLazyQueryResult({ + data: { hello: "world" }, called: true, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 2 }, + variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); - it("should merge variables from original hook and execution function", async () => { - const counterQuery: TypedDocumentNode< - { - counter: number; - vars: Record<string, boolean>; - }, - { - hookVar?: boolean; - execVar?: boolean; - localDefaultVar?: boolean; - globalDefaultVar?: boolean; + it("changing queries", async () => { + const query1 = gql` + query { + hello } - > = gql` - query GetCounter( - $hookVar: Boolean - $execVar: Boolean - $localDefaultVar: Boolean - $globalDefaultVar: Boolean - ) { - counter - vars + `; + const query2 = gql` + query { + name } `; - - let count = 0; - const client = new ApolloClient({ - defaultOptions: { - watchQuery: { - variables: { - globalDefaultVar: true, - }, - }, + const mocks = [ + { + request: { query: query1 }, + result: { data: { hello: "world" } }, + delay: 20, }, - cache: new InMemoryCache(), - link: new ApolloLink( - (request) => - new Observable((observer) => { - if (request.operationName === "GetCounter") { - setTimeout(() => { - observer.next({ - data: { - counter: ++count, - vars: request.variables, - }, - }); - observer.complete(); - }, 50); - } else { - observer.error( - new Error( - `Unknown query: ${request.operationName || request.query}` - ) - ); - } - }) - ), - }); + { + request: { query: query2 }, + result: { data: { name: "changed" } }, + delay: 20, + }, + ]; - using __disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream( - () => { - return useLazyQuery(counterQuery, { - notifyOnNetworkStatusChange: true, - variables: { - hookVar: true, - }, - defaultOptions: { - variables: { - localDefaultVar: true, - }, - }, - }); - }, - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ), - } - ); + const cache = new InMemoryCache(); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream(({ query }) => useLazyQuery(query), { + initialProps: { query: query1 }, + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - }, + variables: {}, }); } - const expectedFinalData = { - counter: 1, - vars: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, - }; - const [execute] = getCurrentSnapshot(); - const execResult = await execute({ - variables: { - execVar: true, - }, - }); - // TODO: Determine if the return value makes sense. Other fetching functions - // (`refetch`, `fetchMore`, etc.) resolve with an `ApolloQueryResult` type - // which contain a subset of this data. - expect(execResult).toEqualQueryResult({ - data: expectedFinalData, - called: true, + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world" }, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + partial: false, }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { hello: "world" }, called: true, - loading: true, - networkStatus: NetworkStatus.loading, + loading: false, + networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + variables: {}, }); } + await rerender({ query: query2 }); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: expectedFinalData, + expect(result).toEqualLazyQueryResult({ + data: { hello: "world" }, called: true, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + variables: {}, }); } - const refetchResult = await getCurrentSnapshot()[1].reobserve({ - fetchPolicy: "network-only", - nextFetchPolicy: "cache-first", - variables: { - execVar: false, - }, - }); - - expect(refetchResult).toEqualApolloQueryResult({ - data: { counter: 2, vars: { execVar: false } }, + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { name: "changed" }, loading: false, networkStatus: NetworkStatus.ready, partial: false, @@ -500,111 +339,131 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: expectedFinalData, - called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: expectedFinalData, - variables: { execVar: false }, - }); - } - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: { counter: 2, vars: { execVar: false } }, + expect(result).toEqualLazyQueryResult({ + data: { name: "changed" }, called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: expectedFinalData, - variables: { execVar: false }, + previousData: { hello: "world" }, + variables: {}, }); } - const execResult2 = await getCurrentSnapshot()[0]({ - fetchPolicy: "cache-and-network", - nextFetchPolicy: "cache-first", - variables: { - execVar: true, - }, - }); + await expect(takeSnapshot).not.toRerender(); + }); - expect(execResult2).toEqualQueryResult({ - data: { counter: 3, vars: { ...expectedFinalData.vars, execVar: true } }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { counter: 2, vars: { execVar: false } }, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, + it("applies changed query to next refetch after execute", async () => { + const query1 = gql` + query { + hello + } + `; + const query2 = gql` + query { + name + } + `; + const mocks = [ + { + request: { query: query1 }, + result: { data: { hello: "world" } }, + delay: 20, }, - }); + { + request: { query: query2 }, + result: { data: { name: "changed" } }, + delay: 20, + }, + ]; + + const cache = new InMemoryCache(); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream(({ query }) => useLazyQuery(query), { + initialProps: { query: query1 }, + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { counter: 2, vars: { execVar: false } }, + expect(result).toEqualLazyQueryResult({ + data: { hello: "world" }, called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: { counter: 2, vars: { execVar: false } }, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, }); } - // For some reason we get an extra render in React 18 of the same thing - if (IS_REACT_18) { + await rerender({ query: query2 }); + + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { counter: 2, vars: { execVar: false } }, + expect(result).toEqualLazyQueryResult({ + data: { hello: "world" }, called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: { counter: 2, vars: { execVar: false } }, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, }); } + const [, { refetch }] = getCurrentSnapshot(); + + await expect(refetch()).resolves.toEqualApolloQueryResult({ + data: { name: "changed" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { - counter: 3, - vars: { ...expectedFinalData.vars, execVar: true }, - }, + expect(result).toEqualLazyQueryResult({ + data: { name: "changed" }, called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: { counter: 2, vars: { execVar: false } }, - variables: { - globalDefaultVar: true, - localDefaultVar: true, - hookVar: true, - execVar: true, - }, + previousData: { hello: "world" }, + variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); - it("changing queries", async () => { + test("renders loading states when changing queries with notifyOnNetworkStatusChange", async () => { const query1 = gql` query { hello @@ -630,21 +489,25 @@ describe("useLazyQuery Hook", () => { const cache = new InMemoryCache(); using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream(() => useLazyQuery(query1), { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks} cache={cache}> - {children} - </MockedProvider> - ), - }); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ query }) => + useLazyQuery(query, { notifyOnNetworkStatusChange: true }), + { + initialProps: { query: query1 }, + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + } + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -653,13 +516,19 @@ describe("useLazyQuery Hook", () => { }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute()); + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -671,7 +540,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world" }, called: true, loading: false, @@ -681,14 +550,34 @@ describe("useLazyQuery Hook", () => { }); } - setTimeout(() => execute({ query: query2 })); + await rerender({ query: query2 }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world" }, called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { name: "changed" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: true, loading: true, networkStatus: NetworkStatus.loading, previousData: { hello: "world" }, @@ -699,7 +588,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { name: "changed" }, called: true, loading: false, @@ -708,6 +597,8 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); it('should fetch data each time the execution function is called, when using a "network-only" fetch policy', async () => { @@ -741,23 +632,116 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: false, - error: undefined, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, variables: {}, }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute()); + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { hello: "world 1" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 2" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { hello: "world 2" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { hello: "world 1" }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + + it('renders loading states each time the execution function is called when using a "network-only" fetch policy with notifyOnNetworkStatusChange', async () => { + const mocks = [ + { + request: { query: helloQuery }, + result: { data: { hello: "world 1" } }, + delay: 20, + }, + { + request: { query: helloQuery }, + result: { data: { hello: "world 2" } }, + delay: 20, + }, + ]; + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = + await renderHookToSnapshotStream( + () => + useLazyQuery(helloQuery, { + notifyOnNetworkStatusChange: true, + fetchPolicy: "network-only", + }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -770,7 +754,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: false, @@ -780,24 +764,30 @@ describe("useLazyQuery Hook", () => { }); } - setTimeout(() => execute()); + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 2" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: true, networkStatus: NetworkStatus.loading, - previousData: { hello: "world 1" }, + previousData: undefined, variables: {}, }); } + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 2" }, called: true, loading: false, @@ -806,9 +796,11 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); - it("should persist previous data when a query is re-run", async () => { + it("should persist previous data when a query is refetched", async () => { const mocks = [ { request: { query: helloQuery }, @@ -839,9 +831,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -849,13 +840,20 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute()); + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -868,7 +866,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: false, @@ -878,25 +876,31 @@ describe("useLazyQuery Hook", () => { }); } - const refetch = getCurrentSnapshot()[1].refetch; - setTimeout(() => refetch!()); + const [, { refetch }] = getCurrentSnapshot(); + + await expect(refetch()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 2" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: true, networkStatus: NetworkStatus.refetch, - previousData: { hello: "world 1" }, + previousData: undefined, variables: {}, }); } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 2" }, called: true, loading: false, @@ -905,8 +909,13 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); + // TODO: Determine if this hook makes sense for polling or if that should be + // reserved for useQuery. At the very least, we need to figure out if you can + // start polling a query before it has been executed it("should allow for the query to start with polling", async () => { const mocks = [ { @@ -937,9 +946,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -948,26 +956,19 @@ describe("useLazyQuery Hook", () => { }); } - await tick(); - getCurrentSnapshot()[1].startPolling(10); - - { - const [, result] = await takeSnapshot(); + const [execute] = getCurrentSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - } + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "world 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 1" }, called: true, loading: false, @@ -977,10 +978,12 @@ describe("useLazyQuery Hook", () => { }); } + getCurrentSnapshot()[1].startPolling(10); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 2" }, called: true, loading: false, @@ -993,7 +996,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "world 3" }, called: true, loading: false, @@ -1005,7 +1008,7 @@ describe("useLazyQuery Hook", () => { getCurrentSnapshot()[1].stopPolling(); - expect(takeSnapshot).not.toRerender(); + await expect(takeSnapshot).not.toRerender(); }); it("should persist previous data when a query is re-run and variable changes", async () => { @@ -1058,9 +1061,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1068,25 +1070,22 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(() => execute({ variables: { id: 1 } })); - { - const [, result] = await takeSnapshot(); + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: 1 } }) + ).resolves.toEqualApolloQueryResult({ + data: data1, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: { id: 1 }, - }); - } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: data1, called: true, loading: false, @@ -1096,12 +1095,145 @@ describe("useLazyQuery Hook", () => { }); } - setTimeout(() => execute({ variables: { id: 2 } })); - + await expect( + execute({ variables: { id: 2 } }) + ).resolves.toEqualApolloQueryResult({ + data: data2, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: data2, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: data1, + variables: { id: 2 }, + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + + test("renders loading states when a query is re-run and variables changes with notifyOnNetworkStatusChange", async () => { + const CAR_QUERY_BY_ID = gql` + query ($id: Int) { + car(id: $id) { + make + model + } + } + `; + + const data1 = { + car: { + make: "Audi", + model: "A4", + __typename: "Car", + }, + }; + + const data2 = { + car: { + make: "Audi", + model: "RS8", + __typename: "Car", + }, + }; + + const mocks = [ + { + request: { query: CAR_QUERY_BY_ID, variables: { id: 1 } }, + result: { data: data1 }, + delay: 20, + }, + { + request: { query: CAR_QUERY_BY_ID, variables: { id: 2 } }, + result: { data: data2 }, + delay: 20, + }, + ]; + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = + await renderHookToSnapshotStream( + () => + useLazyQuery(CAR_QUERY_BY_ID, { notifyOnNetworkStatusChange: true }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: 1 } }) + ).resolves.toEqualApolloQueryResult({ + data: data1, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: true, + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: { id: 1 }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: data1, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: 1 }, + }); + } + + await expect( + execute({ variables: { id: 2 } }) + ).resolves.toEqualApolloQueryResult({ + data: data2, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -1113,7 +1245,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: data2, called: true, loading: false, @@ -1122,6 +1254,8 @@ describe("useLazyQuery Hook", () => { variables: { id: 2 }, }); } + + await expect(takeSnapshot).not.toRerender(); }); it("should work with cache-and-network fetch policy", async () => { @@ -1153,9 +1287,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1166,13 +1299,17 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); - setTimeout(() => execute()); + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "from link" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - // TODO: FIXME + expect(result).toEqualLazyQueryResult({ data: { hello: "from cache" }, called: true, loading: true, @@ -1185,7 +1322,7 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "from link" }, called: true, loading: false, @@ -1194,31 +1331,48 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); - it("should return a promise from the execution function which resolves with the result", async () => { - const mocks = [ + test("executes on the network multiple times with a cache-and-network fetch policy", async () => { + const cache = new InMemoryCache(); + const link = new MockLink([ { request: { query: helloQuery }, - result: { data: { hello: "world" } }, + result: { data: { hello: "from link" } }, delay: 20, }, - ]; + { + request: { query: helloQuery }, + result: { data: { hello: "from link 2" } }, + delay: 20, + }, + ]); + + const client = new ApolloClient({ + link, + cache, + }); + + cache.writeQuery({ query: helloQuery, data: { hello: "from cache" } }); using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream(() => useLazyQuery(helloQuery), { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - }); + await renderHookToSnapshotStream( + () => useLazyQuery(helloQuery, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1229,14 +1383,18 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); - await tick(); - const executeResult = execute(); + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "from link" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { hello: "from cache" }, called: true, loading: true, networkStatus: NetworkStatus.loading, @@ -1248,180 +1406,341 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { hello: "world" }, + expect(result).toEqualLazyQueryResult({ + data: { hello: "from link" }, called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, + previousData: { hello: "from cache" }, variables: {}, }); } - await expect(executeResult).resolves.toEqualQueryResult({ - data: { hello: "world" }, - called: true, + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "from link 2" }, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { hello: "from link" }, + called: true, + loading: true, + networkStatus: NetworkStatus.loading, + previousData: { hello: "from cache" }, + variables: {}, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { hello: "from link 2" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { hello: "from link" }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); }); - it("should have matching results from execution function and hook", async () => { - const query = gql` - query GetCountries($filter: String) { - countries(filter: $filter) { - code - name - } - } - `; + test("executes on the network multiple times with a cache-and-network fetch policy when changing variables", async () => { + const { query, mocks } = setupVariablesCase(); - const mocks = [ - { - request: { - query, - variables: { - filter: "PA", - }, - }, - result: { - data: { - countries: { - code: "PA", - name: "Panama", - }, - }, - }, - delay: 20, + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, }, - { - request: { - query, - variables: { - filter: "BA", - }, - }, - result: { - data: { - countries: { - code: "BA", - name: "Bahamas", - }, - }, - }, - delay: 20, + variables: { id: "1" }, + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, }, - ]; + variables: { id: "2" }, + }); using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream(() => useLazyQuery(query), { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - }); + await renderHookToSnapshotStream( + () => useLazyQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, + // @ts-expect-error should be undefined variables: {}, }); } const [execute] = getCurrentSnapshot(); - await tick(); - let executeResult = execute({ variables: { filter: "PA" } }); + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, + }, called: true, loading: true, networkStatus: NetworkStatus.loading, previousData: undefined, - variables: { filter: "PA" }, + variables: { id: "1" }, }); } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { countries: { code: "PA", name: "Panama" } }, + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { filter: "PA" }, + previousData: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, + }, + variables: { id: "1" }, }); } - await expect(executeResult).resolves.toEqualQueryResult({ + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ data: { - countries: { - code: "PA", - name: "Panama", - }, + character: { __typename: "Character", id: "2", name: "Black Widow" }, }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { filter: "PA" }, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, + }, + called: true, + loading: true, + networkStatus: NetworkStatus.setVariables, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "2" }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, + }, + variables: { id: "2" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + + test("renders loading states with a cache-and-network fetch policy when changing variables with notifyOnNetworkStatusChange", async () => { + const { query, mocks } = setupVariablesCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, + }, + variables: { id: "1" }, + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, + }, + variables: { id: "2" }, }); - await tick(); - executeResult = execute({ variables: { filter: "BA" } }); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = + await renderHookToSnapshotStream( + () => + useLazyQuery(query, { + fetchPolicy: "cache-and-network", + notifyOnNetworkStatusChange: true, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, + }, called: true, loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: { countries: { code: "PA", name: "Panama" } }, - variables: { filter: "BA" }, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: { id: "1" }, }); } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { countries: { code: "BA", name: "Bahamas" } }, + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: { countries: { code: "PA", name: "Panama" } }, - variables: { filter: "BA" }, + previousData: { + character: { __typename: "Character", id: "1", name: "Cache 1" }, + }, + variables: { id: "1" }, }); } - await expect(executeResult).resolves.toEqualQueryResult({ - data: { countries: { code: "BA", name: "Bahamas" } }, - called: true, + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, loading: false, networkStatus: NetworkStatus.ready, - previousData: { countries: { code: "PA", name: "Panama" } }, - variables: { filter: "BA" }, + partial: false, }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, + }, + called: true, + loading: true, + networkStatus: NetworkStatus.setVariables, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "2" }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "2", name: "Cache 2" }, + }, + variables: { id: "2" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); }); - it("the promise should reject with errors the “way useMutation does”", async () => { + it("the promise returned from execute rejects when GraphQL errors are returned and errorPolicy is `none`", async () => { const mocks = [ { request: { query: helloQuery }, @@ -1454,9 +1773,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1465,71 +1783,150 @@ describe("useLazyQuery Hook", () => { }); } - const executePromise = Promise.resolve().then(() => execute()); + // TODO: Determine if this is the correct behavior. This is different than + // 3.x where this resolves with an `ApolloQueryResult`. + // https://github.com/apollographql/apollo-client/issues/10787 wants this + // behavior + // https://github.com/apollographql/apollo-client/issues/9142#issuecomment-1118972947 + // justifies the old behavior + await expect(execute()).rejects.toEqual( + new ApolloError({ graphQLErrors: [{ message: "error 1" }] }) + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, - loading: true, - networkStatus: NetworkStatus.loading, + loading: false, + networkStatus: NetworkStatus.error, previousData: undefined, + error: new ApolloError({ graphQLErrors: [{ message: "error 1" }] }), variables: {}, }); } + await expect(execute()).rejects.toEqual( + new ApolloError({ graphQLErrors: [{ message: "error 2" }] }) + ); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, called: true, loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new ApolloError({ graphQLErrors: [{ message: "error 1" }] }), + error: new ApolloError({ graphQLErrors: [{ message: "error 2" }] }), variables: {}, }); } - await expect(executePromise).resolves.toEqualQueryResult({ - data: undefined, - called: true, + await expect(takeSnapshot).not.toRerender(); + }); + + it("the promise returned from execute resolves when GraphQL errors are returned and errorPolicy is `all`", async () => { + const query: TypedDocumentNode<{ + currentUser: { __typename: "User"; id: string } | null; + }> = gql` + query currentUser { + id + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { currentUser: null }, + errors: [{ message: "Not logged in" }], + }, + delay: 20, + }, + { + request: { query }, + result: { + data: { currentUser: null }, + errors: [{ message: "Not logged in 2" }], + }, + delay: 20, + }, + ]; + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, peekSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } + ); + + const [execute] = await peekSnapshot(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { currentUser: null }, + error: new ApolloError({ graphQLErrors: [{ message: "Not logged in" }] }), loading: false, networkStatus: NetworkStatus.error, - previousData: undefined, - error: new ApolloError({ graphQLErrors: [{ message: "error 1" }] }), - variables: {}, + partial: false, }); - void execute(); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { currentUser: null }, called: true, - loading: true, - networkStatus: NetworkStatus.loading, + loading: false, + networkStatus: NetworkStatus.error, previousData: undefined, - error: new ApolloError({ graphQLErrors: [{ message: "error 1" }] }), + error: new ApolloError({ + graphQLErrors: [{ message: "Not logged in" }], + }), variables: {}, }); } + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { currentUser: null }, + error: new ApolloError({ + graphQLErrors: [{ message: "Not logged in 2" }], + }), + loading: false, + networkStatus: NetworkStatus.error, + partial: false, + }); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { currentUser: null }, called: true, loading: false, networkStatus: NetworkStatus.error, previousData: undefined, - error: new ApolloError({ graphQLErrors: [{ message: "error 2" }] }), + error: new ApolloError({ + graphQLErrors: [{ message: "Not logged in 2" }], + }), variables: {}, }); } @@ -1537,12 +1934,29 @@ describe("useLazyQuery Hook", () => { await expect(takeSnapshot).not.toRerender(); }); - it("the promise should not cause an unhandled rejection", async () => { + it("the promise returned from execute resolves when GraphQL errors are returned and errorPolicy is `ignore`", async () => { + const query: TypedDocumentNode<{ + currentUser: { __typename: "User"; id: string } | null; + }> = gql` + query currentUser { + id + } + `; + const mocks = [ { - request: { query: helloQuery }, + request: { query }, + result: { + data: { currentUser: null }, + errors: [{ message: "Not logged in" }], + }, + delay: 20, + }, + { + request: { query }, result: { - errors: [new GraphQLError("error 1")], + data: { currentUser: null }, + errors: [{ message: "Not logged in 2" }], }, delay: 20, }, @@ -1550,7 +1964,7 @@ describe("useLazyQuery Hook", () => { using _disabledAct = disableActEnvironment(); const { takeSnapshot, peekSnapshot } = await renderHookToSnapshotStream( - () => useLazyQuery(helloQuery), + () => useLazyQuery(query, { errorPolicy: "ignore" }), { wrapper: ({ children }) => ( <MockedProvider mocks={mocks}>{children}</MockedProvider> @@ -1563,9 +1977,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1574,10 +1987,36 @@ describe("useLazyQuery Hook", () => { }); } - void execute(); + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { currentUser: null }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); - // Making sure the rejection triggers a test failure. - await wait(50); + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { currentUser: null }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqual({ + data: { currentUser: null }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + // We don't see an extra render here since the result is deeply equal to the + // previous result. + await expect(takeSnapshot).not.toRerender(); }); it("allows in-flight requests to resolve when component unmounts", async () => { @@ -1592,7 +2031,7 @@ describe("useLazyQuery Hook", () => { const [execute] = result.current; - let promise: Promise<QueryResult<{ hello: string }>>; + let promise: Promise<ApolloQueryResult<{ hello: string }>>; act(() => { promise = execute(); }); @@ -1601,13 +2040,11 @@ describe("useLazyQuery Hook", () => { link.simulateResult({ result: { data: { hello: "Greetings" } } }, true); - await expect(promise!).resolves.toEqualQueryResult({ + await expect(promise!).resolves.toEqualApolloQueryResult({ data: { hello: "Greetings" }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }); }); @@ -1623,8 +2060,8 @@ describe("useLazyQuery Hook", () => { const [execute] = result.current; - let promise1: Promise<QueryResult<{ hello: string }>>; - let promise2: Promise<QueryResult<{ hello: string }>>; + let promise1: Promise<ApolloQueryResult<{ hello: string }>>; + let promise2: Promise<ApolloQueryResult<{ hello: string }>>; act(() => { promise1 = execute(); promise2 = execute(); @@ -1636,15 +2073,13 @@ describe("useLazyQuery Hook", () => { const expectedResult = { data: { hello: "Greetings" }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }; - await expect(promise1!).resolves.toEqualQueryResult(expectedResult); - await expect(promise2!).resolves.toEqualQueryResult(expectedResult); + await expect(promise1!).resolves.toEqualApolloQueryResult(expectedResult); + await expect(promise2!).resolves.toEqualApolloQueryResult(expectedResult); }); // https://github.com/apollographql/apollo-client/issues/9755 @@ -1694,9 +2129,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1708,58 +2142,28 @@ describe("useLazyQuery Hook", () => { const promise1 = execute({ variables: { id: "1" } }); const promise2 = execute({ variables: { id: "2" } }); - await expect(promise1).resolves.toEqualQueryResult({ + await expect(promise1).resolves.toEqualApolloQueryResult({ data: mocks[0].result.data, loading: false, - called: true, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "2" }, + partial: false, }); - await expect(promise2).resolves.toEqualQueryResult({ + await expect(promise2).resolves.toEqualApolloQueryResult({ data: mocks[1].result.data, loading: false, - called: true, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "2" }, + partial: false, }); - if (IS_REACT_17) { + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: mocks[1].result.data, called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: { id: "1" }, - }); - } - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: undefined, - variables: { id: "2" }, - }); - } - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: mocks[1].result.data, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, + loading: false, + networkStatus: NetworkStatus.ready, previousData: undefined, variables: { id: "2" }, }); @@ -1768,118 +2172,6 @@ describe("useLazyQuery Hook", () => { await expect(takeSnapshot).not.toRerender(); }); - it("uses the most recent options when the hook rerenders before execution", async () => { - interface Data { - user: { id: string; name: string }; - } - - interface Variables { - id: string; - } - - const query: TypedDocumentNode<Data, Variables> = gql` - query UserQuery($id: ID!) { - user(id: $id) { - id - name - } - } - `; - - const mocks = [ - { - request: { query, variables: { id: "1" } }, - result: { data: { user: { id: "1", name: "John Doe" } } }, - delay: 30, - }, - { - request: { query, variables: { id: "2" } }, - result: { data: { user: { id: "2", name: "Jane Doe" } } }, - delay: 20, - }, - ]; - - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot, rerender } = - await renderHookToSnapshotStream( - ({ id }) => useLazyQuery(query, { variables: { id } }), - { - initialProps: { id: "1" }, - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - } - ); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - error: undefined, - called: false, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "1" }, - }); - } - - await rerender({ id: "2" }); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - error: undefined, - called: false, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "1" }, - }); - } - - const [execute] = getCurrentSnapshot(); - const promise = execute(); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: { id: "2" }, - }); - } - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: mocks[1].result.data, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "2" }, - }); - } - - await expect(promise).resolves.toEqualQueryResult({ - data: mocks[1].result.data, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: { id: "2" }, - }); - }); - // https://github.com/apollographql/apollo-client/issues/10198 it("uses the most recent query document when the hook rerenders before execution", async () => { const query = gql` @@ -1908,9 +2200,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1924,9 +2215,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -1937,25 +2227,17 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); - const promise = execute(); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - } + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { hello: "Greetings" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { hello: "Greetings" }, called: true, loading: false, @@ -1965,14 +2247,7 @@ describe("useLazyQuery Hook", () => { }); } - await expect(promise).resolves.toEqualQueryResult({ - data: { hello: "Greetings" }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, - }); + await expect(takeSnapshot).not.toRerender(); }); it("does not refetch when rerendering after executing query", async () => { @@ -2009,15 +2284,11 @@ describe("useLazyQuery Hook", () => { const client = new ApolloClient({ link, cache: new InMemoryCache() }); - const { result, rerender } = renderHook( - () => useLazyQuery(query, { variables: { id: "1" } }), - { - initialProps: { id: "1" }, - wrapper: ({ children }) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ), - } - ); + const { result, rerender } = renderHook(() => useLazyQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); const [execute] = result.current; @@ -2093,7 +2364,7 @@ describe("useLazyQuery Hook", () => { expect(fetchCount).toBe(1); await waitFor(() => { - expect(result.current[1]).toEqualQueryResult({ + expect(result.current[1]).toEqualLazyQueryResult({ data: { user: { id: "2", name: "John Doe" } }, called: true, loading: false, @@ -2108,7 +2379,7 @@ describe("useLazyQuery Hook", () => { await act(() => result.current[0]()); await waitFor(() => { - expect(result.current[1]).toEqualQueryResult({ + expect(result.current[1]).toEqualLazyQueryResult({ data: { user: { id: null, name: "John Default" } }, called: true, loading: false, @@ -2148,7 +2419,7 @@ describe("useLazyQuery Hook", () => { }, { request: { query, variables: { id: "2" } }, - result: { errors: [new GraphQLError("Oops")] }, + result: { errors: [{ message: "Oops" }] }, delay: 20, }, { @@ -2173,7 +2444,6 @@ describe("useLazyQuery Hook", () => { return useLazyQuery(query, { fetchPolicy: "cache-first", - variables: { id: "1" }, skipPollAttempt: () => { trackClosureValue("skipPollAttempt", count); return false; @@ -2194,14 +2464,14 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: "1" }, + // @ts-expect-error Need to fix the return value of this property + variables: {}, }); } @@ -2214,39 +2484,26 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: "1" }, + // @ts-expect-error Need to fix the return value of this property + variables: {}, }); } let [execute] = getCurrentSnapshot(); expect(execute).toBe(originalExecute); - await execute(); - - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: { id: "1" }, - }); - } + await execute({ variables: { id: "1" } }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { user: { id: "1", name: "John Doe" } }, called: true, loading: false, @@ -2265,15 +2522,10 @@ describe("useLazyQuery Hook", () => { // TODO: Update when https://github.com/testing-library/react-render-stream-testing-library/issues/13 is fixed await rerender(undefined); - [execute] = getCurrentSnapshot(); - expect(execute).toBe(originalExecute); - - await execute({ variables: { id: "2" } }); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { user: { id: "1", name: "John Doe" } }, called: true, loading: false, @@ -2283,29 +2535,25 @@ describe("useLazyQuery Hook", () => { }); } - { - const [, result] = await takeSnapshot(); + [execute] = getCurrentSnapshot(); + expect(execute).toBe(originalExecute); - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: { user: { id: "1", name: "John Doe" } }, - variables: { id: "2" }, - }); - } + await expect(execute({ variables: { id: "2" } })).rejects.toEqual( + new ApolloError({ + graphQLErrors: [{ message: "Oops" }], + }) + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { user: { id: "1", name: "John Doe" } }, error: new ApolloError({ graphQLErrors: [{ message: "Oops" }] }), called: true, loading: false, networkStatus: NetworkStatus.error, - previousData: { user: { id: "1", name: "John Doe" } }, + previousData: undefined, variables: { id: "2" }, }); } @@ -2323,39 +2571,26 @@ describe("useLazyQuery Hook", () => { [execute] = getCurrentSnapshot(); expect(execute).toBe(originalExecute); - await execute({ variables: { id: "3" } }); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: undefined, + expect(result).toEqualLazyQueryResult({ + data: { user: { id: "1", name: "John Doe" } }, error: new ApolloError({ graphQLErrors: [{ message: "Oops" }] }), called: true, loading: false, networkStatus: NetworkStatus.error, - previousData: { user: { id: "1", name: "John Doe" } }, + previousData: undefined, variables: { id: "2" }, }); } - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.setVariables, - previousData: { user: { id: "1", name: "John Doe" } }, - variables: { id: "3" }, - }); - } + await execute({ variables: { id: "3" } }); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { user: { id: "3", name: "Johnny Three" } }, called: true, loading: false, @@ -2411,9 +2646,10 @@ describe("useLazyQuery Hook", () => { const client = new ApolloClient({ link, cache: new InMemoryCache() }); const { result, rerender } = renderHook( - ({ id }) => useLazyQuery(query, { variables: { id } }), + ({ notifyOnNetworkStatusChange }) => + useLazyQuery(query, { notifyOnNetworkStatusChange }), { - initialProps: { id: "1" }, + initialProps: { notifyOnNetworkStatusChange: false }, wrapper: ({ children }) => ( <ApolloProvider client={client}>{children}</ApolloProvider> ), @@ -2422,13 +2658,15 @@ describe("useLazyQuery Hook", () => { const [execute] = result.current; - rerender({ id: "2" }); + rerender({ notifyOnNetworkStatusChange: true }); expect(result.current[0]).toBe(execute); }); describe("network errors", () => { - async function check(errorPolicy: ErrorPolicy) { + // For errorPolicy:"none", we expect result.error to be defined and + // result.data to be undefined + it('handles errorPolicy:"none" appropriately', async () => { const networkError = new Error("from the network"); const client = new ApolloClient({ @@ -2448,7 +2686,7 @@ describe("useLazyQuery Hook", () => { await renderHookToSnapshotStream( () => useLazyQuery(helloQuery, { - errorPolicy, + errorPolicy: "none", }), { wrapper: ({ children }) => ( @@ -2460,9 +2698,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -2471,25 +2708,16 @@ describe("useLazyQuery Hook", () => { }); } - const execute = getCurrentSnapshot()[0]; - setTimeout(execute); + const [execute] = getCurrentSnapshot(); - { - const [, result] = await takeSnapshot(); + await expect(execute()).rejects.toEqual( + new ApolloError({ networkError }) + ); - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - } { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, error: new ApolloError({ networkError }), called: true, @@ -2499,72 +2727,35 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } - } - // For errorPolicy:"none", we expect result.error to be defined and - // result.data to be undefined, which is what we test above. - it('handles errorPolicy:"none" appropriately', () => check("none")); + await expect(takeSnapshot).not.toRerender(); + }); // If there was any data to report, errorPolicy:"all" would report both // result.data and result.error, but there is no GraphQL data when we // encounter a network error, so the test again captures desired behavior. - it('handles errorPolicy:"all" appropriately', () => check("all")); - - // Technically errorPolicy:"ignore" is supposed to throw away result.error, - // but in the case of network errors, since there's no actual data to - // report, it's useful/important that we report result.error anyway. - it('handles errorPolicy:"ignore" appropriately', () => check("ignore")); - }); - - describe("options.defaultOptions", () => { - it("defaultOptions do not confuse useLazyQuery", async () => { - const counterQuery: TypedDocumentNode<{ - counter: number; - }> = gql` - query GetCounter { - counter - } - `; + it('handles errorPolicy:"all" appropriately', async () => { + const networkError = new Error("from the network"); - let count = 0; const client = new ApolloClient({ cache: new InMemoryCache(), link: new ApolloLink( (request) => new Observable((observer) => { - if (request.operationName === "GetCounter") { - setTimeout(() => { - observer.next({ - data: { - counter: ++count, - }, - }); - observer.complete(); - }, 20); - } else { - observer.error( - new Error( - `Unknown query: ${request.operationName || request.query}` - ) - ); - } + setTimeout(() => { + observer.error(networkError); + }, 20); }) ), }); - const defaultFetchPolicy = "network-only"; - using _disabledAct = disableActEnvironment(); const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( - () => { - return useLazyQuery(counterQuery, { - defaultOptions: { - fetchPolicy: defaultFetchPolicy, - notifyOnNetworkStatusChange: true, - }, - }); - }, + () => + useLazyQuery(helloQuery, { + errorPolicy: "all", + }), { wrapper: ({ children }) => ( <ApolloProvider client={client}>{children}</ApolloProvider> @@ -2575,9 +2766,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -2587,45 +2777,94 @@ describe("useLazyQuery Hook", () => { } const [execute] = getCurrentSnapshot(); - const execResult = await execute(); - expect(execResult).toEqualQueryResult({ - data: { counter: 1 }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, - }); + await expect(execute()).rejects.toEqual( + new ApolloError({ networkError }) + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, + error: new ApolloError({ networkError }), called: true, - loading: true, - networkStatus: NetworkStatus.loading, + loading: false, + networkStatus: NetworkStatus.error, + previousData: undefined, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + + // Technically errorPolicy:"ignore" is supposed to throw away result.error, + // but in the case of network errors, since there's no actual data to + // report, it's useful/important that we report result.error anyway. + it('handles errorPolicy:"ignore" appropriately', async () => { + const networkError = new Error("from the network"); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink( + (request) => + new Observable((observer) => { + setTimeout(() => { + observer.error(networkError); + }, 20); + }) + ), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = + await renderHookToSnapshotStream( + () => + useLazyQuery(helloQuery, { + errorPolicy: "ignore", + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, previousData: undefined, variables: {}, }); } + const [execute] = getCurrentSnapshot(); + + await expect(execute()).rejects.toEqual( + new ApolloError({ networkError }) + ); + { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { counter: 1 }, + expect(result).toEqualLazyQueryResult({ + data: undefined, + error: new ApolloError({ networkError }), called: true, loading: false, - networkStatus: NetworkStatus.ready, + networkStatus: NetworkStatus.error, previousData: undefined, variables: {}, }); } - const { options } = getCurrentSnapshot()[1].observable; - expect(options.fetchPolicy).toBe(defaultFetchPolicy); + await expect(takeSnapshot).not.toRerender(); }); }); @@ -2649,9 +2888,8 @@ describe("useLazyQuery Hook", () => { { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, @@ -2665,39 +2903,18 @@ describe("useLazyQuery Hook", () => { const promise = execute(); expect(requests).toBe(1); - { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: undefined, - called: true, - loading: true, - networkStatus: NetworkStatus.loading, - previousData: undefined, - variables: {}, - }); - } - await client.clearStore(); - await expect(promise).resolves.toEqualQueryResult({ - data: undefined, - error: new ApolloError({ - networkError: new InvariantError( - "Store reset while query was in flight (not completed in link chain)" - ), - }), - loading: true, - networkStatus: NetworkStatus.loading, - called: true, - previousData: undefined, - variables: {}, - }); + await expect(promise).rejects.toEqual( + new InvariantError( + "Store reset while query was in flight (not completed in link chain)" + ) + ); { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: undefined, error: new ApolloError({ networkError: new InvariantError( @@ -2783,7 +3000,7 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); const result = await execute(); - expect(result).toEqualQueryResult({ + expect(result).toEqualApolloQueryResult({ data: { currentUser: { __typename: "User", @@ -2791,20 +3008,15 @@ describe("useLazyQuery Hook", () => { name: "Test User", }, }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }); - // Loading - await takeSnapshot(); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { currentUser: { __typename: "User", @@ -2819,6 +3031,8 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); it("does not mask queries when dataMasking is `false`", async () => { @@ -2889,7 +3103,7 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); const result = await execute(); - expect(result).toEqualQueryResult({ + expect(result).toEqualApolloQueryResult({ data: { currentUser: { __typename: "User", @@ -2898,20 +3112,15 @@ describe("useLazyQuery Hook", () => { age: 30, }, }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }); - // Loading - await takeSnapshot(); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { currentUser: { __typename: "User", @@ -2927,6 +3136,8 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); it("does not mask queries by default", async () => { @@ -2996,7 +3207,7 @@ describe("useLazyQuery Hook", () => { const [execute] = getCurrentSnapshot(); const result = await execute(); - expect(result).toEqualQueryResult({ + expect(result).toEqualApolloQueryResult({ data: { currentUser: { __typename: "User", @@ -3005,20 +3216,15 @@ describe("useLazyQuery Hook", () => { age: 30, }, }, - called: true, loading: false, networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, + partial: false, }); - // Loading - await takeSnapshot(); - { const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ + expect(result).toEqualLazyQueryResult({ data: { currentUser: { __typename: "User", @@ -3034,6 +3240,8 @@ describe("useLazyQuery Hook", () => { variables: {}, }); } + + await expect(takeSnapshot).not.toRerender(); }); it("masks queries updated by the cache", async () => { @@ -3098,188 +3306,1983 @@ describe("useLazyQuery Hook", () => { // initial render await takeSnapshot(); - const [execute] = getCurrentSnapshot(); - await execute(); + const [execute] = getCurrentSnapshot(); + await execute(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + client.writeQuery({ + query, + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User (updated)", + age: 35, + }, + }, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User (updated)", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + currentUser: { __typename: "User", id: 1, name: "Test User" }, + }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); + }); + + it("does not rerender when updating field in named fragment", async () => { + type UserFieldsFragment = { + __typename: "User"; + age: number; + } & { " $fragmentName"?: "UserFieldsFragment" }; + + interface Query { + currentUser: { + __typename: "User"; + id: number; + name: string; + } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; + } + + const query: TypedDocumentNode<Query, Record<string, never>> = gql` + query MaskedQuery { + currentUser { + id + name + ...UserFields + } + } + + fragment UserFields on User { + age + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 30, + }, + }, + }, + delay: 20, + }, + ]; + + const client = new ApolloClient({ + dataMasking: true, + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = + await renderHookToSnapshotStream(() => useLazyQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + + // initial render + await takeSnapshot(); + + const [execute] = getCurrentSnapshot(); + await execute(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + client.writeQuery({ + query, + data: { + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 35, + }, + }, + }); + + await expect(takeSnapshot).not.toRerender(); + + expect(client.readQuery({ query })).toEqual({ + currentUser: { + __typename: "User", + id: 1, + name: "Test User", + age: 35, + }, + }); + }); + }); +}); + +test("throws when calling `refetch` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { refetch }] = await takeSnapshot(); + + expect(() => { + void refetch(); + }).toThrow( + new InvariantError( + "useLazyQuery: 'refetch' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling `fetchMore` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { fetchMore }] = await takeSnapshot(); + + expect(() => { + void fetchMore({}); + }).toThrow( + new InvariantError( + "useLazyQuery: 'fetchMore' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling `subscribeToMore` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { subscribeToMore }] = await takeSnapshot(); + + expect(() => { + subscribeToMore({ + document: gql` + subscription { + foo + } + `, + }); + }).toThrow( + new InvariantError( + "useLazyQuery: 'subscribeToMore' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling `updateQuery` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { updateQuery }] = await takeSnapshot(); + + expect(() => { + updateQuery(() => ({ greeting: "foo" })); + }).toThrow( + new InvariantError( + "useLazyQuery: 'updateQuery' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling `startPolling` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { startPolling }] = await takeSnapshot(); + + expect(() => { + startPolling(10); + }).toThrow( + new InvariantError( + "useLazyQuery: 'startPolling' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling `stopPolling` before execute function is called", async () => { + const { query, mocks } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [, { stopPolling }] = await takeSnapshot(); + + expect(() => { + stopPolling(); + }).toThrow( + new InvariantError( + "useLazyQuery: 'stopPolling' cannot be called before executing the query." + ) + ); +}); + +test("throws when calling execute function during first render", async () => { + using _consoleSpy = spyOnConsole("error"); + const { query, mocks } = setupSimpleCase(); + + function App() { + const [execute] = useLazyQuery(query); + + void execute(); + + return null; + } + + // We need to use the `async` function here to prevent console errors from + // showing up + await expect(async () => + renderAsync(<App />, { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }) + ).rejects.toThrow( + new InvariantError( + "useLazyQuery: 'execute' should not be called during render. To start a query during render, use the 'useQuery' hook." + ) + ); +}); + +test("throws when calling execute function during subsequent render", async () => { + using _consoleSpy = spyOnConsole("error"); + const { query, mocks } = setupSimpleCase(); + const user = userEvent.setup(); + + function App() { + const [count, setCount] = React.useState(0); + const [execute] = useLazyQuery(query); + + if (count === 1) { + void execute(); + } + + return <button onClick={() => setCount(1)}>Load</button>; + } + + let error!: Error; + + await renderAsync(<App />, { + wrapper: ({ children }) => ( + <ErrorBoundary onError={(e) => (error = e)} fallback={<div>Oops</div>}> + <MockedProvider mocks={mocks}>{children}</MockedProvider> + </ErrorBoundary> + ), + }); + + await act(() => user.click(screen.getByText("Load"))); + + expect(error).toEqual( + new InvariantError( + "useLazyQuery: 'execute' should not be called during render. To start a query during render, use the 'useQuery' hook." + ) + ); +}); + +test("uses the updated client when executing the function after changing clients", async () => { + const { query } = setupSimpleCase(); + + const client1 = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { + request: { query }, + result: { data: { greeting: "Hello client 1" } }, + delay: 20, + }, + ]), + }); + + const client2 = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { + request: { query }, + result: { data: { greeting: "Hello client 2" } }, + delay: 20, + }, + ]), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ client }) => useLazyQuery(query, { client }), + { initialProps: { client: client1 } } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello client 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello client 1" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await rerender({ client: client2 }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello client 1" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello client 2" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello client 2" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { greeting: "Hello client 1" }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("responds to cache updates after executing query", async () => { + const { query } = setupSimpleCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + delay: 20, + }, + ]), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + client.writeQuery({ + query, + data: { + greeting: "Hello (updated)", + }, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello (updated)" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + greeting: "Hello", + }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("responds to cache updates after changing variables", async () => { + const { query, mocks } = setupVariablesCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error this should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "2" }, + }); + } + + client.writeQuery({ + query, + variables: { id: "2" }, + data: { + character: { + __typename: "Character", + id: "2", + name: "Black Widow (updated)", + }, + }, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Black Widow (updated)", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + variables: { id: "2" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); + + // Ensure that writing data to a different set of variables does not rerender + // the hook + client.writeQuery({ + query, + variables: { id: "1" }, + data: { + character: { + __typename: "Character", + id: "1", + name: "Spider-Man (updated)", + }, + }, + }); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("uses cached result when switching to variables already written to the cache", async () => { + const { query, mocks } = setupVariablesCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + variables: { id: "2" }, + data: { + character: { __typename: "Character", id: "2", name: "Cached Character" }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error this should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "2" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("renders loading states where necessary when switching to variables maybe written to the cache with notifyOnNetworkStatusChange", async () => { + const { query, mocks } = setupVariablesCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + variables: { id: "2" }, + data: { + character: { __typename: "Character", id: "2", name: "Cached Character" }, + }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot } = await renderHookToSnapshotStream( + () => useLazyQuery(query, { notifyOnNetworkStatusChange: true }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error this should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: true, + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: { id: "1" }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "2" }, + }); + } + + await expect( + execute({ variables: { id: "3" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { + __typename: "Character", + id: "3", + name: "Iron Man", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: true, + loading: true, + networkStatus: NetworkStatus.setVariables, + previousData: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + variables: { id: "3" }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { + __typename: "Character", + id: "3", + name: "Iron Man", + }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { + __typename: "Character", + id: "2", + name: "Cached Character", + }, + }, + variables: { id: "3" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("applies `errorPolicy` on next fetch when it changes between renders", async () => { + const query: TypedDocumentNode< + { + character: { __typename: "Character"; id: string; name: string } | null; + }, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + }, + delay: 20, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: null, + }, + errors: [new GraphQLError("Could not find character 1")], + }, + delay: 20, + }, + ]; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ errorPolicy }: { errorPolicy: ErrorPolicy }) => + useLazyQuery(query, { errorPolicy }), + { + initialProps: { errorPolicy: "none" }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error this should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await rerender({ errorPolicy: "all" }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + const [, { refetch }] = getCurrentSnapshot(); + void refetch(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: null, + }, + error: new ApolloError({ + graphQLErrors: [{ message: "Could not find character 1" }], + }), + called: true, + loading: false, + networkStatus: NetworkStatus.error, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + variables: { id: "1" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("applies `context` on next fetch when it changes between renders", async () => { + const query = gql` + query { + context + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + const context = operation.getContext(); + + return new Observable((observer) => { + setTimeout(() => { + observer.next({ data: { context: { source: context.source } } }); + observer.complete(); + }, 20); + }); + }), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ context }) => + useLazyQuery(query, { context, fetchPolicy: "network-only" }), + { + initialProps: { context: { source: "initialHookValue" } }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { context: { source: "initialHookValue" } }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "initialHookValue" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await rerender({ context: { source: "rerender" } }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "initialHookValue" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { context: { source: "rerender" } }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "rerender" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { context: { source: "initialHookValue" } }, + variables: {}, + }); + } + + await rerender({ context: { source: "rerenderForRefetch" } }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "rerender" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { context: { source: "initialHookValue" } }, + variables: {}, + }); + } + + // Ensure context isn't just applied to execute function + void getCurrentSnapshot()[1].refetch(); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "rerenderForRefetch" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { context: { source: "rerender" } }, + variables: {}, + }); + } + + await expect( + execute({ context: { source: "execute" } }) + ).resolves.toEqualApolloQueryResult({ + data: { context: { source: "execute" } }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { context: { source: "execute" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { context: { source: "rerenderForRefetch" } }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("applies `refetchWritePolicy` on next fetch when it changes between renders", async () => { + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + delay: 20, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + { + request: { query, variables: { min: 30, max: 50 } }, + result: { data: { primes: [31, 37, 41, 43, 47] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + cache, + link: new MockLink(mocks), + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ refetchWritePolicy }) => useLazyQuery(query, { refetchWritePolicy }), + { + initialProps: { refetchWritePolicy: "merge" as RefetchWritePolicy }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error needs to be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { min: 0, max: 12 } }) + ).resolves.toEqualApolloQueryResult({ + data: mocks[0].result.data, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: mocks[0].result.data, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { min: 0, max: 12 }, + }); + } + + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + const [, { refetch }] = getCurrentSnapshot(); + + void refetch({ min: 12, max: 30 }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { primes: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: mocks[0].result.data, + variables: { min: 12, max: 30 }, + }); + } + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + ]); + + await rerender({ refetchWritePolicy: "overwrite" }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { primes: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: mocks[0].result.data, + variables: { min: 12, max: 30 }, + }); + } + + void refetch({ min: 30, max: 50 }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: mocks[2].result.data, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { primes: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] }, + variables: { min: 30, max: 50 }, + }); + } + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + [undefined, [31, 37, 41, 43, 47]], + ]); + + await expect(takeSnapshot).not.toRerender(); +}); + +test("applies `returnPartialData` on next fetch when it changes between renders", async () => { + const fullQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const partialQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery, variables: { id: "1" } }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }, + }, + delay: 20, + }, + { + request: { query: fullQuery, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Hulk", + }, + }, + }, + delay: 20, + }, + ]; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query: partialQuery, + data: { character: { __typename: "Character", id: "1" } }, + variables: { id: "1" }, + }); + + client.writeQuery({ + query: partialQuery, + data: { character: { __typename: "Character", id: "2" } }, + variables: { id: "2" }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ returnPartialData }) => useLazyQuery(fullQuery, { returnPartialData }), + { + initialProps: { returnPartialData: false }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Doctor Strange" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Doctor Strange" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await rerender({ returnPartialData: true }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Doctor Strange" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Hulk" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { character: { __typename: "Character", id: "2" } }, + called: true, + loading: true, + networkStatus: NetworkStatus.setVariables, + previousData: { + character: { __typename: "Character", id: "1", name: "Doctor Strange" }, + }, + variables: { id: "2" }, + }); + } + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { character: { __typename: "Character", id: "2", name: "Hulk" } }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "2" }, + }, + variables: { id: "2" }, + }); + } + + await expect(takeSnapshot).not.toRerender(); +}); + +test("applies updated `fetchPolicy` on next fetch when it changes between renders", async () => { + const { query, mocks } = setupVariablesCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "1", name: "Spider-Cache" }, + }, + variables: { id: "1" }, + }); + + client.writeQuery({ + query, + data: { + character: { __typename: "Character", id: "2", name: "Cached Widow" }, + }, + variables: { id: "2" }, + }); + + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ fetchPolicy }) => useLazyQuery(query, { fetchPolicy }), + { + initialProps: { fetchPolicy: "cache-first" as WatchQueryFetchPolicy }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + // @ts-expect-error should be undefined + variables: {}, + }); + } + + const [execute] = getCurrentSnapshot(); + + await expect( + execute({ variables: { id: "1" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Cache" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Cache" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await rerender({ fetchPolicy: "cache-and-network" }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "1", name: "Spider-Cache" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: { id: "1" }, + }); + } + + await expect( + execute({ variables: { id: "2" } }) + ).resolves.toEqualApolloQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Cached Widow" }, + }, + called: true, + loading: true, + networkStatus: NetworkStatus.setVariables, + previousData: { + character: { __typename: "Character", id: "1", name: "Spider-Cache" }, + }, + variables: { id: "2" }, + }); + } + + { + const [, result] = await takeSnapshot(); - // Loading - await takeSnapshot(); + expect(result).toEqualLazyQueryResult({ + data: { + character: { __typename: "Character", id: "2", name: "Black Widow" }, + }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { + character: { __typename: "Character", id: "2", name: "Cached Widow" }, + }, + variables: { id: "2" }, + }); + } - { - const [, result] = await takeSnapshot(); + await expect(takeSnapshot).not.toRerender(); +}); - expect(result).toEqualQueryResult({ - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User", - }, - }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, - }); - } +test("renders loading states at appropriate times on next fetch after updating `notifyOnNetworkStatusChange`", async () => { + const { query } = setupSimpleCase(); - client.writeQuery({ - query, - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User (updated)", - age: 35, - }, - }, - }); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { request: { query }, result: { data: { greeting: "Hello 1" } } }, + { request: { query }, result: { data: { greeting: "Hello 2" } } }, + { request: { query }, result: { data: { greeting: "Hello 3" } } }, + ]), + }); + using _disabledAct = disableActEnvironment(); + const { takeSnapshot, getCurrentSnapshot, rerender } = + await renderHookToSnapshotStream( + ({ notifyOnNetworkStatusChange }) => + useLazyQuery(query, { + notifyOnNetworkStatusChange, + fetchPolicy: "network-only", + }), { - const [, result] = await takeSnapshot(); - - expect(result).toEqualQueryResult({ - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User (updated)", - }, - }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: { - currentUser: { __typename: "User", id: 1, name: "Test User" }, - }, - variables: {}, - }); + initialProps: { notifyOnNetworkStatusChange: false }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), } + ); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: undefined, + called: false, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, }); + } - it("does not rerender when updating field in named fragment", async () => { - type UserFieldsFragment = { - __typename: "User"; - age: number; - } & { " $fragmentName"?: "UserFieldsFragment" }; + const [execute] = getCurrentSnapshot(); - interface Query { - currentUser: { - __typename: "User"; - id: number; - name: string; - } & { " $fragmentRefs"?: { UserFieldsFragment: UserFieldsFragment } }; - } + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello 1" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); - const query: TypedDocumentNode<Query, Record<string, never>> = gql` - query MaskedQuery { - currentUser { - id - name - ...UserFields - } - } + { + const [, result] = await takeSnapshot(); - fragment UserFields on User { - age - } - `; + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 1" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } - const mocks = [ - { - request: { query }, - result: { - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 30, - }, - }, - }, - delay: 20, - }, - ]; + await rerender({ notifyOnNetworkStatusChange: true }); - const client = new ApolloClient({ - dataMasking: true, - cache: new InMemoryCache(), - link: new MockLink(mocks), - }); + { + const [, result] = await takeSnapshot(); - using _disabledAct = disableActEnvironment(); - const { takeSnapshot, getCurrentSnapshot } = - await renderHookToSnapshotStream(() => useLazyQuery(query), { - wrapper: ({ children }) => ( - <ApolloProvider client={client}>{children}</ApolloProvider> - ), - }); + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 1" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: undefined, + variables: {}, + }); + } - // initial render - await takeSnapshot(); + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello 2" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, + }); - const [execute] = getCurrentSnapshot(); - await execute(); + { + const [, result] = await takeSnapshot(); - // Loading - await takeSnapshot(); + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 1" }, + called: true, + loading: true, + networkStatus: NetworkStatus.loading, + previousData: undefined, + variables: {}, + }); + } - { - const [, result] = await takeSnapshot(); + { + const [, result] = await takeSnapshot(); - expect(result).toEqualQueryResult({ - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User", - }, - }, - called: true, - loading: false, - networkStatus: NetworkStatus.ready, - previousData: undefined, - variables: {}, - }); - } + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 2" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { greeting: "Hello 1" }, + variables: {}, + }); + } - client.writeQuery({ - query, - data: { - currentUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 35, - }, - }, - }); + await rerender({ notifyOnNetworkStatusChange: false }); - await expect(takeSnapshot).not.toRerender(); + { + const [, result] = await takeSnapshot(); - expect(client.readQuery({ query })).toEqual({ - currentUser: { - __typename: "User", - id: 1, - name: "Test User", - age: 35, - }, - }); + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 2" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { greeting: "Hello 1" }, + variables: {}, }); + } + + await expect(execute()).resolves.toEqualApolloQueryResult({ + data: { greeting: "Hello 3" }, + loading: false, + networkStatus: NetworkStatus.ready, + partial: false, }); + + { + const [, result] = await takeSnapshot(); + + expect(result).toEqualLazyQueryResult({ + data: { greeting: "Hello 3" }, + called: true, + loading: false, + networkStatus: NetworkStatus.ready, + previousData: { greeting: "Hello 2" }, + variables: {}, + }); + } + + await expect(takeSnapshot).not.toRerender(); }); describe.skip("Type Tests", () => { test("NoInfer prevents adding arbitrary additional variables", () => { const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; - const [_, { variables }] = useLazyQuery(typedNode, { + const [execute, { variables }] = useLazyQuery(typedNode); + + void execute({ variables: { bar: 4, // @ts-expect-error nonExistingVariable: "string", }, }); + variables?.bar; // @ts-expect-error variables?.nonExistingVariable; @@ -3511,4 +5514,145 @@ describe.skip("Type Tests", () => { expectTypeOf(data).toEqualTypeOf<Query | undefined>(); } }); + + test("variables are optional and can be anything with an DocumentNode", () => { + const query = gql``; + + const [execute] = useLazyQuery(query); + + void execute(); + void execute({}); + void execute({ variables: {} }); + void execute({ variables: { foo: "bar" } }); + void execute({ variables: { bar: "baz" } }); + }); + + test("variables are optional and can be anything with unspecified TVariables on a TypedDocumentNode", () => { + const query: TypedDocumentNode<{ greeting: string }> = gql``; + + const [execute] = useLazyQuery(query); + + void execute(); + void execute({}); + void execute({ variables: {} }); + void execute({ variables: { foo: "bar" } }); + void execute({ variables: { bar: "baz" } }); + }); + + test("variables are optional when TVariables are empty", () => { + const query: TypedDocumentNode< + { greeting: string }, + Record<string, never> + > = gql``; + + const [execute] = useLazyQuery(query); + + void execute(); + void execute({}); + void execute({ variables: {} }); + // @ts-expect-error unknown variables + void execute({ variables: { foo: "bar" } }); + }); + + test("does not allow variables when TVariables is `never`", () => { + const query: TypedDocumentNode<{ greeting: string }, never> = gql``; + + const [execute] = useLazyQuery(query); + + void execute(); + void execute({}); + void execute({ variables: {} }); + // @ts-expect-error unknown variables + void execute({ variables: { foo: "bar" } }); + }); + + test("optional variables are optional", () => { + const query: TypedDocumentNode<{ posts: string[] }, { limit?: number }> = + gql``; + + const [execute] = useLazyQuery(query); + + void execute(); + void execute({}); + void execute({ variables: {} }); + void execute({ variables: { limit: 10 } }); + void execute({ + variables: { + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + void execute({ + variables: { + limit: 10, + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + }); + + test("enforces required variables when TVariables includes required variables", () => { + const query: TypedDocumentNode<{ character: string }, { id: string }> = + gql``; + + const [execute] = useLazyQuery(query); + + // @ts-expect-error empty variables + void execute(); + // @ts-expect-error empty variables + void execute({}); + // @ts-expect-error empty variables + void execute({ variables: {} }); + void execute({ variables: { id: "1" } }); + void execute({ + variables: { + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + void execute({ + variables: { + id: "1", + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + }); + + test("requires variables with mixed TVariables", () => { + const query: TypedDocumentNode< + { character: string }, + { id: string; language?: string } + > = gql``; + + const [execute] = useLazyQuery(query); + + // @ts-expect-error empty variables + void execute(); + // @ts-expect-error empty variables + void execute({}); + // @ts-expect-error empty variables + void execute({ variables: {} }); + void execute({ variables: { id: "1" } }); + void execute({ + // @ts-expect-error missing required variables + variables: { language: "en" }, + }); + void execute({ variables: { id: "1", language: "en" } }); + void execute({ + variables: { + id: "1", + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + void execute({ + variables: { + id: "1", + language: "en", + // @ts-expect-error unknown variables + foo: "bar", + }, + }); + }); }); diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -43,7 +43,7 @@ import { spyOnConsole, } from "../../../testing/internal/index.js"; import { QueryResult } from "../../types/types.js"; -import { useLazyQuery } from "../useLazyQuery.js"; +import { LazyQueryResult, useLazyQuery } from "../useLazyQuery.js"; import { useMutation } from "../useMutation.js"; import { useQuery } from "../useQuery.js"; @@ -5400,7 +5400,7 @@ describe("useQuery Hook", () => { const renderStream = createRenderStream({ initialSnapshot: { useQueryResult: null as QueryResult<Query1, Variables> | null, - useLazyQueryResult: null as QueryResult<Query2, Variables> | null, + useLazyQueryResult: null as LazyQueryResult<Query2, Variables> | null, }, }); @@ -5434,14 +5434,16 @@ describe("useQuery Hook", () => { }); const [execute, useLazyQueryResult] = useLazyQuery(query2, { - variables: { id: 1 }, + notifyOnNetworkStatusChange: true, }); renderStream.replaceSnapshot({ useQueryResult, useLazyQueryResult }); return ( <> - <button onClick={() => execute()}>Run 2nd query</button> + <button onClick={() => execute({ variables: { id: 1 } })}> + Run 2nd query + </button> <button onClick={() => { // Intentionally use reobserve here as opposed to refetch to @@ -5474,14 +5476,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -5500,14 +5502,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -5528,7 +5530,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -5567,7 +5569,7 @@ describe("useQuery Hook", () => { partial: true, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", id: 1, lastName: "Doe" } }, called: true, loading: false, @@ -5591,7 +5593,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", id: 1, lastName: "Doe" } }, called: true, loading: false, @@ -5630,7 +5632,7 @@ describe("useQuery Hook", () => { partial: true, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", id: 1, lastName: "Doe" } }, called: true, loading: false, @@ -5690,7 +5692,7 @@ describe("useQuery Hook", () => { const renderStream = createRenderStream({ initialSnapshot: { useQueryResult: null as QueryResult<Query1, Variables> | null, - useLazyQueryResult: null as QueryResult<Query2, Variables> | null, + useLazyQueryResult: null as LazyQueryResult<Query2, Variables> | null, }, }); @@ -5730,12 +5732,16 @@ describe("useQuery Hook", () => { }); const [execute, useLazyQueryResult] = useLazyQuery(query2, { - variables: { id: 1 }, + notifyOnNetworkStatusChange: true, }); renderStream.replaceSnapshot({ useQueryResult, useLazyQueryResult }); - return <button onClick={() => execute()}>Run 2nd query</button>; + return ( + <button onClick={() => execute({ variables: { id: 1 } })}> + Run 2nd query + </button> + ); } await renderStream.render(<App />, { @@ -5756,14 +5762,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -5782,14 +5788,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -5810,7 +5816,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -5838,7 +5844,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", @@ -5873,7 +5879,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", @@ -5940,7 +5946,7 @@ describe("useQuery Hook", () => { const renderStream = createRenderStream({ initialSnapshot: { useQueryResult: null as QueryResult<Query1, Variables> | null, - useLazyQueryResult: null as QueryResult<Query2, Variables> | null, + useLazyQueryResult: null as LazyQueryResult<Query2, Variables> | null, }, }); @@ -5980,12 +5986,16 @@ describe("useQuery Hook", () => { }); const [execute, useLazyQueryResult] = useLazyQuery(query2, { - variables: { id: 1 }, + notifyOnNetworkStatusChange: true, }); renderStream.replaceSnapshot({ useQueryResult, useLazyQueryResult }); - return <button onClick={() => execute()}>Run 2nd query</button>; + return ( + <button onClick={() => execute({ variables: { id: 1 } })}> + Run 2nd query + </button> + ); } await renderStream.render(<App />, { @@ -6006,14 +6016,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -6032,14 +6042,14 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, - error: undefined, called: false, loading: false, networkStatus: NetworkStatus.ready, previousData: undefined, - variables: { id: 1 }, + // @ts-expect-error should be undefined + variables: {}, }); } @@ -6060,7 +6070,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: undefined, called: true, loading: true, @@ -6085,7 +6095,7 @@ describe("useQuery Hook", () => { variables: { id: 1 }, }); - expect(snapshot.useLazyQueryResult!).toEqualQueryResult({ + expect(snapshot.useLazyQueryResult!).toEqualLazyQueryResult({ data: { person: { __typename: "Person", diff --git a/src/react/ssr/__tests__/useLazyQuery.test.tsx b/src/react/ssr/__tests__/useLazyQuery.test.tsx deleted file mode 100644 --- a/src/react/ssr/__tests__/useLazyQuery.test.tsx +++ /dev/null @@ -1,74 +0,0 @@ -/** @jest-environment node */ -import { DocumentNode } from "graphql"; -import { gql } from "graphql-tag"; -import React from "react"; - -import { InMemoryCache } from "@apollo/client/cache"; -import { ApolloClient } from "@apollo/client/core"; -import { ApolloProvider } from "@apollo/client/react/context"; -import { useLazyQuery } from "@apollo/client/react/hooks"; -import { renderToStringWithData } from "@apollo/client/react/ssr"; -import { mockSingleLink } from "@apollo/client/testing"; - -describe("useLazyQuery Hook SSR", () => { - const CAR_QUERY: DocumentNode = gql` - query { - cars { - make - model - vin - } - } - `; - - const CAR_RESULT_DATA = { - cars: [ - { - make: "Audi", - model: "RS8", - vin: "DOLLADOLLABILL", - __typename: "Car", - }, - ], - }; - - it("should run query only after calling the lazy mode execute function", () => { - const link = mockSingleLink({ - request: { query: CAR_QUERY }, - result: { data: CAR_RESULT_DATA }, - }); - - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - ssrMode: true, - }); - - const Component = () => { - let html = null; - const [execute, { loading, called, data }] = useLazyQuery(CAR_QUERY); - - if (!loading && !called) { - execute(); - } - - if (!loading && called) { - expect(loading).toEqual(false); - expect(data).toEqual(CAR_RESULT_DATA); - html = <p>{data.cars[0].make}</p>; - } - - return html; - }; - - const app = ( - <ApolloProvider client={client}> - <Component /> - </ApolloProvider> - ); - - return renderToStringWithData(app).then((markup) => { - expect(markup).toMatch(/Audi/); - }); - }); -}); diff --git a/src/testing/matchers/index.d.ts b/src/testing/matchers/index.d.ts --- a/src/testing/matchers/index.d.ts +++ b/src/testing/matchers/index.d.ts @@ -5,11 +5,16 @@ import type { FetchResult, OperationVariables, } from "../../core/index.js"; -import type { QueryRef, QueryResult } from "../../react/index.js"; +import type { + LazyQueryResult, + QueryRef, + QueryResult, +} from "../../react/index.js"; import { NextRenderOptions, ObservableStream } from "../internal/index.js"; import { RenderStreamMatchers } from "@testing-library/react-render-stream/expect"; import { TakeOptions } from "../internal/ObservableStream.js"; import { CheckedKeys } from "./toEqualQueryResult.js"; +import { CheckedLazyQueryResult } from "./toEqualLazyQueryResult.js"; interface ApolloCustomMatchers<R = void, T = {}> { /** @@ -89,6 +94,14 @@ interface ApolloCustomMatchers<R = void, T = {}> { (expected: ApolloQueryResult<TData>) => R : { error: "matchers needs to be called on an ApolloQueryResult" }; + toEqualLazyQueryResult: T extends ( + LazyQueryResult<infer TData, infer TVariables> + ) ? + (expected: CheckedLazyQueryResult<TData, TVariables>) => R + : T extends Promise<LazyQueryResult<infer TData, infer TVariables>> ? + (expected: CheckedLazyQueryResult<TData, TVariables>) => R + : { error: "matchers needs to be called on a LazyQueryResult" }; + toEqualQueryResult: T extends QueryResult<infer TData, infer TVariables> ? (expected: Pick<QueryResult<TData, TVariables>, CheckedKeys>) => R : T extends Promise<QueryResult<infer TData, infer TVariables>> ? diff --git a/src/testing/matchers/index.ts b/src/testing/matchers/index.ts --- a/src/testing/matchers/index.ts +++ b/src/testing/matchers/index.ts @@ -13,6 +13,7 @@ import { toEmitValue } from "./toEmitValue.js"; import { toEmitValueStrict } from "./toEmitValueStrict.js"; import { toEqualApolloQueryResult } from "./toEqualApolloQueryResult.js"; import { toEqualFetchResult } from "./toEqualFetchResult.js"; +import { toEqualLazyQueryResult } from "./toEqualLazyQueryResult.js"; import { toEqualQueryResult } from "./toEqualQueryResult.js"; import { toHaveSuspenseCacheEntryUsing } from "./toHaveSuspenseCacheEntryUsing.js"; import { toMatchDocument } from "./toMatchDocument.js"; @@ -29,6 +30,7 @@ expect.extend({ toEmitValueStrict, toEqualApolloQueryResult, toEqualFetchResult, + toEqualLazyQueryResult, toEqualQueryResult, toBeDisposed, toHaveSuspenseCacheEntryUsing, diff --git a/src/testing/matchers/toEqualLazyQueryResult.ts b/src/testing/matchers/toEqualLazyQueryResult.ts new file mode 100644 --- /dev/null +++ b/src/testing/matchers/toEqualLazyQueryResult.ts @@ -0,0 +1,75 @@ +import { iterableEquality } from "@jest/expect-utils"; +import type { MatcherFunction } from "expect"; + +import type { OperationVariables } from "@apollo/client/core"; +import type { LazyQueryResult } from "@apollo/client/react"; + +const CHECKED_KEYS = [ + "loading", + "error", + "data", + "variables", + "networkStatus", + "called", + "previousData", +] as const; + +export type CheckedLazyQueryResult< + TData, + TVariables extends OperationVariables, +> = Pick<LazyQueryResult<TData, TVariables>, (typeof CHECKED_KEYS)[number]>; + +const hasOwnProperty = (obj: Record<string, any>, key: string) => + Object.prototype.hasOwnProperty.call(obj, key); + +export const toEqualLazyQueryResult: MatcherFunction< + [lazyQueryResult: CheckedLazyQueryResult<any, any>] +> = function (actual, expected) { + const lazyQueryResult = actual as LazyQueryResult<any, any>; + const hint = this.utils.matcherHint( + this.isNot ? ".not.toEqualLazyQueryResult" : "toEqualLazyQueryResult", + "lazyQueryResult", + "expected", + { isNot: this.isNot, promise: this.promise } + ); + + const checkedLazyQueryResult = CHECKED_KEYS.reduce( + (memo, key) => { + if (hasOwnProperty(lazyQueryResult, key)) { + memo[key] = lazyQueryResult[key]; + } + + return memo; + }, + {} as Partial<LazyQueryResult<any, any>> + ); + + const pass = this.equals( + checkedLazyQueryResult, + expected, + // https://github.com/jestjs/jest/blob/22029ba06b69716699254bb9397f2b3bc7b3cf3b/packages/expect/src/matchers.ts#L62-L67 + [...this.customTesters, iterableEquality], + true + ); + + return { + pass, + message: () => { + if (pass) { + return hint + `\n\nExpected: not ${this.utils.printExpected(expected)}`; + } + + return ( + hint + + "\n\n" + + this.utils.printDiffOrStringify( + expected, + checkedLazyQueryResult, + "Expected", + "Received", + true + ) + ); + }, + }; +};
`useMutation`/`useLazyQuery` execution functions always requires full variables when using TypedDocumentNode ### Issue Description The documentation for hooks like [`useMutation`](https://www.apollographql.com/docs/react/data/mutations#option-precedence) and [`useLazyQuery`](https://www.apollographql.com/docs/react/data/queries#manual-execution-with-uselazyquery) mention that there are two distinct ways to provide variables to the actual request: (1) as an option to the hook itself and (2) as an option to the query function returned by the hook. These allow variable merging such that the query function used at the call-site can override any values provided in the hook, and the docs linked above advertise this as a good way to provide defaults. The issue I'm running into is using this with Typescript and specifically `TypedDocumentNode`s that strongly type the variables to the operation, because the hooks always require the fully formed Variables rather than something than might support the merging / default structure you'd expect. Some possible solutions might be something straightforward like allowing a `Partial` at both sites or something more involved that makes any fields provided in the hook optional in the query function. Check the quick sketch below as well as the provided Codesandbox to see this issue in more detail: Imagine we'd like to use the following mutation ```gql mutation AddPerson($name: String, $title: String) { addPerson(name: $name, title: $title) { node { id name title } } } ``` Usage of `useMutation` might look something like this, which creates errors in both the hook declaration and the execution usage, despite functionally working properly if you erase those errors with `as any`. ```ts const [addEngineer] = useMutation(TYPED_DOCUMENT_NODE, { // ❌ Property 'name' is missing in type '{ title: string; }' but required in type '{ name: string; title: string; }'. variables: { title: 'Engineer' } }); // ❌ Property 'title' is missing in type '{ name: string; }' but required in type '{ name: string; title: string; }'. useEffect(() => addEngineer({ variables: { name: 'Alvin' } }), []); ``` The same sort of issue occurs for `useLazyQuery` instances where the provided operation takes variables. ### Link to Reproduction https://codesandbox.io/p/devbox/determined-elbakyan-vrsjcs?workspaceId=59bd876a-c3ad-4ffd-a783-ffc36a11e7d9 ### Reproduction Steps #### Try it out 1. Run the [Codesandbox](https://codesandbox.io/p/devbox/determined-elbakyan-vrsjcs?workspaceId=59bd876a-c3ad-4ffd-a783-ffc36a11e7d9) 2. Type in any name and click "Add engineer" 3. Observe that the name provided is correctly added to the list and notice that they're annotated as an Engineer #### To the code 1. Jump into the code and find two `as any` typecasts around `useMutation` and the usage of the execution function in `src/index.tsx` 2. Remove both typecasts Observe type errors despite the working functionality described in the first section above ### `@apollo/client` version 3.11.8
Hey @maciesielka 👋 Good catch! FYI the team is away at a conference this week so we won't be able to look at this until at least next week. Thanks for raising the issue! Hi @jerelmiller, can I have a try at this if possible? @naman1608 absolutely! @jerelmiller, I have a doubt if you could take a look? Should `MutationFunctionOptions` type have a mutation key? The mutate function shouldn't be able to take a mutation right? It's also not mentioned in the docs - https://www.apollographql.com/docs/react/data/mutations#options Hi @maciesielka and @naman1608, I'm sorry that I do have to go back on this a bit: As these "double options", especially the merging behaviour between them, are very confusing, we're actually considering to deprecate or even remove the `variables` option on the `useMutation`/`useLazyQuery` hooks and only allow variables to be passed into the respective `execute` function. In that context, I'm honestly very reluctant towards any type changes here now - we want to start work on 4.0 very soon, and it would be very confusing for users if we went back-and-forth on this so quickly. Okay that makes sense if improving the typing experience here might encourage usage of functionality that's on its way out. I'll try and soft-deprecate usage of the variable merging in my projects until y'all get around to officially deprecating it. Yup makes sense, thanks @phryneas!!
2025-02-12T04:37:29Z
3.13
apollographql/apollo-client
12,052
apollographql__apollo-client-12052
[ "12051" ]
40d21f17e300c4498b22642803f3907bbd55064f
diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -229,7 +229,16 @@ export abstract class ApolloCache<TSerialized> implements DataProxy { const diffOptions: Cache.DiffOptions<TData, TVars> = { ...otherOptions, returnPartialData: true, - id: typeof from === "string" ? from : this.identify(from), + id: + // While our TypeScript types do not allow for `undefined` as a valid + // `from`, its possible `useFragment` gives us an `undefined` since it + // calls` cache.identify` and provides that value to `from`. We are + // adding this fix here however to ensure those using plain JavaScript + // and using `cache.identify` themselves will avoid seeing the obscure + // warning. + typeof from === "undefined" || typeof from === "string" ? + from + : this.identify(from), query, optimistic, };
diff --git a/src/react/hooks/__tests__/useFragment.test.tsx b/src/react/hooks/__tests__/useFragment.test.tsx --- a/src/react/hooks/__tests__/useFragment.test.tsx +++ b/src/react/hooks/__tests__/useFragment.test.tsx @@ -1725,6 +1725,34 @@ describe("useFragment", () => { }); }); }); + + // https://github.com/apollographql/apollo-client/issues/12051 + it("does not warn when the cache identifier is invalid", async () => { + using _ = spyOnConsole("warn"); + const cache = new InMemoryCache(); + + const ProfiledHook = profileHook(() => + useFragment({ + fragment: ItemFragment, + // Force a value that results in cache.identify === undefined + from: { __typename: "Item" }, + }) + ); + + render(<ProfiledHook />, { + wrapper: ({ children }) => ( + <MockedProvider cache={cache}>{children}</MockedProvider> + ), + }); + + expect(console.warn).not.toHaveBeenCalled(); + + const { data, complete } = await ProfiledHook.takeSnapshot(); + + // TODO: Update when https://github.com/apollographql/apollo-client/issues/12003 is fixed + expect(complete).toBe(true); + expect(data).toEqual({}); + }); }); describe("has the same timing as `useQuery`", () => {
UseFragment producing a warning during store misses (TypeError: Cannot read properties of undefined) ### Issue Description Since @apollo/client 3.11.5 I've started receiving warnings in the console during store misses of useFragment in development: `TypeError: Cannot read properties of undefined (reading '__typename')` A bit of testing suggests that this happens in all my uses of useFragment during store misses. Note that although I get the warning, everything continues to work fine. @apollo/client 3.11.4 did not have this issue. In case it's relevant I'm using the following arguments on useFragment: `fragment, fragmentName, from: {__typename: X, id: X}` The full stack trace is: ``` TypeError: Cannot read properties of undefined (reading '__typename') at Policies2.identify (policies.ts:360:14) at InMemoryCache2.identify (inMemoryCache.ts:350:28) at ApolloCache2.watchFragment (cache.ts:232:50) at useFragment.ts:105:36 at subscribeToStore (react-dom.development.js:16139:10) at commitHookEffectListMount (react-dom.development.js:23189:26) at invokePassiveEffectMountInDEV (react-dom.development.js:25193:13) at invokeEffectsInDev (react-dom.development.js:27390:11) at commitDoubleInvokeEffectsInDEV (react-dom.development.js:27369:7) at flushPassiveEffectsImpl (react-dom.development.js:27095:5) ``` ### Link to Reproduction None ### Reproduction Steps Call useFragment on any fragment which hasn't been loaded into the FE cache/store by typename + id. ### `@apollo/client` version 3.11.5
Hey @alex-e-leon! You beat me to opening this 😆. I discovered this just recently working with the data masking feature. We did some rework with `useFragment` to ensure it follows the Rules of React (specifically not writing to `ref` in render), but I just found out that causes this warning. I know exactly where the issue is coming from so I'll get a PR out for this very soon (hopefully today). So sorry about the additional noise! This is the punishment I get for staying up to date with all the latest deps : ) And thanks @jerelmiller - its great to get such fast feedback and see that you're already on top of it!
2024-09-03T20:00:03Z
3.11
apollographql/apollo-client
11,944
apollographql__apollo-client-11944
[ "11945" ]
c4cf5774bedb4da7b5b21ecb4de263a7217285a7
diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -220,7 +220,10 @@ export interface MutationBaseOptions< /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#optimisticResponse:member} */ optimisticResponse?: | TData - | ((vars: TVariables, { IGNORE }: { IGNORE: IgnoreModifier }) => TData); + | (( + vars: TVariables, + { IGNORE }: { IGNORE: IgnoreModifier } + ) => TData | IgnoreModifier); /** {@inheritDoc @apollo/client!MutationOptionsDocumentation#updateQueries:member} */ updateQueries?: MutationQueryReducersMap<TData>;
diff --git a/src/__tests__/optimistic.ts b/src/__tests__/optimistic.ts --- a/src/__tests__/optimistic.ts +++ b/src/__tests__/optimistic.ts @@ -9,6 +9,7 @@ import { ApolloLink, ApolloCache, MutationQueryReducersMap, + TypedDocumentNode, } from "../core"; import { QueryManager } from "../core/QueryManager"; @@ -1089,6 +1090,25 @@ describe("optimistic mutation results", () => { resolve(); } ); + + it("allows IgnoreModifier as return value when inferring from a TypedDocumentNode mutation", () => { + const mutation: TypedDocumentNode<{ bar: string }> = gql` + mutation foo { + foo { + bar + } + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + }); + + client.mutate({ + mutation, + optimisticResponse: (vars, { IGNORE }) => IGNORE, + }); + }); }); describe("optimistic updates using `updateQueries`", () => {
Type error returning `IGNORE` object when using a generic `TypedDocumentNode` mutation ### Issue Description There's a type error when returning the `IGNORE` sentinel object from `optimisticResponse` that is inferring from a `TypedDocumentNode<>`. Given the following document: ![image](https://github.com/apollographql/apollo-client/assets/48073625/a74f9993-a2a8-4356-bfcc-e48fafe3f259) We get the following type error: ![image](https://github.com/apollographql/apollo-client/assets/48073625/04ab406e-5c01-4dce-86bc-5bbf026a0845) I've checked the type definitions and it's fixed if we explicitly return an union type instead of only returning the inferred type. I created a PR #11944 changing this. ### Link to Reproduction https://codesandbox.io/p/sandbox/optimisticresponse-apollo-ts-error-mv34ws ### Reproduction Steps _No response_ ### `@apollo/client` version 3.10.8
2024-07-10T01:41:29Z
3.11
apollographql/apollo-client
11,921
apollographql__apollo-client-11921
[ "10216" ]
2941824dd66cdd20eee5f2293373ad7a9cf991a4
diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -138,7 +138,8 @@ export function useSubscription< } } - const { skip, fetchPolicy, shouldResubscribe, context } = options; + const { skip, fetchPolicy, shouldResubscribe, context, ignoreResults } = + options; const variables = useDeepMemo(() => options.variables, [options.variables]); let [observable, setObservable] = React.useState(() => @@ -177,16 +178,30 @@ export function useSubscription< optionsRef.current = options; }); + const fallbackLoading = !skip && !ignoreResults; const fallbackResult = React.useMemo<SubscriptionResult<TData, TVariables>>( () => ({ - loading: !skip, + loading: fallbackLoading, error: void 0, data: void 0, variables, }), - [skip, variables] + [fallbackLoading, variables] ); + const ignoreResultsRef = React.useRef(ignoreResults); + useIsomorphicLayoutEffect(() => { + // We cannot reference `ignoreResults` directly in the effect below + // it would add a dependency to the `useEffect` deps array, which means the + // subscription would be recreated if `ignoreResults` changes + // As a result, on resubscription, the last result would be re-delivered, + // rendering the component one additional time, and re-triggering `onData`. + // The same applies to `fetchPolicy`, which results in a new `observable` + // being created. We cannot really avoid it in that case, but we can at least + // avoid it for `ignoreResults`. + ignoreResultsRef.current = ignoreResults; + }); + const ret = useSyncExternalStore<SubscriptionResult<TData, TVariables>>( React.useCallback( (update) => { @@ -212,7 +227,7 @@ export function useSubscription< variables, }; observable.__.setResult(result); - update(); + if (!ignoreResultsRef.current) update(); if (optionsRef.current.onData) { optionsRef.current.onData({ @@ -234,7 +249,7 @@ export function useSubscription< error, variables, }); - update(); + if (!ignoreResultsRef.current) update(); optionsRef.current.onError?.(error); } }, @@ -261,7 +276,10 @@ export function useSubscription< }, [observable] ), - () => (observable && !skip ? observable.__.result : fallbackResult) + () => + observable && !skip && !ignoreResults ? + observable.__.result + : fallbackResult ); return React.useMemo( () => ({ diff --git a/src/react/types/types.documentation.ts b/src/react/types/types.documentation.ts --- a/src/react/types/types.documentation.ts +++ b/src/react/types/types.documentation.ts @@ -531,6 +531,12 @@ export interface SubscriptionOptionsDocumentation { */ shouldResubscribe: unknown; + /** + * If `true`, the hook will not cause the component to rerender. This is useful when you want to control the rendering of your component yourself with logic in the `onData` and `onError` callbacks. + * + * Changing this to `true` when the hook already has `data` will reset the `data` to `undefined`. + */ + ignoreResults: unknown; /** * An `ApolloClient` instance. By default `useSubscription` / `Subscription` uses the client passed down via context, but a different client can be passed in. */ diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -457,6 +457,11 @@ export interface BaseSubscriptionOptions< onError?: (error: ApolloError) => void; /** {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#onSubscriptionComplete:member} */ onSubscriptionComplete?: () => void; + /** + * {@inheritDoc @apollo/client!SubscriptionOptionsDocumentation#ignoreResults:member} + * @defaultValue `false` + */ + ignoreResults?: boolean; } export interface SubscriptionResult<TData = any, TVariables = any> { @@ -479,6 +484,9 @@ export interface SubscriptionHookOptions< TVariables extends OperationVariables = OperationVariables, > extends BaseSubscriptionOptions<TData, TVariables> {} +/** + * @deprecated This type is not used anymore. It will be removed in the next major version of Apollo Client + */ export interface SubscriptionDataOptions< TData = any, TVariables extends OperationVariables = OperationVariables,
diff --git a/src/react/hooks/__tests__/useSubscription.test.tsx b/src/react/hooks/__tests__/useSubscription.test.tsx --- a/src/react/hooks/__tests__/useSubscription.test.tsx +++ b/src/react/hooks/__tests__/useSubscription.test.tsx @@ -1455,6 +1455,297 @@ describe("`restart` callback", () => { }); }); +describe("ignoreResults", () => { + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = ["Audi", "BMW"].map((make) => ({ + result: { data: { car: { make } } }, + })); + + it("should not rerender when ignoreResults is true, but will call `onData` and `onComplete`", async () => { + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); + const onError = jest.fn((() => {}) as SubscriptionHookOptions["onError"]); + const onComplete = jest.fn( + (() => {}) as SubscriptionHookOptions["onComplete"] + ); + const ProfiledHook = profileHook(() => + useSubscription(subscription, { + ignoreResults: true, + onData, + onError, + onComplete, + }) + ); + render(<ProfiledHook />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + data: undefined, + variables: undefined, + restart: expect.any(Function), + }); + link.simulateResult(results[0]); + + await waitFor(() => { + expect(onData).toHaveBeenCalledTimes(1); + expect(onData).toHaveBeenLastCalledWith( + expect.objectContaining({ + data: { + data: results[0].result.data, + error: undefined, + loading: false, + variables: undefined, + }, + }) + ); + expect(onError).toHaveBeenCalledTimes(0); + expect(onComplete).toHaveBeenCalledTimes(0); + }); + + link.simulateResult(results[1], true); + await waitFor(() => { + expect(onData).toHaveBeenCalledTimes(2); + expect(onData).toHaveBeenLastCalledWith( + expect.objectContaining({ + data: { + data: results[1].result.data, + error: undefined, + loading: false, + variables: undefined, + }, + }) + ); + expect(onError).toHaveBeenCalledTimes(0); + expect(onComplete).toHaveBeenCalledTimes(1); + }); + + await expect(ProfiledHook).not.toRerender(); + }); + + it("should not rerender when ignoreResults is true and an error occurs", async () => { + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); + const onError = jest.fn((() => {}) as SubscriptionHookOptions["onError"]); + const onComplete = jest.fn( + (() => {}) as SubscriptionHookOptions["onComplete"] + ); + const ProfiledHook = profileHook(() => + useSubscription(subscription, { + ignoreResults: true, + onData, + onError, + onComplete, + }) + ); + render(<ProfiledHook />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + data: undefined, + variables: undefined, + restart: expect.any(Function), + }); + link.simulateResult(results[0]); + + await waitFor(() => { + expect(onData).toHaveBeenCalledTimes(1); + expect(onData).toHaveBeenLastCalledWith( + expect.objectContaining({ + data: { + data: results[0].result.data, + error: undefined, + loading: false, + variables: undefined, + }, + }) + ); + expect(onError).toHaveBeenCalledTimes(0); + expect(onComplete).toHaveBeenCalledTimes(0); + }); + + const error = new Error("test"); + link.simulateResult({ error }); + await waitFor(() => { + expect(onData).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenLastCalledWith(error); + expect(onComplete).toHaveBeenCalledTimes(0); + }); + + await expect(ProfiledHook).not.toRerender(); + }); + + it("can switch from `ignoreResults: true` to `ignoreResults: false` and will start rerendering, without creating a new subscription", async () => { + const subscriptionCreated = jest.fn(); + const link = new MockSubscriptionLink(); + link.onSetup(subscriptionCreated); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); + const ProfiledHook = profileHook( + ({ ignoreResults }: { ignoreResults: boolean }) => + useSubscription(subscription, { + ignoreResults, + onData, + }) + ); + const { rerender } = render(<ProfiledHook ignoreResults={true} />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + expect(subscriptionCreated).toHaveBeenCalledTimes(1); + + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + data: undefined, + variables: undefined, + restart: expect.any(Function), + }); + expect(onData).toHaveBeenCalledTimes(0); + } + link.simulateResult(results[0]); + await expect(ProfiledHook).not.toRerender({ timeout: 20 }); + expect(onData).toHaveBeenCalledTimes(1); + + rerender(<ProfiledHook ignoreResults={false} />); + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + // `data` appears immediately after changing to `ignoreResults: false` + data: results[0].result.data, + variables: undefined, + restart: expect.any(Function), + }); + // `onData` should not be called again for the same result + expect(onData).toHaveBeenCalledTimes(1); + } + + link.simulateResult(results[1]); + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + data: results[1].result.data, + variables: undefined, + restart: expect.any(Function), + }); + expect(onData).toHaveBeenCalledTimes(2); + } + // a second subscription should not have been started + expect(subscriptionCreated).toHaveBeenCalledTimes(1); + }); + it("can switch from `ignoreResults: false` to `ignoreResults: true` and will stop rerendering, without creating a new subscription", async () => { + const subscriptionCreated = jest.fn(); + const link = new MockSubscriptionLink(); + link.onSetup(subscriptionCreated); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onData = jest.fn((() => {}) as SubscriptionHookOptions["onData"]); + const ProfiledHook = profileHook( + ({ ignoreResults }: { ignoreResults: boolean }) => + useSubscription(subscription, { + ignoreResults, + onData, + }) + ); + const { rerender } = render(<ProfiledHook ignoreResults={false} />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + expect(subscriptionCreated).toHaveBeenCalledTimes(1); + + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: true, + error: undefined, + data: undefined, + variables: undefined, + restart: expect.any(Function), + }); + expect(onData).toHaveBeenCalledTimes(0); + } + link.simulateResult(results[0]); + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + data: results[0].result.data, + variables: undefined, + restart: expect.any(Function), + }); + expect(onData).toHaveBeenCalledTimes(1); + } + await expect(ProfiledHook).not.toRerender({ timeout: 20 }); + + rerender(<ProfiledHook ignoreResults={true} />); + { + const snapshot = await ProfiledHook.takeSnapshot(); + expect(snapshot).toStrictEqual({ + loading: false, + error: undefined, + // switching back to the default `ignoreResults: true` return value + data: undefined, + variables: undefined, + restart: expect.any(Function), + }); + // `onData` should not be called again + expect(onData).toHaveBeenCalledTimes(1); + } + + link.simulateResult(results[1]); + await expect(ProfiledHook).not.toRerender({ timeout: 20 }); + expect(onData).toHaveBeenCalledTimes(2); + + // a second subscription should not have been started + expect(subscriptionCreated).toHaveBeenCalledTimes(1); + }); +}); + describe.skip("Type Tests", () => { test("NoInfer prevents adding arbitrary additional variables", () => { const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>;
Add an `ignoreResults` option to the `useSubscription` API ### Overview Some issues have been brought to our attention that the `useSubscription` [API](https://www.apollographql.com/docs/react/data/subscriptions#options) doesn't have an option of [ignoreResults](https://www.apollographql.com/docs/react/data/mutations#ignoreresults), similar to the `useMutation` [API](https://www.apollographql.com/docs/react/data/mutations#options). ### Issue Unfortunately, unlike `useMutation`, there is no "magic parameter" like `ignoreResults`, and a component will be rendered each time you get new data on subscription. `useSubscription` returns a value, and because it does, that means we need to update it, which requires components to re-render. Whereas `useMutation` allows someone to avoid the re-renders by passing an` ignoreResults` option. ### Purpose We could offer an `ignoreResults` option in `useSubscription` which would not return a value from `useSubscription` thereby avoiding re-renders This would be useful to allow a developer to to not re-render a component on certain occasions if they wish. ### Details - **Name**: ignoreResults - **Type** boolean - **Functionality:** If true, the subscription's data property is not updated with the subscription's result. - **Default:** false ### References - https://stackoverflow.com/questions/61876931/how-to-prevent-re-rendering-with-usesubscription - https://dagster.io/blog/web-workers-performance-issue - https://medium.com/@seniv/improve-performance-of-your-react-apollo-application-440692e37026
+10000
2024-07-03T13:56:00Z
3.1
apollographql/apollo-client
11,923
apollographql__apollo-client-11923
[ "11818" ]
5ae4876457391d4fbb8da56878425610c4d37fb6
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -163,6 +163,8 @@ export class ObservableQuery< this.waitForOwnResult = skipCacheDataFor(options.fetchPolicy); this.isTornDown = false; + this.subscribeToMore = this.subscribeToMore.bind(this); + const { watchQuery: { fetchPolicy: defaultFetchPolicy = "cache-first" } = {}, } = queryManager.defaultOptions; diff --git a/src/react/hooks/useBackgroundQuery.ts b/src/react/hooks/useBackgroundQuery.ts --- a/src/react/hooks/useBackgroundQuery.ts +++ b/src/react/hooks/useBackgroundQuery.ts @@ -17,7 +17,11 @@ import type { CacheKey, QueryRef } from "../internal/index.js"; import type { BackgroundQueryHookOptions, NoInfer } from "../types/types.js"; import { wrapHook } from "./internal/index.js"; import { useWatchQueryOptions } from "./useSuspenseQuery.js"; -import type { FetchMoreFunction, RefetchFunction } from "./useSuspenseQuery.js"; +import type { + FetchMoreFunction, + RefetchFunction, + SubscribeToMoreFunction, +} from "./useSuspenseQuery.js"; import { canonicalStringify } from "../../cache/index.js"; import type { DeepPartial } from "../../utilities/index.js"; import type { SkipToken } from "./constants.js"; @@ -26,7 +30,11 @@ export type UseBackgroundQueryResult< TData = unknown, TVariables extends OperationVariables = OperationVariables, > = { + /** {@inheritDoc @apollo/client!ObservableQuery#subscribeToMore:member(1)} */ + subscribeToMore: SubscribeToMoreFunction<TData, TVariables>; + /** {@inheritDoc @apollo/client!ObservableQuery#fetchMore:member(1)} */ fetchMore: FetchMoreFunction<TData, TVariables>; + /** {@inheritDoc @apollo/client!ObservableQuery#refetch:member(1)} */ refetch: RefetchFunction<TData, TVariables>; }; @@ -281,6 +289,10 @@ function _useBackgroundQuery< return [ didFetchResult.current ? wrappedQueryRef : void 0, - { fetchMore, refetch }, + { + fetchMore, + refetch, + subscribeToMore: queryRef.observable.subscribeToMore, + }, ]; } diff --git a/src/react/hooks/useLoadableQuery.ts b/src/react/hooks/useLoadableQuery.ts --- a/src/react/hooks/useLoadableQuery.ts +++ b/src/react/hooks/useLoadableQuery.ts @@ -18,7 +18,11 @@ import type { CacheKey, QueryRef } from "../internal/index.js"; import type { LoadableQueryHookOptions } from "../types/types.js"; import { __use, useRenderGuard } from "./internal/index.js"; import { useWatchQueryOptions } from "./useSuspenseQuery.js"; -import type { FetchMoreFunction, RefetchFunction } from "./useSuspenseQuery.js"; +import type { + FetchMoreFunction, + RefetchFunction, + SubscribeToMoreFunction, +} from "./useSuspenseQuery.js"; import { canonicalStringify } from "../../cache/index.js"; import type { DeepPartial, @@ -49,6 +53,8 @@ export type UseLoadableQueryResult< fetchMore: FetchMoreFunction<TData, TVariables>; /** {@inheritDoc @apollo/client!QueryResultDocumentation#refetch:member} */ refetch: RefetchFunction<TData, TVariables>; + /** {@inheritDoc @apollo/client!ObservableQuery#subscribeToMore:member(1)} */ + subscribeToMore: SubscribeToMoreFunction<TData, TVariables>; /** * A function that resets the `queryRef` back to `null`. */ @@ -255,9 +261,22 @@ export function useLoadableQuery< ] ); + const subscribeToMore: SubscribeToMoreFunction<TData, TVariables> = + React.useCallback( + (options) => { + invariant( + internalQueryRef, + "The query has not been loaded. Please load the query." + ); + + return internalQueryRef.observable.subscribeToMore(options); + }, + [internalQueryRef] + ); + const reset: ResetFunction = React.useCallback(() => { setQueryRef(null); }, []); - return [loadQuery, queryRef, { fetchMore, refetch, reset }]; + return [loadQuery, queryRef, { fetchMore, refetch, reset, subscribeToMore }]; } diff --git a/src/react/hooks/useQueryRefHandlers.ts b/src/react/hooks/useQueryRefHandlers.ts --- a/src/react/hooks/useQueryRefHandlers.ts +++ b/src/react/hooks/useQueryRefHandlers.ts @@ -8,7 +8,11 @@ import { } from "../internal/index.js"; import type { QueryRef } from "../internal/index.js"; import type { OperationVariables } from "../../core/types.js"; -import type { RefetchFunction, FetchMoreFunction } from "./useSuspenseQuery.js"; +import type { + RefetchFunction, + FetchMoreFunction, + SubscribeToMoreFunction, +} from "./useSuspenseQuery.js"; import type { FetchMoreQueryOptions } from "../../core/watchQueryOptions.js"; import { useApolloClient } from "./useApolloClient.js"; import { wrapHook } from "./internal/index.js"; @@ -21,6 +25,8 @@ export interface UseQueryRefHandlersResult< refetch: RefetchFunction<TData, TVariables>; /** {@inheritDoc @apollo/client!ObservableQuery#fetchMore:member(1)} */ fetchMore: FetchMoreFunction<TData, TVariables>; + /** {@inheritDoc @apollo/client!ObservableQuery#subscribeToMore:member(1)} */ + subscribeToMore: SubscribeToMoreFunction<TData, TVariables>; } /** @@ -112,5 +118,9 @@ function _useQueryRefHandlers< [internalQueryRef] ); - return { refetch, fetchMore }; + return { + refetch, + fetchMore, + subscribeToMore: internalQueryRef.observable.subscribeToMore, + }; } diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -274,13 +274,7 @@ function _useSuspenseQuery< [queryRef] ); - const subscribeToMore: SubscribeToMoreFunction< - TData | undefined, - TVariables - > = React.useCallback( - (options) => queryRef.observable.subscribeToMore(options), - [queryRef] - ); + const subscribeToMore = queryRef.observable.subscribeToMore; return React.useMemo< UseSuspenseQueryResult<TData | undefined, TVariables>
diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -16,6 +16,7 @@ import { TypedDocumentNode, ApolloLink, Observable, + split, } from "../../../core"; import { MockedResponse, @@ -29,6 +30,7 @@ import { concatPagination, offsetLimitPagination, DeepPartial, + getMainDefinition, } from "../../../utilities"; import { useBackgroundQuery } from "../useBackgroundQuery"; import { UseReadQueryResult, useReadQuery } from "../useReadQuery"; @@ -37,7 +39,10 @@ import { QueryRef, QueryReference } from "../../internal"; import { InMemoryCache } from "../../../cache"; import { SuspenseQueryHookFetchPolicy } from "../../types/types"; import equal from "@wry/equality"; -import { RefetchWritePolicy } from "../../../core/watchQueryOptions"; +import { + RefetchWritePolicy, + SubscribeToMoreOptions, +} from "../../../core/watchQueryOptions"; import { skipToken } from "../constants"; import { PaginatedCaseData, @@ -54,6 +59,7 @@ import { spyOnConsole, useTrackRenders, } from "../../../testing/internal"; +import { SubscribeToMoreFunction } from "../useSuspenseQuery"; afterEach(() => { jest.useRealTimers(); @@ -6052,6 +6058,135 @@ describe("fetchMore", () => { await expect(Profiler).not.toRerender(); }); + + it("can subscribe to subscriptions and react to cache updates via `subscribeToMore`", async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + type UpdateQueryFn = NonNullable< + SubscribeToMoreOptions< + SimpleCaseData, + Record<string, never>, + SubscriptionData + >["updateQuery"] + >; + + const subscription: TypedDocumentNode< + SubscriptionData, + Record<string, never> + > = gql` + subscription { + greetingUpdated + } + `; + + const { mocks, query } = setupSimpleCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + mockLink + ); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + const Profiler = createProfiler({ + initialSnapshot: { + subscribeToMore: null as SubscribeToMoreFunction< + SimpleCaseData, + Record<string, never> + > | null, + result: null as UseReadQueryResult<SimpleCaseData> | null, + }, + }); + + const { SuspenseFallback, ReadQueryHook } = + createDefaultTrackedComponents(Profiler); + + function App() { + useTrackRenders(); + const [queryRef, { subscribeToMore }] = useBackgroundQuery(query); + + Profiler.mergeSnapshot({ subscribeToMore }); + + return ( + <Suspense fallback={<SuspenseFallback />}> + <ReadQueryHook queryRef={queryRef} /> + </Suspense> + ); + } + + renderWithClient(<App />, { client, wrapper: Profiler }); + + { + const { renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App, SuspenseFallback]); + } + + { + const { renderedComponents, snapshot } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + const updateQuery = jest.fn< + ReturnType<UpdateQueryFn>, + Parameters<UpdateQueryFn> + >((_, { subscriptionData: { data } }) => { + return { greeting: data.greetingUpdated }; + }); + + const { snapshot } = Profiler.getCurrentRender(); + + snapshot.subscribeToMore!({ document: subscription, updateQuery }); + + wsLink.simulateResult({ + result: { + data: { + greetingUpdated: "Subscription hello", + }, + }, + }); + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Subscription hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + expect(updateQuery).toHaveBeenCalledTimes(1); + expect(updateQuery).toHaveBeenCalledWith( + { greeting: "Hello" }, + { + subscriptionData: { + data: { greetingUpdated: "Subscription hello" }, + }, + variables: {}, + } + ); + }); }); describe.skip("type tests", () => { diff --git a/src/react/hooks/__tests__/useLoadableQuery.test.tsx b/src/react/hooks/__tests__/useLoadableQuery.test.tsx --- a/src/react/hooks/__tests__/useLoadableQuery.test.tsx +++ b/src/react/hooks/__tests__/useLoadableQuery.test.tsx @@ -22,6 +22,8 @@ import { Observable, OperationVariables, RefetchWritePolicy, + SubscribeToMoreOptions, + split, } from "../../../core"; import { MockedProvider, @@ -35,6 +37,7 @@ import { concatPagination, offsetLimitPagination, DeepPartial, + getMainDefinition, } from "../../../utilities"; import { useLoadableQuery } from "../useLoadableQuery"; import type { UseReadQueryResult } from "../useReadQuery"; @@ -43,7 +46,11 @@ import { ApolloProvider } from "../../context"; import { InMemoryCache } from "../../../cache"; import { LoadableQueryHookFetchPolicy } from "../../types/types"; import { QueryRef } from "../../../react"; -import { FetchMoreFunction, RefetchFunction } from "../useSuspenseQuery"; +import { + FetchMoreFunction, + RefetchFunction, + SubscribeToMoreFunction, +} from "../useSuspenseQuery"; import invariant, { InvariantError } from "ts-invariant"; import { Profiler, @@ -4667,6 +4674,218 @@ it("allows loadQuery to be called in useEffect on first render", async () => { expect(() => renderWithMocks(<App />, { mocks })).not.toThrow(); }); +it("can subscribe to subscriptions and react to cache updates via `subscribeToMore`", async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + type UpdateQueryFn = NonNullable< + SubscribeToMoreOptions< + SimpleCaseData, + Record<string, never>, + SubscriptionData + >["updateQuery"] + >; + + const subscription: TypedDocumentNode< + SubscriptionData, + Record<string, never> + > = gql` + subscription { + greetingUpdated + } + `; + + const { mocks, query } = setupSimpleCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + mockLink + ); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + const Profiler = createProfiler({ + initialSnapshot: { + subscribeToMore: null as SubscribeToMoreFunction< + SimpleCaseData, + Record<string, never> + > | null, + result: null as UseReadQueryResult<SimpleCaseData> | null, + }, + }); + + const { SuspenseFallback, ReadQueryHook } = + createDefaultProfiledComponents(Profiler); + + function App() { + useTrackRenders(); + const [loadQuery, queryRef, { subscribeToMore }] = useLoadableQuery(query); + + Profiler.mergeSnapshot({ subscribeToMore }); + + return ( + <div> + <button onClick={() => loadQuery()}>Load query</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <ReadQueryHook queryRef={queryRef} />} + </Suspense> + </div> + ); + } + + const { user } = renderWithClient(<App />, { client, wrapper: Profiler }); + // initial render + await Profiler.takeRender(); + + await act(() => user.click(screen.getByText("Load query"))); + + { + const { renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App, SuspenseFallback]); + } + + { + const { renderedComponents, snapshot } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + const updateQuery = jest.fn< + ReturnType<UpdateQueryFn>, + Parameters<UpdateQueryFn> + >((_, { subscriptionData: { data } }) => { + return { greeting: data.greetingUpdated }; + }); + + const { snapshot } = Profiler.getCurrentRender(); + + snapshot.subscribeToMore!({ document: subscription, updateQuery }); + + wsLink.simulateResult({ + result: { + data: { + greetingUpdated: "Subscription hello", + }, + }, + }); + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Subscription hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + expect(updateQuery).toHaveBeenCalledTimes(1); + expect(updateQuery).toHaveBeenCalledWith( + { greeting: "Hello" }, + { + subscriptionData: { + data: { greetingUpdated: "Subscription hello" }, + }, + variables: {}, + } + ); +}); + +it("throws when calling `subscribeToMore` before loading the query", async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + const subscription: TypedDocumentNode< + SubscriptionData, + Record<string, never> + > = gql` + subscription { + greetingUpdated + } + `; + + const { mocks, query } = setupSimpleCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + mockLink + ); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + const Profiler = createProfiler({ + initialSnapshot: { + subscribeToMore: null as SubscribeToMoreFunction< + SimpleCaseData, + Record<string, never> + > | null, + result: null as UseReadQueryResult<SimpleCaseData> | null, + }, + }); + + const { SuspenseFallback, ReadQueryHook } = + createDefaultProfiledComponents(Profiler); + + function App() { + useTrackRenders(); + const [loadQuery, queryRef, { subscribeToMore }] = useLoadableQuery(query); + + Profiler.mergeSnapshot({ subscribeToMore }); + + return ( + <div> + <button onClick={() => loadQuery()}>Load query</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <ReadQueryHook queryRef={queryRef} />} + </Suspense> + </div> + ); + } + + renderWithClient(<App />, { client, wrapper: Profiler }); + // initial render + await Profiler.takeRender(); + + const { snapshot } = Profiler.getCurrentRender(); + + expect(() => { + snapshot.subscribeToMore!({ document: subscription }); + }).toThrow( + new InvariantError("The query has not been loaded. Please load the query.") + ); +}); + describe.skip("type tests", () => { it("returns unknown when TData cannot be inferred", () => { const query = gql``; diff --git a/src/react/hooks/__tests__/useQueryRefHandlers.test.tsx b/src/react/hooks/__tests__/useQueryRefHandlers.test.tsx --- a/src/react/hooks/__tests__/useQueryRefHandlers.test.tsx +++ b/src/react/hooks/__tests__/useQueryRefHandlers.test.tsx @@ -4,10 +4,16 @@ import { ApolloClient, InMemoryCache, NetworkStatus, + SubscribeToMoreOptions, TypedDocumentNode, gql, + split, } from "../../../core"; -import { MockLink, MockedResponse } from "../../../testing"; +import { + MockLink, + MockSubscriptionLink, + MockedResponse, +} from "../../../testing"; import { PaginatedCaseData, SimpleCaseData, @@ -19,13 +25,14 @@ import { } from "../../../testing/internal"; import { useQueryRefHandlers } from "../useQueryRefHandlers"; import { UseReadQueryResult, useReadQuery } from "../useReadQuery"; +import type { SubscribeToMoreFunction } from "../useSuspenseQuery"; import { Suspense } from "react"; import { createQueryPreloader } from "../../query-preloader/createQueryPreloader"; import userEvent from "@testing-library/user-event"; import { QueryRef } from "../../internal"; import { useBackgroundQuery } from "../useBackgroundQuery"; import { useLoadableQuery } from "../useLoadableQuery"; -import { concatPagination } from "../../../utilities"; +import { concatPagination, getMainDefinition } from "../../../utilities"; test("does not interfere with updates from useReadQuery", async () => { const { query, mocks } = setupSimpleCase(); @@ -1927,3 +1934,147 @@ test("`fetchMore` works with startTransition from useBackgroundQuery and useQuer await expect(Profiler).not.toRerender(); }); + +test("can subscribe to subscriptions and react to cache updates via `subscribeToMore`", async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + type UpdateQueryFn = NonNullable< + SubscribeToMoreOptions< + SimpleCaseData, + Record<string, never>, + SubscriptionData + >["updateQuery"] + >; + + const subscription: TypedDocumentNode< + SubscriptionData, + Record<string, never> + > = gql` + subscription { + greetingUpdated + } + `; + + const { mocks, query } = setupSimpleCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + mockLink + ); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + const preloadQuery = createQueryPreloader(client); + const queryRef = preloadQuery(query); + + const Profiler = createProfiler({ + initialSnapshot: { + subscribeToMore: null as SubscribeToMoreFunction< + SimpleCaseData, + Record<string, never> + > | null, + result: null as UseReadQueryResult<SimpleCaseData> | null, + }, + }); + + function SuspenseFallback() { + useTrackRenders(); + return <p>Loading</p>; + } + + function ReadQueryHook() { + useTrackRenders(); + Profiler.mergeSnapshot({ result: useReadQuery(queryRef) }); + + return null; + } + + function App() { + useTrackRenders(); + // We can ignore the return result here since we are testing the mechanics + // of this hook to ensure it doesn't interfere with the updates from + // useReadQuery + const { subscribeToMore } = useQueryRefHandlers(queryRef); + + Profiler.mergeSnapshot({ subscribeToMore }); + + return ( + <Suspense fallback={<SuspenseFallback />}> + <ReadQueryHook /> + </Suspense> + ); + } + + renderWithClient(<App />, { client, wrapper: Profiler }); + + { + const { renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App, SuspenseFallback]); + } + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + const updateQuery = jest.fn< + ReturnType<UpdateQueryFn>, + Parameters<UpdateQueryFn> + >((_, { subscriptionData: { data } }) => { + return { greeting: data.greetingUpdated }; + }); + + const { snapshot } = Profiler.getCurrentRender(); + + snapshot.subscribeToMore!({ document: subscription, updateQuery }); + + wsLink.simulateResult({ + result: { + data: { + greetingUpdated: "Subscription hello", + }, + }, + }); + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([ReadQueryHook]); + expect(snapshot.result).toEqual({ + data: { greeting: "Subscription hello" }, + error: undefined, + networkStatus: NetworkStatus.ready, + }); + } + + expect(updateQuery).toHaveBeenCalledTimes(1); + expect(updateQuery).toHaveBeenCalledWith( + { greeting: "Hello" }, + { + subscriptionData: { + data: { greetingUpdated: "Subscription hello" }, + }, + variables: {}, + } + ); +});
Add `subscribeToMore` functionality to `useQueryRefHandlers`, `useBackgroundQuery` Context: https://community.apollographql.com/t/how-to-use-subscriptions-together-with-preloadquery/7434/3 Some of our hooks include `subscribeToMore` in the results object. We want to ensure that all relevant hooks include `subscribeToMore`, including `useQueryRefHandlers` and `useBackgroundQuery`.
2024-07-03T21:12:02Z
3.1
apollographql/apollo-client
11,799
apollographql__apollo-client-11799
[ "11798" ]
60592e95399c3695d1d49a4c39ad29f00d4059fd
diff --git a/src/react/ssr/RenderPromises.ts b/src/react/ssr/RenderPromises.ts --- a/src/react/ssr/RenderPromises.ts +++ b/src/react/ssr/RenderPromises.ts @@ -1,8 +1,9 @@ -import type { DocumentNode } from "graphql"; import type * as ReactTypes from "react"; import type { ObservableQuery, OperationVariables } from "../../core/index.js"; import type { QueryDataOptions } from "../types/types.js"; +import { Trie } from "@wry/trie"; +import { canonicalStringify } from "../../cache/index.js"; // TODO: A vestigial interface from when hooks were implemented with utility // classes, which should be deleted in the future. @@ -16,11 +17,13 @@ type QueryInfo = { observable: ObservableQuery<any, any> | null; }; -function makeDefaultQueryInfo(): QueryInfo { - return { +function makeQueryInfoTrie() { + // these Tries are very short-lived, so we don't need to worry about making it + // "weak" - it's easier to test and debug as a strong Trie. + return new Trie<QueryInfo>(false, () => ({ seen: false, observable: null, - }; + })); } export class RenderPromises { @@ -31,13 +34,13 @@ export class RenderPromises { // objects. These QueryInfo objects are intended to survive through the whole // getMarkupFromTree process, whereas specific Query instances do not survive // beyond a single call to renderToStaticMarkup. - private queryInfoTrie = new Map<DocumentNode, Map<string, QueryInfo>>(); + private queryInfoTrie = makeQueryInfoTrie(); private stopped = false; public stop() { if (!this.stopped) { this.queryPromises.clear(); - this.queryInfoTrie.clear(); + this.queryInfoTrie = makeQueryInfoTrie(); this.stopped = true; } } @@ -133,13 +136,9 @@ export class RenderPromises { private lookupQueryInfo<TData, TVariables extends OperationVariables>( props: QueryDataOptions<TData, TVariables> ): QueryInfo { - const { queryInfoTrie } = this; - const { query, variables } = props; - const varMap = queryInfoTrie.get(query) || new Map<string, QueryInfo>(); - if (!queryInfoTrie.has(query)) queryInfoTrie.set(query, varMap); - const variablesString = JSON.stringify(variables); - const info = varMap.get(variablesString) || makeDefaultQueryInfo(); - if (!varMap.has(variablesString)) varMap.set(variablesString, info); - return info; + return this.queryInfoTrie.lookup( + props.query, + canonicalStringify(props.variables) + ); } }
diff --git a/src/react/ssr/__tests__/useQuery.test.tsx b/src/react/ssr/__tests__/useQuery.test.tsx --- a/src/react/ssr/__tests__/useQuery.test.tsx +++ b/src/react/ssr/__tests__/useQuery.test.tsx @@ -2,12 +2,17 @@ import React from "react"; import { DocumentNode } from "graphql"; import gql from "graphql-tag"; -import { MockedProvider, mockSingleLink } from "../../../testing"; +import { + MockedProvider, + MockedResponse, + mockSingleLink, +} from "../../../testing"; import { ApolloClient } from "../../../core"; import { InMemoryCache } from "../../../cache"; -import { ApolloProvider } from "../../context"; +import { ApolloProvider, getApolloContext } from "../../context"; import { useApolloClient, useQuery } from "../../hooks"; import { renderToStringWithData } from ".."; +import type { Trie } from "@wry/trie"; describe("useQuery Hook SSR", () => { const CAR_QUERY: DocumentNode = gql` @@ -333,4 +338,50 @@ describe("useQuery Hook SSR", () => { expect(cache.extract()).toMatchSnapshot(); }); }); + + it("should deduplicate `variables` with identical content, but different order", async () => { + const mocks: MockedResponse[] = [ + { + request: { + query: CAR_QUERY, + variables: { foo: "a", bar: 1 }, + }, + result: { data: CAR_RESULT_DATA }, + maxUsageCount: 1, + }, + ]; + + let trie: Trie<any> | undefined; + const Component = ({ + variables, + }: { + variables: { foo: string; bar: number }; + }) => { + const { loading, data } = useQuery(CAR_QUERY, { variables, ssr: true }); + trie ||= + React.useContext(getApolloContext()).renderPromises!["queryInfoTrie"]; + if (!loading) { + expect(data).toEqual(CAR_RESULT_DATA); + const { make, model, vin } = data.cars[0]; + return ( + <div> + {make}, {model}, {vin} + </div> + ); + } + return null; + }; + + await renderToStringWithData( + <MockedProvider mocks={mocks}> + <> + <Component variables={{ foo: "a", bar: 1 }} /> + <Component variables={{ bar: 1, foo: "a" }} /> + </> + </MockedProvider> + ); + expect( + Array.from(trie!["getChildTrie"](CAR_QUERY)["strong"].keys()) + ).toEqual(['{"bar":1,"foo":"a"}']); + }); });
useQuery fails to resolve ssr renderPromise due to JSON.stringify reordering ### Issue Description We found a bug in production where the [`lookupQueryInfo()`](https://github.com/apollographql/apollo-client/blob/main/src/react/ssr/RenderPromises.ts#L140) method would consistently not render a SSR component after calling `renderToStringWithData`. Upon investigation, we found that the query was being executed and the result should have been available. However, a second `QueryInfo` would be registered instead of recovering the resolved one. This is due to the `JSON.stringify` method switching the order of attributes when looking up the Map (Check screenshots). --- <img width="1032" alt="Screenshot 2024-04-19 at 4 27 10 PM" src="https://github.com/apollographql/apollo-client/assets/11381625/ecbd4f7c-c2ec-46e5-85ef-8243105a3e1d"> <img width="1189" alt="Screenshot 2024-04-19 at 4 28 15 PM" src="https://github.com/apollographql/apollo-client/assets/11381625/d4813464-8c28-4d83-bda4-9be0faa58145"> --- I noticed the utilities package already provides a [`canonicalStringify`](https://github.com/apollographql/apollo-client/blob/main/src/utilities/common/canonicalStringify.ts) which would fix this issue. ### Link to Reproduction https://github.com/apollographql/apollo-client/blob/main/src/react/ssr/RenderPromises.ts#L140 ### Reproduction Steps _No response_ ### `@apollo/client` version 3.8.3
That's interesting - are you passing different variables in different situations here? I don't think that Apollo Client ever touches variables in a writing manner, and JSON.stringify would always stringify things in the object order. That said, using `canonicalStringify` absolutely makes sense here. I'll prepare a fix. Amazing, @phryneas. Thank you for the fast response. That's exactly what I found weird about it. We don't pass different variables to it (although I guess the react re-rendering recreates the objects/variables that are passed when the functional component gets re-executed). I tried getting `JSON.stringify` to reproduce the issue in isolation but couldn't. I am also willing to make the contribution if you are short on time. Let me know.
2024-04-22T14:31:51Z
3.1
apollographql/apollo-client
11,638
apollographql__apollo-client-11638
[ "11315" ]
f1d8bc40c3d8e39340f721f4f1c3fd0ed77b8a6b
diff --git a/src/react/internal/cache/QueryReference.ts b/src/react/internal/cache/QueryReference.ts --- a/src/react/internal/cache/QueryReference.ts +++ b/src/react/internal/cache/QueryReference.ts @@ -379,10 +379,22 @@ export class InternalQueryReference<TData = unknown> { // promise is resolved correctly. returnedPromise .then((result) => { - if (this.promise.status === "pending") { - this.result = result; - this.resolve?.(result); - } + // In the case of `fetchMore`, this promise is resolved before a cache + // result is emitted due to the fact that `fetchMore` sets a `no-cache` + // fetch policy and runs `cache.batch` in its `.then` handler. Because + // the timing is different, we accidentally run this update twice + // causing an additional re-render with the `fetchMore` result by + // itself. By wrapping in `setTimeout`, this should provide a short + // delay to allow the `QueryInfo.notify` handler to run before this + // promise is checked. + // See https://github.com/apollographql/apollo-client/issues/11315 for + // more information + setTimeout(() => { + if (this.promise.status === "pending") { + this.result = result; + this.resolve?.(result); + } + }); }) .catch(() => {});
diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -4927,25 +4927,6 @@ describe("fetchMore", () => { expect(renderedComponents).toStrictEqual([App, SuspenseFallback]); } - // TODO: Determine why we have this extra render here. - // Possibly related: https://github.com/apollographql/apollo-client/issues/11315 - { - const { snapshot } = await Profiler.takeRender(); - - expect(snapshot.result).toEqual({ - data: { - letters: [ - { __typename: "Letter", position: 1, letter: "A" }, - { __typename: "Letter", position: 2, letter: "B" }, - { __typename: "Letter", position: 3, letter: "C" }, - { __typename: "Letter", position: 4, letter: "D" }, - ], - }, - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - { const { snapshot } = await Profiler.takeRender(); @@ -5034,25 +5015,6 @@ describe("fetchMore", () => { expect(renderedComponents).toStrictEqual([App, SuspenseFallback]); } - // TODO: Determine why we have this extra render here. - // Possibly related: https://github.com/apollographql/apollo-client/issues/11315 - { - const { snapshot } = await Profiler.takeRender(); - - expect(snapshot.result).toEqual({ - data: { - letters: [ - { __typename: "Letter", position: 1, letter: "A" }, - { __typename: "Letter", position: 2, letter: "B" }, - { __typename: "Letter", position: 3, letter: "C" }, - { __typename: "Letter", position: 4, letter: "D" }, - ], - }, - error: undefined, - networkStatus: NetworkStatus.ready, - }); - } - { const { snapshot } = await Profiler.takeRender(); @@ -5245,39 +5207,6 @@ describe("fetchMore", () => { }); } - // TODO: Determine why we have this extra render here. This should mimic - // the update in the next render where we see <App /> included in the - // rerendered components. - // Possibly related: https://github.com/apollographql/apollo-client/issues/11315 - { - const { snapshot, renderedComponents } = await Profiler.takeRender(); - - expect(renderedComponents).toStrictEqual([ReadQueryHook]); - expect(snapshot).toEqual({ - isPending: false, - result: { - data: { - todos: [ - { - __typename: "Todo", - id: "1", - name: "Clean room", - completed: false, - }, - { - __typename: "Todo", - id: "2", - name: "Take out trash", - completed: true, - }, - ], - }, - error: undefined, - networkStatus: NetworkStatus.ready, - }, - }); - } - { // Eventually we should see the updated todos content once its done // suspending. diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -1,4 +1,4 @@ -import React, { Fragment, StrictMode, Suspense } from "react"; +import React, { Fragment, StrictMode, Suspense, useTransition } from "react"; import { act, screen, @@ -51,7 +51,15 @@ import { RefetchWritePolicy, WatchQueryFetchPolicy, } from "../../../core/watchQueryOptions"; -import { profile, spyOnConsole } from "../../../testing/internal"; +import { + PaginatedCaseData, + PaginatedCaseVariables, + createProfiler, + profile, + setupPaginatedCase, + spyOnConsole, + useTrackRenders, +} from "../../../testing/internal"; type RenderSuspenseHookOptions<Props, TSerializedCache = {}> = Omit< RenderHookOptions<Props>, @@ -9978,6 +9986,129 @@ describe("useSuspenseQuery", () => { }); }); + // https://github.com/apollographql/apollo-client/issues/11315 + it("fetchMore does not cause extra render", async () => { + const { query, link } = setupPaginatedCase(); + + const user = userEvent.setup(); + const client = new ApolloClient({ + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + letters: offsetLimitPagination(), + }, + }, + }, + }), + link, + }); + + const Profiler = createProfiler({ + initialSnapshot: { + result: null as UseSuspenseQueryResult< + PaginatedCaseData, + PaginatedCaseVariables + > | null, + }, + }); + + function SuspenseFallback() { + useTrackRenders(); + + return <div>Loading...</div>; + } + + function App() { + useTrackRenders(); + const [isPending, startTransition] = useTransition(); + const result = useSuspenseQuery(query, { + variables: { offset: 0, limit: 2 }, + }); + const { data, fetchMore } = result; + + Profiler.mergeSnapshot({ result }); + + return ( + <button + disabled={isPending} + onClick={() => + startTransition(() => { + fetchMore({ + variables: { + offset: data.letters.length, + limit: data.letters.length + 1, + }, + }); + }) + } + > + Fetch next + </button> + ); + } + + render(<App />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + <Profiler> + <Suspense fallback={<SuspenseFallback />}>{children}</Suspense> + </Profiler> + </ApolloProvider> + ), + }); + + { + const { renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([SuspenseFallback]); + } + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App]); + expect(snapshot.result?.data).toEqual({ + letters: [ + { __typename: "Letter", letter: "A", position: 1 }, + { __typename: "Letter", letter: "B", position: 2 }, + ], + }); + } + + await act(() => user.click(screen.getByText("Fetch next"))); + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App]); + expect(screen.getByText("Fetch next")).toBeDisabled(); + expect(snapshot.result?.data).toEqual({ + letters: [ + { __typename: "Letter", letter: "A", position: 1 }, + { __typename: "Letter", letter: "B", position: 2 }, + ], + }); + } + + { + const { snapshot, renderedComponents } = await Profiler.takeRender(); + + expect(renderedComponents).toStrictEqual([App]); + expect(snapshot.result?.data).toEqual({ + letters: [ + { __typename: "Letter", letter: "A", position: 1 }, + { __typename: "Letter", letter: "B", position: 2 }, + { __typename: "Letter", letter: "C", position: 3 }, + { __typename: "Letter", letter: "D", position: 4 }, + { __typename: "Letter", letter: "E", position: 5 }, + ], + }); + } + + await expect(Profiler).not.toRerender(); + }); + describe.skip("type tests", () => { it("returns unknown when TData cannot be inferred", () => { const query = gql` diff --git a/src/testing/internal/scenarios/index.ts b/src/testing/internal/scenarios/index.ts --- a/src/testing/internal/scenarios/index.ts +++ b/src/testing/internal/scenarios/index.ts @@ -80,8 +80,8 @@ export interface PaginatedCaseVariables { export function setupPaginatedCase() { const query: TypedDocumentNode<PaginatedCaseData, PaginatedCaseVariables> = gql` - query letters($limit: Int, $offset: Int) { - letters(limit: $limit) { + query LettersQuery($limit: Int, $offset: Int) { + letters(limit: $limit, offset: $offset) { letter position }
Pagination using useSuspenseQuery and React transition is broken I have a paginated query that I use with `useSuspenseQuery`. When I call the `fetchMore` function inside a `startTransition`, an unexpected re-render with **only** the new data happens before the final re-render with all the data merged together. I've made a reproduction link so you can try it out : https://stackblitz.com/edit/stackblitz-starters-fqmtck?file=src%2Fcomponents%2FPaginatedQuery.tsx ### Reproduction steps 1 - Go to the stackblitz link 2 - Open your console 3 - Click "Fetch More" 4 - Look at logs
Thanks for reporting this, @pvandamme! Really appreciate the reproduction - looks like this issue should be transferred over to the Apollo Client repo which I will do shortly. Thanks @alessbell :) Hi @alessbell, any issue that I can follow on the apollo client repo for updates ? Thanks ! Faced a similar issue. Seems to be caused by [this line](https://github.com/apollographql/apollo-client/blob/0bcbb2c258c5771e6bb170c59a9ce360645351b4/src/react/cache/QueryReference.ts#L303) Hi there, I'm sorry - it seems that something went wrong when transferring this earlier. I transferred it now. Is there any workaround available currently? @finkef Well, with @apollo/client v3.9.5, deleting line 243 in `node_modules/@apollo/client/react/internal/cache/QueryReference.js` seems to fix the issue for me. But I wouldn't do it in an app I care about) @none23 Thanks! I just came up with this workaround, but wouldn't expect the best performance. It appears that `readQuery` always gets the correct data. ```ts import { useApolloClient } from "@apollo/client" import { useSuspenseQuery as useApolloSuspenseQuery } from "@apollo/experimental-nextjs-app-support/ssr" import { startTransition, useCallback, useMemo } from "react" export const useSuspenseQuery: typeof useApolloSuspenseQuery = ( ...args: Parameters<typeof useApolloSuspenseQuery> ) => { const client = useApolloClient() const { data, fetchMore, ...rest } = useApolloSuspenseQuery(...args) const newFetchMore = useCallback( (...fetchMoreArgs: Parameters<typeof fetchMore>) => { return new Promise((resolve, reject) => { startTransition(() => { fetchMore(...fetchMoreArgs) .then(resolve) .catch(reject) }) }) }, [fetchMore], ) const actualData = useMemo(() => { return client.readQuery({ query: args[0], variables: (args as any)[1]?.variables, }) }, [data]) return { data: actualData, fetchMore: newFetchMore, ...rest, } as any } ```
2024-03-04T22:12:13Z
3.9
apollographql/apollo-client
11,403
apollographql__apollo-client-11403
[ "11201" ]
e855d00447e4d9ae478d98f6796d842ef6cc76d1
diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -78,7 +78,7 @@ export function useLazyQuery< // Use refs to track options and the used query to ensure the `execute` // function remains referentially stable between renders. - optionsRef.current = merged; + optionsRef.current = options; queryRef.current = document; const internalState = useInternalState<TData, TVariables>(
diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -5,6 +5,7 @@ import { act, render, renderHook, waitFor } from "@testing-library/react"; import { ApolloClient, + ApolloError, ApolloLink, ErrorPolicy, InMemoryCache, @@ -19,6 +20,7 @@ import { wait, tick, MockSubscriptionLink, + MockLink, } from "../../../testing"; import { useLazyQuery } from "../useLazyQuery"; import { QueryResult } from "../../types/types"; @@ -1483,6 +1485,274 @@ describe("useLazyQuery Hook", () => { expect(fetchCount).toBe(1); }); + // https://github.com/apollographql/apollo-client/issues/9448 + it.each(["network-only", "no-cache", "cache-and-network"] as const)( + "does not issue multiple network calls when calling execute again without variables with a %s fetch policy", + async (fetchPolicy) => { + interface Data { + user: { id: string; name: string }; + } + + interface Variables { + id?: string; + } + + const query: TypedDocumentNode<Data, Variables> = gql` + query UserQuery($id: ID) { + user(id: $id) { + id + name + } + } + `; + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + fetchCount++; + return new Observable((observer) => { + const { id } = operation.variables; + + setTimeout(() => { + observer.next({ + data: { + user: + id ? + { id, name: "John Doe" } + : { id: null, name: "John Default" }, + }, + }); + observer.complete(); + }, 20); + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const { result } = renderHook( + () => useLazyQuery(query, { fetchPolicy }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + await act(() => result.current[0]({ variables: { id: "2" } })); + + expect(fetchCount).toBe(1); + + await waitFor(() => { + expect(result.current[1].data).toEqual({ + user: { id: "2", name: "John Doe" }, + }); + }); + + expect(fetchCount).toBe(1); + + await act(() => result.current[0]()); + + await waitFor(() => { + expect(result.current[1].data).toEqual({ + user: { id: null, name: "John Default" }, + }); + }); + + expect(fetchCount).toBe(2); + } + ); + + it("maintains stable execute function when passing in dynamic function options", async () => { + interface Data { + user: { id: string; name: string }; + } + + interface Variables { + id: string; + } + + const query: TypedDocumentNode<Data, Variables> = gql` + query UserQuery($id: ID!) { + user(id: $id) { + id + name + } + } + `; + + const link = new MockLink([ + { + request: { query, variables: { id: "1" } }, + result: { data: { user: { id: "1", name: "John Doe" } } }, + delay: 20, + }, + { + request: { query, variables: { id: "2" } }, + result: { errors: [new GraphQLError("Oops")] }, + delay: 20, + }, + { + request: { query, variables: { id: "3" } }, + result: { data: { user: { id: "3", name: "Johnny Three" } } }, + delay: 20, + maxUsageCount: Number.POSITIVE_INFINITY, + }, + ]); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + let countRef = { current: 0 }; + + const trackClosureValue = jest.fn(); + + const { result, rerender } = renderHook( + () => { + let count = countRef.current; + + return useLazyQuery(query, { + fetchPolicy: "cache-first", + variables: { id: "1" }, + onCompleted: () => { + trackClosureValue("onCompleted", count); + }, + onError: () => { + trackClosureValue("onError", count); + }, + skipPollAttempt: () => { + trackClosureValue("skipPollAttempt", count); + return false; + }, + nextFetchPolicy: (currentFetchPolicy) => { + trackClosureValue("nextFetchPolicy", count); + return currentFetchPolicy; + }, + }); + }, + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [originalExecute] = result.current; + + countRef.current++; + rerender(); + + expect(result.current[0]).toBe(originalExecute); + + // Check for stale closures with onCompleted + await act(() => result.current[0]()); + await waitFor(() => { + expect(result.current[1].data).toEqual({ + user: { id: "1", name: "John Doe" }, + }); + }); + + // after fetch + expect(trackClosureValue).toHaveBeenNthCalledWith(1, "nextFetchPolicy", 1); + expect(trackClosureValue).toHaveBeenNthCalledWith(2, "onCompleted", 1); + trackClosureValue.mockClear(); + + countRef.current++; + rerender(); + + expect(result.current[0]).toBe(originalExecute); + + // Check for stale closures with onError + await act(() => result.current[0]({ variables: { id: "2" } })); + await waitFor(() => { + expect(result.current[1].error).toEqual( + new ApolloError({ graphQLErrors: [new GraphQLError("Oops")] }) + ); + }); + + // variables changed + expect(trackClosureValue).toHaveBeenNthCalledWith(1, "nextFetchPolicy", 2); + // after fetch + expect(trackClosureValue).toHaveBeenNthCalledWith(2, "nextFetchPolicy", 2); + expect(trackClosureValue).toHaveBeenNthCalledWith(3, "onError", 2); + trackClosureValue.mockClear(); + + countRef.current++; + rerender(); + + expect(result.current[0]).toBe(originalExecute); + + await act(() => result.current[0]({ variables: { id: "3" } })); + await waitFor(() => { + expect(result.current[1].data).toEqual({ + user: { id: "3", name: "Johnny Three" }, + }); + }); + + // variables changed + expect(trackClosureValue).toHaveBeenNthCalledWith(1, "nextFetchPolicy", 3); + // after fetch + expect(trackClosureValue).toHaveBeenNthCalledWith(2, "nextFetchPolicy", 3); + expect(trackClosureValue).toHaveBeenNthCalledWith(3, "onCompleted", 3); + trackClosureValue.mockClear(); + + // Test for stale closures for skipPollAttempt + result.current[1].startPolling(20); + await wait(50); + result.current[1].stopPolling(); + + expect(trackClosureValue).toHaveBeenCalledWith("skipPollAttempt", 3); + }); + + it("maintains stable execute function identity when changing non-callback options", async () => { + interface Data { + user: { id: string; name: string }; + } + + interface Variables { + id: string; + } + + const query: TypedDocumentNode<Data, Variables> = gql` + query UserQuery($id: ID!) { + user(id: $id) { + id + name + } + } + `; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + setTimeout(() => { + observer.next({ + data: { user: { id: operation.variables.id, name: "John Doe" } }, + }); + observer.complete(); + }, 20); + }); + }); + + const client = new ApolloClient({ link, cache: new InMemoryCache() }); + + const { result, rerender } = renderHook( + ({ id }) => useLazyQuery(query, { variables: { id } }), + { + initialProps: { id: "1" }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [execute] = result.current; + + rerender({ id: "2" }); + + expect(result.current[0]).toBe(execute); + }); + describe("network errors", () => { async function check(errorPolicy: ErrorPolicy) { const networkError = new Error("from the network");
v3.8.3 useLazyQuery fires 2 times -unsure why ### Issue Description We are getting 2 calls in Apollo Client when using useLazyQuery. It is exactly the same call, and passes the Authentication: Bearer header. I confirmed we are only calling the 1st parameter 1 time. And even put in a console.log('FIRE'); and only get 1 of these calls. No idea why. Any thing we can see internally to Apollo Client to debug the Apollo Client? Using latest React 18 and NextJS. These calls are pretty large responses. And calling it 2 times is really slowing down the site. Thanks. ### Link to Reproduction None ### Reproduction Steps See above. I think they're maybe a better way to find out why it is being called 2 times.
Hey @billnbell 👋 Would you be able to provide a minimal reproduction of the issue? Without code samples or a reproduction to play with, its very difficult to determine what might be happening here. Any more information you can provide would be super helpful. Thanks! OK - I will send more. But any debug mode I can see internal into this call ? useLazyQuery You could provide us a [https://www.replay.io/](Replay) of the issue occuring, but please keep in mind that that replay will contain your authentication headers, so don't record it in a production environment. Otherwise, maybe you can also share a code snippet with us that shows us how you are using it? I'm having this problem with an extra request after removing one of the variables properties. <img width="600" alt="image" src="https://github.com/apollographql/apollo-client/assets/38090158/8e1d0340-b6f4-4148-bf40-823976a21ca6"> if the fetchPolicy property is set to "no-cache" 1) Use getDataWithVariable 2) Use function getDataWithoutVariable after function getDataWithVariable 3) Look at tab network and see it: ![image](https://github.com/apollographql/apollo-client/assets/38090158/8ec36c97-ea98-4971-a3dc-372367a0cccc) getData makes three requests instead of two. After removing one of the properties useLazyQuery made a query with the previous value of this field. https://github.com/apollographql/apollo-client/blob/53b7370b76465f92e439e02e3431d1c13dc58d7a/src/react/hooks/useLazyQuery.ts#L98 I think, we can remove "variables" from the default state of options, since we overwrite them by passing them to the function, is that right? @billnbell @KataevAS if you downgrade to the latest version in the 3.7.x series, are you still able to reproduce this? > @billnbell @KataevAS if you downgrade to the latest version in the 3.7.x series, are you still able to reproduce this? @alessbell 3.7.15 there is a bug too @KataevAS could you try 3.7.10 as well? There was a few fixes to `useLazyQuery` in 3.7.11 and I'm curious if any of those changes introduced this regression or if this has existed for longer. Thanks! > @KataevAS could you try 3.7.10 as well? There was a few fixes to `useLazyQuery` in 3.7.11 and I'm curious if any of those changes introduced this regression or if this has existed for longer. Thanks! @jerelmiller 3.7.10 same thing here <img width="1129" alt="image" src="https://github.com/apollographql/apollo-client/assets/38090158/c0ceddd8-032f-455f-a943-e85d12a7638b"> I did this for a test and it solved the problem, since by passing variables to the useLazyQuery function I expect them to be completely replaced. I think, they are replaced after calling forceUpdate now. Is this a fix? @billnbell this is a hot fix that solved the problem for me. @alessbell what do you think about this? This is a very interesting angle (thanks for digging so deep!), but it's still hard for us to reproduce & verify this. Could you maybe create a small reproduction (maybe starting with [this CodeSandbox](https://codesandbox.io/s/github/apollographql/react-apollo-error-template))? @phryneas yes I can. Look at the console please https://codesandbox.io/s/gracious-water-w6259q?file=/src/index.jsx If I use a cache in a project, I won’t see an extra request. But with the cache disabled, an extra promise will be launched. @KataevAS that's certainly important information - thank you! @phryneas can I submit my edits for review? @KataevAS A PR would be welcome, but please be aware that we haven't looked into this deep enough yet and might in the end prefer another solution, so I can't 100% guarantee that we'd go with the PR at this point. I can trigger you a package build from that PR, though, so you can continue running an Apollo Client version without local patches :) This solution does not work if we need to store in each request the variables passed during hook initialization what can we do to get this fixed? @phryneas I think this might be interesting for you. Look at the console. https://codesandbox.io/s/elastic-khorana-lgvyf5?file=/src/index.jsx It is important that the variables match what is the issue? @KataevAS in your case, one of the two queries (both to the same field with the same argument) is missing an `id`, so the normalized cache cannot correctly save that data, the second query overrides data from the first query and the first query has to run again. If you add an `id` field (as you should always, to every entity!), your double request goes away. @phryneas сould this be related? https://codesandbox.io/s/silly-mayer-6pr822?file=/src/index.jsx > what can we do to get this fixed? A solution might be to pass null values ​​of all used variables to useLazyQuery. https://codesandbox.io/s/pensive-stitch-gp3gcg
2023-12-01T20:09:00Z
3.9
apollographql/apollo-client
11,200
apollographql__apollo-client-11200
[ "11199" ]
e5acf910e39752b453540b6751046d1c19b66350
diff --git a/.size-limit.cjs b/.size-limit.cjs --- a/.size-limit.cjs +++ b/.size-limit.cjs @@ -1,7 +1,7 @@ const checks = [ { path: "dist/apollo-client.min.cjs", - limit: "37972", + limit: "37975", }, { path: "dist/main.cjs", @@ -10,7 +10,7 @@ const checks = [ { path: "dist/index.js", import: "{ ApolloClient, InMemoryCache, HttpLink }", - limit: "32017", + limit: "32019", }, ...[ "ApolloProvider", diff --git a/src/cache/core/types/Cache.ts b/src/cache/core/types/Cache.ts --- a/src/cache/core/types/Cache.ts +++ b/src/cache/core/types/Cache.ts @@ -93,7 +93,7 @@ export namespace Cache { this: TCache, watch: Cache.WatchOptions, diff: Cache.DiffResult<any>, - lastDiff: Cache.DiffResult<any> | undefined + lastDiff?: Cache.DiffResult<any> | undefined ) => any; } diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -593,7 +593,7 @@ class CacheGroup { // Used by the EntityStore#makeCacheKey method to compute cache keys // specific to this CacheGroup. - public keyMaker: Trie<object>; + public keyMaker!: Trie<object>; constructor( public readonly caching: boolean, @@ -800,7 +800,11 @@ class Layer extends EntityStore { public getStorage(): StorageType { let p: EntityStore = this.parent; while ((p as Layer).parent) p = (p as Layer).parent; - return p.getStorage.apply(p, arguments); + return p.getStorage.apply( + p, + // @ts-expect-error + arguments + ); } } @@ -823,20 +827,20 @@ class Stump extends Layer { return this; } - public merge() { + public merge(older: string | StoreObject, newer: string | StoreObject) { // We never want to write any data into the Stump, so we forward any merge // calls to the Root instead. Another option here would be to throw an // exception, but the toReference(object, true) function can sometimes // trigger Stump writes (which used to be Root writes, before the Stump // concept was introduced). - return this.parent.merge.apply(this.parent, arguments); + return this.parent.merge(older, newer); } } function storeObjectReconciler( existingObject: StoreObject, incomingObject: StoreObject, - property: string + property: string | number ): StoreValue { const existingValue = existingObject[property]; const incomingValue = incomingObject[property]; diff --git a/src/cache/inmemory/fixPolyfills.native.ts b/src/cache/inmemory/fixPolyfills.native.ts --- a/src/cache/inmemory/fixPolyfills.native.ts +++ b/src/cache/inmemory/fixPolyfills.native.ts @@ -33,10 +33,10 @@ try { // https://github.com/apollographql/react-apollo/issues/2442#issuecomment-426489517 testMap.set(frozen, frozen).delete(frozen); } catch { - const wrap = (method: <T>(obj: T) => T): typeof method => { + const wrap = <M extends <T>(obj: T) => T>(method: M): M => { return ( method && - ((obj) => { + (((obj) => { try { // If .set succeeds, also call .delete to avoid leaking memory. testMap.set(obj, obj).delete(obj); @@ -45,7 +45,7 @@ try { // by this return-from-finally statement: return method.call(Object, obj); } - }) + }) as M) ); }; Object.freeze = wrap(Object.freeze); diff --git a/src/cache/inmemory/fragmentRegistry.ts b/src/cache/inmemory/fragmentRegistry.ts --- a/src/cache/inmemory/fragmentRegistry.ts +++ b/src/cache/inmemory/fragmentRegistry.ts @@ -6,6 +6,7 @@ import type { } from "graphql"; import { visit } from "graphql"; +import type { OptimisticWrapperFunction } from "optimism"; import { wrap } from "optimism"; import type { FragmentMap } from "../../utilities/index.js"; @@ -29,24 +30,22 @@ export function createFragmentRegistry( return new FragmentRegistry(...fragments); } -const { forEach: arrayLikeForEach } = Array.prototype; - class FragmentRegistry implements FragmentRegistryAPI { private registry: FragmentMap = Object.create(null); - // Call static method FragmentRegistry.from(...) instead of invoking the + // Call `createFragmentRegistry` instead of invoking the // FragmentRegistry constructor directly. This reserves the constructor for // future configuration of the FragmentRegistry. constructor(...fragments: DocumentNode[]) { this.resetCaches(); if (fragments.length) { - this.register.apply(this, fragments); + this.register(...fragments); } } - public register(): this { + public register(...fragments: DocumentNode[]): this { const definitions = new Map<string, FragmentDefinitionNode>(); - arrayLikeForEach.call(arguments, (doc: DocumentNode) => { + fragments.forEach((doc: DocumentNode) => { getFragmentDefinitions(doc).forEach((node) => { definitions.set(node.name.value, node); }); @@ -66,27 +65,15 @@ class FragmentRegistry implements FragmentRegistryAPI { private invalidate(name: string) {} public resetCaches() { - this.invalidate = (this.lookup = this.cacheUnaryMethod("lookup")).dirty; // This dirty function is bound to the wrapped lookup method. - this.transform = this.cacheUnaryMethod("transform"); - this.findFragmentSpreads = this.cacheUnaryMethod("findFragmentSpreads"); + this.invalidate = (this.lookup = this.cacheUnaryMethod(this.lookup)).dirty; // This dirty function is bound to the wrapped lookup method. + this.transform = this.cacheUnaryMethod(this.transform); + this.findFragmentSpreads = this.cacheUnaryMethod(this.findFragmentSpreads); } - private cacheUnaryMethod< - TName extends keyof Pick< - FragmentRegistry, - "lookup" | "transform" | "findFragmentSpreads" - >, - >(name: TName) { - const registry = this; - const originalMethod = FragmentRegistry.prototype[name]; - return wrap( - function () { - return originalMethod.apply(registry, arguments); - }, - { - makeCacheKey: (arg) => arg, - } - ); + private cacheUnaryMethod<F extends (arg: any) => any>(originalMethod: F) { + return wrap<Parameters<F>, ReturnType<F>>(originalMethod.bind(this), { + makeCacheKey: (arg) => arg, + }) as OptimisticWrapperFunction<Parameters<F>, ReturnType<F>> & F; } public lookup(fragmentName: string): FragmentDefinitionNode | null { diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -33,18 +33,18 @@ type BroadcastOptions = Pick< >; export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { - private data: EntityStore; - private optimisticData: EntityStore; + private data!: EntityStore; + private optimisticData!: EntityStore; protected config: InMemoryCacheConfig; private watches = new Set<Cache.WatchOptions>(); private addTypename: boolean; - private storeReader: StoreReader; - private storeWriter: StoreWriter; + private storeReader!: StoreReader; + private storeWriter!: StoreWriter; private addTypenameTransform = new DocumentTransform(addTypenameToDocument); - private maybeBroadcastWatch: OptimisticWrapperFunction< + private maybeBroadcastWatch!: OptimisticWrapperFunction< [Cache.WatchOptions, BroadcastOptions?], any, [Cache.WatchOptions] diff --git a/src/cache/inmemory/readFromStore.ts b/src/cache/inmemory/readFromStore.ts --- a/src/cache/inmemory/readFromStore.ts +++ b/src/cache/inmemory/readFromStore.ts @@ -535,7 +535,7 @@ function firstMissing(tree: MissingTree): string | undefined { return value; }); } catch (result) { - return result; + return result as string; } } diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -86,7 +86,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { public readonly typeDefs: ApolloClientOptions<TCacheShape>["typeDefs"]; private queryManager: QueryManager<TCacheShape>; - private devToolsHookCb: Function; + private devToolsHookCb?: Function; private resetStoreCallbacks: Array<() => Promise<any>> = []; private clearStoreCallbacks: Array<() => Promise<any>> = []; private localState: LocalState<TCacheShape>; @@ -592,7 +592,9 @@ export class ApolloClient<TCacheShape> implements DataProxy { >( options: RefetchQueriesOptions<TCache, TResult> ): RefetchQueriesResult<TResult> { - const map = this.queryManager.refetchQueries(options); + const map = this.queryManager.refetchQueries( + options as RefetchQueriesOptions<ApolloCache<TCacheShape>, TResult> + ); const queries: ObservableQuery<any>[] = []; const results: InternalRefetchQueriesResult<TResult>[] = []; diff --git a/src/core/LocalState.ts b/src/core/LocalState.ts --- a/src/core/LocalState.ts +++ b/src/core/LocalState.ts @@ -75,9 +75,9 @@ export type LocalStateOptions<TCacheShape> = { export class LocalState<TCacheShape> { private cache: ApolloCache<TCacheShape>; - private client: ApolloClient<TCacheShape>; + private client?: ApolloClient<TCacheShape>; private resolvers?: Resolvers; - private fragmentMatcher: FragmentMatcher; + private fragmentMatcher?: FragmentMatcher; private selectionsToResolveCache = new WeakMap< ExecutableDefinitionNode, Set<SelectionNode> @@ -162,7 +162,7 @@ export class LocalState<TCacheShape> { this.fragmentMatcher = fragmentMatcher; } - public getFragmentMatcher(): FragmentMatcher { + public getFragmentMatcher(): FragmentMatcher | undefined { return this.fragmentMatcher; } @@ -197,11 +197,11 @@ export class LocalState<TCacheShape> { // To support `@client @export(as: "someVar")` syntax, we'll first resolve // @client @export fields locally, then pass the resolved values back to be // used alongside the original operation variables. - public async addExportedVariables( + public async addExportedVariables<TVars extends OperationVariables>( document: DocumentNode, - variables: OperationVariables = {}, + variables: TVars = {} as TVars, context = {} - ) { + ): /* returns at least the variables that were passed in */ Promise<TVars> { if (document) { return this.resolveDocument( document, diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -35,6 +35,7 @@ import type { QueryInfo } from "./QueryInfo.js"; import type { MissingFieldError } from "../cache/index.js"; import type { MissingTree } from "../cache/core/types/common.js"; import { equalByQuery } from "./equalByQuery.js"; +import type { TODO } from "../utilities/types/TODO.js"; const { assign, hasOwnProperty } = Object; @@ -924,8 +925,9 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, public reobserve( newOptions?: Partial<WatchQueryOptions<TVariables, TData>>, newNetworkStatus?: NetworkStatus - ) { - return this.reobserveAsConcast(newOptions, newNetworkStatus).promise; + ): Promise<ApolloQueryResult<TData>> { + return this.reobserveAsConcast(newOptions, newNetworkStatus) + .promise as TODO; } public resubscribeAfterError( @@ -1048,14 +1050,18 @@ export function reobserveCacheFirst<TData, TVars extends OperationVariables>( fetchPolicy: "cache-first", // Use a temporary nextFetchPolicy function that replaces itself with the // previous nextFetchPolicy value and returns the original fetchPolicy. - nextFetchPolicy(this: WatchQueryOptions<TVars, TData>) { + nextFetchPolicy( + this: WatchQueryOptions<TVars, TData>, + currentFetchPolicy: WatchQueryFetchPolicy, + context: NextFetchPolicyContext<TData, TVars> + ) { // Replace this nextFetchPolicy function in the options object with the // original this.options.nextFetchPolicy value. this.nextFetchPolicy = nextFetchPolicy; // If the original nextFetchPolicy value was a function, give it a // chance to decide what happens here. - if (typeof nextFetchPolicy === "function") { - return nextFetchPolicy.apply(this, arguments); + if (typeof this.nextFetchPolicy === "function") { + return this.nextFetchPolicy(currentFetchPolicy, context); } // Otherwise go back to the original this.options.fetchPolicy. return fetchPolicy!; diff --git a/src/core/QueryInfo.ts b/src/core/QueryInfo.ts --- a/src/core/QueryInfo.ts +++ b/src/core/QueryInfo.ts @@ -49,6 +49,7 @@ function wrapDestructiveCacheMethod( // that matters in any conceivable practical scenario. (destructiveMethodCounts.get(cache)! + 1) % 1e15 ); + // @ts-expect-error this is just too generic to be typed correctly return original.apply(this, arguments); }; } @@ -111,7 +112,7 @@ export class QueryInfo { // NetworkStatus.loading, but also possibly fetchMore, poll, refetch, // or setVariables. networkStatus?: NetworkStatus; - observableQuery?: ObservableQuery<any>; + observableQuery?: ObservableQuery<any, any>; lastRequestId?: number; }): this { let networkStatus = query.networkStatus || NetworkStatus.loading; @@ -212,10 +213,10 @@ export class QueryInfo { } } - public readonly observableQuery: ObservableQuery<any> | null = null; + public readonly observableQuery: ObservableQuery<any, any> | null = null; private oqListener?: QueryListener; - setObservableQuery(oq: ObservableQuery<any> | null) { + setObservableQuery(oq: ObservableQuery<any, any> | null) { if (oq === this.observableQuery) return; if (this.oqListener) { diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -74,6 +74,7 @@ import { import type { ApolloErrorOptions } from "../errors/index.js"; import { PROTOCOL_ERRORS_SYMBOL } from "../errors/index.js"; import { print } from "../utilities/index.js"; +import type { TODO } from "../utilities/types/TODO.js"; const { hasOwnProperty } = Object.prototype; @@ -479,7 +480,7 @@ export class QueryManager<TStore> { const results: any[] = []; this.refetchQueries({ - updateCache: (cache: TCache) => { + updateCache: (cache) => { if (!skipCache) { cacheWrites.forEach((write) => cache.write(write)); } @@ -526,7 +527,7 @@ export class QueryManager<TStore> { // either a SingleExecutionResult or the final ExecutionPatchResult, // call the update function. if (isFinalResult) { - update(cache, result, { + update(cache as TCache, result, { context: mutation.context, variables: mutation.variables, }); @@ -617,7 +618,7 @@ export class QueryManager<TStore> { networkStatus?: NetworkStatus ): Promise<ApolloQueryResult<TData>> { return this.fetchConcastWithInfo(queryId, options, networkStatus).concast - .promise; + .promise as TODO; } public getQueryStore() { diff --git a/src/link/error/index.ts b/src/link/error/index.ts --- a/src/link/error/index.ts +++ b/src/link/error/index.ts @@ -84,7 +84,7 @@ export function onError(errorHandler: ErrorHandler): ApolloLink { }, }); } catch (e) { - errorHandler({ networkError: e, operation, forward }); + errorHandler({ networkError: e as Error, operation, forward }); observer.error(e); } diff --git a/src/link/http/HttpLink.ts b/src/link/http/HttpLink.ts --- a/src/link/http/HttpLink.ts +++ b/src/link/http/HttpLink.ts @@ -1,10 +1,8 @@ -import type { RequestHandler } from "../core/index.js"; import { ApolloLink } from "../core/index.js"; import type { HttpOptions } from "./selectHttpOptionsAndBody.js"; import { createHttpLink } from "./createHttpLink.js"; export class HttpLink extends ApolloLink { - public requester: RequestHandler; constructor(public options: HttpOptions = {}) { super(createHttpLink(options).request); } diff --git a/src/link/http/iterators/reader.ts b/src/link/http/iterators/reader.ts --- a/src/link/http/iterators/reader.ts +++ b/src/link/http/iterators/reader.ts @@ -6,7 +6,7 @@ import { canUseAsyncIteratorSymbol } from "../../../utilities/index.js"; interface ReaderIterator<T> { - next(): Promise<ReadableStreamReadResult<T>>; + next(): Promise<IteratorResult<T, T | undefined>>; [Symbol.asyncIterator]?(): AsyncIterator<T>; } @@ -15,12 +15,20 @@ export default function readerIterator<T>( ): AsyncIterableIterator<T> { const iterator: ReaderIterator<T> = { next() { - return reader.read(); + return reader.read() as Promise< + | ReadableStreamReadValueResult<T> + // DoneResult has `value` optional, which doesn't comply with an + // `IteratorResult`, so we assert it to `T | undefined` instead + | Required<ReadableStreamReadDoneResult<T | undefined>> + >; }, }; if (canUseAsyncIteratorSymbol) { - iterator[Symbol.asyncIterator] = function (): AsyncIterator<T> { + iterator[Symbol.asyncIterator] = function (): AsyncIterator< + T, + T | undefined + > { return this; }; } diff --git a/src/link/http/serializeFetchParameter.ts b/src/link/http/serializeFetchParameter.ts --- a/src/link/http/serializeFetchParameter.ts +++ b/src/link/http/serializeFetchParameter.ts @@ -9,7 +9,7 @@ export const serializeFetchParameter = (p: any, label: string) => { let serialized; try { serialized = JSON.stringify(p); - } catch (e) { + } catch (e: any) { const parseError = newInvariantError( `Network request failed. %s is not serializable: %s`, label, diff --git a/src/react/cache/QueryReference.ts b/src/react/cache/QueryReference.ts --- a/src/react/cache/QueryReference.ts +++ b/src/react/cache/QueryReference.ts @@ -73,7 +73,7 @@ export class InternalQueryReference<TData = unknown> { private subscription: ObservableSubscription; private listeners = new Set<Listener<TData>>(); - private autoDisposeTimeoutId: NodeJS.Timeout; + private autoDisposeTimeoutId?: NodeJS.Timeout; private status: "idle" | "loading" = "loading"; private resolve: ((result: ApolloQueryResult<TData>) => void) | undefined; diff --git a/src/react/cache/SuspenseCache.ts b/src/react/cache/SuspenseCache.ts --- a/src/react/cache/SuspenseCache.ts +++ b/src/react/cache/SuspenseCache.ts @@ -32,7 +32,9 @@ export class SuspenseCache { cacheKey: CacheKey, createObservable: () => ObservableQuery<TData> ) { - const ref = this.queryRefs.lookupArray(cacheKey); + const ref = this.queryRefs.lookupArray(cacheKey) as { + current?: InternalQueryReference<TData>; + }; if (!ref.current) { ref.current = new InternalQueryReference(createObservable(), { @@ -44,6 +46,6 @@ export class SuspenseCache { }); } - return ref.current as InternalQueryReference<TData>; + return ref.current; } } diff --git a/src/react/hooks/useBackgroundQuery.ts b/src/react/hooks/useBackgroundQuery.ts --- a/src/react/hooks/useBackgroundQuery.ts +++ b/src/react/hooks/useBackgroundQuery.ts @@ -1,8 +1,10 @@ import * as React from "react"; import type { DocumentNode, + FetchMoreQueryOptions, OperationVariables, TypedDocumentNode, + WatchQueryOptions, } from "../../core/index.js"; import { useApolloClient } from "./useApolloClient.js"; import { wrapQueryRef } from "../cache/QueryReference.js"; @@ -197,7 +199,7 @@ export function useBackgroundQuery< ]; const queryRef = suspenseCache.getQueryRef(cacheKey, () => - client.watchQuery(watchQueryOptions) + client.watchQuery(watchQueryOptions as WatchQueryOptions<any, any>) ); const [promiseCache, setPromiseCache] = React.useState( @@ -213,7 +215,7 @@ export function useBackgroundQuery< const fetchMore: FetchMoreFunction<TData, TVariables> = React.useCallback( (options) => { - const promise = queryRef.fetchMore(options); + const promise = queryRef.fetchMore(options as FetchMoreQueryOptions<any>); setPromiseCache((promiseCache) => new Map(promiseCache).set(queryRef.key, queryRef.promise) diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -75,6 +75,7 @@ export function useLazyQuery< // Only the first time populating execOptionsRef.current matters here. internalState.forceUpdateState(); } + // @ts-expect-error this is just too generic to type return method.apply(this, arguments); }; } diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -12,6 +12,7 @@ import type { import type { ApolloCache, DefaultContext, + MutationOptions, OperationVariables, } from "../../core/index.js"; import { mergeOptions } from "../../utilities/index.js"; @@ -87,10 +88,10 @@ export function useMutation< } const mutationId = ++ref.current.mutationId; - const clientOptions = mergeOptions(baseOptions, executeOptions as any); + const clientOptions = mergeOptions(baseOptions, executeOptions); return client - .mutate(clientOptions) + .mutate(clientOptions as MutationOptions<TData, OperationVariables>) .then((response) => { const { data, errors } = response; const error = @@ -102,7 +103,10 @@ export function useMutation< executeOptions.onError || ref.current.options?.onError; if (error && onError) { - onError(error, clientOptions); + onError( + error, + clientOptions as MutationOptions<TData, OperationVariables> + ); } if ( @@ -126,7 +130,10 @@ export function useMutation< executeOptions.onCompleted || ref.current.options?.onCompleted; if (!error) { - onCompleted?.(response.data!, clientOptions); + onCompleted?.( + response.data!, + clientOptions as MutationOptions<TData, OperationVariables> + ); } return response; @@ -150,7 +157,10 @@ export function useMutation< executeOptions.onError || ref.current.options?.onError; if (onError) { - onError(error, clientOptions); + onError( + error, + clientOptions as MutationOptions<TData, OperationVariables> + ); // TODO(brian): why are we returning this here??? return { data: void 0, errors: error }; diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -271,8 +271,8 @@ class InternalState<TData, TVariables extends OperationVariables> { // useQuery method, so we can safely use these members in other/later methods // without worrying they might be uninitialized. private renderPromises: ApolloContextValue["renderPromises"]; - private queryHookOptions: QueryHookOptions<TData, TVariables>; - private watchQueryOptions: WatchQueryOptions<TVariables, TData>; + private queryHookOptions!: QueryHookOptions<TData, TVariables>; + private watchQueryOptions!: WatchQueryOptions<TVariables, TData>; private useOptions(options: QueryHookOptions<TData, TVariables>) { const watchQueryOptions = this.createWatchQueryOptions( @@ -461,8 +461,8 @@ class InternalState<TData, TVariables extends OperationVariables> { private onCompleted(data: TData) {} private onError(error: ApolloError) {} - private observable: ObservableQuery<TData, TVariables>; - private obsQueryFields: Omit< + private observable!: ObservableQuery<TData, TVariables>; + private obsQueryFields!: Omit< ObservableQueryFields<TData, TVariables>, "variables" >; diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -178,7 +178,11 @@ export function useSuspenseQuery< ): UseSuspenseQueryResult<TData | undefined, TVariables> { const client = useApolloClient(options.client); const suspenseCache = getSuspenseCache(client); - const watchQueryOptions = useWatchQueryOptions({ client, query, options }); + const watchQueryOptions = useWatchQueryOptions<any, any>({ + client, + query, + options, + }); const { fetchPolicy, variables } = watchQueryOptions; const { queryKey = [] } = options; @@ -236,8 +240,8 @@ export function useSuspenseQuery< const result = fetchPolicy === "standby" ? skipResult : __use(promise); - const fetchMore: FetchMoreFunction<TData, TVariables> = React.useCallback( - (options) => { + const fetchMore = React.useCallback( + ((options) => { const promise = queryRef.fetchMore(options); setPromiseCache((previousPromiseCache) => @@ -245,7 +249,10 @@ export function useSuspenseQuery< ); return promise; - }, + }) satisfies FetchMoreFunction< + unknown, + OperationVariables + > as FetchMoreFunction<TData | undefined, TVariables>, [queryRef] ); @@ -262,13 +269,17 @@ export function useSuspenseQuery< [queryRef] ); - const subscribeToMore: SubscribeToMoreFunction<TData, TVariables> = - React.useCallback( - (options) => queryRef.observable.subscribeToMore(options), - [queryRef] - ); + const subscribeToMore: SubscribeToMoreFunction< + TData | undefined, + TVariables + > = React.useCallback( + (options) => queryRef.observable.subscribeToMore(options), + [queryRef] + ); - return React.useMemo(() => { + return React.useMemo< + UseSuspenseQueryResult<TData | undefined, TVariables> + >(() => { return { client, data: result.data, diff --git a/src/utilities/common/filterInPlace.ts b/src/utilities/common/filterInPlace.ts deleted file mode 100644 --- a/src/utilities/common/filterInPlace.ts +++ /dev/null @@ -1,14 +0,0 @@ -export function filterInPlace<T>( - array: T[], - test: (elem: T) => boolean, - context?: any -): T[] { - let target = 0; - array.forEach(function (elem, i) { - if (test.call(this, elem, i, array)) { - array[target++] = elem; - } - }, context); - array.length = target; - return array; -} diff --git a/src/utilities/common/mergeDeep.ts b/src/utilities/common/mergeDeep.ts --- a/src/utilities/common/mergeDeep.ts +++ b/src/utilities/common/mergeDeep.ts @@ -72,7 +72,7 @@ const defaultReconciler: ReconcilerFunction<any[]> = function ( export class DeepMerger<TContextArgs extends any[]> { constructor( - private reconciler: ReconcilerFunction<TContextArgs> = defaultReconciler + private reconciler: ReconcilerFunction<TContextArgs> = defaultReconciler as any as ReconcilerFunction<TContextArgs> ) {} public merge(target: any, source: any, ...context: TContextArgs): any { diff --git a/src/utilities/common/mergeOptions.ts b/src/utilities/common/mergeOptions.ts --- a/src/utilities/common/mergeOptions.ts +++ b/src/utilities/common/mergeOptions.ts @@ -10,7 +10,7 @@ import { compact } from "./compact.js"; type OptionsUnion<TData, TVariables extends OperationVariables, TContext> = | WatchQueryOptions<TVariables, TData> | QueryOptions<TVariables, TData> - | MutationOptions<TData, TVariables, TContext>; + | MutationOptions<TData, TVariables, TContext, any>; export function mergeOptions< TDefaultOptions extends Partial<OptionsUnion<any, any, any>>, diff --git a/src/utilities/graphql/transform.ts b/src/utilities/graphql/transform.ts --- a/src/utilities/graphql/transform.ts +++ b/src/utilities/graphql/transform.ts @@ -12,7 +12,7 @@ import type { FragmentSpreadNode, VariableDefinitionNode, ASTNode, - ASTVisitor, + ASTVisitFn, InlineFragmentNode, } from "graphql"; import { visit, Kind } from "graphql"; @@ -29,6 +29,12 @@ import type { FragmentMap } from "./fragments.js"; import { createFragmentMap } from "./fragments.js"; import { isArray, isNonEmptyArray } from "../common/arrays.js"; +// https://github.com/graphql/graphql-js/blob/8d7c8fccf5a9846a50785de04abda58a7eb13fc0/src/language/visitor.ts#L20-L23 +interface EnterLeaveVisitor<TVisitedNode extends ASTNode> { + readonly enter?: ASTVisitFn<TVisitedNode>; + readonly leave?: ASTVisitFn<TVisitedNode>; +} + export type RemoveNodeConfig<N> = { name?: string; test?: (node: N) => boolean; @@ -208,8 +214,10 @@ export function removeDirectivesFromDocument( // original doc immediately without any modifications. let firstVisitMadeChanges = false; - const fieldOrInlineFragmentVisitor: ASTVisitor = { - enter(node: FieldNode | InlineFragmentNode) { + const fieldOrInlineFragmentVisitor: EnterLeaveVisitor< + FieldNode | InlineFragmentNode + > = { + enter(node) { if (shouldRemoveField(node.directives)) { firstVisitMadeChanges = true; return null; @@ -385,8 +393,10 @@ export function removeDirectivesFromDocument( ) ); - const enterVisitor: ASTVisitor = { - enter(node: FragmentSpreadNode | FragmentDefinitionNode) { + const enterVisitor: EnterLeaveVisitor< + FragmentSpreadNode | FragmentDefinitionNode + > = { + enter(node) { if (fragmentWillBeRemoved(node.name.value)) { return null; } diff --git a/src/utilities/observables/Concast.ts b/src/utilities/observables/Concast.ts --- a/src/utilities/observables/Concast.ts +++ b/src/utilities/observables/Concast.ts @@ -147,9 +147,9 @@ export class Concast<T> extends Observable<T> { // Any Concast object can be trivially converted to a Promise, without // having to create a new wrapper Observable. This promise provides an // easy way to observe the final state of the Concast. - private resolve: (result?: T | PromiseLike<T>) => void; - private reject: (reason: any) => void; - public readonly promise = new Promise<T>((resolve, reject) => { + private resolve!: (result?: T | PromiseLike<T>) => void; + private reject!: (reason: any) => void; + public readonly promise = new Promise<T | undefined>((resolve, reject) => { this.resolve = resolve; this.reject = reject; }); diff --git a/src/utilities/observables/Observable.ts b/src/utilities/observables/Observable.ts --- a/src/utilities/observables/Observable.ts +++ b/src/utilities/observables/Observable.ts @@ -17,6 +17,7 @@ export type { Observer, ObservableSubscription, Subscriber }; const { prototype } = Observable; const fakeObsSymbol = "@@observable" as keyof typeof prototype; if (!prototype[fakeObsSymbol]) { + // @ts-expect-error prototype[fakeObsSymbol] = function () { return this; }; diff --git a/src/utilities/policies/pagination.ts b/src/utilities/policies/pagination.ts --- a/src/utilities/policies/pagination.ts +++ b/src/utilities/policies/pagination.ts @@ -42,7 +42,7 @@ export function offsetLimitPagination<T = Reference>( // to receive any arguments, so you might prefer to throw an // exception here, instead of recovering by appending incoming // onto the existing array. - merged.push.apply(merged, incoming); + merged.push(...incoming); } } diff --git a/src/utilities/types/TODO.ts b/src/utilities/types/TODO.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/TODO.ts @@ -0,0 +1,2 @@ +/** @internal */ +export type TODO = any;
diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -4,7 +4,6 @@ import { ApolloClient, ApolloError, DefaultOptions, - FetchPolicy, QueryOptions, makeReference, } from "../core"; @@ -2126,8 +2125,8 @@ describe("ApolloClient", () => { } `; - ["network-only", "cache-and-network"].forEach( - (fetchPolicy: FetchPolicy) => { + (["network-only", "cache-and-network"] as const).forEach( + (fetchPolicy) => { const observable = client.watchQuery({ query, fetchPolicy, @@ -2156,13 +2155,15 @@ describe("ApolloClient", () => { } `; - [ - "cache-first", - "cache-and-network", - "network-only", - "cache-only", - "no-cache", - ].forEach((fetchPolicy: FetchPolicy) => { + ( + [ + "cache-first", + "cache-and-network", + "network-only", + "cache-only", + "no-cache", + ] as const + ).forEach((fetchPolicy) => { const observable = client.watchQuery({ query, fetchPolicy, diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -1821,7 +1821,7 @@ describe("client", () => { link, cache: new InMemoryCache({ - dataIdFromObject: (obj: { id: any }) => obj.id, + dataIdFromObject: (obj: any) => obj.id, addTypename: false, }), }); @@ -1870,7 +1870,7 @@ describe("client", () => { callback(); throw new Error("not reached"); } catch (thrown) { - expect(thrown.message).toBe(cacheAndNetworkError); + expect((thrown as Error).message).toBe(cacheAndNetworkError); } } diff --git a/src/__tests__/graphqlSubscriptions.ts b/src/__tests__/graphqlSubscriptions.ts --- a/src/__tests__/graphqlSubscriptions.ts +++ b/src/__tests__/graphqlSubscriptions.ts @@ -488,7 +488,7 @@ describe("GraphQL Subscriptions", () => { client.subscribe(options).subscribe({ next() { - expect(link.operation.getContext().someVar).toEqual( + expect(link.operation?.getContext().someVar).toEqual( options.context.someVar ); resolve(); diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -1207,7 +1207,7 @@ describe("Combining client and server state/operations", () => { watchCount += 1; client.mutate({ mutation, - update(proxy, { data: { updateUser } }: { data: any }) { + update(proxy, { data: { updateUser } }) { proxy.writeQuery({ query: userQuery, data: { diff --git a/src/__tests__/local-state/resolvers.ts b/src/__tests__/local-state/resolvers.ts --- a/src/__tests__/local-state/resolvers.ts +++ b/src/__tests__/local-state/resolvers.ts @@ -11,7 +11,7 @@ import { WatchQueryOptions, } from "../../core"; -import { InMemoryCache } from "../../cache"; +import { InMemoryCache, isReference } from "../../cache"; import { Observable, Observer } from "../../utilities"; import { ApolloLink } from "../../link/core"; import { itAsync } from "../../testing"; @@ -747,10 +747,13 @@ describe("Writing cache data from resolvers", () => { }, }, }); - cache.modify({ + cache.modify<{ field: { field2: number } }>({ id: "Object:uniqueId", fields: { - field(value: { field2: number }) { + field(value) { + if (isReference(value)) { + fail("Should not be a reference"); + } expect(value.field2).toBe(1); return { ...value, field2: 2 }; }, diff --git a/src/__tests__/optimistic.ts b/src/__tests__/optimistic.ts --- a/src/__tests__/optimistic.ts +++ b/src/__tests__/optimistic.ts @@ -235,7 +235,7 @@ describe("optimistic mutation results", () => { await promise; } catch (err) { expect(err).toBeInstanceOf(Error); - expect(err.message).toBe("forbidden (test error)"); + expect((err as Error).message).toBe("forbidden (test error)"); const dataInStore = (client.cache as InMemoryCache).extract(true); expect((dataInStore["TodoList5"] as any).todos.length).toBe(3); @@ -489,7 +489,7 @@ describe("optimistic mutation results", () => { await promise; } catch (err) { expect(err).toBeInstanceOf(Error); - expect(err.message).toBe("forbidden (test error)"); + expect((err as Error).message).toBe("forbidden (test error)"); const dataInStore = (client.cache as InMemoryCache).extract(true); expect((dataInStore["TodoList5"] as any).todos.length).toBe(3); @@ -2019,11 +2019,12 @@ describe("optimistic mutation results", () => { const wrapReject = <TArgs extends any[], TResult>( fn: (...args: TArgs) => TResult ): typeof fn => { - return function () { + return function (this: unknown, ...args: TArgs) { try { - return fn.apply(this, arguments); + return fn.apply(this, args); } catch (e) { reject(e); + throw e; } }; }; diff --git a/src/__tests__/subscribeToMore.ts b/src/__tests__/subscribeToMore.ts --- a/src/__tests__/subscribeToMore.ts +++ b/src/__tests__/subscribeToMore.ts @@ -8,8 +8,10 @@ import { itAsync, mockSingleLink, mockObservableLink } from "../testing"; const isSub = (operation: Operation) => (operation.query as DocumentNode).definitions - .filter((x) => x.kind === "OperationDefinition") - .some((x: OperationDefinitionNode) => x.operation === "subscription"); + .filter( + (x): x is OperationDefinitionNode => x.kind === "OperationDefinition" + ) + .some((x) => x.operation === "subscription"); describe("subscribeToMore", () => { const query = gql` diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -4035,6 +4035,7 @@ describe("ReactiveVar and makeVar", () => { let broadcastCount = 0; cache["broadcastWatches"] = function () { ++broadcastCount; + // @ts-expect-error return broadcast.apply(this, arguments); }; diff --git a/src/cache/inmemory/__tests__/diffAgainstStore.ts b/src/cache/inmemory/__tests__/diffAgainstStore.ts --- a/src/cache/inmemory/__tests__/diffAgainstStore.ts +++ b/src/cache/inmemory/__tests__/diffAgainstStore.ts @@ -493,8 +493,8 @@ describe("diffing queries against the store", () => { }; const cache = new InMemoryCache({ - dataIdFromObject({ id }: { id: string }) { - return id; + dataIdFromObject(obj: any) { + return obj.id; }, }); @@ -841,7 +841,7 @@ describe("diffing queries against the store", () => { const writer = new StoreWriter( new InMemoryCache({ - dataIdFromObject: ({ id }: { id: string }) => id, + dataIdFromObject: (obj: any) => obj.id, }) ); @@ -1067,7 +1067,7 @@ describe("diffing queries against the store", () => { }); throw new Error("should have thrown"); } catch (e) { - expect(e.message).toEqual( + expect((e as Error).message).toEqual( "Missing selection set for object of type Message returned for query field messageList" ); } diff --git a/src/cache/inmemory/__tests__/readFromStore.ts b/src/cache/inmemory/__tests__/readFromStore.ts --- a/src/cache/inmemory/__tests__/readFromStore.ts +++ b/src/cache/inmemory/__tests__/readFromStore.ts @@ -1904,7 +1904,7 @@ describe("reading from the store", () => { ); expect(value.__ref).toBe('Deity:{"name":"Zeus"}'); // Interim ruler Apollo takes over for real. - return toReference(apolloRulerResult.ruler); + return toReference(apolloRulerResult.ruler)!; }, }, }); diff --git a/src/cache/inmemory/__tests__/roundtrip.ts b/src/cache/inmemory/__tests__/roundtrip.ts --- a/src/cache/inmemory/__tests__/roundtrip.ts +++ b/src/cache/inmemory/__tests__/roundtrip.ts @@ -61,7 +61,7 @@ function storeRoundtrip(query: DocumentNode, result: any, variables = {}) { (immutableResult as any).illegal = "this should not work"; throw new Error("unreached"); } catch (e) { - expect(e.message).not.toMatch(/unreached/); + expect((e as Error).message).not.toMatch(/unreached/); expect(e).toBeInstanceOf(TypeError); } assertDeeplyFrozen(immutableResult); diff --git a/src/cache/inmemory/__tests__/writeToStore.ts b/src/cache/inmemory/__tests__/writeToStore.ts --- a/src/cache/inmemory/__tests__/writeToStore.ts +++ b/src/cache/inmemory/__tests__/writeToStore.ts @@ -25,9 +25,14 @@ import { defaultNormalizedCacheFactory, writeQueryToStore } from "./helpers"; import { InMemoryCache } from "../inMemoryCache"; import { TypedDocumentNode } from "../../../core"; import { extractFragmentContext } from "../helpers"; +import { KeyFieldsFunction } from "../policies"; +import { invariant } from "../../../utilities/globals"; import { spyOnConsole } from "../../../testing/internal"; -const getIdField = ({ id }: { id: string }) => id; +const getIdField: KeyFieldsFunction = ({ id }) => { + invariant(typeof id === "string", "id is not a string"); + return id; +}; describe("writing to the store", () => { const cache = new InMemoryCache({ @@ -1293,19 +1298,17 @@ describe("writing to the store", () => { } testData.forEach((data) => { - data.mutation.definitions.forEach( - (definition: OperationDefinitionNode) => { - if (isOperationDefinition(definition)) { - definition.selectionSet.selections.forEach((selection) => { - if (isField(selection)) { - expect( - storeKeyNameFromField(selection, data.variables) - ).toEqual(data.expected); - } - }); - } + data.mutation.definitions.forEach((definition) => { + if (isOperationDefinition(definition)) { + definition.selectionSet.selections.forEach((selection) => { + if (isField(selection)) { + expect(storeKeyNameFromField(selection, data.variables)).toEqual( + data.expected + ); + } + }); } - ); + }); }); }); @@ -1357,7 +1360,7 @@ describe("writing to the store", () => { return value.kind === "OperationDefinition"; } - mutation.definitions.map((def: OperationDefinitionNode) => { + mutation.definitions.map((def) => { if (isOperationDefinition(def)) { const writer = new StoreWriter( new InMemoryCache({ diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -46,7 +46,8 @@ export const mockFetchQuery = (queryManager: QueryManager<any>) => { >( original: T ) => - jest.fn<ReturnType<T>, Parameters<T>>(function () { + jest.fn<ReturnType<T>, Parameters<T>>(function (): ReturnType<T> { + // @ts-expect-error return original.apply(queryManager, arguments); }); @@ -2740,7 +2741,7 @@ describe("ObservableQuery", () => { throw new Error("not reached"); } catch (error) { expect(error).toBeInstanceOf(TypeError); - expect(error.message).toMatch( + expect((error as Error).message).toMatch( /Cannot assign to read only property 'value'/ ); } diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -1610,7 +1610,7 @@ describe("QueryManager", () => { }); }); - const getIdField = ({ id }: { id: string }) => id; + const getIdField = (obj: any) => obj.id; itAsync( "runs a mutation with object parameters and puts the result in the store", diff --git a/src/link/batch-http/__tests__/batchHttpLink.ts b/src/link/batch-http/__tests__/batchHttpLink.ts --- a/src/link/batch-http/__tests__/batchHttpLink.ts +++ b/src/link/batch-http/__tests__/batchHttpLink.ts @@ -35,10 +35,11 @@ function makeCallback<TArgs extends any[]>( ) { return function () { try { + // @ts-expect-error callback.apply(this, arguments); resolve(); } catch (error) { - reject(error); + reject(error as Error); } } as typeof callback; } @@ -472,7 +473,7 @@ describe("SharedHttpTest", () => { after(); } catch (e) { - reject(e); + reject(e as Error); } }, }); diff --git a/src/link/batch/__tests__/batchLink.ts b/src/link/batch/__tests__/batchLink.ts --- a/src/link/batch/__tests__/batchLink.ts +++ b/src/link/batch/__tests__/batchLink.ts @@ -64,6 +64,7 @@ function terminatingCheck<TArgs extends any[]>( ) { return function () { try { + // @ts-expect-error callback.apply(this, arguments); resolve(); } catch (error) { diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -92,10 +92,11 @@ function makeCallback<TArgs extends any[]>( ) { return function () { try { + // @ts-expect-error callback.apply(this, arguments); resolve(); } catch (error) { - reject(error); + reject(error as Error); } } as typeof callback; } @@ -1202,7 +1203,7 @@ describe("HttpLink", () => { reject("warning wasn't called"); } catch (e) { makeCallback(resolve, reject, () => - expect(e.message).toMatch(/has not been found globally/) + expect((e as Error).message).toMatch(/has not been found globally/) )(); } }); @@ -1214,7 +1215,7 @@ describe("HttpLink", () => { reject("warning wasn't called"); } catch (e) { makeCallback(resolve, reject, () => - expect(e.message).toMatch(/has not been found globally/) + expect((e as Error).message).toMatch(/has not been found globally/) )(); } }); diff --git a/src/link/http/__tests__/responseIterator.ts b/src/link/http/__tests__/responseIterator.ts --- a/src/link/http/__tests__/responseIterator.ts +++ b/src/link/http/__tests__/responseIterator.ts @@ -17,10 +17,11 @@ function makeCallback<TArgs extends any[]>( ) { return function () { try { + // @ts-expect-error callback.apply(this, arguments); resolve(); } catch (error) { - reject(error); + reject(error as Error); } } as typeof callback; } diff --git a/src/link/persisted-queries/__tests__/persisted-queries.test.ts b/src/link/persisted-queries/__tests__/persisted-queries.test.ts --- a/src/link/persisted-queries/__tests__/persisted-queries.test.ts +++ b/src/link/persisted-queries/__tests__/persisted-queries.test.ts @@ -219,7 +219,7 @@ describe("happy path", () => { reject("should have thrown an error"); } catch (error) { expect( - error.message.indexOf( + (error as Error).message.indexOf( 'Missing/invalid "sha256" or "generateHash" function' ) ).toBe(0); @@ -238,7 +238,7 @@ describe("happy path", () => { reject("should have thrown an error"); } catch (error) { expect( - error.message.indexOf( + (error as Error).message.indexOf( 'Missing/invalid "sha256" or "generateHash" function' ) ).toBe(0); @@ -569,6 +569,7 @@ describe("failure path", () => { status, }); } + // @ts-expect-error return global.fetch.apply(null, args); }; const link = createPersistedQuery({ sha256 }).concat( @@ -623,6 +624,7 @@ describe("failure path", () => { status, }); } + // @ts-expect-error return global.fetch.apply(null, args); }; const link = createPersistedQuery({ sha256 }).concat( @@ -662,6 +664,7 @@ describe("failure path", () => { status, }); } + // @ts-expect-error return global.fetch.apply(null, args); }; diff --git a/src/react/hoc/__tests__/fragments.test.tsx b/src/react/hoc/__tests__/fragments.test.tsx --- a/src/react/hoc/__tests__/fragments.test.tsx +++ b/src/react/hoc/__tests__/fragments.test.tsx @@ -55,7 +55,7 @@ describe("fragments", () => { ); throw new Error(); } catch (e) { - expect(e.name).toMatch(/Invariant Violation/); + expect((e as Error).name).toMatch(/Invariant Violation/); } }); diff --git a/src/react/hoc/__tests__/queries/errors.test.tsx b/src/react/hoc/__tests__/queries/errors.test.tsx --- a/src/react/hoc/__tests__/queries/errors.test.tsx +++ b/src/react/hoc/__tests__/queries/errors.test.tsx @@ -105,7 +105,7 @@ describe("[queries] errors", () => { try { unmount(); - } catch (e) { + } catch (e: any) { throw new Error(e); } }); @@ -220,6 +220,7 @@ describe("[queries] errors", () => { "setVar", 1 )( + // @ts-expect-error graphql<Props, Data, Vars>(query)( class extends React.Component<ChildProps<Props, Data, Vars>> { componentDidUpdate() { diff --git a/src/react/hoc/__tests__/queries/updateQuery.test.tsx b/src/react/hoc/__tests__/queries/updateQuery.test.tsx --- a/src/react/hoc/__tests__/queries/updateQuery.test.tsx +++ b/src/react/hoc/__tests__/queries/updateQuery.test.tsx @@ -143,7 +143,7 @@ describe("[queries] updateQuery", () => { ).toBeTruthy(); try { this.props.data!.updateQuery((p) => p); - } catch (e) { + } catch (e: any) { // TODO: branch never hit in test expect(e.toString()).toMatch( /ObservableQuery with this id doesn't exist:/ diff --git a/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx b/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx --- a/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx +++ b/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx @@ -945,6 +945,7 @@ describe("SSR", () => { </div> ); + // @ts-expect-error const WrappedElement = withQuery(withMutation(Element)); const app = ( diff --git a/src/testing/core/itAsync.ts b/src/testing/core/itAsync.ts --- a/src/testing/core/itAsync.ts +++ b/src/testing/core/itAsync.ts @@ -9,7 +9,7 @@ function wrap(key?: "only" | "skip" | "todo") { ) => (key ? it[key] : it)( message, - function () { + function (this: unknown) { return new Promise((resolve, reject) => callback.call(this, resolve, reject) ); @@ -21,7 +21,7 @@ function wrap(key?: "only" | "skip" | "todo") { const wrappedIt = wrap(); export const itAsync = Object.assign( - function (...args: Parameters<typeof wrappedIt>) { + function (this: unknown, ...args: Parameters<typeof wrappedIt>) { return wrappedIt.apply(this, args); }, { diff --git a/src/testing/core/mocking/mockLink.ts b/src/testing/core/mocking/mockLink.ts --- a/src/testing/core/mocking/mockLink.ts +++ b/src/testing/core/mocking/mockLink.ts @@ -45,7 +45,7 @@ function requestToKey(request: GraphQLRequest, addTypename: Boolean): string { } export class MockLink extends ApolloLink { - public operation: Operation; + public operation!: Operation; public addTypename: Boolean = true; public showWarnings: boolean = true; private mockedResponsesByKey: { [key: string]: MockedResponse[] } = {}; diff --git a/src/testing/core/mocking/mockSubscriptionLink.ts b/src/testing/core/mocking/mockSubscriptionLink.ts --- a/src/testing/core/mocking/mockSubscriptionLink.ts +++ b/src/testing/core/mocking/mockSubscriptionLink.ts @@ -15,7 +15,7 @@ export interface MockedSubscriptionResult { export class MockSubscriptionLink extends ApolloLink { public unsubscribers: any[] = []; public setups: any[] = []; - public operation: Operation; + public operation?: Operation; private observers: any[] = []; diff --git a/src/testing/core/observableToPromise.ts b/src/testing/core/observableToPromise.ts --- a/src/testing/core/observableToPromise.ts +++ b/src/testing/core/observableToPromise.ts @@ -11,7 +11,7 @@ import type { ObservableSubscription } from "../../utilities/index.js"; * @param errorCallbacks an expected set of errors */ export type Options = { - observable: ObservableQuery<any>; + observable: ObservableQuery<any, any>; shouldResolve?: boolean; wait?: number; errorCallbacks?: ((error: Error) => any)[]; diff --git a/src/testing/core/withConsoleSpy.ts b/src/testing/core/withConsoleSpy.ts --- a/src/testing/core/withConsoleSpy.ts +++ b/src/testing/core/withConsoleSpy.ts @@ -2,8 +2,7 @@ function wrapTestFunction( fn: (...args: any[]) => any, consoleMethodName: "log" | "warn" | "error" ) { - return function () { - const args = arguments; + return function (this: any, ...args: any[]) { const spy = jest.spyOn(console, consoleMethodName); spy.mockImplementation(() => {}); return new Promise((resolve) => { diff --git a/src/testing/internal/profile/profile.tsx b/src/testing/internal/profile/profile.tsx --- a/src/testing/internal/profile/profile.tsx +++ b/src/testing/internal/profile/profile.tsx @@ -203,7 +203,7 @@ export function profile< return render; }, async takeRender(options: NextRenderOptions = {}) { - let error: { message?: string } | undefined = undefined; + let error: unknown = undefined; try { return await Profiled.peekRender({ [_stackTrace]: captureStackTrace(Profiled.takeRender), diff --git a/src/testing/matchers/ProfiledComponent.ts b/src/testing/matchers/ProfiledComponent.ts --- a/src/testing/matchers/ProfiledComponent.ts +++ b/src/testing/matchers/ProfiledComponent.ts @@ -6,10 +6,10 @@ import type { ProfiledHook, } from "../internal/index.js"; export const toRerender: MatcherFunction<[options?: NextRenderOptions]> = - async function ( - _profiled: ProfiledComponent<any, any> | ProfiledHook<any, any>, - options?: NextRenderOptions - ) { + async function (actual, options) { + const _profiled = actual as + | ProfiledComponent<any, any> + | ProfiledHook<any, any>; const profiled = "ProfiledComponent" in _profiled ? _profiled.ProfiledComponent @@ -42,11 +42,10 @@ const failed = {}; export const toRenderExactlyTimes: MatcherFunction< [times: number, options?: NextRenderOptions] -> = async function ( - _profiled: ProfiledComponent<any, any> | ProfiledHook<any, any>, - times: number, - optionsPerRender?: NextRenderOptions -) { +> = async function (actual, times, optionsPerRender) { + const _profiled = actual as + | ProfiledComponent<any, any> + | ProfiledHook<any, any>; const profiled = "ProfiledComponent" in _profiled ? _profiled.ProfiledComponent : _profiled; const options = { timeout: 100, ...optionsPerRender }; diff --git a/src/utilities/common/__tests__/mergeDeep.ts b/src/utilities/common/__tests__/mergeDeep.ts --- a/src/utilities/common/__tests__/mergeDeep.ts +++ b/src/utilities/common/__tests__/mergeDeep.ts @@ -146,7 +146,7 @@ describe("mergeDeep", function () { }); it("supports custom reconciler functions", function () { - const merger = new DeepMerger((target, source, key) => { + const merger = new DeepMerger(function (target, source, key) { const targetValue = target[key]; const sourceValue = source[key]; if (Array.isArray(sourceValue)) { diff --git a/src/utilities/observables/__tests__/Observable.ts b/src/utilities/observables/__tests__/Observable.ts --- a/src/utilities/observables/__tests__/Observable.ts +++ b/src/utilities/observables/__tests__/Observable.ts @@ -39,8 +39,13 @@ describe("Observable", () => { return constructor as any; } + type ObservableWithSub<T> = Observable<T> & { sub?: Subscriber<T> }; + it("simulating super(sub) with Observable.call(this, sub)", () => { - function SubclassWithSuperCall<T>(sub: Subscriber<T>) { + function SubclassWithSuperCall<T>( + this: ObservableWithSub<T>, + sub: Subscriber<T> + ) { const self = Observable.call(this, sub) || this; self.sub = sub; return self; @@ -49,7 +54,10 @@ describe("Observable", () => { }); it("simulating super(sub) with Observable.apply(this, arguments)", () => { - function SubclassWithSuperApplyArgs<T>(_sub: Subscriber<T>) { + function SubclassWithSuperApplyArgs<T>( + this: ObservableWithSub<T>, + _sub: Subscriber<T> + ) { const self = Observable.apply(this, arguments) || this; self.sub = _sub; return self; @@ -58,7 +66,10 @@ describe("Observable", () => { }); it("simulating super(sub) with Observable.apply(this, [sub])", () => { - function SubclassWithSuperApplyArray<T>(...args: [Subscriber<T>]) { + function SubclassWithSuperApplyArray<T>( + this: ObservableWithSub<T>, + ...args: [Subscriber<T>] + ) { const self = Observable.apply(this, args) || this; self.sub = args[0]; return self; diff --git a/src/utilities/observables/__tests__/asyncMap.ts b/src/utilities/observables/__tests__/asyncMap.ts --- a/src/utilities/observables/__tests__/asyncMap.ts +++ b/src/utilities/observables/__tests__/asyncMap.ts @@ -25,6 +25,7 @@ function rejectExceptions<Args extends any[], Ret>( ) { return function () { try { + // @ts-expect-error return fn.apply(this, arguments); } catch (error) { reject(error); diff --git a/tsconfig.tests.json b/tsconfig.tests.json --- a/tsconfig.tests.json +++ b/tsconfig.tests.json @@ -1,5 +1,3 @@ { - "extends": "./tsconfig.json", - "include": ["src/**/__tests__/**/*.ts", "src/**/__tests__/**/*.tsx"], - "exclude": [] + "extends": "./src/tsconfig.json" }
Enable `strict` setting in tsconfig.json ### Issue Description Our codebase does not currently use the `strict` setting for our codebase. Enabling this causes our codebase to fail due to the [`strictFunctionTypes` rule](https://www.typescriptlang.org/tsconfig#strictFunctionTypes). As a best practice, we should be using `strict` to avoid subtle bugs that may arise without this enabled. ### Link to Reproduction The codebase ### Reproduction Steps Add `strict: true` to `tsconfig.json` and watch all the tests fail due to TS issues (primarily with `src/utilities/graphql/transform.ts`).
2023-09-07T17:19:47Z
3.8
apollographql/apollo-client
11,180
apollographql__apollo-client-11180
[ "11179" ]
6b8198109bd9fe5eedf352421a0a773ac0acfb18
diff --git a/.size-limit.cjs b/.size-limit.cjs --- a/.size-limit.cjs +++ b/.size-limit.cjs @@ -1,7 +1,7 @@ const checks = [ { path: "dist/apollo-client.min.cjs", - limit: "38107", + limit: "38190", }, { path: "dist/main.cjs", @@ -10,7 +10,7 @@ const checks = [ { path: "dist/index.js", import: "{ ApolloClient, InMemoryCache, HttpLink }", - limit: "31980", + limit: "32044", }, ...[ "ApolloProvider", diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -931,6 +931,32 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, return this.reobserveAsConcast(newOptions, newNetworkStatus).promise; } + public resubscribeAfterError( + onNext: (value: ApolloQueryResult<TData>) => void, + onError?: (error: any) => void, + onComplete?: () => void + ): ObservableSubscription; + + public resubscribeAfterError( + observer: Observer<ApolloQueryResult<TData>> + ): ObservableSubscription; + + public resubscribeAfterError(...args: [any, any?, any?]) { + // If `lastError` is set in the current when the subscription is re-created, + // the subscription will immediately receive the error, which will + // cause it to terminate again. To avoid this, we first clear + // the last error/result from the `observableQuery` before re-starting + // the subscription, and restore the last value afterwards so that the + // subscription has a chance to stay open. + const last = this.last; + this.resetLastResults(); + + const subscription = this.subscribe(...args); + this.last = last; + + return subscription; + } + // (Re)deliver the current result to this.observers without applying fetch // policies or making network requests. private observe() { diff --git a/src/react/cache/QueryReference.ts b/src/react/cache/QueryReference.ts --- a/src/react/cache/QueryReference.ts +++ b/src/react/cache/QueryReference.ts @@ -248,6 +248,12 @@ export class InternalQueryReference<TData = unknown> { } private handleError(error: ApolloError) { + this.subscription.unsubscribe(); + this.subscription = this.observable.resubscribeAfterError( + this.handleNext, + this.handleError + ); + switch (this.status) { case "loading": { this.status = "idle"; diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -209,21 +209,8 @@ class InternalState<TData, TVariables extends OperationVariables> { }; const onError = (error: Error) => { - const last = obsQuery["last"]; subscription.unsubscribe(); - // Unfortunately, if `lastError` is set in the current - // `observableQuery` when the subscription is re-created, - // the subscription will immediately receive the error, which will - // cause it to terminate again. To avoid this, we first clear - // the last error/result from the `observableQuery` before re-starting - // the subscription, and restore it afterwards (so the subscription - // has a chance to stay open). - try { - obsQuery.resetLastResults(); - subscription = obsQuery.subscribe(onNext, onError); - } finally { - obsQuery["last"] = last; - } + subscription = obsQuery.resubscribeAfterError(onNext, onError); if (!hasOwnProperty.call(error, "graphQLErrors")) { // The error is not a GraphQL error
diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx --- a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -3503,6 +3503,258 @@ describe("useBackgroundQuery", () => { }, ]); }); + + it("can refetch after error is encountered", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: null, + errors: [new GraphQLError("Oops couldn't fetch")], + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: true } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Parent() { + const [queryRef, { refetch }] = useBackgroundQuery(query, { + variables: { id: "1" }, + }); + + return ( + <Suspense fallback={<SuspenseFallback />}> + <ErrorBoundary + onReset={() => refetch()} + fallbackRender={({ error, resetErrorBoundary }) => ( + <> + <button onClick={resetErrorBoundary}>Retry</button> + <div>{error.message}</div> + </> + )} + > + <Todo queryRef={queryRef} /> + </ErrorBoundary> + </Suspense> + ); + } + + function Todo({ queryRef }: { queryRef: QueryReference<Data> }) { + const { + data: { todo }, + } = useReadQuery(queryRef); + + return ( + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + ); + } + + render(<App />); + + // Disable error message shown in the console due to an uncaught error. + // TODO: need to determine why the error message is logged to the console + // as an uncaught error since other tests do not require this. + const consoleSpy = jest + .spyOn(console, "error") + .mockImplementation(() => {}); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect( + await screen.findByText("Oops couldn't fetch") + ).toBeInTheDocument(); + + consoleSpy.mockRestore(); + + const button = screen.getByText("Retry"); + + await act(() => user.click(button)); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("todo")).toHaveTextContent( + "Clean room (completed)" + ); + }); + }); + + it("throws errors on refetch after error is encountered after first fetch with error", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: null, + errors: [new GraphQLError("Oops couldn't fetch")], + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: null, + errors: [new GraphQLError("Oops couldn't fetch again")], + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Parent() { + const [queryRef, { refetch }] = useBackgroundQuery(query, { + variables: { id: "1" }, + }); + + return ( + <Suspense fallback={<SuspenseFallback />}> + <ErrorBoundary + onReset={() => refetch()} + fallbackRender={({ error, resetErrorBoundary }) => ( + <> + <button onClick={resetErrorBoundary}>Retry</button> + <div>{error.message}</div> + </> + )} + > + <Todo queryRef={queryRef} /> + </ErrorBoundary> + </Suspense> + ); + } + + function Todo({ queryRef }: { queryRef: QueryReference<Data> }) { + const { + data: { todo }, + } = useReadQuery(queryRef); + + return ( + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + ); + } + + render(<App />); + + // Disable error message shown in the console due to an uncaught error. + // TODO: need to determine why the error message is logged to the console + // as an uncaught error since other tests do not require this. + const consoleSpy = jest + .spyOn(console, "error") + .mockImplementation(() => {}); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect( + await screen.findByText("Oops couldn't fetch") + ).toBeInTheDocument(); + + const button = screen.getByText("Retry"); + + await act(() => user.click(button)); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + await waitFor(() => { + expect( + screen.getByText("Oops couldn't fetch again") + ).toBeInTheDocument(); + }); + + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + consoleSpy.mockRestore(); + }); + it("`refetch` works with startTransition to allow React to show stale UI until finished suspending", async () => { type Variables = { id: string; diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -3506,6 +3506,7 @@ describe("useSuspenseQuery", () => { }); it("tears down subscription when throwing an error", async () => { + jest.useFakeTimers(); const consoleSpy = jest.spyOn(console, "error").mockImplementation(); const { query, mocks } = useErrorCase({ @@ -3523,8 +3524,14 @@ describe("useSuspenseQuery", () => { await waitFor(() => expect(renders.errorCount).toBe(1)); + // The query was never retained since the error was thrown before the + // useEffect coud run. We need to wait for the auto dispose timeout to kick + // in before we check whether the observable was cleaned up + jest.advanceTimersByTime(30_000); + expect(client.getObservableQueries().size).toBe(0); + jest.useRealTimers(); consoleSpy.mockRestore(); });
`useBackgroundQuery` hangs on loading when refetching after error ### Issue Description When an error is encountered in `useBackgroundQuery`/`useReadQuery`, an error is thrown to the nearest suspense boundary. In some cases, users may want to execute a `refetch` from the error boundary to try the query again. When executing `refetch`, the component rendering `useReadQuery` is re-suspended and the Suspense boundary is displayed. In some cases, the query hangs on the loading state after the refetch. From my digging into this issue, this appears to happen when an error is returned again from the server after the refetch. The query _is_ executed through the network, but the result is never reported to `useReadQuery`. This does _not_ appear to happen when a refetch occurs and a successful result is returned after the refetch. I've attached a repo with a reproduction of this issue. ### Link to Reproduction https://github.com/jerelmiller/apollo-client-issue-11179 ### Reproduction Steps 1. Run the demo app 2. Reload the page until an error message is displayed (the synthetic error is returned 50% of the time) 3. Hit the "retry" button 4. If the response is successful, note that it works as intended. Now bump the synthetic error rate to `1` to always trigger this: ```diff query ArtistsQuery { - me @synthetics(timeout: 1000, errorRate: 0.5) { + me @synthetics(timeout: 1000, errorRate: 1) { albums { edges { node { id name } } } } } ``` 1. Click the "retry" button and notice that the "Loading" message is stuck there
2023-08-30T03:29:55Z
3.8
apollographql/apollo-client
11,078
apollographql__apollo-client-11078
[ "10912" ]
62f3b6d0e89611e27d9f29812ee60e5db5963fd6
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -906,12 +906,16 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`, const { concast, fromLink } = this.fetch(options, newNetworkStatus, query); const observer: Observer<ApolloQueryResult<TData>> = { next: (result) => { - finishWaitingForOwnResult(); - this.reportResult(result, variables); + if (equal(this.variables, variables)) { + finishWaitingForOwnResult(); + this.reportResult(result, variables); + } }, error: (error) => { - finishWaitingForOwnResult(); - this.reportError(error, variables); + if (equal(this.variables, variables)) { + finishWaitingForOwnResult(); + this.reportError(error, variables); + } }, };
diff --git a/src/__tests__/mutationResults.ts b/src/__tests__/mutationResults.ts --- a/src/__tests__/mutationResults.ts +++ b/src/__tests__/mutationResults.ts @@ -1186,8 +1186,6 @@ describe("mutation results", () => { subscribeAndCount(reject, watchedQuery, (count, result) => { if (count === 1) { - expect(result.data).toEqual({ echo: "a" }); - } else if (count === 2) { expect(result.data).toEqual({ echo: "b" }); client.mutate({ mutation: resetMutation, @@ -1197,7 +1195,7 @@ describe("mutation results", () => { }, }, }); - } else if (count === 3) { + } else if (count === 2) { expect(result.data).toEqual({ echo: "0" }); resolve(); } diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -34,6 +34,7 @@ import wrap from "../../testing/core/wrap"; import { resetStore } from "./QueryManager"; import { SubscriptionObserver } from "zen-observable-ts"; import { waitFor } from "@testing-library/react"; +import { ObservableStream } from "../../testing/internal"; export const mockFetchQuery = (queryManager: QueryManager<any>) => { const fetchConcastWithInfo = queryManager["fetchConcastWithInfo"]; @@ -1086,6 +1087,98 @@ describe("ObservableQuery", () => { } ); + it("calling refetch with different variables before the query itself resolved will only yield the result for the new variables", async () => { + const observers: SubscriptionObserver<FetchResult<typeof dataOne>>[] = []; + const queryManager = new QueryManager({ + cache: new InMemoryCache(), + link: new ApolloLink((operation, forward) => { + return new Observable((observer) => { + observers.push(observer); + }); + }), + }); + const observableQuery = queryManager.watchQuery({ + query, + variables: { id: 1 }, + }); + const stream = new ObservableStream(observableQuery); + + observableQuery.refetch({ id: 2 }); + + observers[0].next({ data: dataOne }); + observers[0].complete(); + + observers[1].next({ data: dataTwo }); + observers[1].complete(); + + { + const result = await stream.takeNext(); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataTwo, + }); + } + expect(stream.take()).rejects.toThrow(/Timeout/i); + }); + + it("calling refetch multiple times with different variables will return only results for the most recent variables", async () => { + const observers: SubscriptionObserver<FetchResult<typeof dataOne>>[] = []; + const queryManager = new QueryManager({ + cache: new InMemoryCache(), + link: new ApolloLink((operation, forward) => { + return new Observable((observer) => { + observers.push(observer); + }); + }), + }); + const observableQuery = queryManager.watchQuery({ + query, + variables: { id: 1 }, + }); + const stream = new ObservableStream(observableQuery); + + observers[0].next({ data: dataOne }); + observers[0].complete(); + + { + const result = await stream.takeNext(); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: dataOne, + }); + } + + observableQuery.refetch({ id: 2 }); + observableQuery.refetch({ id: 3 }); + + observers[1].next({ data: dataTwo }); + observers[1].complete(); + + observers[2].next({ + data: { + people_one: { + name: "SomeOneElse", + }, + }, + }); + observers[2].complete(); + + { + const result = await stream.takeNext(); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data: { + people_one: { + name: "SomeOneElse", + }, + }, + }); + } + }); + itAsync( "calls fetchRequest with fetchPolicy `no-cache` when using `no-cache` fetch policy", (resolve, reject) => { diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -1,4 +1,4 @@ -import React, { Fragment, ReactNode, useEffect, useState } from "react"; +import React, { Fragment, ReactNode, useEffect, useRef, useState } from "react"; import { DocumentNode, GraphQLError } from "graphql"; import gql from "graphql-tag"; import { act } from "react-dom/test-utils"; @@ -27,6 +27,7 @@ import { QueryResult } from "../../types/types"; import { useQuery } from "../useQuery"; import { useMutation } from "../useMutation"; import { profileHook, spyOnConsole } from "../../../testing/internal"; +import { useApolloClient } from "../useApolloClient"; describe("useQuery Hook", () => { describe("General use", () => { @@ -4494,6 +4495,138 @@ describe("useQuery Hook", () => { }); }); }); + + it("keeps cache consistency when a call to refetchQueries is interrupted with another query caused by changing variables and the second query returns before the first one", async () => { + const CAR_QUERY_BY_ID = gql` + query Car($id: Int) { + car(id: $id) { + make + model + } + } + `; + + const mocks = { + 1: [ + { + car: { + make: "Audi", + model: "A4", + __typename: "Car", + }, + }, + { + car: { + make: "Audi", + model: "A3", // Changed + __typename: "Car", + }, + }, + ], + 2: [ + { + car: { + make: "Audi", + model: "RS8", + __typename: "Car", + }, + }, + ], + }; + + const link = new ApolloLink( + (operation) => + new Observable((observer) => { + if (operation.variables.id === 1) { + // Queries for this ID return after a delay + setTimeout(() => { + const data = mocks[1].splice(0, 1).pop(); + observer.next({ data }); + observer.complete(); + }, 100); + } else if (operation.variables.id === 2) { + // Queries for this ID return immediately + const data = mocks[2].splice(0, 1).pop(); + observer.next({ data }); + observer.complete(); + } else { + observer.error(new Error("Unexpected query")); + } + }) + ); + + const hookResponse = jest.fn().mockReturnValue(null); + + function Component({ children, id }: any) { + const result = useQuery(CAR_QUERY_BY_ID, { + variables: { id }, + notifyOnNetworkStatusChange: true, + fetchPolicy: "network-only", + }); + const client = useApolloClient(); + const hasRefetchedRef = useRef(false); + + useEffect(() => { + if ( + result.networkStatus === NetworkStatus.ready && + !hasRefetchedRef.current + ) { + client.reFetchObservableQueries(); + hasRefetchedRef.current = true; + } + }, [result.networkStatus]); + + return children(result); + } + + const { rerender } = render( + <Component id={1}>{hookResponse}</Component>, + { + wrapper: ({ children }) => ( + <MockedProvider link={link}>{children}</MockedProvider> + ), + } + ); + + await waitFor(() => { + // Resolves as soon as reFetchObservableQueries is + // called, but before the result is returned + expect(hookResponse).toHaveBeenCalledTimes(3); + }); + + rerender(<Component id={2}>{hookResponse}</Component>); + + await waitFor(() => { + // All results are returned + expect(hookResponse).toHaveBeenCalledTimes(5); + }); + + expect(hookResponse.mock.calls.map((call) => call[0].data)).toEqual([ + undefined, + { + car: { + __typename: "Car", + make: "Audi", + model: "A4", + }, + }, + { + car: { + __typename: "Car", + make: "Audi", + model: "A4", + }, + }, + undefined, + { + car: { + __typename: "Car", + make: "Audi", + model: "RS8", + }, + }, + ]); + }); }); describe("Callbacks", () => { diff --git a/src/testing/internal/ObservableStream.ts b/src/testing/internal/ObservableStream.ts --- a/src/testing/internal/ObservableStream.ts +++ b/src/testing/internal/ObservableStream.ts @@ -29,6 +29,7 @@ async function* observableToAsyncEventIterator<T>(observable: Observable<T>) { (error) => resolveNext({ type: "error", error }), () => resolveNext({ type: "complete" }) ); + yield "initialization value" as unknown as Promise<ObservableEvent<T>>; while (true) { yield promises.shift()!; @@ -54,7 +55,11 @@ class IteratorStream<T> { export class ObservableStream<T> extends IteratorStream<ObservableEvent<T>> { constructor(observable: Observable<T>) { - super(observableToAsyncEventIterator(observable)); + const iterator = observableToAsyncEventIterator(observable); + // we need to call next() once to start the generator so we immediately subscribe. + // the first value is always "initialization value" which we don't care about + iterator.next(); + super(iterator); } async takeNext(options?: TakeOptions): Promise<T> {
Add failing test for cache consistency bug Since the bug is really specific, I've submitted a failing test case instead of a bug report—hope this helps! <!-- Thanks for filing a pull request on Apollo Client! A few automated bots may chime in on your PR. They are here to help with reviewing and ensuring Apollo Client is production ready after each pull request merge. - apollo-cla will respond asking you to sign the CLA if this is your first PR. It may also respond with warnings, messages, or fail the build if something is off. Don't worry, it'll help you to fix what is broken! - bundlesize is a status check to keep the footprint of Apollo Client as small as possible. - circleci will run tests, checking style of code, and generally make sure everything is working as expected Please look at the following checklist to ensure that your PR can be accepted quickly: --> ### Checklist: - [ ] If this PR contains changes to the library itself (not necessary for e.g. docs updates), please include a changeset (see [CONTRIBUTING.md](https://github.com/apollographql/apollo-client/blob/main/CONTRIBUTING.md#changesets)) - [ ] If this PR is a new feature, please reference an issue where a consensus about the design was reached (not necessary for small changes) - [ ] Make sure all of the significant new logic is covered by tests
2023-07-18T11:38:10Z
3.9
apollographql/apollo-client
10,853
apollographql__apollo-client-10853
[ "10767" ]
e3c676deb59d006f33d24a7211e58725a67641b8
diff --git a/config/entryPoints.js b/config/entryPoints.js --- a/config/entryPoints.js +++ b/config/entryPoints.js @@ -11,6 +11,7 @@ const entryPoints = [ { dirs: ['link', 'http'] }, { dirs: ['link', 'persisted-queries'] }, { dirs: ['link', 'retry'] }, + { dirs: ['link', 'remove-typename'] }, { dirs: ['link', 'schema'] }, { dirs: ['link', 'subscriptions'] }, { dirs: ['link', 'utils'] }, diff --git a/src/link/http/selectHttpOptionsAndBody.ts b/src/link/http/selectHttpOptionsAndBody.ts --- a/src/link/http/selectHttpOptionsAndBody.ts +++ b/src/link/http/selectHttpOptionsAndBody.ts @@ -1,6 +1,5 @@ import type { ASTNode} from 'graphql'; import { print } from 'graphql'; -import { stripTypename } from '../../utilities'; import type { Operation } from '../core'; @@ -181,7 +180,7 @@ export function selectHttpOptionsAndBodyInternal( //The body depends on the http options const { operationName, extensions, variables, query } = operation; - const body: Body = { operationName, variables: stripTypename(variables) }; + const body: Body = { operationName, variables }; if (http.includeExtensions) (body as any).extensions = extensions; diff --git a/src/link/remove-typename/index.ts b/src/link/remove-typename/index.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/index.ts @@ -0,0 +1,5 @@ +export { + removeTypenameFromVariables, + KEEP, + RemoveTypenameFromVariablesOptions, +} from './removeTypenameFromVariables'; diff --git a/src/link/remove-typename/removeTypenameFromVariables.ts b/src/link/remove-typename/removeTypenameFromVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/removeTypenameFromVariables.ts @@ -0,0 +1,105 @@ +import { Trie } from '@wry/trie'; +import { wrap } from 'optimism'; +import type { DocumentNode, TypeNode } from 'graphql'; +import { Kind, visit } from 'graphql'; +import { ApolloLink } from '../core'; +import { canUseWeakMap, stripTypename } from '../../utilities'; + +export const KEEP = '__KEEP'; + +interface KeepTypenameConfig { + [key: string]: typeof KEEP | KeepTypenameConfig; +} + +export interface RemoveTypenameFromVariablesOptions { + except?: KeepTypenameConfig; +} + +export function removeTypenameFromVariables( + options: RemoveTypenameFromVariablesOptions = Object.create(null) +) { + const { except } = options; + + const trie = new Trie<typeof stripTypename.BREAK>( + canUseWeakMap, + () => stripTypename.BREAK + ); + + if (except) { + // Use `lookupArray` to store the path in the `trie` ahead of time. We use + // `peekArray` when actually checking if a path is configured in the trie + // to avoid creating additional lookup paths in the trie. + collectPaths(except, (path) => trie.lookupArray(path)); + } + + return new ApolloLink((operation, forward) => { + const { query, variables } = operation; + + if (!variables) { + return forward(operation); + } + + if (!except) { + return forward({ ...operation, variables: stripTypename(variables) }); + } + + const variableDefinitions = getVariableDefinitions(query); + + return forward({ + ...operation, + variables: stripTypename(variables, { + keep: (variablePath) => { + const typename = variableDefinitions[variablePath[0]]; + + // The path configurations do not include array indexes, so we + // omit them when checking the `trie` for a configured path + const withoutArrayIndexes = variablePath.filter( + (segment) => typeof segment === 'string' + ); + + // Our path configurations use the typename as the root so we need to + // replace the first segment in the variable path with the typename + // instead of the top-level variable name. + return trie.peekArray([typename, ...withoutArrayIndexes.slice(1)]); + }, + }), + }); + }); +} + +const getVariableDefinitions = wrap((document: DocumentNode) => { + const definitions: Record<string, string> = {}; + + visit(document, { + VariableDefinition(node) { + definitions[node.variable.name.value] = unwrapType(node.type); + }, + }); + + return definitions; +}); + +function unwrapType(node: TypeNode): string { + switch (node.kind) { + case Kind.NON_NULL_TYPE: + return unwrapType(node.type); + case Kind.LIST_TYPE: + return unwrapType(node.type); + case Kind.NAMED_TYPE: + return node.name.value; + } +} + +function collectPaths( + config: KeepTypenameConfig, + register: (path: string[]) => void, + path: string[] = [] +) { + Object.entries(config).forEach(([key, value]) => { + if (value === KEEP) { + return register([...path, key]); + } + + collectPaths(value, register, path.concat(key)); + }); +} diff --git a/src/link/subscriptions/index.ts b/src/link/subscriptions/index.ts --- a/src/link/subscriptions/index.ts +++ b/src/link/subscriptions/index.ts @@ -33,7 +33,7 @@ import type { Client } from "graphql-ws"; import type { Operation, FetchResult } from "../core"; import { ApolloLink } from "../core"; -import { isNonNullObject, stripTypename, Observable } from "../../utilities"; +import { isNonNullObject, Observable } from "../../utilities"; import { ApolloError } from "../../errors"; // https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/close_event @@ -54,11 +54,7 @@ export class GraphQLWsLink extends ApolloLink { public request(operation: Operation): Observable<FetchResult> { return new Observable((observer) => { return this.client.subscribe<FetchResult>( - { - ...operation, - query: print(operation.query), - variables: stripTypename(operation.variables) - }, + { ...operation, query: print(operation.query) }, { next: observer.next.bind(observer), complete: observer.complete.bind(observer), diff --git a/src/utilities/common/omitDeep.ts b/src/utilities/common/omitDeep.ts --- a/src/utilities/common/omitDeep.ts +++ b/src/utilities/common/omitDeep.ts @@ -1,14 +1,28 @@ import type { DeepOmit } from '../types/DeepOmit'; import { isPlainObject } from './objects'; -export function omitDeep<T, K extends string>(value: T, key: K) { - return __omitDeep(value, key); +const BREAK: unique symbol = Symbol('BREAK'); + +export interface OmitDeepOptions { + keep?: (path: (string | number)[]) => boolean | typeof BREAK | undefined; +} + +export function omitDeep<T, K extends string>( + value: T, + key: K, + options: OmitDeepOptions = Object.create(null) +) { + return __omitDeep(value, key, options); } +omitDeep.BREAK = BREAK; + function __omitDeep<T, K extends string>( value: T, key: K, - known = new Map<any, any>() + options: OmitDeepOptions, + known = new Map<any, any>(), + path = [] as (string | number)[] ): DeepOmit<T, K> { if (known.has(value)) { return known.get(value); @@ -21,7 +35,11 @@ function __omitDeep<T, K extends string>( known.set(value, array); value.forEach((value, index) => { - const result = __omitDeep(value, key, known); + const objectPath = path.concat(index); + const result = + options.keep?.(objectPath) === BREAK + ? value + : __omitDeep(value, key, options, known, objectPath); modified ||= result !== value; array[index] = result; @@ -35,14 +53,22 @@ function __omitDeep<T, K extends string>( known.set(value, obj); Object.keys(value).forEach((k) => { - if (k === key) { - modified = true; - } else { - const result = __omitDeep(value[k], key, known); - modified ||= result !== value[k]; + const objectPath = path.concat(k); + const keep = options.keep?.(objectPath); - obj[k] = result; + if (k === key && keep !== true) { + modified = true; + return; } + + const result = + keep === BREAK + ? value[k] + : __omitDeep(value[k], key, options, known, objectPath); + + modified ||= result !== value[k]; + + obj[k] = result; }); if (modified) { diff --git a/src/utilities/common/stripTypename.ts b/src/utilities/common/stripTypename.ts --- a/src/utilities/common/stripTypename.ts +++ b/src/utilities/common/stripTypename.ts @@ -1,5 +1,12 @@ +import type { OmitDeepOptions } from './omitDeep'; import { omitDeep } from './omitDeep'; -export function stripTypename<T>(value: T) { - return omitDeep(value, '__typename'); +interface StripTypenameOptions { + keep?: OmitDeepOptions['keep']; } + +export function stripTypename<T>(value: T, options?: StripTypenameOptions) { + return omitDeep(value, '__typename', options); +} + +stripTypename.BREAK = omitDeep.BREAK; diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -106,7 +106,7 @@ export * from './common/mergeOptions'; export * from './common/responseIterator'; export * from './common/incrementalResult'; -export { omitDeep } from './common/omitDeep'; +export { omitDeep, OmitDeepOptions } from './common/omitDeep'; export { stripTypename } from './common/stripTypename'; export * from './types/IsStrictlyAny';
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -206,6 +206,13 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/link/remove-typename 1`] = ` +Array [ + "KEEP", + "removeTypenameFromVariables", +] +`; + exports[`exports of public entry points @apollo/client/link/retry 1`] = ` Array [ "RetryLink", diff --git a/src/__tests__/exports.ts b/src/__tests__/exports.ts --- a/src/__tests__/exports.ts +++ b/src/__tests__/exports.ts @@ -15,6 +15,7 @@ import * as linkError from "../link/error"; import * as linkHTTP from "../link/http"; import * as linkPersistedQueries from "../link/persisted-queries"; import * as linkRetry from "../link/retry"; +import * as linkRemoveTypename from "../link/remove-typename"; import * as linkSchema from "../link/schema"; import * as linkSubscriptions from "../link/subscriptions"; import * as linkUtils from "../link/utils"; @@ -56,6 +57,7 @@ describe('exports of public entry points', () => { check("@apollo/client/link/error", linkError); check("@apollo/client/link/http", linkHTTP); check("@apollo/client/link/persisted-queries", linkPersistedQueries); + check("@apollo/client/link/remove-typename", linkRemoveTypename); check("@apollo/client/link/retry", linkRetry); check("@apollo/client/link/schema", linkSchema); check("@apollo/client/link/subscriptions", linkSubscriptions); diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -1111,262 +1111,6 @@ describe('HttpLink', () => { new Error('HttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or set the `transformOptions.removeClientFields` option to `true`.') ); }); - - it('strips __typename from object argument when sending a mutation', async () => { - fetchMock.mock('https://example.com/graphql', { - status: 200, - body: JSON.stringify({ - data: { - __typename: 'Mutation', - updateTodo: { - __typename: 'Todo', - id: 1, - name: 'Take out trash', - completed: true - } - } - }), - headers: { 'content-type': 'application/json' } - }); - - const query = gql` - mutation UpdateTodo($todo: TodoInput!) { - updateTodo(todo: $todo) { - id - name - completed - } - } - `; - - const link = createHttpLink({ uri: 'https://example.com/graphql' }); - - const todo = { - __typename: 'Todo', - id: 1, - name: 'Take out trash', - completed: true, - } - - await new Promise((resolve, reject) => { - execute(link, { query, variables: { todo } }).subscribe({ - next: resolve, - error: reject - }); - }); - - const [, options] = fetchMock.lastCall()!; - const { body } = options! - - expect(JSON.parse(body!.toString())).toEqual({ - operationName: 'UpdateTodo', - query: print(query), - variables: { - todo: { - id: 1, - name: 'Take out trash', - completed: true, - } - } - }); - }); - - it('strips __typename from array argument when sending a mutation', async () => { - fetchMock.mock('https://example.com/graphql', { - status: 200, - body: JSON.stringify({ - data: { - __typename: 'Mutation', - updateTodos: [ - { - __typename: 'Todo', - id: 1, - name: 'Take out trash', - completed: true - }, - { - __typename: 'Todo', - id: 2, - name: 'Clean room', - completed: true - }, - ] - } - }), - headers: { 'content-type': 'application/json' } - }); - - const query = gql` - mutation UpdateTodos($todos: [TodoInput!]!) { - updateTodos(todos: $todos) { - id - name - completed - } - } - `; - - const link = createHttpLink({ uri: 'https://example.com/graphql' }); - - const todos = [ - { - __typename: 'Todo', - id: 1, - name: 'Take out trash', - completed: true, - }, - { - __typename: 'Todo', - id: 2, - name: 'Clean room', - completed: true, - }, - ]; - - await new Promise((resolve, reject) => { - execute(link, { query, variables: { todos } }).subscribe({ - next: resolve, - error: reject - }); - }); - - const [, options] = fetchMock.lastCall()!; - const { body } = options! - - expect(JSON.parse(body!.toString())).toEqual({ - operationName: 'UpdateTodos', - query: print(query), - variables: { - todos: [ - { - id: 1, - name: 'Take out trash', - completed: true, - }, - { - id: 2, - name: 'Clean room', - completed: true, - }, - ] - } - }); - }); - - it('strips __typename from mixed argument when sending a mutation', async () => { - fetchMock.mock('https://example.com/graphql', { - status: 200, - body: JSON.stringify({ - data: { - __typename: 'Mutation', - updateProfile: { - __typename: 'Profile', - id: 1, - }, - } - }), - headers: { 'content-type': 'application/json' } - }); - - const query = gql` - mutation UpdateProfile($profile: ProfileInput!) { - updateProfile(profile: $profile) { - id - } - } - `; - - const link = createHttpLink({ uri: 'https://example.com/graphql' }); - - const profile = { - __typename: 'Profile', - id: 1, - interests: [ - { __typename: 'Interest', name: 'Hiking' }, - { __typename: 'Interest', name: 'Nature' } - ], - avatar: { - __typename: 'Avatar', - url: 'https://example.com/avatar.jpg', - } - }; - - await new Promise((resolve, reject) => { - execute(link, { query, variables: { profile } }).subscribe({ - next: resolve, - error: reject - }); - }); - - const [, options] = fetchMock.lastCall()!; - const { body } = options! - - expect(JSON.parse(body!.toString())).toEqual({ - operationName: 'UpdateProfile', - query: print(query), - variables: { - profile: { - id: 1, - interests: [ - { name: 'Hiking' }, - { name: 'Nature' } - ], - avatar: { - url: 'https://example.com/avatar.jpg', - }, - }, - } - }); - }); - }); - - it('strips __typename when sending a query', async () => { - fetchMock.mock('https://example.com/graphql', { - status: 200, - body: JSON.stringify({ - data: { - __typename: 'Query', - searchTodos: [] - } - }), - headers: { 'content-type': 'application/json' } - }); - - const query = gql` - query SearchTodos($filter: TodoFilter!) { - searchTodos(filter: $filter) { - id - name - } - } - `; - - const link = createHttpLink({ uri: 'https://example.com/graphql' }); - - const filter = { - __typename: 'Filter', - completed: true, - }; - - await new Promise((resolve, reject) => { - execute(link, { query, variables: { filter } }).subscribe({ - next: resolve, - error: reject - }); - }); - - const [, options] = fetchMock.lastCall()!; - const { body } = options! - - expect(JSON.parse(body!.toString())).toEqual({ - operationName: 'SearchTodos', - query: print(query), - variables: { - filter: { - completed: true, - }, - }, - }); }); describe('Dev warnings', () => { diff --git a/src/link/http/__tests__/selectHttpOptionsAndBody.ts b/src/link/http/__tests__/selectHttpOptionsAndBody.ts --- a/src/link/http/__tests__/selectHttpOptionsAndBody.ts +++ b/src/link/http/__tests__/selectHttpOptionsAndBody.ts @@ -104,25 +104,4 @@ describe('selectHttpOptionsAndBody', () => { expect(body.query).toBe('query SampleQuery{stub{id}}'); }); - - it('strips __typename from variables', () => { - const operation = createOperation( - {}, - { - query, - variables: { - __typename: 'Test', - nested: { __typename: 'Nested', foo: 'bar' }, - array: [{ __typename: 'Item', baz: 'foo' }] - }, - } - ); - - const { body } = selectHttpOptionsAndBody(operation, {}); - - expect(body.variables).toEqual({ - nested: { foo: 'bar' }, - array: [{ baz: 'foo' }], - }); - }) }); diff --git a/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts b/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts @@ -0,0 +1,417 @@ +import { + KEEP, + removeTypenameFromVariables, +} from '../removeTypenameFromVariables'; +import { ApolloLink, Operation } from '../../core'; +import { Observable, gql } from '../../../core'; +import { createOperation, toPromise } from '../../utils'; + +type PartialOperation = Partial<Pick<Operation, 'variables'>> & + Pick<Operation, 'query'>; + +// Since this link modifies the `operation` and we only care to test against +// the changed operation, we use a custom `execute` helper here instead of the +// version exported by the `core` module, which expects a well-formed response. +async function execute(link: ApolloLink, operation: PartialOperation) { + function forward(operation: Operation) { + // use the `data` key to satisfy the TypeScript types required by + // `forward`'s' return value + return Observable.of({ data: operation }); + } + + const { data } = await toPromise( + link.request(createOperation({}, operation), forward)! + ); + + return data as Operation; +} + +test('strips all __typename keys by default', async () => { + const query = gql` + query Test($foo: FooInput!, $bar: BarInput!) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables(); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + foo: true, + bar: 'Bar', + baz: { __typename: 'Baz', baz: true }, + qux: [{ __typename: 'Qux', qux: 0 }], + }, + bar: [{ __typename: 'Bar', bar: true }], + }, + }); + + expect(variables).toStrictEqual({ + foo: { + foo: true, + bar: 'Bar', + baz: { baz: true }, + qux: [{ qux: 0 }], + }, + bar: [{ bar: true }], + }); +}); + +test('does nothing when no variables are passed', async () => { + const query = gql` + query Test { + foo { + bar + } + } + `; + + const link = removeTypenameFromVariables(); + + const operation = { query }; + const resultOperation = await execute(link, operation); + + expect(resultOperation).toBe(operation); +}); + +test('does nothing when no variables are passed even if variables are declared in the document', async () => { + const query = gql` + query Test($unused: Boolean) { + foo { + bar + } + } + `; + + const link = removeTypenameFromVariables(); + + const operation = { query }; + const resultOperation = await execute(link, operation); + + expect(resultOperation).toBe(operation); +}); + +test('keeps __typename for variables with types defined by `except`', async () => { + const query = gql` + query Test($foo: JSON, $bar: BarInput) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + foo: true, + baz: { __typename: 'Baz', baz: true }, + }, + bar: { __typename: 'Bar', bar: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + __typename: 'Foo', + foo: true, + baz: { __typename: 'Baz', baz: true }, + }, + bar: { bar: true }, + }); +}); + +test('keeps __typename in all variables with types configured with `except`', async () => { + const query = gql` + query Test($foo: JSON, $bar: Config, $baz: BazInput) { + someField(foo: $foo, bar: $bar, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + Config: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { __typename: 'Foo', foo: true }, + bar: { __typename: 'Bar', bar: true }, + baz: { __typename: 'Baz', baz: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { __typename: 'Foo', foo: true }, + bar: { __typename: 'Bar', bar: true }, + baz: { baz: true }, + }); +}); + +test('handles variable declarations declared as non null and list types', async () => { + const query = gql` + query Test($foo: JSON!, $bar: [JSON], $baz: [JSON!]!, $qux: QuxInput!) { + someField(foo: $foo, bar: $bar, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { __typename: 'Foo', foo: true }, + bar: [ + { __typename: 'Bar', bar: true, baz: { __typename: 'Baz', baz: true } }, + ], + baz: [ + { __typename: 'Baz', baz: true }, + { __typename: 'Baz', baz: true }, + ], + qux: { __typename: 'Qux', qux: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { __typename: 'Foo', foo: true }, + bar: [ + { __typename: 'Bar', bar: true, baz: { __typename: 'Baz', baz: true } }, + ], + baz: [ + { __typename: 'Baz', baz: true }, + { __typename: 'Baz', baz: true }, + ], + qux: { qux: true }, + }); +}); + +test('keeps __typename at configured fields under input object types', async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + baz: KEEP, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + aa: true, + bar: { + __typename: 'Bar', + bb: true, + }, + baz: { + __typename: 'Baz', + cc: true, + }, + qux: { + __typename: 'Qux', + dd: true, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + aa: true, + bar: { + __typename: 'Bar', + bb: true, + }, + baz: { + __typename: 'Baz', + cc: true, + }, + qux: { + dd: true, + }, + }, + }); +}); + +test('keeps __typename at a deeply nested field', async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: { + baz: { + qux: KEEP, + }, + }, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + bar: { + __typename: 'Bar', + baz: { + __typename: 'Baz', + qux: { + __typename: 'Qux', + quux: true, + }, + }, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + baz: { + qux: { + __typename: 'Qux', + quux: true, + }, + }, + }, + }, + }); +}); + +test('handles configured fields varying nesting levels', async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + baz: { + qux: KEEP, + }, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + bar: { + __typename: 'Bar', + aa: true, + }, + baz: { + __typename: 'Baz', + qux: { + __typename: 'Qux', + quux: true, + }, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + __typename: 'Bar', + aa: true, + }, + baz: { + qux: { + __typename: 'Qux', + quux: true, + }, + }, + }, + }); +}); + +test('handles multiple configured types with fields', async () => { + const query = gql` + query Test($foo: FooInput, $baz: BazInput) { + someField(foo: $foo, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + }, + BazInput: { + qux: KEEP, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: 'Foo', + bar: { + __typename: 'Bar', + aa: true, + }, + }, + baz: { + __typename: 'Bar', + qux: { + __typename: 'Qux', + bb: true, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + __typename: 'Bar', + aa: true, + }, + }, + baz: { + qux: { + __typename: 'Qux', + bb: true, + }, + }, + }); +}); diff --git a/src/utilities/common/__tests__/omitDeep.ts b/src/utilities/common/__tests__/omitDeep.ts --- a/src/utilities/common/__tests__/omitDeep.ts +++ b/src/utilities/common/__tests__/omitDeep.ts @@ -1,3 +1,4 @@ +import equal from '@wry/equality'; import { omitDeep } from '../omitDeep'; test('omits the key from a shallow object', () => { @@ -135,3 +136,114 @@ test('only considers plain objects and ignores class instances when omitting pro expect(modifiedThing).toBe(thing); expect(modifiedThing).toHaveProperty('omit', false); }); + +test('allows paths to be conditionally kept with the `keep` option by returning `true`', () => { + const original = { + omit: true, + foo: { omit: false, bar: 'bar' }, + omitFirst: [ + { omit: true, foo: 'bar' }, + { omit: false, foo: 'bar' }, + ], + }; + + const result = omitDeep(original, 'omit', { + keep: (path) => { + return ( + equal(path, ['foo', 'omit']) || equal(path, ['omitFirst', 1, 'omit']) + ); + }, + }); + + expect(result).toEqual({ + foo: { omit: false, bar: 'bar' }, + omitFirst: [{ foo: 'bar' }, { omit: false, foo: 'bar' }], + }); +}); + +test('allows path traversal to be skipped with the `keep` option by returning `BREAK`', () => { + const original = { + omit: true, + foo: { + omit: false, + bar: 'bar', + baz: { + foo: 'bar', + omit: false, + }, + }, + keepAll: [ + { omit: false, foo: 'bar' }, + { omit: false, foo: 'bar' }, + ], + keepOne: [ + { omit: false, nested: { omit: false, foo: 'bar' } }, + { omit: true, nested: { omit: true, foo: 'bar' } }, + ], + }; + + const result = omitDeep(original, 'omit', { + keep: (path) => { + if ( + equal(path, ['foo']) || + equal(path, ['keepAll']) || + equal(path, ['keepOne', 0]) + ) { + return omitDeep.BREAK; + } + }, + }); + + expect(result).toEqual({ + foo: { omit: false, bar: 'bar', baz: { foo: 'bar', omit: false } }, + keepAll: [ + { omit: false, foo: 'bar' }, + { omit: false, foo: 'bar' }, + ], + keepOne: [ + { omit: false, nested: { omit: false, foo: 'bar' } }, + { nested: { foo: 'bar' } }, + ], + }); + + expect(result.foo).toBe(original.foo); + expect(result.keepAll).toBe(original.keepAll); + expect(result.keepOne[0]).toBe(original.keepOne[0]); +}); + +test('can mix and match `keep` with `true `and `BREAK`', () => { + const original = { + omit: true, + foo: { + omit: false, + bar: 'bar', + baz: { + foo: 'bar', + omit: false, + }, + }, + omitFirst: [ + { omit: false, foo: 'bar' }, + { omit: true, foo: 'bar' }, + ], + }; + + const result = omitDeep(original, 'omit', { + keep: (path) => { + if (equal(path, ['foo'])) { + return omitDeep.BREAK; + } + + if (equal(path, ['omitFirst', 0, 'omit'])) { + return true; + } + }, + }); + + expect(result).toEqual({ + foo: { omit: false, bar: 'bar', baz: { foo: 'bar', omit: false } }, + omitFirst: [{ omit: false, foo: 'bar' }, { foo: 'bar' }], + }); + + expect(result.foo).toBe(original.foo); +}); diff --git a/src/utilities/common/__tests__/stripTypename.ts b/src/utilities/common/__tests__/stripTypename.ts --- a/src/utilities/common/__tests__/stripTypename.ts +++ b/src/utilities/common/__tests__/stripTypename.ts @@ -1,3 +1,4 @@ +import equal from '@wry/equality'; import { stripTypename } from '../stripTypename'; test('omits __typename from a shallow object', () => { @@ -58,3 +59,53 @@ test('returns primitives unchanged', () => { expect(stripTypename(null)).toBe(null); expect(stripTypename(undefined)).toBe(undefined); }); + +test('keeps __typename for paths allowed by the `keep` option', () => { + const variables = { + __typename: 'Foo', + bar: { + __typename: 'Bar', + aa: true, + }, + baz: { + __typename: 'Baz', + bb: true, + }, + deeply: { + __typename: 'Deeply', + nested: { + __typename: 'Nested', + value: 'value', + }, + }, + }; + + const result = stripTypename(variables, { + keep: (path) => { + if (equal(path, ['bar', '__typename'])) { + return true; + } + + if (equal(path, ['deeply'])) { + return stripTypename.BREAK; + } + }, + }); + + expect(result).toStrictEqual({ + bar: { + __typename: 'Bar', + aa: true, + }, + baz: { + bb: true, + }, + deeply: { + __typename: 'Deeply', + nested: { + __typename: 'Nested', + value: 'value', + }, + }, + }); +});
Auto strip `__typename` does not correctly handle JSON scalars ### Issue Description #10724 introduced the ability to automatically strip `__typename` fields from `variables` before a request is made to a server. While this works well for the majority of cases, the change is a bit over-eager as it will also strip `__typename` from JSON-related scalars. Since a JSON scalar allows you to pass any arbitrary data, we should be a good citizen and avoid stripping `__typename` in these cases. NOTE: JSON scalars do not have to be named `JSON`. A schema can defined any arbitrarily named scalar type that represents a JSON blob (though `JSON` is the most common name for this type of scalar). We'll need a mechanism that allows users to opt-in/opt-out of this behavior for these scalars. There are two approaches we can consider: 1. Opt-out behavior In this approach, stripping the `__typename` would be considered the default behavior. Users would need to opt-out of this behavior by listing the types where `__typename` should be left alone. 2. Opt-in behavior On the flipside, we would consider this feature as _opt-in_ which means stripping the `__typename` does not happen automatically. Users would need to opt-in to this behavior by listing the types where the `__typename` should be removed. There is a good case to be made for either approach and I think both are valid. --- There is a scenario that adds a bit of a wrinkle. If you declare an `input` type where one of its fields is a JSON scalar, detecting the boundary at which `__typename` could be left alone cannot solely be done by inspecting the query document. For example, lets say I have this query: ```graphql mutation UpdateUser($user: UserInput!) { updateUser(user: $user) { id } } ``` And `UserInput` is defined as: ```graphql input UserInput { id: ID! config: JSON } ``` Here we'd want to strip any `__typename` on the top-level `user` variable, but leave anything inside `config` alone. We'll need to provide a mechanism for users to specify field paths as well as types to ensure this is handled correctly for all scenarios. ### Link to Reproduction n/a ### Reproduction Steps _No response_
After some internal discussion, I'm convinced we will not be able to ship this as an opt-out solution because we break backwards compatibility for anyone that relies upon `__typename` as part of a JSON scalar. `__typename` _is_ allowed when used as part of a JSON scalar and people may be using this solution as a workaround for the fact that GraphQL doesn't yet support [input unions](https://github.com/graphql/graphql-wg/blob/main/rfcs/InputUnion.md). Additionally, there are a few downsides to the approach in https://github.com/apollographql/apollo-client/pull/10724. 1. The change is limited to `HttpLink`, `BatchHttpLink` and `GraphQLWsLink`. If anyone is using a custom terminating link, or the older `WebSocketLink`, this functionality is missed. 2. This functionality needs to be duplicated in every terminating link that should include this functionality. This can lead to increased bundle size. 3. Users that don't need this functionality, either because they strip typenames themselves, or because they don't use data that includes a `__typename` in their variables, suffer from a larger bundle size since the underlying code cannot be tree-shaken away. --- Instead, the plan is to create a newly exported `ApolloLink` that will provide this functionality. For the sake of this discussion, I'll call it a `StripTypenameLink` (final name TBD). Shipping a custom link has several advantages that counter the downsides above: 1. We are no longer limited to our built-in terminating links to provide this functionality. You can simply drop a `StripTypenameLink` in your link chain and this will propagate to any terminating link in your app 2. We reduce duplication in our built-in terminating links since this functionality is included in a single place 3. Smaller bundle size for those that don't use the link, since this link can be tree-shaken away. 4. Configuration for this behavior is co-located with the functionality. ### Implementation The link would work by inspecting the incoming query document and comparing its variable declarations with the configuration passed to the link. If we match a variable type with a type defined in the config, this tells us we should leave the `__typename` alone (see below for more details on the configuration) As an example, if I have the following query: ```graphql mutation MyMutation($json: JSON!, $config: Config!, $input: UserInput!) { myMutation(json: $json, config: $config, input: $input) } # `JSON` and `Config` are both JSON-based scalars scalar JSON scalar Config # `UserInput` is an input type that could have a nested JSON scalar field input UserInput { id: ID! config: Config! anotherField: JSON! } ``` We would expect the following inputs/outputs ```ts // input const variables: MyMutationVariables = { // `json` is of type `JSON` based on the variable definition json: { __typename: 'Foo', foo: 'bar', baz: 'baz', nested: { __typename: 'Nested', foo: 'bar' } }, // `config` is of type `Config` based on the variable definition config: { __typename: 'Foo', id: '12345' }, // `input` is of type `UserInput` based on the variable definition input: { __typename: 'User', id: '1', config: { __typename: 'UserConfig', field: 'value' }, anotherField: { __typename: 'Widget', widget: 'foo' } } } // NOTE: API not final. See below on more detailed understanding of the API const link = new StripTypenameLink({ // purposely omit `Config` to strip __typename from its type exclude: [ 'JSON', ['UserInput', ['config']], ['UserInput', ['anotherField']], ] }); // variables sent to the server const finalVariables = { // __typename remains on all nested fields since its type is excluded from getting stripped json: { __typename: 'Foo', foo: 'bar', baz: 'baz', nested: { __typename: 'Nested', foo: 'bar' } }, // __typename is stripped since its type is not excluded from the stripped types config: { id: '12345' }, // __typename is stripped from top-level `input`, but remains on `config` and `anotherField` // since these property paths are excluded from getting stripped input: { id: '1', config: { __typename: 'UserConfig', field: 'value' }, anotherField: { __typename: 'Widget', widget: 'foo' } } } ``` ### API Design We want to allow for simplicity as much as possible. This link should be easy to drop-in with minimal configuration as possible to reduce friction. #### Goals/Requirements * Make it easy to opt-in to stripping `__typename` from everything. Gauging by the number of user-provided solutions in https://github.com/apollographql/apollo-feature-requests/issues/6 that don't handle JSON scalars in any meaningful way, this feels like the most common use case. * Allow a user to define types that should leave `__typename` in-tact. These can be detected from the variable definitions in the query document NOTE: We can't assume all JSON scalars are named `JSON`, since a schema could define a more semantically meaningful name (e.g. `Config`), hence why we need this to be configurable. * Allow a user to define a property path that should leave `__typename` in-tact. If a JSON scalar is a key in a deeply nested input object, simply trying to detect the input type is not enough (see the description in this issue for an example of this problem). I propose the following behaviors: 1. Optimize for the common case, which is to strip `__typename` from everything. JSON scalars, while useful, are typically seen as an anti-pattern since they discourage strong types. ```ts // final name/import TBD import { StripTypenameLink } from '@apollo/client/link/stripTypename'; // pass nothing to strip everything by default const stripTypenameLink = new StripTypenameLink(); ``` 2. Allow users to _exclude_ types that should not adhere to this behavior, instead of forcing the user to include types that should adhere to this behavior. Explicit types should be the norm. JSON scalars should be the exception. By optimizing for the majority case, this should allow for a more minimal configuration. ```ts import { StripTypenameLink } from '@apollo/link/stripTypename'; const link = new StripTypenameLink({ // simple types exclude: ['JSON', 'Config', 'SomeOtherJSONScalarType'], // types with paths exclude: [ // Keep `__typename` for the `config` property of a `UserInput` type, but strip all others ['UserInput', ['config']], // Keep `__typename` on both `anotherConfig` and `config` of `UserInput` since `UserInput` is defined more than once ['UserInput', ['anotherConfig']], // Keep __typename for a deeply nested field ['DashboardInput', ['deeply', 'nested', 'field']] ], // can combine simple types and types with property paths exclude: ['JSON', ['UserInput', ['config']], ['UserInput', ['anotherConfig']]] }); ``` > NOTE: I'm not in love with this API as the array nesting can get hard to read quickly. This should offer a good starting point for discussion at minimum. #### Alternate API ideas for excluding types/property paths - Split types/paths config between 2 properties ```ts new StripTypenameLink({ excludeTypes: ['JSON', 'Config'], excludePaths: { UserInput: [['config'], ['anotherConfig']], DashboardInput: ['widgets', 'config'] } }); ``` - Use named `path` and `paths` options for the second array item. These are mutually exclusive ```ts new StripTypenameLink({ exclude: [ // single path on a type ['UserInput', { path: ['config'] } // multiple paths on a type ['UserInput', { paths: [['config', ['widgets', 'config']] }] ] }); ``` - Use `keyArgs`-style config. Use a nested array to denote a nested field ```ts new StripTypenameLink({ exclude: [ // Keep __typename on all fields inside JSON types 'JSON', // `config` and `widgets` are separate properties on the same object. `widgetConfig` is nested inside `widgets` ['UserInput', ['config', 'widgets', ['widgetConfig']]] ] }); ``` - String-delimited paths ```ts new StripTypenameLink({ exclude: [ // single path ['UserInput', ['config']], // multiple path ['DashboardInput', ['deeply.nested.field', 'another.field']] ] }); ``` Any combination of these APIs are valid. Suggestions welcome for alternatives! What about ```js new StripTypenameLink({ exclude: { UserInput: { config: true }, DashboardInput: { deeply: { nested: { field: true }}, another: { field: true } } } }); ``` ? Or, if we generally think about a Link that can be used to transform outgoing data: ```js new TransformDataLink([ keepTypename('UserInput', 'config'), keepTypename('DashboardInput', 'deeply', 'nested', 'field'), keepTypename('DashboardInput', 'another', 'field'), stripTypename(), ]) ``` If users had a lot of fields, they could abstract over `strip` to make that work best for them. (Note: When using libraries, people like to forget that abstracting is a thing, so we would need to show at least one docs example, that yes, JavaScript is a programming language and you can write your own abstraction) @phryneas those are interesting ideas! > ```ts > new StripTypenameLink({ > exclude: { > UserInput: { config: true }, > DashboardInput: { > deeply: { nested: { field: true }}, > another: { field: true } > } > } > }); > ``` I love the readability of this example. It's much easier to parse than something like this: `['JSON', ['UserInput', ['config']], ['UserInput', ['anotherConfig']]]` The only thing I don't love about this is the fact that `true` is the only value that makes sense here, so at a glance, people might assume there is an equivalent `false`. Perhaps we could just make the leaf nodes an array in that case? ```ts new StripTypenameLink({ exclude: { UserInput: ['config'], DashboardInput: { deeply: { nested: ['field'] }, another: ['field'] } } }); ``` This gets rid of the `true`, but could get weird in other cases, where yours would handle this nicely: ```ts new StripTypenameLink({ exclude: { DashboardInput: { // How I envision a leaf field + another nested field at the same level deeply: ['another', { nested: ['field'] }], } } }); ``` --- > ```ts > new TransformDataLink([ > keepTypename('UserInput', 'config'), > keepTypename('DashboardInput', 'deeply', 'nested', 'field'), > keepTypename('DashboardInput', 'another', 'field'), > stripTypename(), > ]) > ``` This is an interesting idea, though in some ways it feels similar to `ApolloLink` already: `ApolloLink.from([link1, link2])`. In this case, what "data" would you see this operating on? Would this be strictly outgoing data (query, variables, context, etc?)? Would you limit this to just a subset of the `operation`? Huh, that's an interesting edge case. My original idea would also not be able to handle this I believe 🤔 ```js new StripTypenameLink({ exclude: { UserInput: { config: true }, DashboardInput: { // how to keep `__typename` here? deeply: { nested: { field: true } }, another: { field: true }, }, }, }); ``` Another version that could do this, and is a bit clearer, although it is more code, could look like this: ```js import { StripTypenameLink, keepTypename } from "..."; new StripTypenameLink({ exclude: { UserInput: { config: { [keepTypename]: true, }, }, DashboardInput: { // this works now [keepTypename]: true, deeply: { nested: { field: { [keepTypename]: true, }, }, }, another: { field: { [keepTypename]: true, }, }, }, }, }); ``` And then we could get it shorter again, by mixing both approaches and allowing a shortnotation in the form of `fieldName: keepTypename`: ```js import { StripTypenameLink, keepTypename } from "..."; new StripTypenameLink({ exclude: { // keepTypename makes more sense than `true` here UserInput: { config: keepTypename }, DashboardInput: { // now this works [keepTypename]: true, deeply: { nested: { field: keepTypename } }, another: { field: keepTypename }, }, }, }); ``` > Would you limit this to just a subset of the operation? Honestly, I have no idea - I just had the thought that we could explore into that direction and didn't want to immediately dismiss it, so I jotted it down. Your interpretations here are as good as mine ^^ I have to admit that generally, I'm not a big fan of the "nested array" variants - while they are shorter, they are very hard to explain, and they might be very easy to get wrong. > I have to admit that generally, I'm not a big fan of the "nested array" variants - while they are shorter, they are very hard to explain, and they might be very easy to get wrong. I would agree. Its fine for small things, but can get very difficult to read the more nested arrays you have. I do like the idea of an object-like syntax for readability, we just have to settle on the way to denote the leaves of the field paths. Another potential option: ```ts new StripTypenameLink({ excludeTypes: ['JSON'], exclude: { UserInput: ` config { widgets { config } } ` } }); ``` Another option: ```ts new StripTypenameLink({ exclude: [ 'JSON', 'UserInput.config.widgets.config', 'UserInput.config', ] }) ``` We could think about supporting regex: ```ts new StripTypenameLink({ exclude: [ 'JSON', /.*Input/ ] }) ```
2023-05-10T03:06:20Z
3.8
apollographql/apollo-client
10,937
apollographql__apollo-client-10937
[ "10935" ]
f796ce1ac72f31a951a1d0f0b78d19dd039a6398
diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -17,6 +17,7 @@ import type { import { addTypenameToDocument, isReference, + DocumentTransform, } from '../../utilities'; import type { InMemoryCacheConfig, @@ -29,7 +30,6 @@ import { makeVar, forgetCache, recallCache } from './reactiveVars'; import { Policies } from './policies'; import { hasOwn, normalizeConfig, shouldCanonizeResults } from './helpers'; import { canonicalStringify } from './object-canon'; -import { DocumentTransform } from '../../core'; import type { OperationVariables } from '../../core'; type BroadcastOptions = Pick< diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -5,12 +5,11 @@ import type { ExecutionResult, DocumentNode } from 'graphql'; import type { FetchResult, GraphQLRequest} from '../link/core'; import { ApolloLink, execute } from '../link/core'; import type { ApolloCache, DataProxy, Reference } from '../cache'; -import type { Observable } from '../utilities'; +import type { DocumentTransform, Observable } from '../utilities'; import { version } from '../version'; import type { UriFunction } from '../link/http'; import { HttpLink } from '../link/http'; -import type { DocumentTransform } from './DocumentTransform'; import { QueryManager } from './QueryManager'; import type { ObservableQuery } from './ObservableQuery'; diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -34,6 +34,7 @@ import { makeUniqueId, isDocumentNode, isNonNullObject, + DocumentTransform, } from '../utilities'; import { mergeIncrementalData } from '../utilities/common/incrementalResult'; import { ApolloError, isApolloError, graphQLResultHasProtocolErrors } from '../errors'; @@ -57,7 +58,6 @@ import type { InternalRefetchQueriesResult, InternalRefetchQueriesMap, } from './types'; -import { DocumentTransform } from './DocumentTransform'; import { LocalState } from './LocalState'; import type { diff --git a/src/core/index.ts b/src/core/index.ts --- a/src/core/index.ts +++ b/src/core/index.ts @@ -31,11 +31,6 @@ export { FragmentMatcher, } from './LocalState'; export { isApolloError, ApolloError } from '../errors'; -export { - DocumentTransform, - DocumentTransformCacheKey -} from './DocumentTransform'; - /* Cache */ export { @@ -77,6 +72,8 @@ export { /* Utilities */ export { + DocumentTransform, + DocumentTransformCacheKey, Observable, Observer, ObservableSubscription, diff --git a/src/core/DocumentTransform.ts b/src/utilities/graphql/DocumentTransform.ts similarity index 96% rename from src/core/DocumentTransform.ts rename to src/utilities/graphql/DocumentTransform.ts --- a/src/core/DocumentTransform.ts +++ b/src/utilities/graphql/DocumentTransform.ts @@ -1,6 +1,7 @@ import { Trie } from '@wry/trie'; -import { canUseWeakMap, canUseWeakSet, checkDocument } from '../utilities'; -import { invariant } from '../utilities/globals'; +import { canUseWeakMap, canUseWeakSet } from '../common/canUse'; +import { checkDocument } from './getFromAST'; +import { invariant } from '../globals'; import type { DocumentNode } from 'graphql'; export type DocumentTransformCacheKey = ReadonlyArray<unknown>; diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -12,6 +12,11 @@ export { getInclusionDirectives, } from './graphql/directives'; +export { + DocumentTransform, + DocumentTransformCacheKey +} from './graphql/DocumentTransform'; + export { FragmentMap, FragmentMapFunction,
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -383,6 +383,7 @@ Array [ "Concast", "DEV", "DeepMerger", + "DocumentTransform", "Observable", "addTypenameToDocument", "argumentsObjectFromField", diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -4,7 +4,6 @@ import gql from 'graphql-tag'; import { ApolloClient, - DocumentTransform, FetchPolicy, WatchQueryFetchPolicy, QueryOptions, @@ -14,7 +13,7 @@ import { NetworkStatus, } from '../core'; -import { Observable, ObservableSubscription, offsetLimitPagination, removeDirectivesFromDocument } from '../utilities'; +import { DocumentTransform, Observable, ObservableSubscription, offsetLimitPagination, removeDirectivesFromDocument } from '../utilities'; import { ApolloLink } from '../link/core'; import { createFragmentRegistry, InMemoryCache, makeVar, PossibleTypesMap } from '../cache'; import { ApolloError } from '../errors'; diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -4,14 +4,13 @@ import { TypedDocumentNode } from "@graphql-typed-document-node/core"; import { ApolloClient, - DocumentTransform, NetworkStatus, WatchQueryFetchPolicy } from "../../core"; import { ObservableQuery } from "../ObservableQuery"; import { QueryManager } from "../QueryManager"; -import { Observable, removeDirectivesFromDocument } from "../../utilities"; +import { DocumentTransform, Observable, removeDirectivesFromDocument } from "../../utilities"; import { ApolloLink, FetchResult } from "../../link/core"; import { InMemoryCache, NormalizedCacheObject } from "../../cache"; import { ApolloError } from "../../errors"; diff --git a/src/core/__tests__/DocumentTransform.ts b/src/utilities/graphql/__tests__/DocumentTransform.ts similarity index 99% rename from src/core/__tests__/DocumentTransform.ts rename to src/utilities/graphql/__tests__/DocumentTransform.ts --- a/src/core/__tests__/DocumentTransform.ts +++ b/src/utilities/graphql/__tests__/DocumentTransform.ts @@ -1,9 +1,6 @@ import { DocumentTransform } from '../DocumentTransform'; -import { - isMutationOperation, - isQueryOperation, - removeDirectivesFromDocument, -} from '../../utilities'; +import { isMutationOperation, isQueryOperation } from '../operations'; +import { removeDirectivesFromDocument } from '../transform'; import { gql } from 'graphql-tag'; import { DocumentNode, visit, Kind } from 'graphql';
Fix circular dependency with `DocumentTransform` import in `InMemoryCache` ### Issue Description When testing out the first beta release, I tried to run `@apollo/client` in a CLI environment and it blows up trying to instantiate `InMemoryCache`. When logging the import, it appears everything from the `cache` module is set as `undefined`. Through trial and error, I have narrowed it down to #10509 which introduced the new custom document transform feature. Re-reviewing that PR, I noted that `InMemoryCache` [imports](https://github.com/apollographql/apollo-client/pull/10509/files#diff-b62f0293ff7b2f7bd5c12e8641f0eab4b630696d6eba70f6d55f2ec444bdccafR32) the `DocumentTransform` from `core`, yet core exports modules from [`cache`](https://github.com/apollographql/apollo-client/blob/f796ce1ac72f31a951a1d0f0b78d19dd039a6398/src/core/index.ts#L41-L61) resulting in a circular dependency. While some bundlers seem to be able to resolve circular dependencies, running this in the CLI environment fails entirely. Since `DocumentTransform` is used both by [core](https://github.com/apollographql/apollo-client/pull/10509/files#diff-5fb8ad16cfbcb51bc035d32b5963734561c440b48417a1c2dbf80a16098be67bR145-R149) and by the [cache](https://github.com/apollographql/apollo-client/pull/10509/files#diff-b62f0293ff7b2f7bd5c12e8641f0eab4b630696d6eba70f6d55f2ec444bdccafR51), we should move that module to a neutral location to avoid the circular dependency. ### Link to Reproduction https://github.com/apollographql/apollo-utils/pull/287 ### Reproduction Steps Use the new [`generate-persisted-query-manifest`](https://github.com/apollographql/apollo-utils/pull/287/files#diff-2e820a95431b3426c91e69100ea3eaac46e88669f0a0fa3aa53d8e0c78fbd229) tool and notice that it blows up with the error: ``` TypeError: core_1.InMemoryCache is not a constructor at generatePersistedQueryManifest (/node_modules/@apollo/generate-persisted-query-manifest/dist/index.js:66:16) ```
2023-05-31T16:05:19Z
3.8
apollographql/apollo-client
10,809
apollographql__apollo-client-10809
[ "10676" ]
a8e555a9a9549ec8deea02901810c97c9ce19bcb
diff --git a/config/bundlesize.ts b/config/bundlesize.ts --- a/config/bundlesize.ts +++ b/config/bundlesize.ts @@ -3,7 +3,7 @@ import { join } from "path"; import { gzipSync } from "zlib"; import bytes from "bytes"; -const gzipBundleByteLengthLimit = bytes("35.00KB"); +const gzipBundleByteLengthLimit = bytes("35.04KB"); const minFile = join("dist", "apollo-client.min.cjs"); const minPath = join(__dirname, "..", minFile); const gzipByteLen = gzipSync(readFileSync(minPath)).byteLength; diff --git a/src/react/cache/QuerySubscription.ts b/src/react/cache/QuerySubscription.ts --- a/src/react/cache/QuerySubscription.ts +++ b/src/react/cache/QuerySubscription.ts @@ -1,61 +1,45 @@ import { ApolloError, ApolloQueryResult, - DocumentNode, NetworkStatus, ObservableQuery, OperationVariables, } from '../../core'; import { isNetworkRequestSettled } from '../../core'; import { - Concast, ObservableSubscription, - hasAnyDirectives, + createFulfilledPromise, + createRejectedPromise, } from '../../utilities'; -import { invariant } from '../../utilities/globals'; -import { wrap } from 'optimism'; -type Listener<TData> = (result: ApolloQueryResult<TData>) => void; +type Listener = () => void; type FetchMoreOptions<TData> = Parameters< ObservableQuery<TData>['fetchMore'] >[0]; -function wrapWithCustomPromise<TData>( - concast: Concast<ApolloQueryResult<TData>> -) { - return new Promise<ApolloQueryResult<TData>>((resolve, reject) => { - // Unlike `concast.promise`, we want to resolve the promise on the initial - // chunk of the deferred query. This allows the component to unsuspend - // when we get the initial set of data, rather than waiting until all - // chunks have been loaded. - const subscription = concast.subscribe({ - next: (value) => { - resolve(value); - subscription.unsubscribe(); - }, - error: reject, - }); - }); -} - -const isMultipartQuery = wrap((query: DocumentNode) => { - return hasAnyDirectives(['defer', 'stream'], query); -}); - interface QuerySubscriptionOptions { onDispose?: () => void; autoDisposeTimeoutMs?: number; } -export class QuerySubscription<TData = any> { +export class QuerySubscription<TData = unknown> { public result: ApolloQueryResult<TData>; - public promise: Promise<ApolloQueryResult<TData>>; public readonly observable: ObservableQuery<TData>; + public promises: { + main: Promise<ApolloQueryResult<TData>>; + network?: Promise<ApolloQueryResult<TData>>; + }; + private subscription: ObservableSubscription; - private listeners = new Set<Listener<TData>>(); + private listeners = new Set<Listener>(); private autoDisposeTimeoutId: NodeJS.Timeout; + private initialized = false; + private refetching = false; + + private resolve: (result: ApolloQueryResult<TData>) => void; + private reject: (error: unknown) => void; constructor( observable: ObservableQuery<TData>, @@ -66,32 +50,35 @@ export class QuerySubscription<TData = any> { this.handleError = this.handleError.bind(this); this.dispose = this.dispose.bind(this); this.observable = observable; - this.result = observable.getCurrentResult(); + this.result = observable.getCurrentResult(false); if (options.onDispose) { this.onDispose = options.onDispose; } + if ( + isNetworkRequestSettled(this.result.networkStatus) || + (this.result.data && + (!this.result.partial || this.observable.options.returnPartialData)) + ) { + this.promises = { main: createFulfilledPromise(this.result) }; + this.initialized = true; + this.refetching = false; + } + this.subscription = observable.subscribe({ next: this.handleNext, error: this.handleError, }); - // This error should never happen since the `.subscribe` call above - // will ensure a concast is set on the observable via the `reobserve` - // call. Unless something is going horribly wrong and completely messing - // around with the internals of the observable, there should always be a - // concast after subscribing. - invariant( - observable['concast'], - 'Unexpected error: A concast was not found on the observable.' - ); - - const concast = observable['concast']; - - this.promise = isMultipartQuery(observable.query) - ? wrapWithCustomPromise(concast) - : concast.promise; + if (!this.promises) { + this.promises = { + main: new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }), + }; + } // Start a timer that will automatically dispose of the query if the // suspended resource does not use this subscription in the given time. This @@ -103,7 +90,7 @@ export class QuerySubscription<TData = any> { ); } - listen(listener: Listener<TData>) { + listen(listener: Listener) { // As soon as the component listens for updates, we know it has finished // suspending and is ready to receive updates, so we can remove the auto // dispose timer. @@ -117,15 +104,21 @@ export class QuerySubscription<TData = any> { } refetch(variables: OperationVariables | undefined) { - this.promise = this.observable.refetch(variables); + this.refetching = true; - return this.promise; + const promise = this.observable.refetch(variables); + + this.promises.network = promise; + + return promise; } fetchMore(options: FetchMoreOptions<TData>) { - this.promise = this.observable.fetchMore<TData>(options); + const promise = this.observable.fetchMore<TData>(options); - return this.promise; + this.promises.network = promise; + + return promise; } dispose() { @@ -138,19 +131,27 @@ export class QuerySubscription<TData = any> { } private handleNext(result: ApolloQueryResult<TData>) { + if (!this.initialized) { + this.initialized = true; + this.result = result; + this.resolve(result); + return; + } + + if (result.data === this.result.data) { + return; + } + // If we encounter an error with the new result after we have successfully - // fetched a previous result, we should set the new result data to the last - // successful result. - if ( - isNetworkRequestSettled(result.networkStatus) && - this.result.data && - result.data === void 0 - ) { + // fetched a previous result, set the new result data to the last successful + // result. + if (this.result.data && result.data === void 0) { result.data = this.result.data; } this.result = result; - this.deliver(result); + this.promises.main = createFulfilledPromise(result); + this.deliver(); } private handleError(error: ApolloError) { @@ -161,10 +162,22 @@ export class QuerySubscription<TData = any> { }; this.result = result; - this.deliver(result); + + if (!this.initialized || this.refetching) { + this.initialized = true; + this.refetching = false; + this.reject(error); + return; + } + + this.result = result; + this.promises.main = result.data + ? createFulfilledPromise(result) + : createRejectedPromise(result); + this.deliver(); } - private deliver(result: ApolloQueryResult<TData>) { - this.listeners.forEach((listener) => listener(result)); + private deliver() { + this.listeners.forEach((listener) => listener()); } } diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -1,6 +1,5 @@ import { invariant, __DEV__ } from '../../utilities/globals'; -import { equal } from '@wry/equality'; -import { useRef, useCallback, useMemo } from 'react'; +import { useRef, useCallback, useMemo, useEffect, useState } from 'react'; import { ApolloClient, ApolloError, @@ -17,14 +16,12 @@ import { DeepPartial, isNonEmptyArray } from '../../utilities'; import { useApolloClient } from './useApolloClient'; import { DocumentType, verifyDocumentType } from '../parser'; import { - SuspenseQueryHookFetchPolicy, SuspenseQueryHookOptions, ObservableQueryFields, NoInfer, } from '../types/types'; import { useDeepMemo, useStrictModeSafeCleanupEffect, __use } from './internal'; import { useSuspenseCache } from './useSuspenseCache'; -import { useSyncExternalStore } from './useSyncExternalStore'; import { QuerySubscription } from '../cache/QuerySubscription'; import { canonicalStringify } from '../../cache'; @@ -63,6 +60,8 @@ type SubscribeToMoreFunction< TVariables extends OperationVariables > = ObservableQueryFields<TData, TVariables>['subscribeToMore']; +type Version = 'main' | 'network'; + export function useSuspenseQuery_experimental< TData, TVariables extends OperationVariables, @@ -131,14 +130,13 @@ export function useSuspenseQuery_experimental< NoInfer<TVariables> > = Object.create(null) ): UseSuspenseQueryResult<TData, TVariables> { - const didPreviouslySuspend = useRef(false); const client = useApolloClient(options.client); const suspenseCache = useSuspenseCache(options.suspenseCache); const watchQueryOptions = useWatchQueryOptions({ query, options }); - const { returnPartialData = false, variables } = watchQueryOptions; - const { suspensePolicy = 'always', queryKey = [] } = options; - const shouldSuspend = - suspensePolicy === 'always' || !didPreviouslySuspend.current; + const { variables } = watchQueryOptions; + const { queryKey = [] } = options; + + const [version, setVersion] = usePromiseVersion(); const cacheKey = ( [client, query, canonicalStringify(variables)] as any[] @@ -148,59 +146,32 @@ export function useSuspenseQuery_experimental< client.watchQuery(watchQueryOptions) ); - const dispose = useTrackedSubscriptions(subscription); - - useStrictModeSafeCleanupEffect(dispose); - - let result = useSyncExternalStore( - subscription.listen, - () => subscription.result, - () => subscription.result - ); - - const previousVariables = useRef(variables); - const previousData = useRef(result.data); - - if (!equal(variables, previousVariables.current)) { - if (result.networkStatus !== NetworkStatus.ready) { - // Since we now create separate ObservableQuery instances per unique - // query + variables combination, we need to manually insert the previous - // data into the returned result to mimic the behavior when changing - // variables from a single ObservableQuery, where the previous result was - // held onto until the request was finished. - result = { - ...result, - data: previousData.current, - networkStatus: NetworkStatus.setVariables, - }; - } - - previousVariables.current = variables; - previousData.current = result.data; - } + useTrackedSubscriptions(subscription); - if ( - result.networkStatus === NetworkStatus.error || - (shouldSuspend && - !shouldUseCachedResult(subscription.result, { - returnPartialData, - fetchPolicy: options.fetchPolicy, - })) - ) { - // Intentionally ignore the result returned from __use since we want to - // observe results from the observable instead of the the promise. - __use(subscription.promise); - } + useEffect(() => { + return subscription.listen(() => { + setVersion('main'); + }); + }, [subscription]); - didPreviouslySuspend.current = true; + const promise = subscription.promises[version] || subscription.promises.main; + const result = __use(promise); const fetchMore: FetchMoreFunction<TData, TVariables> = useCallback( - (options) => subscription.fetchMore(options), + (options) => { + const promise = subscription.fetchMore(options); + setVersion('network'); + return promise; + }, [subscription] ); const refetch: RefetchFunction<TData, TVariables> = useCallback( - (variables) => subscription.refetch(variables), + (variables) => { + const promise = subscription.refetch(variables); + setVersion('network'); + return promise; + }, [subscription] ); @@ -269,9 +240,24 @@ function useTrackedSubscriptions(subscription: QuerySubscription) { trackedSubscriptions.current.add(subscription); - return function dispose() { + useStrictModeSafeCleanupEffect(() => { trackedSubscriptions.current.forEach((sub) => sub.dispose()); - }; + }); +} + +function usePromiseVersion() { + // Use an object as state to force React to re-render when we publish an + // update to the same version (such as sequential cache updates). + const [{ version }, setState] = useState<{ version: Version }>({ + version: 'main', + }); + + const setVersion = useCallback( + (version: Version) => setState({ version }), + [] + ); + + return [version, setVersion] as const; } interface UseWatchQueryOptionsHookOptions< @@ -293,7 +279,7 @@ function useWatchQueryOptions<TData, TVariables extends OperationVariables>({ () => ({ ...options, query, - notifyOnNetworkStatusChange: true, + notifyOnNetworkStatusChange: false, nextFetchPolicy: void 0, }), [options, query] @@ -305,34 +291,3 @@ function useWatchQueryOptions<TData, TVariables extends OperationVariables>({ return watchQueryOptions; } - -function shouldUseCachedResult( - result: ApolloQueryResult<unknown>, - { - returnPartialData, - fetchPolicy, - }: { - returnPartialData: boolean | undefined; - fetchPolicy: SuspenseQueryHookFetchPolicy | undefined; - } -) { - if ( - result.networkStatus === NetworkStatus.refetch || - result.networkStatus === NetworkStatus.fetchMore || - result.networkStatus === NetworkStatus.error - ) { - return false; - } - - switch (fetchPolicy) { - // The default fetch policy is cache-first, so we can treat undefined as - // such. - case void 0: - case 'cache-first': - case 'cache-and-network': { - return Boolean(result.data && (!result.partial || returnPartialData)); - } - default: - return false; - } -} diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -100,15 +100,6 @@ export interface LazyQueryHookExecOptions< query?: DocumentNode | TypedDocumentNode<TData, TVariables>; } -/** - * suspensePolicy determines how suspense behaves for a refetch. The options are: - * - always (default): Re-suspend a component when a refetch occurs - * - initial: Only suspend on the first fetch - */ -export type SuspensePolicy = - | 'always' - | 'initial' - export type SuspenseQueryHookFetchPolicy = Extract< WatchQueryFetchPolicy, | 'cache-first' @@ -131,7 +122,6 @@ export interface SuspenseQueryHookOptions< | 'refetchWritePolicy' > { fetchPolicy?: SuspenseQueryHookFetchPolicy; - suspensePolicy?: SuspensePolicy; suspenseCache?: SuspenseCache; queryKey?: string | number | any[]; } diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -83,6 +83,8 @@ export { export { isStatefulPromise, + createFulfilledPromise, + createRejectedPromise, wrapPromiseWithState, } from './promises/decoration'; diff --git a/src/utilities/promises/decoration.ts b/src/utilities/promises/decoration.ts --- a/src/utilities/promises/decoration.ts +++ b/src/utilities/promises/decoration.ts @@ -17,6 +17,24 @@ export type PromiseWithState<TValue> = | FulfilledPromise<TValue> | RejectedPromise<TValue>; +export function createFulfilledPromise<TValue>(value: TValue) { + const promise = Promise.resolve(value) as FulfilledPromise<TValue>; + + promise.status = 'fulfilled'; + promise.value = value; + + return promise; +} + +export function createRejectedPromise<TValue = unknown>(reason: unknown) { + const promise = Promise.reject(reason) as RejectedPromise<TValue>; + + promise.status = 'rejected'; + promise.reason = reason; + + return promise; +} + export function isStatefulPromise<TValue>( promise: Promise<TValue> ): promise is PromiseWithState<TValue> {
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -376,6 +376,8 @@ Array [ "compact", "concatPagination", "createFragmentMap", + "createFulfilledPromise", + "createRejectedPromise", "fixObservableSubclass", "getDefaultValues", "getDirectiveNames", diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -35,6 +35,7 @@ import { compact, concatPagination, getMainDefinition, + offsetLimitPagination, } from '../../../utilities'; import { MockedProvider, @@ -2435,72 +2436,6 @@ describe('useSuspenseQuery', () => { ]); }); - it.each<SuspenseQueryHookFetchPolicy>([ - 'cache-first', - 'network-only', - 'no-cache', - 'cache-and-network', - ])( - 'returns previous data when changing variables and using a "%s" with an "initial" suspense policy', - async (fetchPolicy) => { - const { query, mocks } = useVariablesQueryCase(); - - const { result, rerender, renders } = renderSuspenseHook( - ({ id }) => - useSuspenseQuery(query, { - fetchPolicy, - suspensePolicy: 'initial', - variables: { id }, - }), - { mocks, initialProps: { id: '1' } } - ); - - expect(renders.suspenseCount).toBe(1); - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - rerender({ id: '2' }); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - // Renders: - // 1. Initiate fetch and suspend - // 2. Unsuspend and return results from initial fetch - // 3. Change variables - // 4. Unsuspend and return results from refetch - expect(renders.count).toBe(4); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toMatchObject([ - { - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - ...mocks[0].result, - networkStatus: NetworkStatus.setVariables, - error: undefined, - }, - { - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - } - ); - it.each<SuspenseQueryHookFetchPolicy>([ 'cache-first', 'network-only', @@ -3276,33 +3211,6 @@ describe('useSuspenseQuery', () => { consoleSpy.mockRestore(); }); - it('throws errors when suspensePolicy is set to initial', async () => { - const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); - - const { query, mocks } = useErrorCase({ - networkError: new Error('Could not fetch'), - }); - - const { renders } = renderSuspenseHook( - () => useSuspenseQuery(query, { suspensePolicy: 'initial' }), - { mocks } - ); - - await waitFor(() => expect(renders.errorCount).toBe(1)); - - expect(renders.errors.length).toBe(1); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toEqual([]); - - const [error] = renders.errors as ApolloError[]; - - expect(error).toBeInstanceOf(ApolloError); - expect(error.networkError).toEqual(new Error('Could not fetch')); - expect(error.graphQLErrors).toEqual([]); - - consoleSpy.mockRestore(); - }); - it('tears down subscription when throwing an error', async () => { const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); @@ -3382,66 +3290,6 @@ describe('useSuspenseQuery', () => { consoleSpy.mockRestore(); }); - it('tears down subscription when throwing an error on refetch when suspensePolicy is "initial"', async () => { - const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); - - const query = gql` - query UserQuery($id: String!) { - user(id: $id) { - id - name - } - } - `; - - const mocks = [ - { - request: { query, variables: { id: '1' } }, - result: { - data: { user: { id: '1', name: 'Captain Marvel' } }, - }, - }, - { - request: { query, variables: { id: '1' } }, - result: { - errors: [new GraphQLError('Something went wrong')], - }, - }, - ]; - - const client = new ApolloClient({ - cache: new InMemoryCache(), - link: new MockLink(mocks), - }); - - const { result, renders } = renderSuspenseHook( - () => - useSuspenseQuery(query, { - suspensePolicy: 'initial', - variables: { id: '1' }, - }), - { client } - ); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - act(() => { - result.current.refetch(); - }); - - await waitFor(() => expect(renders.errorCount).toBe(1)); - - expect(client.getObservableQueries().size).toBe(0); - - consoleSpy.mockRestore(); - }); - it('throws network errors when errorPolicy is set to "none"', async () => { const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); @@ -3991,86 +3839,6 @@ describe('useSuspenseQuery', () => { ]); }); - it('clears errors when changing variables and errorPolicy is set to "all" with an "initial" suspensePolicy', async () => { - const query = gql` - query UserQuery($id: String!) { - user(id: $id) { - id - name - } - } - `; - - const graphQLErrors = [new GraphQLError('Could not fetch user 1')]; - - const mocks = [ - { - request: { query, variables: { id: '1' } }, - result: { - errors: graphQLErrors, - }, - }, - { - request: { query, variables: { id: '2' } }, - result: { - data: { user: { id: '2', name: 'Captain Marvel' } }, - }, - }, - ]; - - const { result, renders, rerender } = renderSuspenseHook( - ({ id }) => - useSuspenseQuery(query, { - errorPolicy: 'all', - suspensePolicy: 'initial', - variables: { id }, - }), - { mocks, initialProps: { id: '1' } } - ); - - const expectedError = new ApolloError({ graphQLErrors }); - - await waitFor(() => { - expect(result.current).toMatchObject({ - data: undefined, - networkStatus: NetworkStatus.error, - error: expectedError, - }); - }); - - rerender({ id: '2' }); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(4); - expect(renders.errorCount).toBe(0); - expect(renders.errors).toEqual([]); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toMatchObject([ - { - data: undefined, - networkStatus: NetworkStatus.error, - error: expectedError, - }, - { - data: undefined, - networkStatus: NetworkStatus.setVariables, - error: undefined, - }, - { - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - it('re-suspends when calling `refetch`', async () => { const query = gql` query UserQuery($id: String!) { @@ -4290,81 +4058,6 @@ describe('useSuspenseQuery', () => { ]); }); - it('does not suspend and returns previous data when calling `refetch` and using an "initial" suspensePolicy', async () => { - const query = gql` - query UserQuery($id: String!) { - user(id: $id) { - id - name - } - } - `; - - const mocks = [ - { - request: { query, variables: { id: '1' } }, - result: { - data: { user: { id: '1', name: 'Captain Marvel' } }, - }, - }, - { - request: { query, variables: { id: '1' } }, - result: { - data: { user: { id: '1', name: 'Captain Marvel (updated)' } }, - }, - }, - ]; - - const { result, renders } = renderSuspenseHook( - () => - useSuspenseQuery(query, { - suspensePolicy: 'initial', - variables: { id: '1' }, - }), - { mocks } - ); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - act(() => { - result.current.refetch(); - }); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - expect(renders.count).toBe(4); - expect(renders.suspenseCount).toBe(1); - expect(renders.frames).toMatchObject([ - { - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - ...mocks[0].result, - networkStatus: NetworkStatus.refetch, - error: undefined, - }, - { - ...mocks[1].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - ]); - }); - it('throws errors when errors are returned after calling `refetch`', async () => { const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); @@ -4429,80 +4122,6 @@ describe('useSuspenseQuery', () => { consoleSpy.mockRestore(); }); - it('throws errors when errors are returned after calling `refetch` with suspensePolicy set to "initial"', async () => { - const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); - - const query = gql` - query UserQuery($id: String!) { - user(id: $id) { - id - name - } - } - `; - - const mocks = [ - { - request: { query, variables: { id: '1' } }, - result: { - data: { user: { id: '1', name: 'Captain Marvel' } }, - }, - }, - { - request: { query, variables: { id: '1' } }, - result: { - errors: [new GraphQLError('Something went wrong')], - }, - }, - ]; - - const { result, renders } = renderSuspenseHook( - () => - useSuspenseQuery(query, { - suspensePolicy: 'initial', - variables: { id: '1' }, - }), - { mocks } - ); - - await waitFor(() => { - expect(result.current).toMatchObject({ - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - act(() => { - result.current.refetch(); - }); - - await waitFor(() => { - expect(renders.errorCount).toBe(1); - }); - - expect(renders.errors).toEqual([ - new ApolloError({ - graphQLErrors: [new GraphQLError('Something went wrong')], - }), - ]); - - expect(renders.frames).toMatchObject([ - { - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - ...mocks[0].result, - networkStatus: NetworkStatus.refetch, - error: undefined, - }, - ]); - - consoleSpy.mockRestore(); - }); - it('ignores errors returned after calling `refetch` when errorPolicy is set to "ignore"', async () => { const query = gql` query UserQuery($id: String!) { @@ -4686,86 +4305,38 @@ describe('useSuspenseQuery', () => { }); act(() => { - result.current.refetch(); - }); - - await waitFor(() => { - expect(result.current).toMatchObject({ - data: mocks[1].result.data, - networkStatus: NetworkStatus.error, - error: expectedError, - }); - }); - - expect(renders.errorCount).toBe(0); - expect(renders.errors).toEqual([]); - expect(renders.frames).toMatchObject([ - { - ...mocks[0].result, - networkStatus: NetworkStatus.ready, - error: undefined, - }, - { - data: mocks[1].result.data, - networkStatus: NetworkStatus.error, - error: expectedError, - }, - ]); - }); - - it('re-suspends when calling `fetchMore` with different variables', async () => { - const { data, query, link } = usePaginatedCase(); - - const { result, renders } = renderSuspenseHook( - () => useSuspenseQuery(query, { variables: { limit: 2 } }), - { link } - ); - - await waitFor(() => { - expect(result.current).toMatchObject({ - data: { letters: data.slice(0, 2) }, - networkStatus: NetworkStatus.ready, - error: undefined, - }); - }); - - act(() => { - result.current.fetchMore({ variables: { offset: 2 } }); + result.current.refetch(); }); await waitFor(() => { expect(result.current).toMatchObject({ - data: { letters: data.slice(2, 4) }, - networkStatus: NetworkStatus.ready, - error: undefined, + data: mocks[1].result.data, + networkStatus: NetworkStatus.error, + error: expectedError, }); }); - expect(renders.count).toBe(4); - expect(renders.suspenseCount).toBe(2); + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); expect(renders.frames).toMatchObject([ { - data: { letters: data.slice(0, 2) }, + ...mocks[0].result, networkStatus: NetworkStatus.ready, error: undefined, }, { - data: { letters: data.slice(2, 4) }, - networkStatus: NetworkStatus.ready, - error: undefined, + data: mocks[1].result.data, + networkStatus: NetworkStatus.error, + error: expectedError, }, ]); }); - it('does not re-suspend when calling `fetchMore` with different variables while using an "initial" suspense policy', async () => { + it('re-suspends when calling `fetchMore` with different variables', async () => { const { data, query, link } = usePaginatedCase(); const { result, renders } = renderSuspenseHook( - () => - useSuspenseQuery(query, { - suspensePolicy: 'initial', - variables: { limit: 2 }, - }), + () => useSuspenseQuery(query, { variables: { limit: 2 } }), { link } ); @@ -4790,18 +4361,13 @@ describe('useSuspenseQuery', () => { }); expect(renders.count).toBe(4); - expect(renders.suspenseCount).toBe(1); + expect(renders.suspenseCount).toBe(2); expect(renders.frames).toMatchObject([ { data: { letters: data.slice(0, 2) }, networkStatus: NetworkStatus.ready, error: undefined, }, - { - data: { letters: data.slice(0, 2) }, - networkStatus: NetworkStatus.fetchMore, - error: undefined, - }, { data: { letters: data.slice(2, 4) }, networkStatus: NetworkStatus.ready, @@ -7014,6 +6580,287 @@ describe('useSuspenseQuery', () => { }); }); + it('`refetch` works with startTransition to allow React to show stale UI until finished suspending', async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: '1' } }, + result: { + data: { todo: { id: '1', name: 'Clean room', completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: '1' } }, + result: { + data: { todo: { id: '1', name: 'Clean room', completed: true } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + const suspenseCache = new SuspenseCache(); + + function App() { + return ( + <ApolloProvider client={client} suspenseCache={suspenseCache}> + <Suspense fallback={<SuspenseFallback />}> + <Todo id="1" /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string }) { + const { data, refetch } = useSuspenseQuery(query, { variables: { id } }); + const [isPending, startTransition] = React.useTransition(); + const { todo } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + refetch(); + }); + }} + > + Refresh + </button> + <div data-testid="todo" aria-busy={isPending}> + {todo.name} + {todo.completed && ' (completed)'} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText('Loading')).toBeInTheDocument(); + + expect(await screen.findByTestId('todo')).toBeInTheDocument(); + + const todo = screen.getByTestId('todo'); + const button = screen.getByText('Refresh'); + + expect(todo).toHaveTextContent('Clean room'); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText('Loading')).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todo).toHaveAttribute('aria-busy', 'true'); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo).toHaveTextContent('Clean room'); + + // Eventually we should see the updated todo content once its done + // suspending. + await waitFor(() => { + expect(todo).toHaveTextContent('Clean room (completed)'); + }); + }); + + it('`fetchMore` works with startTransition to allow React to show stale UI until finished suspending', async () => { + type Variables = { + offset: number; + }; + + interface Todo { + __typename: 'Todo'; + id: string; + name: string; + completed: boolean; + } + + interface Data { + todos: Todo[]; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodosQuery($offset: Int!) { + todos(offset: $offset) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { offset: 0 } }, + result: { + data: { + todos: [ + { + __typename: 'Todo', + id: '1', + name: 'Clean room', + completed: false, + }, + ], + }, + }, + delay: 10, + }, + { + request: { query, variables: { offset: 1 } }, + result: { + data: { + todos: [ + { + __typename: 'Todo', + id: '2', + name: 'Take out trash', + completed: true, + }, + ], + }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + todos: offsetLimitPagination(), + }, + }, + }, + }), + }); + + const suspenseCache = new SuspenseCache(); + + function App() { + return ( + <ApolloProvider client={client} suspenseCache={suspenseCache}> + <Suspense fallback={<SuspenseFallback />}> + <Todos /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todos() { + const { data, fetchMore } = useSuspenseQuery(query, { + variables: { offset: 0 }, + }); + const [isPending, startTransition] = React.useTransition(); + const { todos } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + fetchMore({ variables: { offset: 1 } }); + }); + }} + > + Load more + </button> + <div data-testid="todos" aria-busy={isPending}> + {todos.map((todo) => ( + <div data-testid={`todo:${todo.id}`} key={todo.id}> + {todo.name} + {todo.completed && ' (completed)'} + </div> + ))} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText('Loading')).toBeInTheDocument(); + + expect(await screen.findByTestId('todos')).toBeInTheDocument(); + + const todos = screen.getByTestId('todos'); + const todo1 = screen.getByTestId('todo:1'); + const button = screen.getByText('Load more'); + + expect(todo1).toBeInTheDocument(); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText('Loading')).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todos).toHaveAttribute('aria-busy', 'true'); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo1).toHaveTextContent('Clean room'); + + // Eventually we should see the updated todos content once its done + // suspending. + await waitFor(() => { + expect(screen.getByTestId('todo:2')).toHaveTextContent( + 'Take out trash (completed)' + ); + expect(todo1).toHaveTextContent('Clean room'); + }); + }); + describe.skip('type tests', () => { it('returns unknown when TData cannot be inferred', () => { const query = gql`
`refetch` and `fetchMore` do not work correctly with `startTransition` ### Issue Description React allows users to show stale UI for a transition via its `startTransition` by detecting whether the state change suspends a component or not. If the component suspends with the update made within `startTransition`, we should not see the loading state but rather React should show the stable UI until the background render finishes suspending. When using the `refetch` or `fetchMore` functions returned from `useSuspenseQuery` in conjunction with `startTransition`, the component suspends and the suspense fallback is shown instead of the stale UI. Somewhere inside `useSuspenseQuery` we are missing an implementation detail that is preventing this from working as it should. If we are able to get these functions working correctly with `startTransition`, this should allow us to remove the `suspensePolicy` option, which was created to give way to the same UX pattern that `startTransition` aims for. I'd rather let React handle this than adding our own flavor. ### Link to Reproduction https://github.com/apollographql/apollo-client/blob/794d1824e125e971d95836250da6f43ef12026b5/src/react/hooks/__tests__/useSuspenseQuery.test.tsx#L6438-L6557 ### Reproduction Steps See the failing test in the reproduction link. The test verifies that the suspense fallback is not shown after calling `refetch` inside of `startTransition`, but it is currently failing.
2023-04-26T18:52:39Z
3.8
apollographql/apollo-client
10,766
apollographql__apollo-client-10766
[ "10713" ]
174ab97c92f2e681548cf6e509dae97ac423293b
diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -13,13 +13,14 @@ import { NetworkStatus, FetchMoreQueryOptions, } from '../../core'; -import { isNonEmptyArray } from '../../utilities'; +import { DeepPartial, isNonEmptyArray } from '../../utilities'; import { useApolloClient } from './useApolloClient'; import { DocumentType, verifyDocumentType } from '../parser'; import { SuspenseQueryHookFetchPolicy, SuspenseQueryHookOptions, ObservableQueryFields, + NoInfer, } from '../types/types'; import { useDeepMemo, useStrictModeSafeCleanupEffect, __use } from './internal'; import { useSuspenseCache } from './useSuspenseCache'; @@ -28,7 +29,7 @@ import { QuerySubscription } from '../cache/QuerySubscription'; import { canonicalStringify } from '../../cache'; export interface UseSuspenseQueryResult< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables > { client: ApolloClient<any>; @@ -63,11 +64,72 @@ type SubscribeToMoreFunction< > = ObservableQueryFields<TData, TVariables>['subscribeToMore']; export function useSuspenseQuery_experimental< - TData = any, + TData, + TVariables extends OperationVariables, + TOptions extends Omit<SuspenseQueryHookOptions<TData>, 'variables'> +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & + TOptions +): UseSuspenseQueryResult< + TOptions['errorPolicy'] extends 'ignore' | 'all' + ? TOptions['returnPartialData'] extends true + ? DeepPartial<TData> | undefined + : TData | undefined + : TOptions['returnPartialData'] extends true + ? DeepPartial<TData> + : TData, + TVariables +>; + +export function useSuspenseQuery_experimental< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + returnPartialData: true; + errorPolicy: 'ignore' | 'all'; + } +): UseSuspenseQueryResult<DeepPartial<TData> | undefined, TVariables>; + +export function useSuspenseQuery_experimental< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + errorPolicy: 'ignore' | 'all'; + } +): UseSuspenseQueryResult<TData | undefined, TVariables>; + +export function useSuspenseQuery_experimental< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + returnPartialData: true; + } +): UseSuspenseQueryResult<DeepPartial<TData>, TVariables>; + +export function useSuspenseQuery_experimental< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> +): UseSuspenseQueryResult<TData, TVariables>; + +export function useSuspenseQuery_experimental< + TData = unknown, TVariables extends OperationVariables = OperationVariables >( query: DocumentNode | TypedDocumentNode<TData, TVariables>, - options: SuspenseQueryHookOptions<TData, TVariables> = Object.create(null) + options: SuspenseQueryHookOptions< + NoInfer<TData>, + NoInfer<TVariables> + > = Object.create(null) ): UseSuspenseQueryResult<TData, TVariables> { const didPreviouslySuspend = useRef(false); const client = useApolloClient(options.client); diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -118,7 +118,7 @@ export type SuspenseQueryHookFetchPolicy = Extract< >; export interface SuspenseQueryHookOptions< - TData = any, + TData = unknown, TVariables extends OperationVariables = OperationVariables > extends Pick< QueryHookOptions<TData, TVariables>, diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -109,3 +109,4 @@ export { stripTypename } from './common/stripTypename'; export * from './types/IsStrictlyAny'; export { DeepOmit } from './types/DeepOmit'; +export { DeepPartial } from './types/DeepPartial'; diff --git a/src/utilities/types/DeepOmit.ts b/src/utilities/types/DeepOmit.ts --- a/src/utilities/types/DeepOmit.ts +++ b/src/utilities/types/DeepOmit.ts @@ -1,12 +1,7 @@ -// DeepOmit primitives include functions and symbols since these are unmodified. -type Primitive = - | string - | Function - | number - | boolean - | Symbol - | undefined - | null; +import { Primitive } from './Primitive'; + +// DeepOmit primitives include functions since these are unmodified. +type DeepOmitPrimitive = Primitive | Function; export type DeepOmitArray<T extends any[], K> = { [P in keyof T]: DeepOmit<T[P], K>; @@ -24,11 +19,11 @@ export type DeepOmitArray<T extends any[], K> = { // This should be fine as of the time of this writing until omitDeep gets // broader use since this utility is only used to strip __typename from // `variables`; a case in which class instances are invalid anyways. -export type DeepOmit<T, K> = T extends Primitive +export type DeepOmit<T, K> = T extends DeepOmitPrimitive ? T : { [P in Exclude<keyof T, K>]: T[P] extends infer TP - ? TP extends Primitive + ? TP extends DeepOmitPrimitive ? TP : TP extends any[] ? DeepOmitArray<TP, K> diff --git a/src/utilities/types/DeepPartial.ts b/src/utilities/types/DeepPartial.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/DeepPartial.ts @@ -0,0 +1,58 @@ +// Inspired by type-fest PartialDeep: https://github.com/sindresorhus/type-fest/blob/9feb8c89be9a0f2f688bf2f497230298a8e2472e/source/partial-deep.d.ts +// +// We're including the license to give credit to the original implementation. +// https://github.com/sindresorhus/type-fest/blob/main/license-mit + +/* + * MIT License + * + * Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +import { Primitive } from './Primitive'; + +type DeepPartialPrimitive = Primitive | Date | RegExp; + +export type DeepPartial<T> = T extends DeepPartialPrimitive + ? T + : T extends Map<infer TKey, infer TValue> + ? DeepPartialMap<TKey, TValue> + : T extends ReadonlyMap<infer TKey, infer TValue> + ? DeepPartialReadonlyMap<TKey, TValue> + : T extends Set<infer TItem> + ? DeepPartialSet<TItem> + : T extends ReadonlySet<infer TItem> + ? DeepPartialReadonlySet<TItem> + : T extends (...args: any[]) => unknown + ? T | undefined + : T extends object + ? T extends ReadonlyArray<infer TItem> // Test for arrays/tuples + ? TItem[] extends T // Test for non-tuples + ? readonly TItem[] extends T + ? ReadonlyArray<DeepPartial<TItem | undefined>> + : Array<DeepPartial<TItem | undefined>> + : DeepPartialObject<T> + : DeepPartialObject<T> + : unknown; + +type DeepPartialMap<TKey, TValue> = {} & Map< + DeepPartial<TKey>, + DeepPartial<TValue> +>; + +type DeepPartialReadonlyMap<TKey, TValue> = {} & ReadonlyMap< + DeepPartial<TKey>, + DeepPartial<TValue> +>; + +type DeepPartialSet<T> = {} & Set<DeepPartial<T>>; +type DeepPartialReadonlySet<T> = {} & ReadonlySet<DeepPartial<T>>; + +type DeepPartialObject<T extends object> = { + [K in keyof T]?: DeepPartial<T[K]>; +}; diff --git a/src/utilities/types/Primitive.ts b/src/utilities/types/Primitive.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/Primitive.ts @@ -0,0 +1,9 @@ +// Matches any primitive value: https://developer.mozilla.org/en-US/docs/Glossary/Primitive. +export type Primitive = + | null + | undefined + | string + | number + | boolean + | symbol + | bigint;
diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -13,6 +13,7 @@ import { ErrorBoundary } from 'react-error-boundary'; import { GraphQLError } from 'graphql'; import { InvariantError } from 'ts-invariant'; import { equal } from '@wry/equality'; +import { expectTypeOf } from 'expect-type'; import { gql, @@ -30,6 +31,7 @@ import { NetworkStatus, } from '../../../core'; import { + DeepPartial, compact, concatPagination, getMainDefinition, @@ -234,21 +236,24 @@ function useErrorCase<TData extends ErrorCaseData>( return { query, mocks: [mock] }; } -function useVariablesQueryCase() { - const CHARACTERS = ['Spider-Man', 'Black Widow', 'Iron Man', 'Hulk']; +interface VariablesCaseData { + character: { + id: string; + name: string; + }; +} - interface QueryData { - character: { - id: string; - name: string; - }; - } +interface VariablesCaseVariables { + id: string; +} - interface QueryVariables { - id: string; - } +function useVariablesQueryCase() { + const CHARACTERS = ['Spider-Man', 'Black Widow', 'Iron Man', 'Hulk']; - const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` query CharacterQuery($id: ID!) { character(id: $id) { id @@ -3129,7 +3134,7 @@ describe('useSuspenseQuery', () => { }); it('can unset a globally defined variable', async () => { - const query = gql` + const query: TypedDocumentNode<{ vars: Record<string, any> }> = gql` query MergedVariablesQuery { vars } @@ -7008,4 +7013,251 @@ describe('useSuspenseQuery', () => { expect(todo).toHaveTextContent('Take out trash (completed)'); }); }); + + describe.skip('type tests', () => { + it('returns unknown when TData cannot be inferred', () => { + const query = gql` + query { + hello + } + `; + + const { data } = useSuspenseQuery(query); + + expectTypeOf(data).toEqualTypeOf<unknown>(); + }); + + it('disallows wider variables type than specified', () => { + const { query } = useVariablesQueryCase(); + + // @ts-expect-error should not allow wider TVariables type + useSuspenseQuery(query, { variables: { id: '1', foo: 'bar' } }); + }); + + it('returns TData in default case', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it('returns TData | undefined with errorPolicy: "ignore"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: 'ignore', + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { errorPolicy: 'ignore' }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData | undefined with errorPolicy: "all"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: 'all', + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + errorPolicy: 'all', + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData with errorPolicy: "none"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: 'none', + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { errorPolicy: 'none' }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it('returns DeepPartial<TData> with returnPartialData: true', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + returnPartialData: true, + }); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { returnPartialData: true }); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData with returnPartialData: false', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + returnPartialData: false, + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: false, + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it('returns TData when passing an option that does not affect TData', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + fetchPolicy: 'no-cache', + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { fetchPolicy: 'no-cache' }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it('handles combinations of options', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferredPartialDataIgnore } = useSuspenseQuery(query, { + returnPartialData: true, + errorPolicy: 'ignore', + }); + + expectTypeOf(inferredPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + inferredPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitPartialDataIgnore } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: 'ignore', + }); + + expectTypeOf(explicitPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + explicitPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: inferredPartialDataNone } = useSuspenseQuery(query, { + returnPartialData: true, + errorPolicy: 'none', + }); + + expectTypeOf(inferredPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + inferredPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitPartialDataNone } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: 'none', + }); + + expectTypeOf(explicitPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + explicitPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns correct TData type when combined options that do not affect TData', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + fetchPolicy: 'no-cache', + returnPartialData: true, + errorPolicy: 'none', + }); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + fetchPolicy: 'no-cache', + returnPartialData: true, + errorPolicy: 'none', + }); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + }); });
Ensure more accurate `TData` TypeScript types for `useSuspenseQuery` Currently `useSuspenseQuery` sets the returned `data` property to type `TData` (a generic). This works well with the default behavior since the hook will throw errors when encountered. There are however a couple cases that will cause `data` to remain `undefined`. 1. Setting `errorPolicy` to anything other than `none` (the default). If an error is encountered with `all` or `ignore`, the error is not thrown and is expected to be handled locally, but `data` might not be set. 2. Setting `skip` to `true` (#10532). Once this is supported, `skip` will not load any data, so `data` is expected to be `undefined` until `skip` is `false`. Additionally, setting `returnPartialData` may result in some of missing data. In this case, we should set `data` to `DeepPartial<TData>` (note `DeepPartial` is not a part of TypeScript: we'll need a helper) to better communicate that the `data` type might not contain all the required values. In short, our types should reflect the following: * `errorPolicy: 'none'` -> `data: TData` * `errorPolicy: 'ignore'` -> `data: TData | undefined` * `errorPolicy: 'all'` -> `data: TData | undefined` * `skip: false` -> `data: TData` * `skip: true` -> `data: TData | undefined` * `returnPartialData: true` -> `data: DeepPartial<TData>` * `returnPartialData: false` -> `data: TData`
2023-04-14T19:20:46Z
3.8
apollographql/apollo-client
10,450
apollographql__apollo-client-10450
[ "10429" ]
ba29d4fc138da9f0c08545cf56acabde0e340470
diff --git a/config/bundlesize.ts b/config/bundlesize.ts --- a/config/bundlesize.ts +++ b/config/bundlesize.ts @@ -3,7 +3,7 @@ import { join } from "path"; import { gzipSync } from "zlib"; import bytes from "bytes"; -const gzipBundleByteLengthLimit = bytes("33.28KB"); +const gzipBundleByteLengthLimit = bytes("33.30KB"); const minFile = join("dist", "apollo-client.min.cjs"); const minPath = join(__dirname, "..", minFile); const gzipByteLen = gzipSync(readFileSync(minPath)).byteLength; diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts --- a/src/react/hooks/useSuspenseQuery.ts +++ b/src/react/hooks/useSuspenseQuery.ts @@ -1,10 +1,4 @@ -import { - useRef, - useEffect, - useCallback, - useMemo, - useState, -} from 'react'; +import { useRef, useEffect, useCallback, useMemo, useState } from 'react'; import { equal } from '@wry/equality'; import { ApolloClient, @@ -38,10 +32,12 @@ export interface UseSuspenseQueryResult< TData = any, TVariables = OperationVariables > { + client: ApolloClient<any>; data: TData; error: ApolloError | undefined; fetchMore: ObservableQueryFields<TData, TVariables>['fetchMore']; refetch: ObservableQueryFields<TData, TVariables>['refetch']; + subscribeToMore: ObservableQueryFields<TData, TVariables>['subscribeToMore']; } const SUPPORTED_FETCH_POLICIES: WatchQueryFetchPolicy[] = [ @@ -151,6 +147,7 @@ export function useSuspenseQuery_experimental< return useMemo(() => { return { + client, data: result.data, error: errorPolicy === 'ignore' ? void 0 : toApolloError(result), fetchMore: (options) => { @@ -173,8 +170,9 @@ export function useSuspenseQuery_experimental< return promise; }, + subscribeToMore: (options) => observable.subscribeToMore(options), }; - }, [result, observable, errorPolicy]); + }, [client, result, observable, errorPolicy]); } function validateOptions(options: WatchQueryOptions) {
diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx --- a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -20,9 +20,16 @@ import { DocumentNode, InMemoryCache, Observable, + OperationVariables, + SubscribeToMoreOptions, TypedDocumentNode, + split, } from '../../../core'; -import { compact, concatPagination } from '../../../utilities'; +import { + compact, + concatPagination, + getMainDefinition, +} from '../../../utilities'; import { MockedProvider, MockedResponse, @@ -629,6 +636,28 @@ describe('useSuspenseQuery', () => { ]); }); + it('returns the client used in the result', async () => { + const { query } = useSimpleQueryCase(); + + const client = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: 'hello' } }) + ), + cache: new InMemoryCache(), + }); + + const { result } = renderSuspenseHook(() => useSuspenseQuery(query), { + client, + }); + + // wait for query to finish suspending to avoid warnings + await waitFor(() => { + expect(result.current.data).toEqual({ greeting: 'hello' }); + }); + + expect(result.current.client).toBe(client); + }); + it('does not suspend when data is in the cache and using a "cache-first" fetch policy', async () => { const { query, mocks } = useSimpleQueryCase(); @@ -4906,4 +4935,96 @@ describe('useSuspenseQuery', () => { }, ]); }); + + it('can subscribe to subscriptions and react to cache updates via `subscribeToMore`', async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + interface QueryData { + greeting: string; + } + + type UpdateQueryFn = NonNullable< + SubscribeToMoreOptions< + QueryData, + OperationVariables, + SubscriptionData + >['updateQuery'] + >; + + const { mocks, query } = useSimpleQueryCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === 'OperationDefinition' && + definition.operation === 'subscription' + ); + }, + wsLink, + mockLink + ); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: 'ignore' }), + { link } + ); + + await waitFor(() => { + expect(result.current.data).toEqual({ greeting: 'Hello' }); + }); + + const updateQuery = jest.fn< + ReturnType<UpdateQueryFn>, + Parameters<UpdateQueryFn> + >((_, { subscriptionData: { data } }) => { + return { greeting: data.greetingUpdated }; + }); + + result.current.subscribeToMore<SubscriptionData>({ + document: gql` + subscription { + greetingUpdated + } + `, + updateQuery, + }); + + wsLink.simulateResult({ + result: { + data: { + greetingUpdated: 'Subscription hello', + }, + }, + }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + greeting: 'Subscription hello', + }); + }); + + expect(updateQuery).toHaveBeenCalledTimes(1); + expect(updateQuery).toHaveBeenCalledWith( + { greeting: 'Hello' }, + { + subscriptionData: { + data: { greetingUpdated: 'Subscription hello' }, + }, + variables: {}, + } + ); + + expect(renders.count).toBe(3); + expect(renders.frames).toMatchObject([ + { data: { greeting: 'Hello' } }, + { data: { greeting: 'Subscription hello' } }, + ]); + }); });
Export `client` and `subscribeToMore` from `useSuspenseQuery` As explained in the [PR](https://github.com/apollographql/apollo-client/pull/10323), my initial goal with the API surface area of `useSuspenseQuery` was to keep it small and add to it as we have feedback throughout the alpha release process. While working with `useSuspenseQuery` in the Spotify demo, I have added a subscription to be able to listen for the playback state to change. Because I'm fetching the initial data set with `useSuspenseQuery`, I want to be able to use `subscribeToMore` to start the subscription and easily update the cache with the results from the subscription. As a workaround, I tried to write to the cache directly in combination with `useSubscription`, but the `client` property isn't exported from `useSuspenseQuery`, making this a touch more difficult. This is a property exported by `useQuery`, so it would make sense that `useSuspenseQuery` would do the same. This makes it easier to get access to the client to perform manual cache updates when needed.
2023-01-18T18:28:09Z
3.8
apollographql/apollo-client
10,368
apollographql__apollo-client-10368
[ "10272" ]
52d5af26129bcec6bd51b45511b8349695219c47
diff --git a/config/bundlesize.ts b/config/bundlesize.ts --- a/config/bundlesize.ts +++ b/config/bundlesize.ts @@ -3,7 +3,7 @@ import { join } from "path"; import { gzipSync } from "zlib"; import bytes from "bytes"; -const gzipBundleByteLengthLimit = bytes("31.87KB"); +const gzipBundleByteLengthLimit = bytes("32KB"); const minFile = join("dist", "apollo-client.min.cjs"); const minPath = join(__dirname, "..", minFile); const gzipByteLen = gzipSync(readFileSync(minPath)).byteLength; diff --git a/src/core/QueryInfo.ts b/src/core/QueryInfo.ts --- a/src/core/QueryInfo.ts +++ b/src/core/QueryInfo.ts @@ -3,6 +3,7 @@ import { equal } from "@wry/equality"; import { Cache, ApolloCache } from '../cache'; import { DeepMerger } from "../utilities" +import { mergeIncrementalData } from '../utilities/common/incrementalResult'; import { WatchQueryOptions, ErrorPolicy } from './watchQueryOptions'; import { ObservableQuery, reobserveCacheFirst } from './ObservableQuery'; import { QueryListener } from './types'; @@ -373,21 +374,7 @@ export class QueryInfo { this.reset(); if ('incremental' in result && isNonEmptyArray(result.incremental)) { - let mergedData = this.getDiff().result; - - result.incremental.forEach(({ data, path, errors }) => { - for (let i = path.length - 1; i >= 0; --i) { - const key = path[i]; - const isNumericKey = !isNaN(+key); - const parent: Record<string | number, any> = isNumericKey ? [] : {}; - parent[key] = data; - data = parent as typeof data; - } - if (errors) { - graphQLErrors.push(...errors); - } - mergedData = merger.merge(mergedData, data); - }); + const mergedData = mergeIncrementalData(this.getDiff().result, result); result.data = mergedData; // Detect the first chunk of a deferred query and merge it with existing diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -6,7 +6,10 @@ type OperationTypeNode = any; import { equal } from '@wry/equality'; import { ApolloLink, execute, FetchResult } from '../link/core'; -import { isExecutionPatchIncrementalResult } from '../utilities/common/incrementalResult'; +import { + isExecutionPatchIncrementalResult, + isExecutionPatchResult, +} from '../utilities/common/incrementalResult'; import { Cache, ApolloCache, canonicalStringify } from '../cache'; import { @@ -15,6 +18,7 @@ import { getOperationName, hasClientExports, graphQLResultHasError, + getGraphQLErrorsFromResult, removeConnectionDirectiveFromDocument, canUseWeakMap, ObservableSubscription, @@ -27,6 +31,7 @@ import { isDocumentNode, isNonNullObject, } from '../utilities'; +import { mergeIncrementalData } from '../utilities/common/incrementalResult'; import { ApolloError, isApolloError } from '../errors'; import { QueryOptions, @@ -248,7 +253,7 @@ export class QueryManager<TStore> { (result: FetchResult<TData>) => { if (graphQLResultHasError(result) && errorPolicy === 'none') { throw new ApolloError({ - graphQLErrors: result.errors, + graphQLErrors: getGraphQLErrorsFromResult(result), }); } @@ -295,13 +300,14 @@ export class QueryManager<TStore> { next(storeResult) { self.broadcastQueries(); - // At the moment, a mutation can have only one result, so we can - // immediately resolve upon receiving the first result. In the future, - // mutations containing @defer or @stream directives might receive - // multiple FetchResult payloads from the ApolloLink chain, so we will - // probably need to collect those results in this next method and call - // resolve only later, in an observer.complete function. - resolve(storeResult); + // Since mutations might receive multiple payloads from the + // ApolloLink chain (e.g. when used with @defer), + // we resolve with a SingleExecutionResult or after the final + // ExecutionPatchResult has arrived and we have assembled the + // multipart response into a single result. + if (!('hasNext' in storeResult) || storeResult.hasNext === false) { + resolve(storeResult); + } }, error(err: Error) { @@ -355,12 +361,38 @@ export class QueryManager<TStore> { const skipCache = mutation.fetchPolicy === "no-cache"; if (!skipCache && shouldWriteResult(result, mutation.errorPolicy)) { - cacheWrites.push({ - result: result.data, - dataId: 'ROOT_MUTATION', - query: mutation.document, - variables: mutation.variables, - }); + if (!isExecutionPatchIncrementalResult(result)) { + cacheWrites.push({ + result: result.data, + dataId: 'ROOT_MUTATION', + query: mutation.document, + variables: mutation.variables, + }); + } + if (isExecutionPatchIncrementalResult(result) && isNonEmptyArray(result.incremental)) { + const diff = cache.diff<TData>({ + id: "ROOT_MUTATION", + // The cache complains if passed a mutation where it expects a + // query, so we transform mutations and subscriptions to queries + // (only once, thanks to this.transformCache). + query: this.transform(mutation.document).asQuery, + variables: mutation.variables, + optimistic: false, + returnPartialData: true, + }); + const mergedData = mergeIncrementalData(diff.result, result); + if (typeof mergedData !== 'undefined') { + // cast the ExecutionPatchResult to FetchResult here since + // ExecutionPatchResult never has `data` when returned from the server + (result as FetchResult).data = mergedData; + cacheWrites.push({ + result: mergedData, + dataId: 'ROOT_MUTATION', + query: mutation.document, + variables: mutation.variables, + }) + } + } const { updateQueries } = mutation; if (updateQueries) { @@ -421,6 +453,12 @@ export class QueryManager<TStore> { // apply those writes to the store by running this reducer again with // a write action. const { update } = mutation; + // Determine whether result is a SingleExecutionResult, + // or the final ExecutionPatchResult. + const isFinalResult = + !isExecutionPatchResult(result) || + (isExecutionPatchIncrementalResult(result) && !result.hasNext); + if (update) { if (!skipCache) { // Re-read the ROOT_MUTATION data we just wrote into the cache @@ -438,20 +476,31 @@ export class QueryManager<TStore> { returnPartialData: true, }); - if (diff.complete && !(isExecutionPatchIncrementalResult(result))) { - result = { ...result, data: diff.result }; + if (diff.complete) { + result = { ...result as FetchResult, data: diff.result }; + if ('incremental' in result) { + delete result.incremental; + } + if ('hasNext' in result) { + delete result.hasNext; + } } } - update(cache, result, { - context: mutation.context, - variables: mutation.variables, - }); + // If we've received the whole response, + // either a SingleExecutionResult or the final ExecutionPatchResult, + // call the update function. + if (isFinalResult) { + update(cache, result, { + context: mutation.context, + variables: mutation.variables, + }); + } } // TODO Do this with cache.evict({ id: 'ROOT_MUTATION' }) but make it // shallow to allow rolling back optimistic evictions. - if (!skipCache && !mutation.keepRootFields) { + if (!skipCache && !mutation.keepRootFields && isFinalResult) { cache.modify({ id: 'ROOT_MUTATION', fields(value, { fieldName, DELETE }) { @@ -1053,19 +1102,8 @@ export class QueryManager<TStore> { ), result => { - const graphQLErrors = isNonEmptyArray(result.errors) - ? result.errors.slice(0) - : []; - - if ('incremental' in result && isNonEmptyArray(result.incremental)) { - result.incremental.forEach(incrementalResult => { - if (incrementalResult.errors) { - graphQLErrors.push(...incrementalResult.errors); - } - }); - } - - const hasErrors = isNonEmptyArray(graphQLErrors); + const graphQLErrors = getGraphQLErrorsFromResult(result); + const hasErrors = graphQLErrors.length > 0; // If we interrupted this request by calling getResultsFromLink again // with the same QueryInfo object, we ignore the old results. diff --git a/src/utilities/common/errorHandling.ts b/src/utilities/common/errorHandling.ts --- a/src/utilities/common/errorHandling.ts +++ b/src/utilities/common/errorHandling.ts @@ -1,5 +1,26 @@ -import { ExecutionResult } from 'graphql'; +import { FetchResult } from "../../link/core"; +import { isNonEmptyArray } from "../../utilities/common/arrays"; +import { isExecutionPatchIncrementalResult } from "../../utilities/common/incrementalResult"; -export function graphQLResultHasError(result: ExecutionResult<unknown>): boolean { - return (result.errors && result.errors.length > 0) || false; +export function graphQLResultHasError(result: FetchResult): boolean { + const errors = getGraphQLErrorsFromResult(result); + return isNonEmptyArray(errors); +} + +export function getGraphQLErrorsFromResult(result: FetchResult) { + const graphQLErrors = isNonEmptyArray(result.errors) + ? result.errors.slice(0) + : []; + + if ( + isExecutionPatchIncrementalResult(result) && + isNonEmptyArray(result.incremental) + ) { + result.incremental.forEach((incrementalResult) => { + if (incrementalResult.errors) { + graphQLErrors.push(...incrementalResult.errors); + } + }); + } + return graphQLErrors; } diff --git a/src/utilities/common/incrementalResult.ts b/src/utilities/common/incrementalResult.ts --- a/src/utilities/common/incrementalResult.ts +++ b/src/utilities/common/incrementalResult.ts @@ -1,5 +1,53 @@ -import { ExecutionPatchIncrementalResult } from '../../link/core'; +import { + ExecutionPatchIncrementalResult, + ExecutionPatchInitialResult, + ExecutionPatchResult, + FetchResult, +} from "../../link/core"; +import { isNonEmptyArray } from "./arrays"; +import { DeepMerger } from "./mergeDeep"; -export function isExecutionPatchIncrementalResult(value: any): value is ExecutionPatchIncrementalResult { - return !!(value as ExecutionPatchIncrementalResult).incremental; +export function isExecutionPatchIncrementalResult( + value: FetchResult +): value is ExecutionPatchIncrementalResult { + return "incremental" in value; +} + +export function isExecutionPatchInitialResult( + value: FetchResult +): value is ExecutionPatchInitialResult { + return "hasNext" in value && "data" in value; +} + +export function isExecutionPatchResult( + value: FetchResult +): value is ExecutionPatchResult { + return ( + isExecutionPatchIncrementalResult(value) || + isExecutionPatchInitialResult(value) + ); +} + +export function mergeIncrementalData<TData>( + prevResult: TData, + result: ExecutionPatchResult<TData> +) { + let mergedData = prevResult; + const merger = new DeepMerger(); + if ( + isExecutionPatchIncrementalResult(result) && + isNonEmptyArray(result.incremental) + ) { + result.incremental.forEach(({ data, path }) => { + for (let i = path.length - 1; i >= 0; --i) { + const key = path[i]; + const isNumericKey = !isNaN(+key); + const parent: Record<string | number, any> = isNumericKey ? [] : {}; + parent[key] = data; + data = parent as typeof data; + } + mergedData = merger.merge(mergedData, data); + }); + } + return mergedData as TData; }
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -369,6 +369,7 @@ Array [ "getFragmentDefinitions", "getFragmentFromSelection", "getFragmentQueryDocument", + "getGraphQLErrorsFromResult", "getInclusionDirectives", "getMainDefinition", "getOperationDefinition", diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -9,11 +9,12 @@ import fetchMock from "fetch-mock"; import { ApolloClient, ApolloLink, ApolloQueryResult, Cache, NetworkStatus, Observable, ObservableQuery, TypedDocumentNode } from '../../../core'; import { InMemoryCache } from '../../../cache'; -import { itAsync, MockedProvider, mockSingleLink, subscribeAndCount } from '../../../testing'; +import { itAsync, MockedProvider, MockSubscriptionLink, mockSingleLink, subscribeAndCount } from '../../../testing'; import { ApolloProvider } from '../../context'; import { useQuery } from '../useQuery'; import { useMutation } from '../useMutation'; import { BatchHttpLink } from '../../../link/batch-http'; +import { FetchResult } from '../../../link/core'; describe('useMutation Hook', () => { interface Todo { @@ -2206,4 +2207,248 @@ describe('useMutation Hook', () => { await screen.findByText('item 3'); }); }); + describe('defer', () => { + const CREATE_TODO_MUTATION_DEFER = gql` + mutation createTodo($description: String!, $priority: String) { + createTodo(description: $description, priority: $priority) { + id + ... @defer { + description + priority + } + } + } + `; + const variables = { + description: 'Get milk!' + }; + it('resolves a deferred mutation with the full result', async () => { + const errorSpy = jest.spyOn(console, "error"); + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const useCreateTodo = () => { + const [createTodo, { loading, data }] = useMutation( + CREATE_TODO_MUTATION_DEFER + ); + + useEffect(() => { + createTodo({ variables }); + }, [variables]); + + return { loading, data }; + }; + + const { result, waitForNextUpdate } = renderHook( + () => useCreateTodo(), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + expect(result.current.loading).toBe(true); + expect(result.current.data).toBe(undefined); + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: 'Todo', + }, + }, + hasNext: true + }, + }); + }); + + setTimeout(() => { + link.simulateResult({ + result: { + incremental: [{ + data: { + description: 'Get milk!', + priority: 'High', + __typename: 'Todo', + }, + path: ['createTodo'], + }], + hasNext: false + }, + }, true); + }); + + + // When defer is used in a mutation, the final value resolves + // in a single result + await waitForNextUpdate(); + + expect(result.current.loading).toBe(false); + expect(result.current.data).toEqual({ + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: 'Todo', + }, + }); + expect(errorSpy).not.toHaveBeenCalled(); + errorSpy.mockRestore(); + }); + it('resolves with resulting errors and calls onError callback', async () => { + const errorSpy = jest.spyOn(console, "error"); + const link = new MockSubscriptionLink(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const onError = jest.fn(); + const { result } = renderHook( + () => useMutation(CREATE_TODO_MUTATION_DEFER, { onError }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + const createTodo = result.current[0]; + + let fetchResult: any; + + setTimeout(() => { + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: 'Todo', + }, + }, + hasNext: true + }, + }); + }); + + setTimeout(() => { + link.simulateResult({ + result: { + incremental: [{ + data: null, + errors: [ + new GraphQLError(CREATE_TODO_ERROR) + ], + path: ['createTodo'], + }], + hasNext: false + }, + }, true); + }); + await act(async () => { + fetchResult = await createTodo({ variables }); + }); + + expect(fetchResult.errors.message).toBe(CREATE_TODO_ERROR); + expect(fetchResult.data).toBe(undefined); + expect(onError).toHaveBeenCalledTimes(1); + expect(onError.mock.calls[0][0].message).toBe(CREATE_TODO_ERROR); + expect(errorSpy).not.toHaveBeenCalled(); + errorSpy.mockRestore(); + }); + it('calls the update function with the final merged result data', async () => { + const errorSpy = jest.spyOn(console, "error"); + const link = new MockSubscriptionLink(); + const update = jest.fn(); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const { result } = renderHook( + () => useMutation(CREATE_TODO_MUTATION_DEFER, + { update }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + const [createTodo] = result.current; + + let promiseReturnedByMutate: Promise<FetchResult>; + + await act(async () => { + promiseReturnedByMutate = createTodo({ variables }); + }); + + link.simulateResult({ + result: { + data: { + createTodo: { + id: 1, + __typename: 'Todo', + }, + }, + hasNext: true + }, + }); + + link.simulateResult({ + result: { + incremental: [{ + data: { + description: 'Get milk!', + priority: 'High', + __typename: 'Todo', + }, + path: ['createTodo'], + }], + hasNext: false + }, + }, true); + + await act(async () => { + await promiseReturnedByMutate; + }); + + expect(update).toHaveBeenCalledTimes(1); + expect(update).toHaveBeenCalledWith( + // the first item is the cache, which we don't need to make any + // assertions against in this test + expect.anything(), + // second argument is the result + expect.objectContaining({ + data: { + createTodo: { + id: 1, + description: "Get milk!", + priority: "High", + __typename: 'Todo', + }, + } + }), + // third argument is an object containing context and variables + // but we only care about variables here + expect.objectContaining({ variables }) + ); + + expect(errorSpy).not.toHaveBeenCalled(); + errorSpy.mockRestore(); + }); + }); });
🐛: Mutations with @defer only responds with the first part > To clarify, this is happening from a non-federated AS4 instance, not router. **Intended outcome:** Using the `useMutation` hook with a mutation with `@defer` results in only the first chunk loading. For example, given a server responding with: ``` --- content-type: application/json; charset=utf-8 {"hasNext":true,"data":{"makePayment":{"id":"1afa84cc-26a5-4820-a496-33c8366e03cd","__typename":"MakePaymentResult"}}} --- content-type: application/json; charset=utf-8 {"hasNext":false,"incremental":[{"path":["makePayment"],"data":{"paymentStatus":{"__typename":"PaymentSuccess","id":"2a3a1ddd-81e1-4535-8a9c-e8dc1ce38026","billedAmount":39.854330085558395},"__typename":"MakePaymentResult"}}]} ----- ``` Only ``` {"makePayment":{"id":"1afa84cc-26a5-4820-a496-33c8366e03cd","__typename":"MakePaymentResult"} ``` is reported in the `data` object. **Actual outcome:** Mutations update with the correct data, same as with `useQuery` which works as expected. **How to reproduce the issue:** Example Repo: https://github.com/lleadbet/graphql-galaxy-demo **Versions** ``` System: OS: macOS 12.6 Binaries: Node: 16.16.0 - ~/.volta/tools/image/node/16.16.0/bin/node Yarn: 1.22.19 - ~/.volta/tools/image/yarn/1.22.19/bin/yarn npm: 8.11.0 - ~/.volta/tools/image/node/16.16.0/bin/npm Browsers: Chrome: 107.0.5304.110 Firefox: 106.0.5 Safari: 16.0 npmPackages: @apollo/client: ^3.7.1 => 3.7.1 ```
This will require some changes to `QueryManager` most likely, inside of the `markMutationResult` method. @alessbell just to confirm, is there any change Apollo Router has to consider here or is this strictly on the client side? cc @abernix @jpvajda I don't have a test case that works against the router yet, but I'm not sure if the issue is in my local subgraph. @abernix, is there a mutation example you can share? If not I will go ahead and open a bug. To clarify, this is happening from a non-federated AS4 instance, not router Yep - I was referring to my attempt to reproduce the issue using the router. When I use a subgraph with the same schema used in https://github.com/lleadbet/graphql-galaxy-demo, I get a "cannot query field 'makePayment' on type 'Query'" error from the router. @lleadbet That's good info, thanks for clarifying that. Opened the router issue here: https://github.com/apollographql/router/issues/2099
2022-12-14T18:40:10Z
3.7
apollographql/apollo-client
10,499
apollographql__apollo-client-10499
[ "10496" ]
2408fd3fe7259ebf8bd5c90b67ec71ce4124180d
diff --git a/config/bundlesize.ts b/config/bundlesize.ts --- a/config/bundlesize.ts +++ b/config/bundlesize.ts @@ -3,7 +3,7 @@ import { join } from "path"; import { gzipSync } from "zlib"; import bytes from "bytes"; -const gzipBundleByteLengthLimit = bytes("32.37KB"); +const gzipBundleByteLengthLimit = bytes("32.42KB"); const minFile = join("dist", "apollo-client.min.cjs"); const minPath = join(__dirname, "..", minFile); const gzipByteLen = gzipSync(readFileSync(minPath)).byteLength; diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -28,15 +28,14 @@ export function useLazyQuery<TData = any, TVariables extends OperationVariables options?: LazyQueryHookOptions<TData, TVariables> ): LazyQueryResultTuple<TData, TVariables> { const abortControllersRef = useRef(new Set<AbortController>()); - const internalState = useInternalState( - useApolloClient(options && options.client), - query, - ); const execOptionsRef = useRef<Partial<LazyQueryHookOptions<TData, TVariables>>>(); - const merged = execOptionsRef.current - ? mergeOptions(options, execOptionsRef.current) - : options; + const merged = execOptionsRef.current ? mergeOptions(options, execOptionsRef.current) : options; + + const internalState = useInternalState<TData, TVariables>( + useApolloClient(options && options.client), + merged?.query ?? query + ); const useQueryResult = internalState.useQuery({ ...merged, diff --git a/src/utilities/common/mergeOptions.ts b/src/utilities/common/mergeOptions.ts --- a/src/utilities/common/mergeOptions.ts +++ b/src/utilities/common/mergeOptions.ts @@ -13,7 +13,7 @@ type OptionsUnion<TData, TVariables extends OperationVariables, TContext> = | MutationOptions<TData, TVariables, TContext>; export function mergeOptions< - TOptions extends OptionsUnion<any, any, any> + TOptions extends Partial<OptionsUnion<any, any, any>> >( defaults: TOptions | Partial<TOptions> | undefined, options: TOptions | Partial<TOptions>,
diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -439,6 +439,77 @@ describe('useLazyQuery Hook', () => { }); }); + + it("changing queries", async () => { + const query1 = gql` + query { + hello + } + `; + const query2 = gql` + query { + name + } + `; + const mocks = [ + { + request: { query: query1 }, + result: { data: { hello: "world" } }, + }, + { + request: { query: query2 }, + result: { data: { name: "changed" } }, + }, + ]; + + const cache = new InMemoryCache(); + const { result } = renderHook(() => useLazyQuery(query1), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + }); + + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + const execute = result.current[0]; + + setTimeout(() => execute()); + + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world" }); + + setTimeout(() => execute({ query: query2 })); + + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ name: "changed" }); + }); + it('should fetch data each time the execution function is called, when using a "network-only" fetch policy', async () => { const mocks = [ {
useLazyQuery ignores changes to the query document ### Issue Description Despite what the documentation for useLazyQuery appears to suggest. Updating the "Query" option inside the trigger of the lazyQuery does nothing. i.e. `const [fetchMyQuery] = useLazyQuery(QUERY_1)` and later calling `fetchMyQuery({ query: SOME_OTHER_QUERY})` still calls QUERY_1 ### Link to Reproduction https://codesandbox.io/embed/great-mestorf-f9nmze?fontsize=14&hidenavigation=1&theme=dark ### Reproduction Steps useLazyQuery with a default gql query, Override it The original query is executed and changes are ignored.
Hey @laytong 👋 This looks related to #10198, just used in a slightly different way. Seeing as `useQuery` supports the ability to [swap queries](https://github.com/apollographql/apollo-client/blob/b4a4bd16a911a233de6ada47780c84512f885401/src/react/hooks/__tests__/useQuery.test.tsx#L775-L810), I see no reason this shouldn't either. I can't guarantee a time frame on a fix, but we will try to get to this when we can! Feel free to submit a PR if you're interested in fixing yourself and we'd be happy to review it 🙂
2023-02-01T15:35:13Z
3.7
apollographql/apollo-client
10,340
apollographql__apollo-client-10340
[ "7626", "11041" ]
98359ef3c717ee15be8c167422bed124a6a64c34
diff --git a/.size-limit.cjs b/.size-limit.cjs --- a/.size-limit.cjs +++ b/.size-limit.cjs @@ -1,16 +1,16 @@ const checks = [ { path: "dist/apollo-client.min.cjs", - limit: "36.64kb" + limit: "38056", }, { path: "dist/main.cjs", - import: "{ ApolloClient, InMemoryCache, HttpLink }" + import: "{ ApolloClient, InMemoryCache, HttpLink }", }, { path: "dist/index.js", import: "{ ApolloClient, InMemoryCache, HttpLink }", - limit: "34.99kb" + limit: "31971", }, ...[ "ApolloProvider", @@ -18,40 +18,53 @@ const checks = [ "useLazyQuery", "useMutation", "useSubscription", - //"useSuspenseQuery_experimental", - "useFragment_experimental" + "useSuspenseQuery", + "useBackgroundQuery", + "useReadQuery", + "useFragment", ].map((name) => ({ path: "dist/react/index.js", import: `{ ${name} }` })), -].map((config) => ({ - ...config, - name: config.name || config.import ? `import ${config.import} from "${config.path}"` : config.path, - ignore: [ - ...(config.ignore || []), - "react", - "react-dom", - "@graphql-typed-document-node/core", - "@wry/context", - "@wry/equality", - "@wry/trie", - "graphql-tag", - "hoist-non-react-statics", - "optimism", - "prop-types", - "response-iterator", - "symbol-observable", - "ts-invariant", - "tslib", - "zen-observable-ts" - ], -})).flatMap((value) => value.path == "dist/apollo-client.min.cjs" ? value : [{...value, limit: undefined}, { - ...value, - name: `${value.name} (production)`, - modifyEsbuildConfig(config){ - config.define = { - "__DEV__": `false`, - "globalThis.__DEV__": `false`, - } - return config - } -}]); +] + .map((config) => ({ + ...config, + name: + config.name || config.import + ? `import ${config.import} from "${config.path}"` + : config.path, + ignore: [ + ...(config.ignore || []), + "react", + "react-dom", + "@graphql-typed-document-node/core", + "@wry/context", + "@wry/equality", + "@wry/trie", + "graphql-tag", + "hoist-non-react-statics", + "optimism", + "prop-types", + "response-iterator", + "symbol-observable", + "ts-invariant", + "tslib", + "zen-observable-ts", + ], + })) + .flatMap((value) => + value.path == "dist/apollo-client.min.cjs" + ? value + : [ + { ...value, limit: undefined }, + { + ...value, + name: `${value.name} (production)`, + modifyEsbuildConfig(config) { + config.define = { + "globalThis.__DEV__": `false`, + }; + return config; + }, + }, + ] + ); module.exports = checks; diff --git a/config/bundlesize.ts b/config/bundlesize.ts --- a/config/bundlesize.ts +++ b/config/bundlesize.ts @@ -3,21 +3,19 @@ import { join } from "path"; import { gzipSync } from "zlib"; import bytes from "bytes"; -const gzipBundleByteLengthLimit = bytes("33.07KB"); +const gzipBundleByteLengthLimit = bytes("35.25KB"); const minFile = join("dist", "apollo-client.min.cjs"); const minPath = join(__dirname, "..", minFile); const gzipByteLen = gzipSync(readFileSync(minPath)).byteLength; const overLimit = gzipByteLen > gzipBundleByteLengthLimit; -const message = `Minified + GZIP-encoded bundle size for ${ - minFile -} = ${ - bytes(gzipByteLen, { unit: "KB" }) -}, ${ - overLimit ? "exceeding" : "under" -} limit ${ - bytes(gzipBundleByteLengthLimit, { unit: "KB" }) -}`; +const message = `Minified + GZIP-encoded bundle size for ${minFile} = ${bytes( + gzipByteLen, + { unit: "KB" } +)}, ${overLimit ? "exceeding" : "under"} limit ${bytes( + gzipBundleByteLengthLimit, + { unit: "KB" } +)}`; if (overLimit) { throw new Error(message); diff --git a/config/distSpotFixes.ts b/config/distSpotFixes.ts deleted file mode 100644 --- a/config/distSpotFixes.ts +++ /dev/null @@ -1,25 +0,0 @@ -import * as fs from "fs"; -import { EOL } from "os"; -import { distDir } from './helpers'; - -export function applyDistSpotFixes() { - sanitizeDEV(); -} - -function sanitizeDEV() { - const globalDTsPath = `${distDir}/utilities/globals/global.d.ts`; - const globalDTs = fs.readFileSync(globalDTsPath, "utf8"); - // The global.d.ts types are useful within the @apollo/client codebase to - // provide a type for the global __DEV__ constant, but actually shipping that - // declaration as a globally-declared type runs too much risk of conflict with - // other __DEV__ declarations attempting to achieve the same thing, most - // notably the one in @types/react-native/index.d.ts. We preserve the default - // export so that index.d.ts can remain unchanged, but otherwise we remove all - // traces of __DEV__ from global.d.ts. - if (/__DEV__/.test(globalDTs)) { - fs.writeFileSync(globalDTsPath, [ - "declare const _default: typeof globalThis;", - "export default _default;", - ].join(EOL)); - } -} diff --git a/config/entryPoints.js b/config/entryPoints.js --- a/config/entryPoints.js +++ b/config/entryPoints.js @@ -1,44 +1,46 @@ const entryPoints = [ { dirs: [], bundleName: "main" }, - { dirs: ['cache'] }, - { dirs: ['core'] }, - { dirs: ['errors'] }, - { dirs: ['link', 'batch'] }, - { dirs: ['link', 'batch-http'] }, - { dirs: ['link', 'context'] }, - { dirs: ['link', 'core'] }, - { dirs: ['link', 'error'] }, - { dirs: ['link', 'http'] }, - { dirs: ['link', 'persisted-queries'] }, - { dirs: ['link', 'retry'] }, - { dirs: ['link', 'schema'] }, - { dirs: ['link', 'subscriptions'] }, - { dirs: ['link', 'utils'] }, - { dirs: ['link', 'ws'] }, - { dirs: ['react'] }, - { dirs: ['react', 'components'] }, - { dirs: ['react', 'context'] }, - { dirs: ['react', 'hoc'] }, - { dirs: ['react', 'hooks'] }, - { dirs: ['react', 'parser'] }, - { dirs: ['react', 'ssr'] }, - { dirs: ['testing'], extensions: [".js", ".jsx"] }, - { dirs: ['testing', 'core'] }, - { dirs: ['utilities'] }, - { dirs: ['utilities', 'globals'], sideEffects: true }, + { dirs: ["cache"] }, + { dirs: ["core"] }, + { dirs: ["dev"] }, + { dirs: ["errors"] }, + { dirs: ["link", "batch"] }, + { dirs: ["link", "batch-http"] }, + { dirs: ["link", "context"] }, + { dirs: ["link", "core"] }, + { dirs: ["link", "error"] }, + { dirs: ["link", "http"] }, + { dirs: ["link", "persisted-queries"] }, + { dirs: ["link", "retry"] }, + { dirs: ["link", "remove-typename"] }, + { dirs: ["link", "schema"] }, + { dirs: ["link", "subscriptions"] }, + { dirs: ["link", "utils"] }, + { dirs: ["link", "ws"] }, + { dirs: ["react"] }, + { dirs: ["react", "components"] }, + { dirs: ["react", "context"] }, + { dirs: ["react", "hoc"] }, + { dirs: ["react", "hooks"] }, + { dirs: ["react", "parser"] }, + { dirs: ["react", "ssr"] }, + { dirs: ["testing"], extensions: [".js", ".jsx"] }, + { dirs: ["testing", "core"] }, + { dirs: ["utilities"] }, + { dirs: ["utilities", "globals"], sideEffects: true }, ]; const lookupTrie = Object.create(null); -entryPoints.forEach(info => { +entryPoints.forEach((info) => { let node = lookupTrie; - info.dirs.forEach(dir => { + info.dirs.forEach((dir) => { const dirs = node.dirs || (node.dirs = Object.create(null)); node = dirs[dir] || (dirs[dir] = { isEntry: false }); }); node.isEntry = true; }); -exports.forEach = function(callback, context) { +exports.forEach = function (callback, context) { entryPoints.forEach(callback, context); }; @@ -74,9 +76,11 @@ exports.check = function (id, parentId) { return false; } - console.warn(`Risky cross-entry-point nested import of ${id} in ${ - partsAfterDist(parentId).join("/") - }`); + console.warn( + `Risky cross-entry-point nested import of ${id} in ${partsAfterDist( + parentId + ).join("/")}` + ); } } @@ -86,6 +90,9 @@ exports.check = function (id, parentId) { function partsAfterDist(id) { const parts = id.split(path.sep); const distIndex = parts.lastIndexOf("dist"); + if (/^index.jsx?$/.test(parts[parts.length - 1])) { + parts.pop(); + } if (distIndex >= 0) { return parts.slice(distIndex + 1); } diff --git a/config/helpers.ts b/config/helpers.ts --- a/config/helpers.ts +++ b/config/helpers.ts @@ -5,17 +5,17 @@ import glob = require("glob"); export const distDir = path.resolve(__dirname, "..", "dist"); -export function eachFile(dir: string, callback: ( - absPath: string, - relPath: string, -) => any) { +export function eachFile( + dir: string, + callback: (absPath: string, relPath: string) => any +) { const promises: Promise<any>[] = []; return new Promise<void>((resolve, reject) => { - glob(`${dir.replace(/\\/g, '/')}/**/*.js`, (error, files) => { + glob(`${dir.replace(/\\/g, "/")}/**/*.js`, (error, files) => { if (error) return reject(error); - files.sort().forEach(file => { + files.sort().forEach((file) => { const relPath = path.relative(dir, file); // Outside the distDir, somehow. @@ -32,9 +32,11 @@ export function eachFile(dir: string, callback: ( // This file is not meant to be imported or processed. if (relPath.endsWith("invariantErrorCodes.js")) return; - promises.push(new Promise(resolve => { - resolve(callback(file, relPath)); - })); + promises.push( + new Promise((resolve) => { + resolve(callback(file, relPath)); + }) + ); }); resolve(); diff --git a/config/jest.config.js b/config/jest.config.js --- a/config/jest.config.js +++ b/config/jest.config.js @@ -2,34 +2,46 @@ const defaults = { rootDir: "src", preset: "ts-jest", testEnvironment: "jsdom", - setupFiles: ["<rootDir>/config/jest/setup.ts"], + setupFilesAfterEnv: ["<rootDir>/config/jest/setup.ts"], + globals: { + __DEV__: true, + }, testEnvironmentOptions: { url: "http://localhost", }, snapshotFormat: { escapeString: true, - printBasicPrototype: true + printBasicPrototype: true, }, transform: { - '^.+\\.tsx?$': [ - 'ts-jest', + "^.+\\.tsx?$": [ + "ts-jest", { diagnostics: { - warnOnly: process.env.TEST_ENV !== 'ci' + warnOnly: process.env.TEST_ENV !== "ci", }, }, ], }, + resolver: "ts-jest-resolver", }; -const ignoreTSFiles = '.ts$'; -const ignoreTSXFiles = '.tsx$'; +const ignoreTSFiles = ".ts$"; +const ignoreTSXFiles = ".tsx$"; + +const react17TestFileIgnoreList = [ + ignoreTSFiles, + // For now, we only support useSuspenseQuery with React 18, so no need to test + // it with React 17 + "src/react/hooks/__tests__/useSuspenseQuery.test.tsx", + "src/react/hooks/__tests__/useBackgroundQuery.test.tsx", +]; const tsStandardConfig = { ...defaults, - displayName: 'Core Tests', + displayName: "Core Tests", testPathIgnorePatterns: [ignoreTSXFiles], -} +}; // For both React (Jest) "projects", ignore core tests (.ts files) as they // do not import React, to avoid running them twice. @@ -42,7 +54,7 @@ const standardReact18Config = { const standardReact17Config = { ...defaults, displayName: "ReactDOM 17", - testPathIgnorePatterns: [ignoreTSFiles], + testPathIgnorePatterns: react17TestFileIgnoreList, moduleNameMapper: { "^react$": "react-17", "^react-dom$": "react-dom-17", @@ -53,9 +65,5 @@ const standardReact17Config = { }; module.exports = { - projects: [ - tsStandardConfig, - standardReact17Config, - standardReact18Config, - ], + projects: [tsStandardConfig, standardReact17Config, standardReact18Config], }; diff --git a/config/postprocessDist.ts b/config/postprocessDist.ts --- a/config/postprocessDist.ts +++ b/config/postprocessDist.ts @@ -1,141 +1,14 @@ -import * as fs from "fs"; -import * as path from "path"; -import resolve from "resolve"; -import { distDir, eachFile, reparse, reprint } from './helpers'; - -import { applyDistSpotFixes } from "./distSpotFixes"; -applyDistSpotFixes(); - -// The primary goal of the 'npm run resolve' script is to make ECMAScript -// modules exposed by Apollo Client easier to consume natively in web browsers, -// without bundling, and without help from package.json files. It accomplishes -// this goal by rewriting internal ./ and ../ (relative) imports to refer to a -// specific ESM module (not a directory), including its file extension. Because -// of this limited goal, this script only touches ESM modules that have .js file -// extensions, not .cjs CommonJS bundles. - -// A secondary goal of this script is to enforce that any module using the -// __DEV__ global constant imports the @apollo/client/utilities/globals polyfill -// module first. - -eachFile(distDir, (file, relPath) => new Promise((resolve, reject) => { - fs.readFile(file, "utf8", (error, source) => { - if (error) return reject(error); - - const tr = new Transformer; - const output = tr.transform(source, file); - - if ( - /\b__DEV__\b/.test(source) && - // Ignore modules that reside within @apollo/client/utilities/globals. - relPath.split(path.sep, 2).join("/") !== "utilities/globals" - ) { - let importsUtilitiesGlobals = false; - - tr.absolutePaths.forEach(absPath => { - const distRelativePath = - path.relative(distDir, absPath).split(path.sep).join("/"); - if (distRelativePath === "utilities/globals/index.js") { - importsUtilitiesGlobals = true; - } - }); - - if (!importsUtilitiesGlobals) { - reject(new Error(`Module ${ - relPath - } uses __DEV__ but does not import @apollo/client/utilities/globals`)); - } - } - - if (source === output) { - resolve(file); - } else { - fs.writeFile(file, output, "utf8", error => { - error ? reject(error) : resolve(file); - }); - } - }); -})); - -import * as recast from "recast"; -const n = recast.types.namedTypes; -type Node = recast.types.namedTypes.Node; - -class Transformer { - absolutePaths = new Set<string>(); - - transform(code: string, file: string) { - const ast = reparse(code); - const transformer = this; - - recast.visit(ast, { - visitImportDeclaration(path) { - this.traverse(path); - transformer.normalizeSourceString(file, path.node.source); - }, - - visitImportExpression(path) { - this.traverse(path); - transformer.normalizeSourceString(file, path.node.source); - }, - - visitExportAllDeclaration(path) { - this.traverse(path); - transformer.normalizeSourceString(file, path.node.source); - }, - - visitExportNamedDeclaration(path) { - this.traverse(path); - transformer.normalizeSourceString(file, path.node.source); - }, - }); - - return reprint(ast); - } - - isRelative(id: string) { - return id.startsWith("./") || id.startsWith("../"); - } - - normalizeSourceString(file: string, source?: Node | null) { - if (source && n.StringLiteral.check(source)) { - // We mostly only worry about normalizing _relative_ module identifiers, - // which start with a ./ or ../ and refer to other modules within the - // @apollo/client package, but we also manually normalize one non-relative - // identifier, ts-invariant/process, to prevent webpack 5 errors - // containing the phrase "failed to resolve only because it was resolved - // as fully specified," referring to webpack's resolve.fullySpecified - // option, which is apparently now true by default when the enclosing - // package's package.json file has "type": "module" (which became true for - // Apollo Client in v3.5). - if (source.value.split("/", 2).join("/") === "ts-invariant/process") { - source.value = "ts-invariant/process/index.js"; - } else if (this.isRelative(source.value)) { - try { - source.value = this.normalizeId(source.value, file); - } catch (error) { - console.error(`Failed to resolve ${source.value} in ${file} with error ${error}`); - process.exit(1); - } - } - } - } - - normalizeId(id: string, file: string) { - const basedir = path.dirname(file); - const absPath = resolve.sync(id, { - basedir, - extensions: [".mjs", ".js"], - packageFilter(pkg) { - return pkg.module ? { - ...pkg, - main: pkg.module, - } : pkg; - }, - }); - this.absolutePaths.add(absPath); - const relPath = path.relative(basedir, absPath); - const relId = relPath.split(path.sep).join('/'); - return this.isRelative(relId) ? relId : "./" + relId; - } -} +import { distDir } from "./helpers.ts"; +import fs from "node:fs"; +import path from "node:path"; + +const globalTypesFile = path.resolve(distDir, "utilities/globals/global.d.ts"); +fs.writeFileSync( + globalTypesFile, + fs + .readFileSync(globalTypesFile, "utf8") + .split("\n") + .filter((line) => line.trim() !== "const __DEV__: boolean;") + .join("\n"), + "utf8" +); diff --git a/config/precheck.js b/config/precheck.js --- a/config/precheck.js +++ b/config/precheck.js @@ -1,17 +1,10 @@ -const { - lockfileVersion, -} = require("../package-lock.json"); +const { lockfileVersion } = require("../package-lock.json"); const expectedVersion = 2; -if (typeof lockfileVersion !== "number" || - lockfileVersion < expectedVersion) { +if (typeof lockfileVersion !== "number" || lockfileVersion < expectedVersion) { throw new Error( - `Old lockfileVersion (${ - lockfileVersion - }) found in package-lock.json (expected ${ - expectedVersion - } or later)` + `Old lockfileVersion (${lockfileVersion}) found in package-lock.json (expected ${expectedVersion} or later)` ); } diff --git a/config/prepareDist.js b/config/prepareDist.js --- a/config/prepareDist.js +++ b/config/prepareDist.js @@ -11,23 +11,22 @@ // - Create a new `package.json` for each sub-set bundle we support, and // store it in the appropriate dist sub-directory. -const fs = require('fs'); -const path = require('path'); -const recast = require('recast'); +const fs = require("fs"); +const path = require("path"); +const recast = require("recast"); const distRoot = `${__dirname}/../dist`; - /* @apollo/client */ -const packageJson = require('../package.json'); -const entryPoints = require('./entryPoints.js'); +const packageJson = require("../package.json"); +const entryPoints = require("./entryPoints.js"); // Enable default interpretation of .js files as ECMAScript modules. We don't // put this in the source ../package.json file because it interferes with tools // like ts-node, which we use to run various ../config/*.ts scripts. // TODO(benjamn) Fully diagnose that interference. -packageJson.type = 'module'; +packageJson.type = "module"; // The root package.json is marked as private to prevent publishing // from happening in the root of the project. This sets the package back to @@ -43,14 +42,19 @@ delete packageJson.engines; // on-going package development (e.g. running tests, supporting npm link, etc.). // When publishing from "dist" however, we need to update the package.json // to point to the files within the same directory. -const distPackageJson = JSON.stringify(packageJson, (_key, value) => { - if (typeof value === 'string' && value.startsWith('./dist/')) { - const parts = value.split('/'); - parts.splice(1, 1); // remove dist - return parts.join('/'); - } - return value; -}, 2) + "\n"; +const distPackageJson = + JSON.stringify( + packageJson, + (_key, value) => { + if (typeof value === "string" && value.startsWith("./dist/")) { + const parts = value.split("/"); + parts.splice(1, 1); // remove dist + return parts.join("/"); + } + return value; + }, + 2 + ) + "\n"; // Save the modified package.json to "dist" fs.writeFileSync(`${distRoot}/package.json`, distPackageJson); @@ -58,8 +62,8 @@ fs.writeFileSync(`${distRoot}/package.json`, distPackageJson); // Copy supporting files into "dist" const srcDir = `${__dirname}/..`; const destDir = `${srcDir}/dist`; -fs.copyFileSync(`${srcDir}/README.md`, `${destDir}/README.md`); -fs.copyFileSync(`${srcDir}/LICENSE`, `${destDir}/LICENSE`); +fs.copyFileSync(`${srcDir}/README.md`, `${destDir}/README.md`); +fs.copyFileSync(`${srcDir}/LICENSE`, `${destDir}/LICENSE`); // Create individual bundle package.json files, storing them in their // associated dist directory. This helps provide a way for the Apollo Client @@ -73,14 +77,18 @@ entryPoints.forEach(function buildPackageJson({ }) { if (!dirs.length) return; fs.writeFileSync( - path.join(distRoot, ...dirs, 'package.json'), - JSON.stringify({ - name: path.posix.join('@apollo', 'client', ...dirs), - type: "module", - main: `${bundleName}.cjs`, - module: 'index.js', - types: 'index.d.ts', - sideEffects, - }, null, 2) + "\n", + path.join(distRoot, ...dirs, "package.json"), + JSON.stringify( + { + name: path.posix.join("@apollo", "client", ...dirs), + type: "module", + main: `${bundleName}.cjs`, + module: "index.js", + types: "index.d.ts", + sideEffects, + }, + null, + 2 + ) + "\n" ); }); diff --git a/config/processInvariants.ts b/config/processInvariants.ts --- a/config/processInvariants.ts +++ b/config/processInvariants.ts @@ -1,6 +1,7 @@ import * as fs from "fs"; -import * as path from "path"; -import { distDir, eachFile, reparse, reprint } from './helpers'; +import { posix, join as osPathJoin } from "path"; +import { distDir, eachFile, reparse, reprint } from "./helpers.ts"; +import type { ExpressionKind } from "ast-types/lib/gen/kinds"; eachFile(distDir, (file, relPath) => { const source = fs.readFileSync(file, "utf8"); @@ -10,10 +11,10 @@ eachFile(distDir, (file, relPath) => { } }).then(() => { fs.writeFileSync( - path.join(distDir, "invariantErrorCodes.js"), - recast.print(errorCodeManifest, { + osPathJoin(distDir, "invariantErrorCodes.js"), + recast.print(program, { tabWidth: 2, - }).code + "\n", + }).code + "\n" ); }); @@ -21,48 +22,110 @@ import * as recast from "recast"; const b = recast.types.builders; const n = recast.types.namedTypes; type Node = recast.types.namedTypes.Node; -type NumericLiteral = recast.types.namedTypes.NumericLiteral; type CallExpression = recast.types.namedTypes.CallExpression; type NewExpression = recast.types.namedTypes.NewExpression; let nextErrorCode = 1; -const errorCodeManifest = b.objectExpression([ - b.property("init", - b.stringLiteral("@apollo/client version"), - b.stringLiteral(require("../package.json").version), +const program = b.program([]); +const allExports = { + errorCodes: getExportObject("errorCodes"), + devDebug: getExportObject("devDebug"), + devLog: getExportObject("devLog"), + devWarn: getExportObject("devWarn"), + devError: getExportObject("devError"), +}; +type ExportName = keyof typeof allExports; + +allExports.errorCodes.comments = [ + b.commentLine( + " This file is used by the error message display website and the", + true ), -]); - -errorCodeManifest.comments = [ - b.commentLine(' This file is meant to help with looking up the source of errors like', true), - b.commentLine(' "Invariant Violation: 35" and is automatically generated by the file', true), - b.commentLine(' @apollo/client/config/processInvariants.ts for each @apollo/client', true), - b.commentLine(' release. The numbers may change from release to release, so please', true), - b.commentLine(' consult the @apollo/client/invariantErrorCodes.js file specific to', true), - b.commentLine(' your @apollo/client version. This file is not meant to be imported.', true), + b.commentLine(" @apollo/client/includeErrors entry point.", true), + b.commentLine(" This file is not meant to be imported manually.", true), ]; +function getExportObject(exportName: string) { + const object = b.objectExpression([]); + program.body.push( + b.exportNamedDeclaration( + b.variableDeclaration("const", [ + b.variableDeclarator(b.identifier(exportName), object), + ]) + ) + ); + return object; +} + function getErrorCode( file: string, expr: CallExpression | NewExpression, -): NumericLiteral { - const numLit = b.numericLiteral(nextErrorCode++); - errorCodeManifest.properties.push( - b.property("init", numLit, b.objectExpression([ - b.property("init", b.identifier("file"), b.stringLiteral("@apollo/client/" + file)), - b.property("init", b.identifier("node"), expr), - ])), - ); - return numLit; -} + type: keyof typeof allExports +): ExpressionKind { + if (isIdWithName(expr.callee, "invariant")) { + return extractString( + file, + allExports[type].properties, + expr.arguments[1], + expr.arguments[0] + ); + } else { + return extractString(file, allExports[type].properties, expr.arguments[0]); + } + + function extractString( + file: string, + target: (typeof allExports)[ExportName]["properties"], + message: recast.types.namedTypes.SpreadElement | ExpressionKind, + condition?: recast.types.namedTypes.SpreadElement | ExpressionKind + ): ExpressionKind { + if (message.type === "ConditionalExpression") { + return b.conditionalExpression( + message.test, + extractString(file, target, message.consequent, condition), + extractString(file, target, message.alternate, condition) + ); + } else if (isStringOnly(message)) { + const messageText = reprint(message); + if (messageText.includes("Apollo DevTools")) { + return message; + } + + const obj = b.objectExpression([]); + const numLit = b.numericLiteral(nextErrorCode++); + target.push(b.property("init", numLit, obj)); + + obj.properties.push( + b.property( + "init", + b.identifier("file"), + b.stringLiteral("@apollo/client/" + file) + ) + ); + if (condition) { + obj.properties.push( + b.property( + "init", + b.identifier("condition"), + b.stringLiteral(reprint(expr.arguments[0])) + ) + ); + } + obj.properties.push(b.property("init", b.identifier("message"), message)); + + return numLit; + } else { + throw new Error(`invariant minification error: node cannot have dynamical error argument! + file: ${posix.join(distDir, file)}:${expr.loc?.start.line} + code: -function transform(code: string, file: string) { - // If the code doesn't seem to contain anything invariant-related, we - // can skip parsing and transforming it. - if (!/invariant/i.test(code)) { - return code; + ${reprint(message)} + `); + } } +} +function transform(code: string, relativeFilePath: string) { const ast = reparse(code); recast.visit(ast, { @@ -71,81 +134,113 @@ function transform(code: string, file: string) { const node = path.node; if (isCallWithLength(node, "invariant", 1)) { - if (isDEVConditional(path.parent.node)) { - return; - } + const newArgs = [...node.arguments]; + newArgs.splice( + 1, + 1, + getErrorCode(relativeFilePath, node, "errorCodes") + ); - const newArgs = node.arguments.slice(0, 1); - newArgs.push(getErrorCode(file, node)); + return b.callExpression.from({ + ...node, + arguments: newArgs, + }); + } - return b.conditionalExpression( - makeDEVExpr(), - node, - b.callExpression.from({ - ...node, - arguments: newArgs, - }), + if (isCallWithLength(node, "newInvariantError", 0)) { + const newArgs = [...node.arguments]; + newArgs.splice( + 0, + 1, + getErrorCode(relativeFilePath, node, "errorCodes") ); - } - if (node.callee.type === "MemberExpression" && - isIdWithName(node.callee.object, "invariant") && - isIdWithName(node.callee.property, "debug", "log", "warn", "error")) { - if (isDEVLogicalAnd(path.parent.node)) { - return; - } - return b.logicalExpression("&&", makeDEVExpr(), node); + return b.callExpression.from({ + ...node, + arguments: newArgs, + }); } - }, - - visitNewExpression(path) { - this.traverse(path); - const node = path.node; - if (isCallWithLength(node, "InvariantError", 0)) { - if (isDEVConditional(path.parent.node)) { - return; - } - const newArgs = [getErrorCode(file, node)]; + if ( + node.callee.type === "MemberExpression" && + isIdWithName(node.callee.object, "invariant") && + isIdWithName(node.callee.property, "debug", "log", "warn", "error") + ) { + let newNode = node; + if (node.arguments[0].type !== "Identifier") { + const prop = node.callee.property; + if (!n.Identifier.check(prop)) throw new Error("unexpected type"); - return b.conditionalExpression( - makeDEVExpr(), - node, - b.newExpression.from({ + const newArgs = [...node.arguments]; + newArgs.splice( + 0, + 1, + getErrorCode( + relativeFilePath, + node, + ("dev" + capitalize(prop.name)) as ExportName + ) + ); + newNode = b.callExpression.from({ ...node, arguments: newArgs, - }), - ); + }); + } + + if (isDEVLogicalAnd(path.parent.node)) { + return newNode; + } + return b.logicalExpression("&&", makeDEVExpr(), newNode); } - } + }, }); + if ( + !["utilities/globals/index.js", "config/jest/setup.js"].includes( + relativeFilePath + ) + ) + recast.visit(ast, { + visitIdentifier(path) { + this.traverse(path); + const node = path.node; + if (isDEVExpr(node)) { + return b.binaryExpression( + "!==", + b.memberExpression( + b.identifier("globalThis"), + b.identifier("__DEV__") + ), + b.literal(false) + ); + } + return node; + }, + }); + return reprint(ast); } -function isIdWithName(node: Node, ...names: string[]) { - return n.Identifier.check(node) && - names.some(name => name === node.name); +function isIdWithName(node: Node | null | undefined, ...names: string[]) { + return ( + node && n.Identifier.check(node) && names.some((name) => name === node.name) + ); } function isCallWithLength( node: CallExpression | NewExpression, name: string, - length: number, + length: number ) { - return isIdWithName(node.callee, name) && - node.arguments.length > length; -} - -function isDEVConditional(node: Node) { - return n.ConditionalExpression.check(node) && - isDEVExpr(node.test); + return isIdWithName(node.callee, name) && node.arguments.length > length; } function isDEVLogicalAnd(node: Node) { - return n.LogicalExpression.check(node) && + return ( + n.LogicalExpression.check(node) && node.operator === "&&" && - isDEVExpr(node.left); + isDEVExpr(node.left) + ); } function makeDEVExpr() { @@ -155,3 +250,28 @@ function makeDEVExpr() { function isDEVExpr(node: Node) { return isIdWithName(node, "__DEV__"); } + +function isStringOnly( + node: recast.types.namedTypes.ASTNode +): node is ExpressionKind { + switch (node.type) { + case "StringLiteral": + case "Literal": + return true; + case "TemplateLiteral": + return (node.expressions as recast.types.namedTypes.ASTNode[]).every( + isStringOnly + ); + case "BinaryExpression": + return ( + node.operator == "+" && + isStringOnly(node.left) && + isStringOnly(node.right) + ); + } + return false; +} + +function capitalize(str: string) { + return str[0].toUpperCase() + str.slice(1); +} diff --git a/config/rewriteSourceMaps.ts b/config/rewriteSourceMaps.ts --- a/config/rewriteSourceMaps.ts +++ b/config/rewriteSourceMaps.ts @@ -1,9 +1,9 @@ import * as fs from "fs"; import * as path from "path"; -import { distDir } from './helpers'; +import { distDir } from "./helpers.ts"; import glob = require("glob"); -glob(`${distDir.replace(/\\/g, '/')}/**/*.js.map`, (error, files) => { +glob(`${distDir.replace(/\\/g, "/")}/**/*.js.map`, (error, files) => { if (error) throw error; const rootDir = path.dirname(distDir); @@ -11,34 +11,38 @@ glob(`${distDir.replace(/\\/g, '/')}/**/*.js.map`, (error, files) => { const startTime = Date.now(); let rewriteCount = 0; - Promise.all(files.map(async file => { - const content = await fs.promises.readFile(file, "utf8"); - const map = JSON.parse(content); - if (map.sourcesContent) return; - if (map.sources) { - map.sourcesContent = await Promise.all( - map.sources.map((relSourcePath: string) => { - const sourcePath = path.normalize( - path.join(path.dirname(file), relSourcePath)); - const relPath = path.relative(rootDir, sourcePath); - // Disallow reading paths outside rootDir. - if (relPath.startsWith("../")) { - throw new Error(`Bad path: ${sourcePath}`); - } - return fs.promises.readFile(sourcePath, "utf8"); - }) + Promise.all( + files.map(async (file) => { + const content = await fs.promises.readFile(file, "utf8"); + const map = JSON.parse(content); + if (map.sourcesContent) return; + if (map.sources) { + map.sourcesContent = await Promise.all( + map.sources.map((relSourcePath: string) => { + const sourcePath = path.normalize( + path.join(path.dirname(file), relSourcePath) + ); + const relPath = path.relative(rootDir, sourcePath); + // Disallow reading paths outside rootDir. + if (relPath.startsWith("../")) { + throw new Error(`Bad path: ${sourcePath}`); + } + return fs.promises.readFile(sourcePath, "utf8"); + }) + ); + ++rewriteCount; + return fs.promises.writeFile(file, JSON.stringify(map)); + } + }) + ).then( + () => { + console.log( + `Rewrote ${rewriteCount} source maps in ${Date.now() - startTime}ms` ); - ++rewriteCount; - return fs.promises.writeFile(file, JSON.stringify(map)); + }, + (error) => { + console.error(error); + process.exit(-1); } - })).then(() => { - console.log(`Rewrote ${ - rewriteCount - } source maps in ${ - Date.now() - startTime - }ms`); - }, error => { - console.error(error); - process.exit(-1); - }); + ); }); diff --git a/config/rollup.config.js b/config/rollup.config.js --- a/config/rollup.config.js +++ b/config/rollup.config.js @@ -1,38 +1,32 @@ -import path from 'path'; +import path, { resolve, dirname } from "path"; import { promises as fs } from "fs"; -import nodeResolve from '@rollup/plugin-node-resolve'; -import { terser as minify } from 'rollup-plugin-terser'; +import nodeResolve from "@rollup/plugin-node-resolve"; +import { terser as minify } from "rollup-plugin-terser"; -const entryPoints = require('./entryPoints'); -const distDir = './dist'; +const entryPoints = require("./entryPoints"); +const distDir = "./dist"; function isExternal(id, parentId, entryPointsAreExternal = true) { - let posixId = toPosixPath(id) + let posixId = toPosixPath(id); const posixParentId = toPosixPath(parentId); // Rollup v2.26.8 started passing absolute id strings to this function, thanks // apparently to https://github.com/rollup/rollup/pull/3753, so we relativize // the id again in those cases. if (path.isAbsolute(id)) { - posixId = path.posix.relative( - path.posix.dirname(posixParentId), - posixId, - ); + posixId = path.posix.relative(path.posix.dirname(posixParentId), posixId); if (!posixId.startsWith(".")) { posixId = "./" + posixId; } } - const isRelative = - posixId.startsWith("./") || - posixId.startsWith("../"); + const isRelative = posixId.startsWith("./") || posixId.startsWith("../"); if (!isRelative) { return true; } - if (entryPointsAreExternal && - entryPoints.check(posixId, posixParentId)) { + if (entryPointsAreExternal && entryPoints.check(posixId, posixParentId)) { return true; } @@ -43,14 +37,14 @@ function isExternal(id, parentId, entryPointsAreExternal = true) { function toPosixPath(p) { // Sometimes, you can have a path like \Users\IEUser on windows, and this // actually means you want C:\Users\IEUser - if (p[0] === '\\') { + if (p[0] === "\\") { p = process.env.SystemDrive + p; } - p = p.replace(/\\/g, '/'); - if (p[1] === ':') { + p = p.replace(/\\/g, "/"); + if (p[1] === ":") { // Transform "C:/bla/bla" to "/c/bla/bla" - p = '/' + p[0] + p.slice(2); + p = "/" + p[0] + p.slice(2); } return p; @@ -64,14 +58,12 @@ function prepareCJS(input, output) { }, output: { file: output, - format: 'cjs', + format: "cjs", sourcemap: true, - exports: 'named', + exports: "named", externalLiveBindings: false, }, - plugins: [ - nodeResolve(), - ], + plugins: [nodeResolve()], }; } @@ -79,8 +71,8 @@ function prepareCJSMinified(input) { return { input, output: { - file: input.replace('.cjs', '.min.cjs'), - format: 'cjs', + file: input.replace(".cjs", ".min.cjs"), + format: "cjs", }, plugins: [ minify({ @@ -90,7 +82,7 @@ function prepareCJSMinified(input) { compress: { toplevel: true, global_defs: { - '@__DEV__': 'false', + "@globalThis.__DEV__": "false", }, }, }), @@ -109,26 +101,48 @@ function prepareBundle({ return { input: inputFile, - external(id, parentId) { - return isExternal(id, parentId, true); - }, + // the external check is done by the `'externalize-dependency'` plugin + // external(id, parentId) {} output: { file: outputFile, - format: 'cjs', + format: "cjs", sourcemap: true, - exports: 'named', + exports: "named", externalLiveBindings: false, }, plugins: [ + { + name: "externalize-dependency", + resolveId(id, parentId) { + if (!parentId) { + return null; + } + function removeIndex(filename) { + if (filename.endsWith(`${path.sep}index.js`)) { + return filename.slice(0, -`${path.sep}index.js`.length); + } + return filename; + } + + const external = isExternal(id, parentId, true); + if (external) { + if (id.startsWith(".")) { + return { + id: removeIndex(resolve(dirname(parentId), id)), + external: true, + }; + } + return { id: removeIndex(id), external: true }; + } + return null; + }, + }, extensions ? nodeResolve({ extensions }) : nodeResolve(), { name: "copy *.cjs to *.cjs.native.js", async writeBundle({ file }) { const buffer = await fs.readFile(file); - await fs.writeFile( - file + ".native.js", - buffer, - ); + await fs.writeFile(file + ".native.js", buffer); }, }, ], @@ -138,11 +152,6 @@ function prepareBundle({ export default [ ...entryPoints.map(prepareBundle), // Convert the ESM entry point to a single CJS bundle. - prepareCJS( - './dist/index.js', - './dist/apollo-client.cjs', - ), - prepareCJSMinified( - './dist/apollo-client.cjs', - ), + prepareCJS("./dist/index.js", "./dist/apollo-client.cjs"), + prepareCJSMinified("./dist/apollo-client.cjs"), ]; diff --git a/config/version.js b/config/version.js --- a/config/version.js +++ b/config/version.js @@ -7,8 +7,9 @@ const pkgJsonPath = path.join(__dirname, "..", "package.json"); const { version } = JSON.parse(fs.readFileSync(pkgJsonPath)); assert.strictEqual( - typeof version, "string", - '"version" field missing from package.json', + typeof version, + "string", + '"version" field missing from package.json' ); switch (process.argv[2]) { @@ -18,8 +19,9 @@ switch (process.argv[2]) { .replace(/\blocal\b/, version); assert.notEqual( - updated.indexOf(version), -1, - "Failed to update dist/version.js with @apollo/client version", + updated.indexOf(version), + -1, + "Failed to update dist/version.js with @apollo/client version" ); fs.writeFileSync(versionPath, updated); @@ -28,15 +30,16 @@ switch (process.argv[2]) { } case "verify": { - const { - ApolloClient, - InMemoryCache, - } = require(path.join(distRoot, "core", "core.cjs")); + const { ApolloClient, InMemoryCache } = require(path.join( + distRoot, + "core", + "core.cjs" + )); // Though this may seem like overkill, verifying that ApolloClient is // constructible in Node.js is actually pretty useful, too! const client = new ApolloClient({ - cache: new InMemoryCache, + cache: new InMemoryCache(), }); // Probably not necessary, but it seems wise to clean up any resources @@ -50,17 +53,16 @@ switch (process.argv[2]) { // convenient because dist/version.js uses ECMAScript module syntax, and is // thus not importable in all versions of Node.js. assert.strictEqual( - client.version, version, - "Failed to update dist/version.js and dist/core/core.cjs", + client.version, + version, + "Failed to update dist/version.js and dist/core/core.cjs" ); break; } default: - throw new Error( - "Pass either 'update' or 'verify' to config/version.js" - ); + throw new Error("Pass either 'update' or 'verify' to config/version.js"); } console.log("ok"); diff --git a/integration-tests/browser-esm/playwright.config.ts b/integration-tests/browser-esm/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/browser-esm/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/cra4/playwright.config.ts b/integration-tests/cra4/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/cra4/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/cra5/playwright.config.ts b/integration-tests/cra5/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/cra5/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/next/next-env.d.ts b/integration-tests/next/next-env.d.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/next-env.d.ts @@ -0,0 +1,6 @@ +/// <reference types="next" /> +/// <reference types="next/image-types/global" /> +/// <reference types="next/navigation-types/compat/navigation" /> + +// NOTE: This file should not be edited +// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/integration-tests/next/next.config.js b/integration-tests/next/next.config.js new file mode 100644 --- /dev/null +++ b/integration-tests/next/next.config.js @@ -0,0 +1,4 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = {}; + +module.exports = nextConfig; diff --git a/integration-tests/next/playwright.config.ts b/integration-tests/next/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/next/src/app/client.ts b/integration-tests/next/src/app/client.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/src/app/client.ts @@ -0,0 +1,10 @@ +import { schemaLink } from "@/libs/schemaLink"; +import { ApolloClient, InMemoryCache } from "@apollo/client"; +import { registerApolloClient } from "@apollo/experimental-nextjs-app-support/rsc"; + +export const { getClient } = registerApolloClient(() => { + return new ApolloClient({ + cache: new InMemoryCache(), + link: schemaLink, + }); +}); diff --git a/integration-tests/next/src/libs/apolloClient.ts b/integration-tests/next/src/libs/apolloClient.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/src/libs/apolloClient.ts @@ -0,0 +1,111 @@ +import { useRef } from "react"; +import type { NormalizedCacheObject } from "@apollo/client"; +import { + ApolloClient, + HttpLink, + InMemoryCache, + from, + ApolloLink, + Observable, +} from "@apollo/client"; +import { onError } from "@apollo/client/link/error"; +import merge from "deepmerge"; +import isEqual from "lodash/isEqual"; +import type { GetServerSidePropsResult } from "next"; +import { schemaLink } from "./schemaLink"; + +export const APOLLO_STATE_PROP_NAME = "__APOLLO_STATE__"; + +let apolloClient: ApolloClient<NormalizedCacheObject>; + +const errorLink = onError(({ graphQLErrors, networkError }) => { + if (graphQLErrors) + graphQLErrors.forEach(({ message, locations, path }) => + console.log( + `[GraphQL error]: Message: ${message}, Location: ${locations}, Path: ${path}` + ) + ); + if (networkError) console.log(`[Network error]: ${networkError}`); +}); + +const delayLink = new ApolloLink((operation, forward) => { + return new Observable((observer) => { + const handle = setTimeout(() => { + forward(operation).subscribe(observer); + }, 1000); + + return () => clearTimeout(handle); + }); +}); + +const httpLink = new HttpLink({ + uri: "https://main--hack-the-e-commerce.apollographos.net/graphql", +}); + +function createApolloClient() { + return new ApolloClient({ + ssrMode: typeof window === "undefined", + link: from([ + errorLink, + delayLink, + typeof window === "undefined" ? schemaLink : httpLink, + ]), + cache: new InMemoryCache(), + }); +} + +export function initializeApollo( + initialState: NormalizedCacheObject | null = null +) { + const _apolloClient = apolloClient ?? createApolloClient(); + // If your page has Next.js data fetching methods that use Apollo Client, + // the initial state gets hydrated here + if (initialState) { + // Get existing cache, loaded during client side data fetching + const existingCache = _apolloClient.extract(); + + // Merge the initialState from getStaticProps/getServerSideProps + // in the existing cache + const data = merge(existingCache, initialState, { + // combine arrays using object equality (like in sets) + arrayMerge: (destinationArray, sourceArray) => [ + ...sourceArray, + ...destinationArray.filter((d) => + sourceArray.every((s) => !isEqual(d, s)) + ), + ], + }); + // Restore the cache with the merged data + _apolloClient.cache.restore(data); + } + // For SSG and SSR always create a new Apollo Client + if (typeof window === "undefined") return _apolloClient; + // Create the Apollo Client once in the client + if (!apolloClient) apolloClient = _apolloClient; + return _apolloClient; +} + +interface ApolloProps { + [APOLLO_STATE_PROP_NAME]: NormalizedCacheObject; +} + +export function addApolloState( + client: ApolloClient<NormalizedCacheObject>, + pageProps: GetServerSidePropsResult<Partial<ApolloProps>> & { + props: Partial<ApolloProps>; + } +) { + if (pageProps?.props) { + pageProps.props[APOLLO_STATE_PROP_NAME] = client.cache.extract(); + } + return pageProps; +} + +export function useApollo(pageProps?: ApolloProps) { + const state = pageProps?.[APOLLO_STATE_PROP_NAME]; + const storeRef = useRef<ApolloClient<NormalizedCacheObject>>(); + if (!storeRef.current) { + storeRef.current = initializeApollo(state); + } + return storeRef.current; +} diff --git a/integration-tests/next/src/libs/schemaLink.ts b/integration-tests/next/src/libs/schemaLink.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/src/libs/schemaLink.ts @@ -0,0 +1,51 @@ +import { makeExecutableSchema } from "@graphql-tools/schema"; +import { gql } from "graphql-tag"; +import { SchemaLink } from "@apollo/client/link/schema"; + +const typeDefs = gql` + type Product { + id: String! + title: String! + } + type Query { + products: [Product!]! + } +`; + +const resolvers = { + Query: { + products: async () => [ + { + id: "product:5", + title: "Soft Warm Apollo Beanie", + }, + { + id: "product:2", + title: "Stainless Steel Water Bottle", + }, + { + id: "product:3", + title: "Athletic Baseball Cap", + }, + { + id: "product:4", + title: "Baby Onesies", + }, + { + id: "product:1", + title: "The Apollo T-Shirt", + }, + { + id: "product:6", + title: "The Apollo Socks", + }, + ], + }, +}; + +export const schema = makeExecutableSchema({ + typeDefs, + resolvers, +}); + +export const schemaLink = new SchemaLink({ schema }); diff --git a/integration-tests/node-esm/test-cjs.cjs b/integration-tests/node-esm/test-cjs.cjs new file mode 100644 --- /dev/null +++ b/integration-tests/node-esm/test-cjs.cjs @@ -0,0 +1,45 @@ +const assert = require("node:assert"); +const path = require("path"); + +const { ApolloClient } = require("@apollo/client"); +const { useQuery } = require("@apollo/client/react"); +const { HttpLink } = require("@apollo/client/link/http"); + +console.log("Testing Node with CJS imports..."); + +function checkFunctionName(fn, name, category) { + console.log(`Checking ${category} '${name}' === '${fn.name}'`); + assert( + fn.name === name, + `${category} \`${name}\` did not import correctly (name: '${fn.name}')` + ); +} + +const entries = [ + [ApolloClient, "ApolloClient", "Barrel Import"], + [useQuery, "useQuery", "Apollo React"], + [HttpLink, "HttpLink", "Link"], +]; + +for (let [fn, name, category] of entries) { + try { + checkFunctionName(fn, name, category); + } catch (error) { + console.error(error); + } +} + +const moduleNames = [ + ["@apollo/client", "/main.cjs"], + ["@apollo/client/react", "/react/react.cjs"], + ["@apollo/client/link/http", "/link/http/http.cjs"], +]; + +for (let [moduleName, expectedFilename] of moduleNames) { + const modulePath = require.resolve(moduleName); + const posixPath = modulePath.split(path.sep).join(path.posix.sep); + console.log(`Module: ${moduleName}, path: ${posixPath}`); + assert(posixPath.endsWith(expectedFilename)); +} + +console.log("CJS test succeeded"); diff --git a/integration-tests/node-esm/test-esm.mjs b/integration-tests/node-esm/test-esm.mjs new file mode 100644 --- /dev/null +++ b/integration-tests/node-esm/test-esm.mjs @@ -0,0 +1,50 @@ +// TODO This entire file doesn't work yet without appending `/index.js` to all imports manually! + +import assert from "node:assert"; +import path from "path"; +import { importMetaResolve } from "resolve-esm"; + +import { ApolloClient } from "@apollo/client/index.js"; +import { useQuery } from "@apollo/client/react/index.js"; +import { HttpLink } from "@apollo/client/link/http/index.js"; + +console.log( + "Testing Node with ESM imports... (user-side workaround with `/index.js)" +); + +function checkFunctionName(fn, name, category) { + console.log(`Checking ${category} '${name}' === '${fn.name}'`); + assert( + fn.name === name, + `${category} \`${name}\` did not import correctly (name: '${fn.name}')` + ); +} + +const entries = [ + [ApolloClient, "ApolloClient", "Barrel Import"], + [useQuery, "useQuery", "Apollo React"], + [HttpLink, "HttpLink", "Link"], +]; + +for (let [fn, name, category] of entries) { + try { + checkFunctionName(fn, name, category); + } catch (error) { + console.error(error); + } +} + +const moduleNames = [ + ["@apollo/client/index.js", "/index.js"], + ["@apollo/client/react/index.js", "/react/index.js"], + ["@apollo/client/link/http/index.js", "/link/http/index.js"], +]; + +(async () => { + for (let [moduleName, expectedFilename] of moduleNames) { + const modulePath = await importMetaResolve(moduleName); + const posixPath = modulePath.split(path.sep).join(path.posix.sep); + console.log(`Module: ${moduleName}, path: ${posixPath}`); + assert(posixPath.endsWith(expectedFilename)); + } +})(); diff --git a/integration-tests/node-standard/test-cjs.js b/integration-tests/node-standard/test-cjs.js new file mode 100644 --- /dev/null +++ b/integration-tests/node-standard/test-cjs.js @@ -0,0 +1,45 @@ +const assert = require("node:assert"); +const path = require("path"); + +const { ApolloClient } = require("@apollo/client"); +const { useQuery } = require("@apollo/client/react"); +const { HttpLink } = require("@apollo/client/link/http"); + +console.log("Testing Node with CJS imports..."); + +function checkFunctionName(fn, name, category) { + console.log(`Checking ${category} '${name}' === '${fn.name}'`); + assert( + fn.name === name, + `${category} \`${name}\` did not import correctly (name: '${fn.name}')` + ); +} + +const entries = [ + [ApolloClient, "ApolloClient", "Barrel Import"], + [useQuery, "useQuery", "Apollo React"], + [HttpLink, "HttpLink", "Link"], +]; + +for (let [fn, name, category] of entries) { + try { + checkFunctionName(fn, name, category); + } catch (error) { + console.error(error); + } +} + +const moduleNames = [ + ["@apollo/client", "/main.cjs"], + ["@apollo/client/react", "/react/react.cjs"], + ["@apollo/client/link/http", "/link/http/http.cjs"], +]; + +for (let [moduleName, expectedFilename] of moduleNames) { + const modulePath = require.resolve(moduleName); + const posixPath = modulePath.split(path.sep).join(path.posix.sep); + console.log(`Module: ${moduleName}, path: ${posixPath}`); + assert(posixPath.endsWith(expectedFilename)); +} + +console.log("CJS test succeeded"); diff --git a/integration-tests/node-standard/test-esm.mjs b/integration-tests/node-standard/test-esm.mjs new file mode 100644 --- /dev/null +++ b/integration-tests/node-standard/test-esm.mjs @@ -0,0 +1,50 @@ +// TODO This entire file doesn't work yet without appending `/index.js` to all imports manually! + +import assert from "node:assert"; +import path from "path"; +import { importMetaResolve } from "resolve-esm"; + +import { ApolloClient } from "@apollo/client/index.js"; +import { useQuery } from "@apollo/client/react/index.js"; +import { HttpLink } from "@apollo/client/link/http/index.js"; + +console.log( + "Testing Node with ESM imports... (user-side workaround with `/index.js)" +); + +function checkFunctionName(fn, name, category) { + console.log(`Checking ${category} '${name}' === '${fn.name}'`); + assert( + fn.name === name, + `${category} \`${name}\` did not import correctly (name: '${fn.name}')` + ); +} + +const entries = [ + [ApolloClient, "ApolloClient", "Barrel Import"], + [useQuery, "useQuery", "Apollo React"], + [HttpLink, "HttpLink", "Link"], +]; + +for (let [fn, name, category] of entries) { + try { + checkFunctionName(fn, name, category); + } catch (error) { + console.error(error); + } +} + +const moduleNames = [ + ["@apollo/client/index.js", "/index.js"], + ["@apollo/client/react/index.js", "/react/index.js"], + ["@apollo/client/link/http/index.js", "/link/http/index.js"], +]; + +(async () => { + for (let [moduleName, expectedFilename] of moduleNames) { + const modulePath = await importMetaResolve(moduleName); + const posixPath = modulePath.split(path.sep).join(path.posix.sep); + console.log(`Module: ${moduleName}, path: ${posixPath}`); + assert(posixPath.endsWith(expectedFilename)); + } +})(); diff --git a/integration-tests/shared/fixture.ts b/integration-tests/shared/fixture.ts new file mode 100644 --- /dev/null +++ b/integration-tests/shared/fixture.ts @@ -0,0 +1,27 @@ +import { test as base, expect } from "@playwright/test"; + +export const test = base.extend<{ + withHar: import("@playwright/test").Page; + blockRequest: import("@playwright/test").Page; +}>({ + page: async ({ page }, use) => { + page.on("pageerror", (error) => { + expect(error.stack || error).toBe("no error"); + }); + await use(page); + }, + withHar: async ({ page }, use) => { + await page.routeFromHAR("../api.har", { + url: "**/graphql", + notFound: "abort", + }); + await use(page); + }, + blockRequest: async ({ page }, use) => { + await page.routeFromHAR("../empty.har", { + url: "**/graphql", + notFound: "abort", + }); + await use(page); + }, +}); diff --git a/integration-tests/shared/playwright.config.ts b/integration-tests/shared/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/shared/playwright.config.ts @@ -0,0 +1,15 @@ +export const baseConfig = { + webServer: { + command: "yarn serve-app", + url: "http://localhost:3000", + timeout: 120 * 1000, + reuseExistingServer: !process.env.CI, + }, + timeout: 120 * 1000, + use: { + headless: true, + viewport: { width: 1280, height: 720 }, + ignoreHTTPSErrors: true, + }, + testDir: "tests/playwright/", +} as const; diff --git a/integration-tests/vite-swc/playwright.config.ts b/integration-tests/vite-swc/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite-swc/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config.ts"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/vite-swc/vite.config.ts b/integration-tests/vite-swc/vite.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite-swc/vite.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react-swc"; + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [react()], +}); diff --git a/integration-tests/vite/playwright.config.ts b/integration-tests/vite/playwright.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite/playwright.config.ts @@ -0,0 +1,4 @@ +import { baseConfig } from "shared/playwright.config.ts"; +import { defineConfig } from "@playwright/test"; + +export default defineConfig(baseConfig); diff --git a/integration-tests/vite/vite.config.ts b/integration-tests/vite/vite.config.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite/vite.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react"; + +// https://vitejs.dev/config/ +export default defineConfig({ + plugins: [react()], +}); diff --git a/src/cache/core/cache.ts b/src/cache/core/cache.ts --- a/src/cache/core/cache.ts +++ b/src/cache/core/cache.ts @@ -1,17 +1,20 @@ -import { DocumentNode } from 'graphql'; +import type { DocumentNode } from 'graphql'; import { wrap } from 'optimism'; -import { +import type { StoreObject, - Reference, + Reference} from '../../utilities/index.js'; +import { getFragmentQueryDocument, -} from '../../utilities'; -import { DataProxy } from './types/DataProxy'; -import { Cache } from './types/Cache'; +} from '../../utilities/index.js'; +import type { DataProxy } from './types/DataProxy.js'; +import type { Cache } from './types/Cache.js'; export type Transaction<T> = (c: ApolloCache<T>) => void; export abstract class ApolloCache<TSerialized> implements DataProxy { + public readonly assumeImmutableResults: boolean = false; + // required to implement // core API public abstract read<TData = any, TVariables = any>( @@ -117,7 +120,7 @@ export abstract class ApolloCache<TSerialized> implements DataProxy { return []; } - public modify(options: Cache.ModifyOptions): boolean { + public modify<Entity extends Record<string, any> = Record<string, any>>(options: Cache.ModifyOptions<Entity>): boolean { return false; } diff --git a/src/cache/core/types/Cache.ts b/src/cache/core/types/Cache.ts --- a/src/cache/core/types/Cache.ts +++ b/src/cache/core/types/Cache.ts @@ -1,6 +1,6 @@ -import { DataProxy } from './DataProxy'; -import { Modifier, Modifiers } from './common'; -import { ApolloCache } from '../cache'; +import { DataProxy } from './DataProxy.js'; +import type { AllFieldsModifier, Modifiers } from './common.js';; +import type { ApolloCache } from '../cache.js'; export namespace Cache { export type WatchCallback<TData = any> = ( @@ -57,9 +57,9 @@ export namespace Cache { discardWatches?: boolean; } - export interface ModifyOptions { + export interface ModifyOptions<Entity extends Record<string, any> = Record<string, any>> { id?: string; - fields: Modifiers | Modifier<any>; + fields: Modifiers<Entity> | AllFieldsModifier<Entity>; optimistic?: boolean; broadcast?: boolean; } diff --git a/src/cache/core/types/DataProxy.ts b/src/cache/core/types/DataProxy.ts --- a/src/cache/core/types/DataProxy.ts +++ b/src/cache/core/types/DataProxy.ts @@ -1,8 +1,8 @@ -import { DocumentNode } from 'graphql'; // eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import type { DocumentNode } from 'graphql'; // ignore-comment eslint-disable-line import/no-extraneous-dependencies, import/no-unresolved +import type { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { MissingFieldError } from './common'; -import { Reference } from '../../../utilities'; +import type { MissingFieldError } from './common.js'; +import type { Reference } from '../../../utilities/index.js'; export namespace DataProxy { export interface Query<TVariables, TData> { diff --git a/src/cache/core/types/common.ts b/src/cache/core/types/common.ts --- a/src/cache/core/types/common.ts +++ b/src/cache/core/types/common.ts @@ -1,13 +1,13 @@ -import { DocumentNode, FieldNode } from 'graphql'; +import type { DocumentNode, FieldNode } from 'graphql'; -import { +import type { Reference, StoreObject, StoreValue, isReference, -} from '../../../utilities'; +} from '../../../utilities/index.js'; -import { StorageType } from '../../inmemory/policies'; +import type { StorageType } from '../../inmemory/policies.js'; // The Readonly<T> type only really works for object types, since it marks // all of the object's properties as readonly, but there are many cases when @@ -76,9 +76,14 @@ export type ToReferenceFunction = ( export type CanReadFunction = (value: StoreValue) => boolean; +declare const _deleteModifier: unique symbol; +export interface DeleteModifier { [_deleteModifier]: true } +declare const _invalidateModifier: unique symbol; +export interface InvalidateModifier { [_invalidateModifier]: true} + export type ModifierDetails = { - DELETE: any; - INVALIDATE: any; + DELETE: DeleteModifier; + INVALIDATE: InvalidateModifier; fieldName: string; storeFieldName: string; readField: ReadFieldFunction; @@ -88,8 +93,28 @@ export type ModifierDetails = { storage: StorageType; } -export type Modifier<T> = (value: T, details: ModifierDetails) => T; - -export type Modifiers = { - [fieldName: string]: Modifier<any>; -}; +export type Modifier<T> = ( + value: T, + details: ModifierDetails +) => T | DeleteModifier | InvalidateModifier; + +type StoreObjectValueMaybeReference<StoreVal> = + StoreVal extends Record<string, any>[] + ? Readonly<StoreVal> | readonly Reference[] + : StoreVal extends Record<string, any> + ? StoreVal | Reference + : StoreVal; + +export type AllFieldsModifier< + Entity extends Record<string, any> +> = Modifier<Entity[keyof Entity] extends infer Value ? + StoreObjectValueMaybeReference<Exclude<Value, undefined>> + : never>; + +export type Modifiers< + T extends Record<string, any> = Record<string, unknown> +> = Partial<{ + [FieldName in keyof T]: Modifier< + StoreObjectValueMaybeReference<Exclude<T[FieldName], undefined>> + >; +}>; diff --git a/src/cache/index.ts b/src/cache/index.ts --- a/src/cache/index.ts +++ b/src/cache/index.ts @@ -1,40 +1,35 @@ -import '../utilities/globals'; +import '../utilities/globals/index.js'; -export { Transaction, ApolloCache } from './core/cache'; -export { Cache } from './core/types/Cache'; -export { DataProxy } from './core/types/DataProxy'; -export { +export type { Transaction } from './core/cache.js'; +export { ApolloCache } from './core/cache.js'; +export { Cache } from './core/types/Cache.js'; +export type { DataProxy } from './core/types/DataProxy.js'; +export type { MissingTree, Modifier, Modifiers, ModifierDetails, - MissingFieldError, ReadFieldOptions -} from './core/types/common'; +} from './core/types/common.js'; +export { MissingFieldError } from './core/types/common.js'; -export { - Reference, - isReference, - makeReference, -} from '../utilities'; +export type { Reference } from '../utilities/index.js'; +export { isReference, makeReference } from '../utilities/index.js'; -export { EntityStore } from './inmemory/entityStore'; +export { EntityStore } from './inmemory/entityStore.js'; export { fieldNameFromStoreName, defaultDataIdFromObject, -} from './inmemory/helpers' +} from './inmemory/helpers.js' export { InMemoryCache, -} from './inmemory/inMemoryCache'; +} from './inmemory/inMemoryCache.js'; -export { - ReactiveVar, - makeVar, - cacheSlot, -} from './inmemory/reactiveVars'; +export type { ReactiveVar } from './inmemory/reactiveVars.js'; +export { makeVar, cacheSlot } from './inmemory/reactiveVars.js'; -export { +export type { TypePolicies, TypePolicy, FieldPolicy, @@ -42,16 +37,14 @@ export { FieldMergeFunction, FieldFunctionOptions, PossibleTypesMap, - Policies, -} from './inmemory/policies'; +} from './inmemory/policies.js'; +export { Policies } from './inmemory/policies.js'; export { canonicalStringify, -} from './inmemory/object-canon'; +} from './inmemory/object-canon.js'; -export { - FragmentRegistryAPI, - createFragmentRegistry, -} from './inmemory/fragmentRegistry'; +export type { FragmentRegistryAPI } from './inmemory/fragmentRegistry.js'; +export { createFragmentRegistry } from './inmemory/fragmentRegistry.js'; -export * from './inmemory/types'; +export * from './inmemory/types.js'; diff --git a/src/cache/inmemory/__mocks__/optimism.ts b/src/cache/inmemory/__mocks__/optimism.ts deleted file mode 100644 --- a/src/cache/inmemory/__mocks__/optimism.ts +++ /dev/null @@ -1,5 +0,0 @@ -const optimism = jest.requireActual('optimism'); -module.exports = { - ...optimism, - wrap: jest.fn(optimism.wrap), -}; diff --git a/src/cache/inmemory/entityStore.ts b/src/cache/inmemory/entityStore.ts --- a/src/cache/inmemory/entityStore.ts +++ b/src/cache/inmemory/entityStore.ts @@ -1,35 +1,40 @@ -import { invariant } from '../../utilities/globals'; -import { dep, OptimisticDependencyFunction } from 'optimism'; +import { invariant } from '../../utilities/globals/index.js'; +import type { OptimisticDependencyFunction } from 'optimism'; +import { dep } from 'optimism'; import { equal } from '@wry/equality'; import { Trie } from '@wry/trie'; -import { - isReference, +import type { StoreValue, StoreObject, - Reference, + Reference} from '../../utilities/index.js'; +import { + isReference, makeReference, DeepMerger, maybeDeepFreeze, canUseWeakMap, isNonNullObject, -} from '../../utilities'; -import { NormalizedCache, NormalizedCacheObject } from './types'; -import { hasOwn, fieldNameFromStoreName } from './helpers'; -import { Policies, StorageType } from './policies'; -import { Cache } from '../core/types/Cache'; -import { +} from '../../utilities/index.js'; +import type { NormalizedCache, NormalizedCacheObject } from './types.js'; +import { hasOwn, fieldNameFromStoreName } from './helpers.js'; +import type { Policies, StorageType } from './policies.js'; +import type { Cache } from '../core/types/Cache.js'; +import type { SafeReadonly, Modifier, Modifiers, ReadFieldOptions, ToReferenceFunction, CanReadFunction, -} from '../core/types/common'; + InvalidateModifier, + DeleteModifier, + ModifierDetails, +} from '../core/types/common.js'; -const DELETE: any = Object.create(null); +const DELETE: DeleteModifier = Object.create(null); const delModifier: Modifier<any> = () => DELETE; -const INVALIDATE: any = Object.create(null); +const INVALIDATE: InvalidateModifier = Object.create(null); export abstract class EntityStore implements NormalizedCache { protected data: NormalizedCacheObject = Object.create(null); @@ -190,7 +195,7 @@ export abstract class EntityStore implements NormalizedCache { public modify( dataId: string, - fields: Modifier<any> | Modifiers, + fields: Modifier<any> | Modifiers<Record<string, any>>, ): boolean { const storeObject = this.lookup(dataId); @@ -215,13 +220,13 @@ export abstract class EntityStore implements NormalizedCache { } : fieldNameOrOptions, { store: this }, ), - }; + } satisfies Partial<ModifierDetails>; Object.keys(storeObject).forEach(storeFieldName => { const fieldName = fieldNameFromStoreName(storeFieldName); let fieldValue = storeObject[storeFieldName]; if (fieldValue === void 0) return; - const modify: Modifier<StoreValue> = typeof fields === "function" + const modify: Modifier<StoreValue> | undefined = typeof fields === "function" ? fields : fields[storeFieldName] || fields[fieldName]; if (modify) { diff --git a/src/cache/inmemory/fragmentRegistry.ts b/src/cache/inmemory/fragmentRegistry.ts --- a/src/cache/inmemory/fragmentRegistry.ts +++ b/src/cache/inmemory/fragmentRegistry.ts @@ -1,14 +1,16 @@ -import { +import type { DocumentNode, ASTNode, FragmentDefinitionNode, - FragmentSpreadNode, + FragmentSpreadNode} from "graphql"; +import { visit, } from "graphql"; import { wrap } from "optimism"; -import { FragmentMap, getFragmentDefinitions } from "../../utilities"; +import type { FragmentMap} from "../../utilities/index.js"; +import { getFragmentDefinitions } from "../../utilities/index.js"; export interface FragmentRegistryAPI { register(...fragments: DocumentNode[]): this; diff --git a/src/cache/inmemory/helpers.ts b/src/cache/inmemory/helpers.ts --- a/src/cache/inmemory/helpers.ts +++ b/src/cache/inmemory/helpers.ts @@ -1,30 +1,31 @@ -import { DocumentNode, FragmentDefinitionNode, SelectionSetNode } from 'graphql'; +import type { DocumentNode, FragmentDefinitionNode, SelectionSetNode } from 'graphql'; -import { +import type { NormalizedCache, InMemoryCacheConfig, -} from './types'; +} from './types.js'; -import { KeyFieldsContext } from './policies'; -import { FragmentRegistryAPI } from './fragmentRegistry'; +import type { KeyFieldsContext } from './policies.js'; +import type { FragmentRegistryAPI } from './fragmentRegistry.js'; -import { +import type { Reference, - isReference, StoreValue, StoreObject, + FragmentMap, + FragmentMapFunction} from '../../utilities/index.js'; +import { + isReference, isField, DeepMerger, resultKeyNameFromField, shouldInclude, isNonNullObject, compact, - FragmentMap, - FragmentMapFunction, createFragmentMap, getFragmentDefinitions, isArray, -} from '../../utilities'; +} from '../../utilities/index.js'; export const { hasOwnProperty: hasOwn, diff --git a/src/cache/inmemory/inMemoryCache.ts b/src/cache/inmemory/inMemoryCache.ts --- a/src/cache/inmemory/inMemoryCache.ts +++ b/src/cache/inmemory/inMemoryCache.ts @@ -1,30 +1,36 @@ -import { invariant } from '../../utilities/globals'; +import { invariant } from '../../utilities/globals/index.js'; // Make builtins like Map and Set safe to use with non-extensible objects. -import './fixPolyfills'; +import './fixPolyfills.js'; -import { DocumentNode } from 'graphql'; -import { OptimisticWrapperFunction, wrap } from 'optimism'; +import type { DocumentNode } from 'graphql'; +import type { OptimisticWrapperFunction} from 'optimism'; +import { wrap } from 'optimism'; import { equal } from '@wry/equality'; -import { ApolloCache } from '../core/cache'; -import { Cache } from '../core/types/Cache'; -import { MissingFieldError } from '../core/types/common'; +import { ApolloCache } from '../core/cache.js'; +import type { Cache } from '../core/types/Cache.js'; +import { MissingFieldError } from '../core/types/common.js'; +import type { + StoreObject, + Reference} from '../../utilities/index.js'; import { addTypenameToDocument, - StoreObject, - Reference, isReference, -} from '../../utilities'; -import { InMemoryCacheConfig, NormalizedCacheObject } from './types'; -import { StoreReader } from './readFromStore'; -import { StoreWriter } from './writeToStore'; -import { EntityStore, supportsResultCaching } from './entityStore'; -import { makeVar, forgetCache, recallCache } from './reactiveVars'; -import { Policies } from './policies'; -import { hasOwn, normalizeConfig, shouldCanonizeResults } from './helpers'; -import { canonicalStringify } from './object-canon'; -import { OperationVariables } from '../../core'; + DocumentTransform, +} from '../../utilities/index.js'; +import type { + InMemoryCacheConfig, + NormalizedCacheObject, +} from './types.js'; +import { StoreReader } from './readFromStore.js'; +import { StoreWriter } from './writeToStore.js'; +import { EntityStore, supportsResultCaching } from './entityStore.js'; +import { makeVar, forgetCache, recallCache } from './reactiveVars.js'; +import { Policies } from './policies.js'; +import { hasOwn, normalizeConfig, shouldCanonizeResults } from './helpers.js'; +import { canonicalStringify } from './object-canon.js'; +import type { OperationVariables } from '../../core/index.js'; type BroadcastOptions = Pick< Cache.BatchOptions<InMemoryCache>, @@ -40,15 +46,19 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { private watches = new Set<Cache.WatchOptions>(); private addTypename: boolean; - private typenameDocumentCache = new Map<DocumentNode, DocumentNode>(); private storeReader: StoreReader; private storeWriter: StoreWriter; + private addTypenameTransform = new DocumentTransform(addTypenameToDocument); private maybeBroadcastWatch: OptimisticWrapperFunction< [Cache.WatchOptions, BroadcastOptions?], any, [Cache.WatchOptions]>; + // Override the default value, since InMemoryCache result objects are frozen + // in development and expected to remain logically immutable in production. + public readonly assumeImmutableResults = true; + // Dynamically imported code can augment existing typePolicies or // possibleTypes by calling cache.policies.addTypePolicies or // cache.policies.addPossibletypes. @@ -204,7 +214,7 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { } } - public modify(options: Cache.ModifyOptions): boolean { + public modify<Entity extends Record<string, any> = Record<string, any>>(options: Cache.ModifyOptions<Entity>): boolean { if (hasOwn.call(options, "id") && !options.id) { // To my knowledge, TypeScript does not currently provide a way to // enforce that an optional property?:type must *not* be undefined @@ -503,32 +513,27 @@ export class InMemoryCache extends ApolloCache<NormalizedCacheObject> { } public transformDocument(document: DocumentNode): DocumentNode { - if (this.addTypename) { - let result = this.typenameDocumentCache.get(document); - if (!result) { - result = addTypenameToDocument(document); - this.typenameDocumentCache.set(document, result); - // If someone calls transformDocument and then mistakenly passes the - // result back into an API that also calls transformDocument, make sure - // we don't keep creating new query documents. - this.typenameDocumentCache.set(result, result); - } - return result; + return this.addTypenameToDocument(this.addFragmentsToDocument(document)); + } + + protected broadcastWatches(options?: BroadcastOptions) { + if (!this.txCount) { + this.watches.forEach(c => this.maybeBroadcastWatch(c, options)); } - return document; } - public transformForLink(document: DocumentNode): DocumentNode { + private addFragmentsToDocument(document: DocumentNode) { const { fragments } = this.config; return fragments ? fragments.transform(document) : document; } - protected broadcastWatches(options?: BroadcastOptions) { - if (!this.txCount) { - this.watches.forEach(c => this.maybeBroadcastWatch(c, options)); + private addTypenameToDocument(document: DocumentNode) { + if (this.addTypename) { + return this.addTypenameTransform.transformDocument(document); } + return document; } // This method is wrapped by maybeBroadcastWatch, which is called by diff --git a/src/cache/inmemory/key-extractor.ts b/src/cache/inmemory/key-extractor.ts --- a/src/cache/inmemory/key-extractor.ts +++ b/src/cache/inmemory/key-extractor.ts @@ -1,18 +1,18 @@ -import { invariant } from "../../utilities/globals"; +import { invariant } from "../../utilities/globals/index.js"; import { argumentsObjectFromField, DeepMerger, isNonEmptyArray, isNonNullObject, -} from "../../utilities"; +} from "../../utilities/index.js"; -import { hasOwn, isArray } from "./helpers"; -import { +import { hasOwn, isArray } from "./helpers.js"; +import type { KeySpecifier, KeyFieldsFunction, KeyArgsFunction, -} from "./policies"; +} from "./policies.js"; // Mapping from JSON-encoded KeySpecifier strings to associated information. const specifierInfoCache: Record<string, { @@ -73,9 +73,9 @@ export function keyFieldsFnFromSpecifier( invariant( extracted !== void 0, - `Missing field '${schemaKeyPath.join('.')}' while extracting keyFields from ${ - JSON.stringify(object) - }`, + `Missing field '%s' while extracting keyFields from %s`, + schemaKeyPath.join('.'), + object, ); return extracted; diff --git a/src/cache/inmemory/object-canon.ts b/src/cache/inmemory/object-canon.ts --- a/src/cache/inmemory/object-canon.ts +++ b/src/cache/inmemory/object-canon.ts @@ -1,12 +1,10 @@ -import "../../utilities/globals"; - import { Trie } from "@wry/trie"; import { canUseWeakMap, canUseWeakSet, isNonNullObject as isObjectOrArray, -} from "../../utilities"; -import { isArray } from "./helpers"; +} from "../../utilities/index.js"; +import { isArray } from "./helpers.js"; function shallowCopy<T>(value: T): T { if (isObjectOrArray(value)) { diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -1,30 +1,31 @@ -import { invariant, InvariantError } from '../../utilities/globals'; +import { invariant, newInvariantError } from '../../utilities/globals/index.js'; -import { +import type { InlineFragmentNode, FragmentDefinitionNode, SelectionSetNode, FieldNode, } from 'graphql'; -import { +import type { FragmentMap, - storeKeyNameFromField, StoreValue, StoreObject, + Reference} from '../../utilities/index.js'; +import { + storeKeyNameFromField, argumentsObjectFromField, - Reference, isReference, getStoreKeyName, isNonNullObject, stringifyForDisplay, -} from '../../utilities'; -import { +} from '../../utilities/index.js'; +import type { IdGetter, MergeInfo, NormalizedCache, ReadMergeModifyContext, -} from "./types"; +} from "./types.js"; import { hasOwn, fieldNameFromStoreName, @@ -33,24 +34,24 @@ import { TypeOrFieldNameRegExp, defaultDataIdFromObject, isArray, -} from './helpers'; -import { cacheSlot } from './reactiveVars'; -import { InMemoryCache } from './inMemoryCache'; -import { +} from './helpers.js'; +import { cacheSlot } from './reactiveVars.js'; +import type { InMemoryCache } from './inMemoryCache.js'; +import type { SafeReadonly, FieldSpecifier, ToReferenceFunction, ReadFieldFunction, ReadFieldOptions, CanReadFunction, -} from '../core/types/common'; -import { WriteContext } from './writeToStore'; +} from '../core/types/common.js'; +import type { WriteContext } from './writeToStore.js'; // Upgrade to a faster version of the default stable JSON.stringify function // used by getStoreKeyName. This function is used when computing storeFieldName // strings (when no keyArgs has been configured for a field). -import { canonicalStringify } from './object-canon'; -import { keyArgsFnFromSpecifier, keyFieldsFnFromSpecifier } from './key-extractor'; +import { canonicalStringify } from './object-canon.js'; +import { keyArgsFnFromSpecifier, keyFieldsFnFromSpecifier } from './key-extractor.js'; getStoreKeyName.setStringify(canonicalStringify); @@ -512,7 +513,7 @@ export class Policies { const rootId = "ROOT_" + which.toUpperCase(); const old = this.rootTypenamesById[rootId]; if (typename !== old) { - invariant(!old || old === which, `Cannot change root ${which} __typename more than once`); + invariant(!old || old === which, `Cannot change root %s __typename more than once`, which); // First, delete any old __typename associated with this rootId from // rootIdsByTypename. if (old) delete this.rootIdsByTypename[old]; @@ -565,11 +566,33 @@ export class Policies { // and merge functions often need to cooperate, so changing only one // of them would be a recipe for inconsistency. // - // Once the TypePolicy for typename has been accessed, its - // properties can still be updated directly using addTypePolicies, - // but future changes to supertype policies will not be reflected in - // this policy, because this code runs at most once per typename. - const supertypes = this.supertypeMap.get(typename); + // Once the TypePolicy for typename has been accessed, its properties can + // still be updated directly using addTypePolicies, but future changes to + // inherited supertype policies will not be reflected in this subtype + // policy, because this code runs at most once per typename. + let supertypes = this.supertypeMap.get(typename); + if (!supertypes && this.fuzzySubtypes.size) { + // To make the inheritance logic work for unknown typename strings that + // may have fuzzy supertypes, we give this typename an empty supertype + // set and then populate it with any fuzzy supertypes that match. + supertypes = this.getSupertypeSet(typename, true)!; + // This only works for typenames that are directly matched by a fuzzy + // supertype. What if there is an intermediate chain of supertypes? + // While possible, that situation can only be solved effectively by + // specifying the intermediate relationships via possibleTypes, manually + // and in a non-fuzzy way. + this.fuzzySubtypes.forEach((regExp, fuzzy) => { + if (regExp.test(typename)) { + // The fuzzy parameter is just the original string version of regExp + // (not a valid __typename string), but we can look up the + // associated supertype(s) in this.supertypeMap. + const fuzzySupertypes = this.supertypeMap.get(fuzzy); + if (fuzzySupertypes) { + fuzzySupertypes.forEach(supertype => supertypes!.add(supertype)); + } + } + }); + } if (supertypes && supertypes.size) { supertypes.forEach(supertype => { const { fields, ...rest } = this.getTypePolicy(supertype); @@ -664,7 +687,7 @@ export class Policies { if (supertypeSet.has(supertype)) { if (!typenameSupertypeSet.has(supertype)) { if (checkingFuzzySubtypes) { - invariant.warn(`Inferring subtype ${typename} of supertype ${supertype}`); + invariant.warn(`Inferring subtype %s of supertype %s`, typename, supertype); } // Record positive results for faster future lookup. // Unfortunately, we cannot safely cache negative results, @@ -951,9 +974,7 @@ export function normalizeReadFieldOptions( } if (__DEV__ && options.from === void 0) { - invariant.warn(`Undefined 'from' passed to readField with arguments ${ - stringifyForDisplay(Array.from(readFieldArgs)) - }`); + invariant.warn(`Undefined 'from' passed to readField with arguments %s`, stringifyForDisplay(Array.from(readFieldArgs))); } if (void 0 === options.variables) { @@ -968,7 +989,7 @@ function makeMergeObjectsFunction( ): MergeObjectsFunction { return function mergeObjects(existing, incoming) { if (isArray(existing) || isArray(incoming)) { - throw new InvariantError("Cannot automatically merge arrays"); + throw newInvariantError("Cannot automatically merge arrays"); } // These dynamic checks are necessary because the parameters of a diff --git a/src/cache/inmemory/reactiveVars.ts b/src/cache/inmemory/reactiveVars.ts --- a/src/cache/inmemory/reactiveVars.ts +++ b/src/cache/inmemory/reactiveVars.ts @@ -1,7 +1,8 @@ -import { dep, OptimisticDependencyFunction } from "optimism"; +import type { OptimisticDependencyFunction } from "optimism"; +import { dep } from "optimism"; import { Slot } from "@wry/context"; -import { InMemoryCache } from "./inMemoryCache"; -import { ApolloCache } from '../../core'; +import type { InMemoryCache } from "./inMemoryCache.js"; +import type { ApolloCache } from '../../core/index.js'; export interface ReactiveVar<T> { (newValue?: T): T; diff --git a/src/cache/inmemory/readFromStore.ts b/src/cache/inmemory/readFromStore.ts --- a/src/cache/inmemory/readFromStore.ts +++ b/src/cache/inmemory/readFromStore.ts @@ -1,21 +1,25 @@ -import { invariant, InvariantError } from '../../utilities/globals'; +import { invariant, newInvariantError } from '../../utilities/globals/index.js'; -import { +import type { DocumentNode, FieldNode, - Kind, - SelectionSetNode, + SelectionSetNode} from 'graphql'; +import { + Kind } from 'graphql'; -import { wrap, OptimisticWrapperFunction } from 'optimism'; +import type { OptimisticWrapperFunction } from 'optimism'; +import { wrap } from 'optimism'; +import type { + Reference, + StoreObject, + FragmentMap, + FragmentMapFunction} from '../../utilities/index.js'; import { isField, resultKeyNameFromField, - Reference, isReference, makeReference, - StoreObject, - FragmentMap, shouldInclude, addTypenameToDocument, getDefaultValues, @@ -27,22 +31,22 @@ import { DeepMerger, isNonNullObject, canUseWeakMap, - compact, - FragmentMapFunction, -} from '../../utilities'; -import { Cache } from '../core/types/Cache'; -import { + compact +} from '../../utilities/index.js'; +import type { Cache } from '../core/types/Cache.js'; +import type { DiffQueryAgainstStoreOptions, InMemoryCacheConfig, NormalizedCache, ReadMergeModifyContext, -} from './types'; -import { maybeDependOnExistenceOfEntity, supportsResultCaching } from './entityStore'; -import { isArray, extractFragmentContext, getTypenameFromStoreObject, shouldCanonizeResults } from './helpers'; -import { Policies } from './policies'; -import { InMemoryCache } from './inMemoryCache'; -import { MissingFieldError, MissingTree } from '../core/types/common'; -import { canonicalStringify, ObjectCanon } from './object-canon'; +} from './types.js'; +import { maybeDependOnExistenceOfEntity, supportsResultCaching } from './entityStore.js'; +import { isArray, extractFragmentContext, getTypenameFromStoreObject, shouldCanonizeResults } from './helpers.js'; +import type { Policies } from './policies.js'; +import type { InMemoryCache } from './inMemoryCache.js'; +import type { MissingTree } from '../core/types/common.js'; +import { MissingFieldError } from '../core/types/common.js'; +import { canonicalStringify, ObjectCanon } from './object-canon.js'; export type VariableMap = { [name: string]: any }; @@ -408,7 +412,7 @@ export class StoreReader { ); if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) { - throw new InvariantError(`No fragment named ${selection.name.value}`); + throw newInvariantError(`No fragment named %s`, selection.name.value); } if (fragment && policies.fragmentMatches(fragment, typename)) { @@ -517,9 +521,9 @@ function assertSelectionSetForIdValue( if (isNonNullObject(value)) { invariant( !isReference(value), - `Missing selection set for object of type ${ - getTypenameFromStoreObject(store, value) - } returned for query field ${field.name.value}`, + `Missing selection set for object of type %s returned for query field %s`, + getTypenameFromStoreObject(store, value), + field.name.value ); Object.values(value).forEach(workSet.add, workSet); } diff --git a/src/cache/inmemory/types.ts b/src/cache/inmemory/types.ts --- a/src/cache/inmemory/types.ts +++ b/src/cache/inmemory/types.ts @@ -1,29 +1,29 @@ -import { DocumentNode, FieldNode } from 'graphql'; +import type { DocumentNode, FieldNode } from 'graphql'; -import { Transaction } from '../core/cache'; -import { +import type { Transaction } from '../core/cache.js'; +import type { StoreObject, StoreValue, Reference, -} from '../../utilities'; -import { FieldValueGetter } from './entityStore'; -import { +} from '../../utilities/index.js'; +import type { FieldValueGetter } from './entityStore.js'; +import type { TypePolicies, PossibleTypesMap, KeyFieldsFunction, StorageType, FieldMergeFunction, -} from './policies'; -import { - Modifier, +} from './policies.js'; +import type { Modifiers, ToReferenceFunction, CanReadFunction, -} from '../core/types/common'; + AllFieldsModifier, +} from '../core/types/common.js'; -import { FragmentRegistryAPI } from './fragmentRegistry'; +import type { FragmentRegistryAPI } from './fragmentRegistry.js'; -export { StoreObject, StoreValue, Reference } +export type { StoreObject, StoreValue, Reference } export interface IdGetterObj extends Object { __typename?: string; @@ -49,7 +49,7 @@ export interface NormalizedCache { merge(olderId: string, newerObject: StoreObject): void; merge(olderObject: StoreObject, newerId: string): void; - modify(dataId: string, fields: Modifiers | Modifier<any>): boolean; + modify<Entity extends Record<string, any>>(dataId: string, fields: Modifiers<Entity> | AllFieldsModifier<Entity>): boolean; delete(dataId: string, fieldName?: string): boolean; clear(): void; diff --git a/src/cache/inmemory/writeToStore.ts b/src/cache/inmemory/writeToStore.ts --- a/src/cache/inmemory/writeToStore.ts +++ b/src/cache/inmemory/writeToStore.ts @@ -1,15 +1,20 @@ -import { invariant, InvariantError } from '../../utilities/globals'; +import { invariant, newInvariantError } from '../../utilities/globals/index.js'; import { equal } from '@wry/equality'; import { Trie } from '@wry/trie'; -import { +import type { SelectionSetNode, - FieldNode, + FieldNode} from 'graphql'; +import { Kind, } from 'graphql'; -import { +import type { FragmentMap, FragmentMapFunction, + StoreValue, + StoreObject, + Reference} from '../../utilities/index.js'; +import { getFragmentFromSelection, getDefaultValues, getOperationDefinition, @@ -17,26 +22,23 @@ import { makeReference, isField, resultKeyNameFromField, - StoreValue, - StoreObject, - Reference, isReference, shouldInclude, cloneDeep, addTypenameToDocument, isNonEmptyArray, argumentsObjectFromField, -} from '../../utilities'; - -import { NormalizedCache, ReadMergeModifyContext, MergeTree, InMemoryCacheConfig } from './types'; -import { isArray, makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject, extractFragmentContext } from './helpers'; -import { StoreReader } from './readFromStore'; -import { InMemoryCache } from './inMemoryCache'; -import { EntityStore } from './entityStore'; -import { Cache } from '../../core'; -import { canonicalStringify } from './object-canon'; -import { normalizeReadFieldOptions } from './policies'; -import { ReadFieldFunction } from '../core/types/common'; +} from '../../utilities/index.js'; + +import type { NormalizedCache, ReadMergeModifyContext, MergeTree, InMemoryCacheConfig } from './types.js'; +import { isArray, makeProcessedFieldsMerger, fieldNameFromStoreName, storeValueIsStoreObject, extractFragmentContext } from './helpers.js'; +import type { StoreReader } from './readFromStore.js'; +import type { InMemoryCache } from './inMemoryCache.js'; +import type { EntityStore } from './entityStore.js'; +import type { Cache } from '../../core/index.js'; +import { canonicalStringify } from './object-canon.js'; +import { normalizeReadFieldOptions } from './policies.js'; +import type { ReadFieldFunction } from '../core/types/common.js'; export interface WriteContext extends ReadMergeModifyContext { readonly written: { @@ -148,7 +150,7 @@ export class StoreWriter { }); if (!isReference(ref)) { - throw new InvariantError(`Could not identify object ${JSON.stringify(result)}`); + throw newInvariantError(`Could not identify object %s`, result); } // So far, the store has not been modified, so now it's time to process @@ -357,11 +359,7 @@ export class StoreWriter { // not be cause for alarm. !policies.getReadFunction(typename, field.name.value) ) { - invariant.error(`Missing field '${ - resultKeyNameFromField(field) - }' while writing result ${ - JSON.stringify(result, null, 2) - }`.substring(0, 1000)); + invariant.error(`Missing field '%s' while writing result %o`, resultKeyNameFromField(field), result); } }); @@ -568,7 +566,7 @@ export class StoreWriter { ); if (!fragment && selection.kind === Kind.FRAGMENT_SPREAD) { - throw new InvariantError(`No fragment named ${selection.name.value}`); + throw newInvariantError(`No fragment named %s`, selection.name.value); } if (fragment && @@ -813,25 +811,27 @@ function warnAboutDataLoss( } invariant.warn( -`Cache data may be lost when replacing the ${fieldName} field of a ${parentType} object. +`Cache data may be lost when replacing the %s field of a %s object. This could cause additional (usually avoidable) network requests to fetch data that were otherwise cached. -To address this problem (which is not a bug in Apollo Client), ${ - childTypenames.length - ? "either ensure all objects of type " + - childTypenames.join(" and ") + " have an ID or a custom merge function, or " - : "" -}define a custom merge function for the ${ - typeDotName -} field, so InMemoryCache can safely merge these objects: +To address this problem (which is not a bug in Apollo Client), %sdefine a custom merge function for the %s field, so InMemoryCache can safely merge these objects: - existing: ${JSON.stringify(existing).slice(0, 1000)} - incoming: ${JSON.stringify(incoming).slice(0, 1000)} + existing: %s + incoming: %s For more information about these options, please refer to the documentation: * Ensuring entity objects have IDs: https://go.apollo.dev/c/generating-unique-identifiers * Defining custom merge functions: https://go.apollo.dev/c/merging-non-normalized-objects -`); +`, + fieldName, + parentType, + childTypenames.length + ? "either ensure all objects of type " + childTypenames.join(" and ") + " have an ID or a custom merge function, or " + : "", + typeDotName, + existing, + incoming +); } diff --git a/src/config/jest/setup.ts b/src/config/jest/setup.ts --- a/src/config/jest/setup.ts +++ b/src/config/jest/setup.ts @@ -1,6 +1,18 @@ import gql from 'graphql-tag'; +import '@testing-library/jest-dom'; +import { loadErrorMessageHandler } from '../../dev/loadErrorMessageHandler.js'; +import '../../testing/matchers/index.js'; // Turn off warnings for repeated fragment names gql.disableFragmentWarnings(); process.on('unhandledRejection', () => {}); + +loadErrorMessageHandler(); + +function fail(reason = "fail was called in a test.") { + expect(reason).toBe(undefined); +} + +// @ts-ignore +globalThis.fail = fail; \ No newline at end of file diff --git a/src/core/ApolloClient.ts b/src/core/ApolloClient.ts --- a/src/core/ApolloClient.ts +++ b/src/core/ApolloClient.ts @@ -1,17 +1,19 @@ -import { invariant, InvariantError } from '../utilities/globals'; +import { invariant, newInvariantError } from '../utilities/globals/index.js'; -import { ExecutionResult, DocumentNode } from 'graphql'; +import type { ExecutionResult, DocumentNode } from 'graphql'; -import { ApolloLink, FetchResult, GraphQLRequest, execute } from '../link/core'; -import { ApolloCache, DataProxy, Reference } from '../cache'; -import { Observable } from '../utilities'; -import { version } from '../version'; -import { HttpLink, UriFunction } from '../link/http'; +import type { FetchResult, GraphQLRequest} from '../link/core/index.js'; +import { ApolloLink, execute } from '../link/core/index.js'; +import type { ApolloCache, DataProxy, Reference } from '../cache/index.js'; +import type { DocumentTransform, Observable } from '../utilities/index.js'; +import { version } from '../version.js'; +import type { UriFunction } from '../link/http/index.js'; +import { HttpLink } from '../link/http/index.js'; -import { QueryManager } from './QueryManager'; -import { ObservableQuery } from './ObservableQuery'; +import { QueryManager } from './QueryManager.js'; +import type { ObservableQuery } from './ObservableQuery.js'; -import { +import type { ApolloQueryResult, DefaultContext, OperationVariables, @@ -20,20 +22,21 @@ import { RefetchQueriesResult, InternalRefetchQueriesResult, RefetchQueriesInclude, -} from './types'; +} from './types.js'; -import { +import type { QueryOptions, WatchQueryOptions, MutationOptions, SubscriptionOptions, WatchQueryFetchPolicy, -} from './watchQueryOptions'; +} from './watchQueryOptions.js'; +import type { + FragmentMatcher} from './LocalState.js'; import { - LocalState, - FragmentMatcher, -} from './LocalState'; + LocalState +} from './LocalState.js'; export interface DefaultOptions { watchQuery?: Partial<WatchQueryOptions<any, any>>; @@ -60,13 +63,14 @@ export type ApolloClientOptions<TCacheShape> = { fragmentMatcher?: FragmentMatcher; name?: string; version?: string; + documentTransform?: DocumentTransform }; // Though mergeOptions now resides in @apollo/client/utilities, it was // previously declared and exported from this module, and then reexported from // @apollo/client/core. Since we need to preserve that API anyway, the easiest // solution is to reexport mergeOptions where it was previously declared (here). -import { mergeOptions } from "../utilities"; +import { mergeOptions } from "../utilities/index.js"; export { mergeOptions } /** @@ -125,11 +129,20 @@ export class ApolloClient<TCacheShape> implements DataProxy { * you are using. */ constructor(options: ApolloClientOptions<TCacheShape>) { + if (!options.cache) { + throw newInvariantError( + "To initialize Apollo Client, you must specify a 'cache' property " + + "in the options object. \n" + + "For more information, please visit: https://go.apollo.dev/c/docs" + ); + } + const { uri, credentials, headers, cache, + documentTransform, ssrMode = false, ssrForceFetchDelay = 0, connectToDevTools = @@ -141,7 +154,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { __DEV__, queryDeduplication = true, defaultOptions, - assumeImmutableResults = false, + assumeImmutableResults = cache.assumeImmutableResults, resolvers, typeDefs, fragmentMatcher, @@ -157,14 +170,6 @@ export class ApolloClient<TCacheShape> implements DataProxy { : ApolloLink.empty(); } - if (!cache) { - throw new InvariantError( - "To initialize Apollo Client, you must specify a 'cache' property " + - "in the options object. \n" + - "For more information, please visit: https://go.apollo.dev/c/docs" - ); - } - this.link = link; this.cache = cache; this.disableNetworkFetches = ssrMode || ssrForceFetchDelay > 0; @@ -214,7 +219,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { if (url) { invariant.log( "Download the Apollo DevTools for a better development " + - "experience: " + url + "experience: %s", url ); } } @@ -233,6 +238,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { cache: this.cache, link: this.link, defaultOptions: this.defaultOptions, + documentTransform, queryDeduplication, ssrMode, clientAwareness: { @@ -256,6 +262,15 @@ export class ApolloClient<TCacheShape> implements DataProxy { }); } + /** + * The `DocumentTransform` used to modify GraphQL documents before a request + * is made. If a custom `DocumentTransform` is not provided, this will be the + * default document transform. + */ + get documentTransform() { + return this.queryManager.documentTransform; + } + /** * Call this method to terminate any active client processes, making it safe * to dispose of this `ApolloClient` instance. @@ -567,7 +582,7 @@ export class ApolloClient<TCacheShape> implements DataProxy { // result.queries and result.results instead, you shouldn't have to worry // about preventing uncaught rejections for the Promise.all result. result.catch(error => { - invariant.debug(`In client.refetchQueries, Promise.all promise rejected with error ${error}`); + invariant.debug(`In client.refetchQueries, Promise.all promise rejected with error %o`, error); }); return result; diff --git a/src/core/LocalState.ts b/src/core/LocalState.ts --- a/src/core/LocalState.ts +++ b/src/core/LocalState.ts @@ -1,6 +1,6 @@ -import { invariant } from '../utilities/globals'; +import { invariant } from '../utilities/globals/index.js'; -import { +import type { DocumentNode, OperationDefinitionNode, SelectionSetNode, @@ -9,18 +9,20 @@ import { FragmentDefinitionNode, FieldNode, ASTNode, - visit, - BREAK, - isSelectionNode, DirectiveNode, FragmentSpreadNode, - ExecutableDefinitionNode, + ExecutableDefinitionNode} from 'graphql'; +import { + visit, + BREAK, + isSelectionNode } from 'graphql'; -import { ApolloCache } from '../cache'; -import { +import type { ApolloCache } from '../cache/index.js'; +import type { FragmentMap, - StoreObject, + StoreObject} from '../utilities/index.js'; +import { argumentsObjectFromField, buildQueryFromSelectionSet, createFragmentMap, @@ -34,11 +36,11 @@ import { removeClientSetsFromDocument, resultKeyNameFromField, shouldInclude, -} from '../utilities'; -import { ApolloClient } from './ApolloClient'; -import { Resolvers, OperationVariables } from './types'; -import { FetchResult } from '../link/core'; -import { cacheSlot } from '../cache'; +} from '../utilities/index.js'; +import type { ApolloClient } from './ApolloClient.js'; +import type { Resolvers, OperationVariables } from './types.js'; +import type { FetchResult } from '../link/core/index.js'; +import { cacheSlot } from '../cache/index.js'; export type Resolver = ( rootValue?: any, @@ -341,7 +343,7 @@ export class LocalState<TCacheShape> { } else { // This is a named fragment. fragment = fragmentMap[selection.name.value]; - invariant(fragment, `No fragment named ${selection.name.value}`); + invariant(fragment, `No fragment named %s`, selection.name.value); } if (fragment && fragment.typeCondition) { @@ -506,7 +508,7 @@ export class LocalState<TCacheShape> { }, FragmentSpread(spread: FragmentSpreadNode, _, __, ___, ancestors) { const fragment = fragmentMap[spread.name.value]; - invariant(fragment, `No fragment named ${spread.name.value}`); + invariant(fragment, `No fragment named %s`, spread.name.value); const fragmentSelections = collectByDefinition(fragment); if (fragmentSelections.size > 0) { diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -1,38 +1,39 @@ -import { invariant } from '../utilities/globals'; -import { DocumentNode } from 'graphql'; +import { invariant } from '../utilities/globals/index.js'; +import type { DocumentNode } from 'graphql'; import { equal } from '@wry/equality'; -import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus'; -import { +import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus.js'; +import type { Concast, + Observer, + ObservableSubscription} from '../utilities/index.js'; +import { cloneDeep, compact, getOperationDefinition, Observable, - Observer, - ObservableSubscription, iterateObserversSafely, - isNonEmptyArray, fixObservableSubclass, getQueryDefinition, -} from '../utilities'; -import { ApolloError } from '../errors'; -import { QueryManager } from './QueryManager'; -import { +} from '../utilities/index.js'; +import type { ApolloError } from '../errors/index.js'; +import type { QueryManager } from './QueryManager.js'; +import type { ApolloQueryResult, OperationVariables, TypedDocumentNode, -} from './types'; -import { +} from './types.js'; +import type { WatchQueryOptions, FetchMoreQueryOptions, SubscribeToMoreOptions, NextFetchPolicyContext, WatchQueryFetchPolicy, -} from './watchQueryOptions'; -import { QueryInfo } from './QueryInfo'; -import { MissingFieldError } from '../cache'; -import { MissingTree } from '../cache/core/types/common'; +} from './watchQueryOptions.js'; +import type { QueryInfo } from './QueryInfo.js'; +import type { MissingFieldError } from '../cache/index.js'; +import type { MissingTree } from '../cache/core/types/common.js'; +import { equalByQuery } from './equalByQuery.js'; const { assign, @@ -70,10 +71,12 @@ export class ObservableQuery< public readonly queryId: string; public readonly queryName?: string; + // The `query` computed property will always reflect the document transformed + // by the last run query. `this.options.query` will always reflect the raw + // untransformed query to ensure document transforms with runtime conditionals + // are run on the original document. public get query(): TypedDocumentNode<TData, TVariables> { - // This transform is heavily cached, so it should not be expensive to - // transform the same this.options.query document repeatedly. - return this.queryManager.transform(this.options.query).document; + return this.lastQuery || this.options.query; } // Computed shorthand for this.options.variables, preserved for @@ -89,6 +92,7 @@ export class ObservableQuery< private waitForOwnResult: boolean; private last?: Last<TData, TVariables>; + private lastQuery?: DocumentNode; private queryInfo: QueryInfo; @@ -248,7 +252,7 @@ export class ObservableQuery< // trust diff.result, since it was read from the cache without running // local resolvers (and it's too late to run resolvers now, since we must // return a result synchronously). - this.queryManager.transform(this.options.query).hasForcedResolvers + this.queryManager.getDocumentInfo(this.query).hasForcedResolvers ) { // Fall through. } else if (this.waitForOwnResult) { @@ -313,9 +317,22 @@ export class ObservableQuery< newResult: ApolloQueryResult<TData>, variables?: TVariables ) { + if (!this.last) { + return true; + } + + const resultIsDifferent = + this.queryManager.getDocumentInfo(this.query).hasNonreactiveDirective + ? !equalByQuery( + this.query, + this.last.result, + newResult, + this.variables, + ) + : !equal(this.last.result, newResult); + return ( - !this.last || - !equal(this.last.result, newResult) || + resultIsDifferent || (variables && !equal(this.last.variables, variables)) ); } @@ -380,12 +397,11 @@ export class ObservableQuery< const queryDef = getQueryDefinition(this.query); const vars = queryDef.variableDefinitions; if (!vars || !vars.some(v => v.variable.name.value === "variables")) { - invariant.warn(`Called refetch(${ - JSON.stringify(variables) - }) for query ${ - queryDef.name?.value || JSON.stringify(queryDef) - }, which does not declare a $variables variable. -Did you mean to call refetch(variables) instead of refetch({ variables })?`); + invariant.warn(`Called refetch(%o) for query %o, which does not declare a $variables variable. +Did you mean to call refetch(variables) instead of refetch({ variables })?`, + variables, + queryDef.name?.value || queryDef + ); } } @@ -416,7 +432,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); const combinedOptions = { ...(fetchMoreOptions.query ? fetchMoreOptions : { ...this.options, - query: this.query, + query: this.options.query, ...fetchMoreOptions, variables: { ...this.options.variables, @@ -431,8 +447,19 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); fetchPolicy: "no-cache", } as WatchQueryOptions<TFetchVars, TFetchData>; + combinedOptions.query = this.transformDocument(combinedOptions.query); + const qid = this.queryManager.generateQueryId(); + // If a temporary query is passed to `fetchMore`, we don't want to store + // it as the last query result since it may be an optimized query for + // pagination. We will however run the transforms on the original document + // as well as the document passed in `fetchMoreOptions` to ensure the cache + // uses the most up-to-date document which may rely on runtime conditionals. + this.lastQuery = fetchMoreOptions.query + ? this.transformDocument(this.options.query) + : combinedOptions.query; + // Simulate a loading result for the original query with // result.networkStatus === NetworkStatus.fetchMore. const { queryInfo } = this; @@ -565,6 +592,13 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); return this.reobserve(newOptions); } + public silentSetOptions( + newOptions: Partial<WatchQueryOptions<TVariables, TData>>, + ) { + const mergedOptions = compact(this.options, newOptions || {}); + assign(this.options, mergedOptions); + } + /** * Update the variables of this observable query, and fetch the new results * if they've changed. Most users should prefer `refetch` instead of @@ -774,17 +808,18 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); newResult: ApolloQueryResult<TData>, variables = this.variables, ) { - this.last = { - ...this.last, + let error: ApolloError | undefined = this.getLastError(); + // Preserve this.last.error unless the variables have changed. + if (error && this.last && !equal(variables, this.last.variables)) { + error = void 0; + } + return this.last = { result: this.queryManager.assumeImmutableResults ? newResult : cloneDeep(newResult), variables, + ...(error ? { error } : null), }; - if (!isNonEmptyArray(newResult.errors)) { - delete this.last.error; - } - return this.last; } public reobserveAsConcast( @@ -816,6 +851,14 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); ? mergedOptions : assign(this.options, mergedOptions); + // Don't update options.query with the transformed query to avoid + // overwriting this.options.query when we aren't using a disposable concast. + // We want to ensure we can re-run the custom document transforms the next + // time a request is made against the original query. + const query = this.transformDocument(options.query); + + this.lastQuery = query; + if (!useDisposableConcast) { // We can skip calling updatePolling if we're not changing this.options. this.updatePolling(); @@ -839,15 +882,19 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); } } - this.waitForOwnResult &&= skipCacheDataFor(options.fetchPolicy); + // If the transform doesn't change the document, leave `options` alone and + // use the original object. + const fetchOptions = query === options.query ? options : { ...options, query }; + + this.waitForOwnResult &&= skipCacheDataFor(fetchOptions.fetchPolicy); const finishWaitingForOwnResult = () => { if (this.concast === concast) { this.waitForOwnResult = false; } }; - const variables = options.variables && { ...options.variables }; - const { concast, fromLink } = this.fetch(options, newNetworkStatus); + const variables = fetchOptions.variables && { ...fetchOptions.variables }; + const { concast, fromLink } = this.fetch(fetchOptions, newNetworkStatus); const observer: Observer<ApolloQueryResult<TData>> = { next: result => { finishWaitingForOwnResult(); @@ -859,7 +906,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); }, }; - if (!useDisposableConcast && fromLink) { + if (!useDisposableConcast && (fromLink || !this.concast)) { // We use the {add,remove}Observer methods directly to avoid wrapping // observer with an unnecessary SubscriptionObserver object. if (this.concast && this.observer) { @@ -900,12 +947,16 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); variables: TVariables | undefined, ) { const lastError = this.getLastError(); - if (lastError || this.isDifferentFromLastResult(result, variables)) { - if (lastError || !result.partial || this.options.returnPartialData) { - this.updateLastResult(result, variables); - } - - iterateObserversSafely(this.observers, 'next', result); + const isDifferent = this.isDifferentFromLastResult(result, variables); + // Update the last result even when isDifferentFromLastResult returns false, + // because the query may be using the @nonreactive directive, and we want to + // save the the latest version of any nonreactive subtrees (in case + // getCurrentResult is called), even though we skip broadcasting changes. + if (lastError || !result.partial || this.options.returnPartialData) { + this.updateLastResult(result, variables); + } + if (lastError || isDifferent) { + iterateObserversSafely(this.observers, "next", result); } } @@ -948,6 +999,10 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); this.observers.clear(); this.isTornDown = true; } + + private transformDocument(document: DocumentNode) { + return this.queryManager.transform(document); + } } // Necessary because the ObservableQuery constructor has a different @@ -1000,9 +1055,7 @@ export function logMissingFieldErrors( missing: MissingFieldError[] | MissingTree | undefined, ) { if (__DEV__ && missing) { - invariant.debug(`Missing cache result fields: ${ - JSON.stringify(missing) - }`, missing); + invariant.debug(`Missing cache result fields: %o`, missing); } } diff --git a/src/core/QueryInfo.ts b/src/core/QueryInfo.ts --- a/src/core/QueryInfo.ts +++ b/src/core/QueryInfo.ts @@ -1,25 +1,27 @@ -import { DocumentNode, GraphQLError } from 'graphql'; +import type { DocumentNode, GraphQLError } from 'graphql'; import { equal } from "@wry/equality"; -import { Cache, ApolloCache } from '../cache'; -import { DeepMerger } from "../utilities" -import { mergeIncrementalData } from '../utilities/common/incrementalResult'; -import { WatchQueryOptions, ErrorPolicy } from './watchQueryOptions'; -import { ObservableQuery, reobserveCacheFirst } from './ObservableQuery'; -import { QueryListener } from './types'; -import { FetchResult } from '../link/core'; +import type { Cache, ApolloCache } from '../cache/index.js'; +import { DeepMerger } from "../utilities/index.js" +import { mergeIncrementalData } from '../utilities/index.js'; +import type { WatchQueryOptions, ErrorPolicy } from './watchQueryOptions.js'; +import type { ObservableQuery} from './ObservableQuery.js'; +import { reobserveCacheFirst } from './ObservableQuery.js'; +import type { QueryListener, MethodKeys } from './types.js'; +import type { FetchResult } from '../link/core/index.js'; +import type { + ObservableSubscription} from '../utilities/index.js'; import { - ObservableSubscription, isNonEmptyArray, graphQLResultHasError, canUseWeakMap, -} from '../utilities'; +} from '../utilities/index.js'; import { NetworkStatus, isNetworkRequestInFlight, -} from './networkStatus'; -import { ApolloError } from '../errors'; -import { QueryManager } from './QueryManager'; +} from './networkStatus.js'; +import type { ApolloError } from '../errors/index.js'; +import type { QueryManager } from './QueryManager.js'; export type QueryStoreValue = Pick<QueryInfo, | "variables" @@ -40,7 +42,7 @@ const destructiveMethodCounts = new ( function wrapDestructiveCacheMethod( cache: ApolloCache<any>, - methodName: keyof ApolloCache<any>, + methodName: MethodKeys<ApolloCache<any>>, ) { const original = cache[methodName]; if (typeof original === "function") { diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -1,17 +1,24 @@ -import { invariant, InvariantError } from '../utilities/globals'; +import { invariant, newInvariantError } from '../utilities/globals/index.js'; -import { DocumentNode } from 'graphql'; +import type { DocumentNode } from 'graphql'; // TODO(brian): A hack until this issue is resolved (https://github.com/graphql/graphql-js/issues/3356) type OperationTypeNode = any; import { equal } from '@wry/equality'; -import { ApolloLink, execute, FetchResult } from '../link/core'; +import type { ApolloLink, FetchResult } from '../link/core/index.js'; +import { execute } from '../link/core/index.js'; import { + hasDirectives, isExecutionPatchIncrementalResult, isExecutionPatchResult, -} from '../utilities/common/incrementalResult'; -import { Cache, ApolloCache, canonicalStringify } from '../cache'; + removeDirectivesFromDocument, +} from '../utilities/index.js'; +import type { Cache, ApolloCache} from '../cache/index.js'; +import { canonicalStringify } from '../cache/index.js'; +import type { + ObservableSubscription, + ConcastSourcesArray} from '../utilities/index.js'; import { getDefaultValues, getOperationDefinition, @@ -19,31 +26,29 @@ import { hasClientExports, graphQLResultHasError, getGraphQLErrorsFromResult, - removeConnectionDirectiveFromDocument, canUseWeakMap, - ObservableSubscription, Observable, asyncMap, isNonEmptyArray, Concast, - ConcastSourcesArray, makeUniqueId, isDocumentNode, isNonNullObject, -} from '../utilities'; -import { mergeIncrementalData } from '../utilities/common/incrementalResult'; -import { ApolloError, isApolloError, graphQLResultHasProtocolErrors } from '../errors'; -import { + DocumentTransform, +} from '../utilities/index.js'; +import { mergeIncrementalData } from '../utilities/common/incrementalResult.js'; +import { ApolloError, isApolloError, graphQLResultHasProtocolErrors } from '../errors/index.js'; +import type { QueryOptions, WatchQueryOptions, SubscriptionOptions, MutationOptions, ErrorPolicy, MutationFetchPolicy, -} from './watchQueryOptions'; -import { ObservableQuery, logMissingFieldErrors } from './ObservableQuery'; -import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus'; -import { +} from './watchQueryOptions.js'; +import { ObservableQuery, logMissingFieldErrors } from './ObservableQuery.js'; +import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus.js'; +import type { ApolloQueryResult, OperationVariables, MutationUpdaterFunction, @@ -52,16 +57,19 @@ import { InternalRefetchQueriesOptions, InternalRefetchQueriesResult, InternalRefetchQueriesMap, -} from './types'; -import { LocalState } from './LocalState'; +} from './types.js'; +import { LocalState } from './LocalState.js'; +import type { + QueryStoreValue} from './QueryInfo.js'; import { QueryInfo, - QueryStoreValue, shouldWriteResult, CacheWriteBehavior, -} from './QueryInfo'; -import { PROTOCOL_ERRORS_SYMBOL, ApolloErrorOptions } from '../errors'; +} from './QueryInfo.js'; +import type { ApolloErrorOptions } from '../errors/index.js'; +import { PROTOCOL_ERRORS_SYMBOL } from '../errors/index.js'; +import { print } from '../utilities/index.js'; const { hasOwnProperty } = Object.prototype; @@ -75,16 +83,16 @@ interface MutationStoreValue { type UpdateQueries<TData> = MutationOptions<TData, any, any>["updateQueries"]; interface TransformCacheEntry { - document: DocumentNode; hasClientExports: boolean; hasForcedResolvers: boolean; + hasNonreactiveDirective: boolean; clientQuery: DocumentNode | null; serverQuery: DocumentNode | null; defaultVars: OperationVariables; asQuery: DocumentNode; } -type DefaultOptions = import("./ApolloClient").DefaultOptions; +type DefaultOptions = import("./ApolloClient.js").DefaultOptions; export class QueryManager<TStore> { public cache: ApolloCache<TStore>; @@ -92,6 +100,7 @@ export class QueryManager<TStore> { public defaultOptions: DefaultOptions; public readonly assumeImmutableResults: boolean; + public readonly documentTransform: DocumentTransform; public readonly ssrMode: boolean; private queryDeduplication: boolean; @@ -109,22 +118,26 @@ export class QueryManager<TStore> { // Maps from queryId strings to Promise rejection functions for // currently active queries and fetches. - private fetchCancelFns = new Map<string, (error: any) => any>(); + // Use protected instead of private field so + // @apollo/experimental-nextjs-app-support can access type info. + protected fetchCancelFns = new Map<string, (error: any) => any>(); constructor({ cache, link, defaultOptions, + documentTransform, queryDeduplication = false, onBroadcast, ssrMode = false, clientAwareness = {}, localState, - assumeImmutableResults, + assumeImmutableResults = !!cache.assumeImmutableResults, }: { cache: ApolloCache<TStore>; link: ApolloLink; defaultOptions?: DefaultOptions; + documentTransform?: DocumentTransform; queryDeduplication?: boolean; onBroadcast?: () => void; ssrMode?: boolean; @@ -132,6 +145,12 @@ export class QueryManager<TStore> { localState?: LocalState<TStore>; assumeImmutableResults?: boolean; }) { + const defaultDocumentTransform = new DocumentTransform( + (document) => this.cache.transformDocument(document), + // Allow the apollo cache to manage its own transform caches + { cache: false } + ); + this.cache = cache; this.link = link; this.defaultOptions = defaultOptions || Object.create(null); @@ -139,7 +158,17 @@ export class QueryManager<TStore> { this.clientAwareness = clientAwareness; this.localState = localState || new LocalState({ cache }); this.ssrMode = ssrMode; - this.assumeImmutableResults = !!assumeImmutableResults; + this.assumeImmutableResults = assumeImmutableResults; + this.documentTransform = documentTransform + ? defaultDocumentTransform + .concat(documentTransform) + // The custom document transform may add new fragment spreads or new + // field selections, so we want to give the cache a chance to run + // again. For example, the InMemoryCache adds __typename to field + // selections and fragments from the fragment registry. + .concat(defaultDocumentTransform) + : defaultDocumentTransform + if ((this.onBroadcast = onBroadcast)) { this.mutationStore = Object.create(null); } @@ -155,7 +184,7 @@ export class QueryManager<TStore> { }); this.cancelPendingFetches( - new InvariantError('QueryManager stopped while query was in flight'), + newInvariantError('QueryManager stopped while query was in flight'), ); } @@ -196,11 +225,8 @@ export class QueryManager<TStore> { const mutationId = this.generateMutationId(); - const { - document, - hasClientExports, - } = this.transform(mutation); - mutation = this.cache.transformForLink(document); + mutation = this.cache.transformForLink(this.transform(mutation)); + const { hasClientExports } = this.getDocumentInfo(mutation); variables = this.getVariables(mutation, variables) as TVariables; if (hasClientExports) { @@ -376,7 +402,7 @@ export class QueryManager<TStore> { // The cache complains if passed a mutation where it expects a // query, so we transform mutations and subscriptions to queries // (only once, thanks to this.transformCache). - query: this.transform(mutation.document).asQuery, + query: this.getDocumentInfo(mutation.document).asQuery, variables: mutation.variables, optimistic: false, returnPartialData: true, @@ -474,7 +500,7 @@ export class QueryManager<TStore> { // The cache complains if passed a mutation where it expects a // query, so we transform mutations and subscriptions to queries // (only once, thanks to this.transformCache). - query: this.transform(mutation.document).asQuery, + query: this.getDocumentInfo(mutation.document).asQuery, variables: mutation.variables, optimistic: false, returnPartialData: true, @@ -577,11 +603,11 @@ export class QueryManager<TStore> { options: WatchQueryOptions<TVars, TData>, networkStatus?: NetworkStatus, ): Promise<ApolloQueryResult<TData>> { - return this.fetchQueryObservable<TData, TVars>( + return this.fetchConcastWithInfo( queryId, options, networkStatus, - ).promise; + ).concast.promise; } public getQueryStore() { @@ -605,35 +631,41 @@ export class QueryManager<TStore> { } } + public transform(document: DocumentNode) { + return this.documentTransform.transformDocument(document); + } + private transformCache = new ( canUseWeakMap ? WeakMap : Map )<DocumentNode, TransformCacheEntry>(); - public transform(document: DocumentNode) { + public getDocumentInfo(document: DocumentNode) { const { transformCache } = this; if (!transformCache.has(document)) { - const transformed = this.cache.transformDocument(document); - const noConnection = removeConnectionDirectiveFromDocument(transformed); - const clientQuery = this.localState.clientQuery(transformed); - const serverQuery = noConnection && this.localState.serverQuery(noConnection); - const cacheEntry: TransformCacheEntry = { - document: transformed, - // TODO These two calls (hasClientExports and shouldForceResolvers) - // could probably be merged into a single traversal. - hasClientExports: hasClientExports(transformed), - hasForcedResolvers: this.localState.shouldForceResolvers(transformed), - clientQuery, - serverQuery, + // TODO These three calls (hasClientExports, shouldForceResolvers, and + // usesNonreactiveDirective) are performing independent full traversals + // of the transformed document. We should consider merging these + // traversals into a single pass in the future, though the work is + // cached after the first time. + hasClientExports: hasClientExports(document), + hasForcedResolvers: this.localState.shouldForceResolvers(document), + hasNonreactiveDirective: hasDirectives(['nonreactive'], document), + clientQuery: this.localState.clientQuery(document), + serverQuery: removeDirectivesFromDocument([ + { name: 'client', remove: true }, + { name: 'connection' }, + { name: 'nonreactive' }, + ], document), defaultVars: getDefaultValues( - getOperationDefinition(transformed) + getOperationDefinition(document) ) as OperationVariables, // Transform any mutation or subscription operations to query operations // so we can read/write them from/to the cache. asQuery: { - ...transformed, - definitions: transformed.definitions.map(def => { + ...document, + definitions: document.definitions.map(def => { if (def.kind === "OperationDefinition" && def.operation !== "query") { return { ...def, operation: "query" as OperationTypeNode }; @@ -643,18 +675,7 @@ export class QueryManager<TStore> { } }; - const add = (doc: DocumentNode | null) => { - if (doc && !transformCache.has(doc)) { - transformCache.set(doc, cacheEntry); - } - } - // Add cacheEntry to the transformCache using several different keys, - // since any one of these documents could end up getting passed to the - // transform method again in the future. - add(document); - add(transformed); - add(clientQuery); - add(serverQuery); + transformCache.set(document, cacheEntry); } return transformCache.get(document)!; @@ -665,7 +686,7 @@ export class QueryManager<TStore> { variables?: TVariables, ): OperationVariables { return { - ...this.transform(document).defaultVars, + ...this.getDocumentInfo(document).defaultVars, ...variables, }; } @@ -673,11 +694,15 @@ export class QueryManager<TStore> { public watchQuery<T, TVariables extends OperationVariables = OperationVariables>( options: WatchQueryOptions<TVariables, T>, ): ObservableQuery<T, TVariables> { + const query = this.transform(options.query); + // assign variable default values if supplied + // NOTE: We don't modify options.query here with the transformed query to + // ensure observable.options.query is set to the raw untransformed query. options = { ...options, variables: this.getVariables( - options.query, + query, options.variables, ) as TVariables, }; @@ -692,11 +717,14 @@ export class QueryManager<TStore> { queryInfo, options, }); + observable['lastQuery'] = query; this.queries.set(observable.queryId, queryInfo); + // We give queryInfo the transformed query to ensure the first cache diff + // uses the transformed query instead of the raw query queryInfo.init({ - document: observable.query, + document: query, observableQuery: observable, variables: observable.variables, }); @@ -731,7 +759,7 @@ export class QueryManager<TStore> { return this.fetchQuery<TData, TVars>( queryId, - options, + { ...options, query: this.transform(options.query) }, ).finally(() => this.stopQuery(queryId)); } @@ -768,7 +796,7 @@ export class QueryManager<TStore> { // depend on values that previously existed in the data portion of the // store. So, we cancel the promises and observers that we have issued // so far and not yet resolved (in the case of queries). - this.cancelPendingFetches(new InvariantError( + this.cancelPendingFetches(newInvariantError( 'Store reset while query was in flight (not completed in link chain)', )); @@ -802,7 +830,7 @@ export class QueryManager<TStore> { if (typeof desc === "string") { queryNamesAndDocs.set(desc, false); } else if (isDocumentNode(desc)) { - queryNamesAndDocs.set(this.transform(desc).document, false); + queryNamesAndDocs.set(this.transform(desc), false); } else if (isNonNullObject(desc) && desc.query) { legacyQueryOptions.add(desc); } @@ -867,11 +895,7 @@ export class QueryManager<TStore> { if (__DEV__ && queryNamesAndDocs.size) { queryNamesAndDocs.forEach((included, nameOrDoc) => { if (!included) { - invariant.warn(`Unknown query ${ - typeof nameOrDoc === "string" ? "named " : "" - }${ - JSON.stringify(nameOrDoc, null, 2) - } requested in refetchQueries options.include array`); + invariant.warn(typeof nameOrDoc === "string" ? `Unknown query named "%s" requested in refetchQueries options.include array` : `Unknown query %s requested in refetchQueries options.include array`, nameOrDoc); } }); } @@ -913,7 +937,7 @@ export class QueryManager<TStore> { variables, context = {}, }: SubscriptionOptions): Observable<FetchResult<T>> { - query = this.transform(query).document; + query = this.transform(query); variables = this.getVariables(query, variables); const makeObservable = (variables: OperationVariables) => @@ -953,7 +977,7 @@ export class QueryManager<TStore> { return result; }); - if (this.transform(query).hasClientExports) { + if (this.getDocumentInfo(query).hasClientExports) { const observablePromise = this.localState.addExportedVariables( query, variables, @@ -1005,8 +1029,10 @@ export class QueryManager<TStore> { return this.localState; } - private inFlightLinkObservables = new Map< - DocumentNode, + // Use protected instead of private field so + // @apollo/experimental-nextjs-app-support can access type info. + protected inFlightLinkObservables = new Map< + string, Map<string, Observable<FetchResult>> >(); @@ -1021,7 +1047,7 @@ export class QueryManager<TStore> { ): Observable<FetchResult<T>> { let observable: Observable<FetchResult<T>>; - const { serverQuery } = this.transform(query); + const { serverQuery, clientQuery } = this.getDocumentInfo(query); if (serverQuery) { const { inFlightLinkObservables, link } = this; @@ -1038,8 +1064,9 @@ export class QueryManager<TStore> { context = operation.context; if (deduplication) { - const byVariables = inFlightLinkObservables.get(serverQuery) || new Map(); - inFlightLinkObservables.set(serverQuery, byVariables); + const printedServerQuery = print(serverQuery); + const byVariables = inFlightLinkObservables.get(printedServerQuery) || new Map(); + inFlightLinkObservables.set(printedServerQuery, byVariables); const varJson = canonicalStringify(variables); observable = byVariables.get(varJson); @@ -1054,7 +1081,7 @@ export class QueryManager<TStore> { concast.beforeNext(() => { if (byVariables.delete(varJson) && byVariables.size < 1) { - inFlightLinkObservables.delete(serverQuery); + inFlightLinkObservables.delete(printedServerQuery); } }); } @@ -1071,7 +1098,6 @@ export class QueryManager<TStore> { context = this.prepareContext(context); } - const { clientQuery } = this.transform(query); if (clientQuery) { observable = asyncMap(observable, result => { return this.localState.runResolvers({ @@ -1090,6 +1116,7 @@ export class QueryManager<TStore> { queryInfo: QueryInfo, cacheWriteBehavior: CacheWriteBehavior, options: Pick<WatchQueryOptions<TVars, TData>, + | "query" | "variables" | "context" | "fetchPolicy" @@ -1100,10 +1127,7 @@ export class QueryManager<TStore> { // Performing transformForLink here gives this.cache a chance to fill in // missing fragment definitions (for example) before sending this document // through the link chain. - const linkDocument = this.cache.transformForLink( - // Use same document originally produced by this.cache.transformDocument. - this.transform(queryInfo.document!).document - ); + const linkDocument = this.cache.transformForLink(options.query); return asyncMap( this.getObservableFromLink( @@ -1161,21 +1185,6 @@ export class QueryManager<TStore> { ); } - public fetchQueryObservable<TData, TVars extends OperationVariables>( - queryId: string, - options: WatchQueryOptions<TVars, TData>, - // The initial networkStatus for this fetch, most often - // NetworkStatus.loading, but also possibly fetchMore, poll, refetch, - // or setVariables. - networkStatus?: NetworkStatus, - ): Concast<ApolloQueryResult<TData>> { - return this.fetchConcastWithInfo( - queryId, - options, - networkStatus, - ).concast; - } - private fetchConcastWithInfo<TData, TVars extends OperationVariables>( queryId: string, options: WatchQueryOptions<TVars, TData>, @@ -1184,7 +1193,7 @@ export class QueryManager<TStore> { // or setVariables. networkStatus = NetworkStatus.loading ): ConcastAndInfo<TData> { - const query = this.transform(options.query).document; + const { query } = options; const variables = this.getVariables(query, options.variables) as TVars; const queryInfo = this.getQuery(queryId); @@ -1252,7 +1261,7 @@ export class QueryManager<TStore> { // since the timing of result delivery is (unfortunately) important // for backwards compatibility. TODO This code could be simpler if // we deprecated and removed LocalState. - if (this.transform(normalized.query).hasClientExports) { + if (this.getDocumentInfo(normalized.query).hasClientExports) { concast = new Concast( this.localState .addExportedVariables(normalized.query, normalized.variables, normalized.context) @@ -1453,7 +1462,7 @@ export class QueryManager<TStore> { const oldNetworkStatus = queryInfo.networkStatus; queryInfo.init({ - document: this.transform(query).document, + document: query, variables, networkStatus, }); @@ -1479,7 +1488,7 @@ export class QueryManager<TStore> { ...(diff.complete ? null : { partial: true }), } as ApolloQueryResult<TData>); - if (data && this.transform(query).hasForcedResolvers) { + if (data && this.getDocumentInfo(query).hasForcedResolvers) { return this.localState.runResolvers({ document: query, remoteResult: { data }, @@ -1517,6 +1526,7 @@ export class QueryManager<TStore> { queryInfo, cacheWriteBehavior, { + query, variables, context, fetchPolicy, diff --git a/src/core/equalByQuery.ts b/src/core/equalByQuery.ts new file mode 100644 --- /dev/null +++ b/src/core/equalByQuery.ts @@ -0,0 +1,156 @@ +import equal from "@wry/equality"; + +import type { + DirectiveNode, + DocumentNode, + FieldNode, + FragmentDefinitionNode, + FragmentSpreadNode, + InlineFragmentNode, + SelectionNode, + SelectionSetNode, +} from "graphql"; + +import type { ApolloQueryResult, OperationVariables } from "./types.js"; + +import type { FragmentMap } from "../utilities/index.js"; +import { + createFragmentMap, + getFragmentDefinitions, + getFragmentFromSelection, + getMainDefinition, + isField, + resultKeyNameFromField, + shouldInclude, +} from "../utilities/index.js"; + +// Returns true if aResult and bResult are deeply equal according to the fields +// selected by the given query, ignoring any fields marked as @nonreactive. +export function equalByQuery( + query: DocumentNode, + { data: aData, ...aRest }: Partial<ApolloQueryResult<unknown>>, + { data: bData, ...bRest }: Partial<ApolloQueryResult<unknown>>, + variables?: OperationVariables +): boolean { + return ( + equal(aRest, bRest) && + equalBySelectionSet(getMainDefinition(query).selectionSet, aData, bData, { + fragmentMap: createFragmentMap(getFragmentDefinitions(query)), + variables, + }) + ); +} + +// Encapsulates the information used by equalBySelectionSet that does not change +// during the recursion. +interface CompareContext<TVariables> { + fragmentMap: FragmentMap; + variables: TVariables | undefined; +} + +function equalBySelectionSet( + selectionSet: SelectionSetNode, + aResult: any, + bResult: any, + context: CompareContext<OperationVariables> +): boolean { + if (aResult === bResult) { + return true; + } + + const seenSelections = new Set<SelectionNode>(); + + // Returning true from this Array.prototype.every callback function skips the + // current field/subtree. Returning false aborts the entire traversal + // immediately, causing equalBySelectionSet to return false. + return selectionSet.selections.every((selection) => { + // Avoid re-processing the same selection at the same level of recursion, in + // case the same field gets included via multiple indirect fragment spreads. + if (seenSelections.has(selection)) return true; + seenSelections.add(selection); + + // Ignore @skip(if: true) and @include(if: false) fields. + if (!shouldInclude(selection, context.variables)) return true; + + // If the field or (named) fragment spread has a @nonreactive directive on + // it, we don't care if it's different, so we pretend it's the same. + if (selectionHasNonreactiveDirective(selection)) return true; + + if (isField(selection)) { + const resultKey = resultKeyNameFromField(selection); + const aResultChild = aResult && aResult[resultKey]; + const bResultChild = bResult && bResult[resultKey]; + const childSelectionSet = selection.selectionSet; + + if (!childSelectionSet) { + // These are scalar values, so we can compare them with deep equal + // without redoing the main recursive work. + return equal(aResultChild, bResultChild); + } + + const aChildIsArray = Array.isArray(aResultChild); + const bChildIsArray = Array.isArray(bResultChild); + if (aChildIsArray !== bChildIsArray) return false; + if (aChildIsArray && bChildIsArray) { + const length = aResultChild.length; + if (bResultChild.length !== length) { + return false; + } + for (let i = 0; i < length; ++i) { + if ( + !equalBySelectionSet( + childSelectionSet, + aResultChild[i], + bResultChild[i], + context + ) + ) { + return false; + } + } + return true; + } + + return equalBySelectionSet( + childSelectionSet, + aResultChild, + bResultChild, + context + ); + } else { + const fragment = getFragmentFromSelection(selection, context.fragmentMap); + if (fragment) { + // The fragment might === selection if it's an inline fragment, but + // could be !== if it's a named fragment ...spread. + if (selectionHasNonreactiveDirective(fragment)) return true; + + return equalBySelectionSet( + fragment.selectionSet, + // Notice that we reuse the same aResult and bResult values here, + // since the fragment ...spread does not specify a field name, but + // consists of multiple fields (within the fragment's selection set) + // that should be applied to the current result value(s). + aResult, + bResult, + context + ); + } + } + }); +} + +function selectionHasNonreactiveDirective( + selection: + | FieldNode + | InlineFragmentNode + | FragmentSpreadNode + | FragmentDefinitionNode +): boolean { + return ( + !!selection.directives && selection.directives.some(directiveIsNonreactive) + ); +} + +function directiveIsNonreactive(dir: DirectiveNode): boolean { + return dir.name.value === "nonreactive"; +} diff --git a/src/core/index.ts b/src/core/index.ts --- a/src/core/index.ts +++ b/src/core/index.ts @@ -1,19 +1,13 @@ /* Core */ -import { DEV } from '../utilities/globals'; - -export { - ApolloClient, - ApolloClientOptions, - DefaultOptions, - mergeOptions, -} from './ApolloClient'; -export { - ObservableQuery, +export type { ApolloClientOptions, DefaultOptions } from "./ApolloClient.js"; +export { ApolloClient, mergeOptions } from "./ApolloClient.js"; +export type { FetchMoreOptions, UpdateQueryOptions, -} from './ObservableQuery'; -export { +} from "./ObservableQuery.js"; +export { ObservableQuery } from "./ObservableQuery.js"; +export type { QueryOptions, WatchQueryOptions, MutationOptions, @@ -23,30 +17,20 @@ export { ErrorPolicy, FetchMoreQueryOptions, SubscribeToMoreOptions, -} from './watchQueryOptions'; -export { NetworkStatus } from './networkStatus'; -export * from './types'; -export { - Resolver, - FragmentMatcher, -} from './LocalState'; -export { isApolloError, ApolloError } from '../errors'; - +} from "./watchQueryOptions.js"; +export { NetworkStatus, isNetworkRequestSettled } from "./networkStatus.js"; +export * from "./types.js"; +export type { Resolver, FragmentMatcher } from "./LocalState.js"; +export { isApolloError, ApolloError } from "../errors/index.js"; /* Cache */ -export { - // All the exports (types and values) from ../cache, minus cacheSlot, +export type { + // All the exports (types) from ../cache, minus cacheSlot, // which we want to keep semi-private. - Cache, - ApolloCache, Transaction, DataProxy, - InMemoryCache, InMemoryCacheConfig, - MissingFieldError, - defaultDataIdFromObject, ReactiveVar, - makeVar, TypePolicies, TypePolicy, FieldPolicy, @@ -54,33 +38,45 @@ export { FieldMergeFunction, FieldFunctionOptions, PossibleTypesMap, -} from '../cache'; +} from "../cache/index.js"; +export { + Cache, + ApolloCache, + InMemoryCache, + MissingFieldError, + defaultDataIdFromObject, + makeVar, +} from "../cache/index.js"; -export * from '../cache/inmemory/types'; +export * from "../cache/inmemory/types.js"; /* Link */ -export * from '../link/core'; -export * from '../link/http'; +export * from "../link/core/index.js"; +export * from "../link/http/index.js"; +export type { ServerError } from "../link/utils/index.js"; export { fromError, toPromise, fromPromise, - ServerError, throwServerError, -} from '../link/utils'; +} from "../link/utils/index.js"; /* Utilities */ -export { - Observable, +export type { + DocumentTransformCacheKey, Observer, ObservableSubscription, Reference, + StoreObject, +} from "../utilities/index.js"; +export { + DocumentTransform, + Observable, isReference, makeReference, - StoreObject, -} from '../utilities'; +} from "../utilities/index.js"; /* Supporting */ @@ -89,8 +85,8 @@ export { // "warn", "error", or "silent" to setVerbosity ("log" is the default). // Note that all invariant.* logging is hidden in production. import { setVerbosity } from "ts-invariant"; -export { setVerbosity as setLogVerbosity } -setVerbosity(DEV ? "log" : "silent"); +export { setVerbosity as setLogVerbosity }; +setVerbosity(__DEV__ ? "log" : "silent"); // Note that importing `gql` by itself, then destructuring // additional properties separately before exporting, is intentional. @@ -110,4 +106,4 @@ export { disableFragmentWarnings, enableExperimentalFragmentVariables, disableExperimentalFragmentVariables, -} from 'graphql-tag'; +} from "graphql-tag"; diff --git a/src/core/networkStatus.ts b/src/core/networkStatus.ts --- a/src/core/networkStatus.ts +++ b/src/core/networkStatus.ts @@ -54,3 +54,13 @@ export function isNetworkRequestInFlight( ): boolean { return networkStatus ? networkStatus < 7 : false; } + +/** + * Returns true if the network request is in ready or error state according to a given network + * status. + */ +export function isNetworkRequestSettled( + networkStatus?: NetworkStatus, +): boolean { + return networkStatus === 7 || networkStatus === 8; +} diff --git a/src/core/types.ts b/src/core/types.ts --- a/src/core/types.ts +++ b/src/core/types.ts @@ -1,17 +1,21 @@ -import { DocumentNode, GraphQLError } from 'graphql'; - -import { ApolloCache } from '../cache'; -import { FetchResult } from '../link/core'; -import { ApolloError } from '../errors'; -import { QueryInfo } from './QueryInfo'; -import { NetworkStatus } from './networkStatus'; -import { Resolver } from './LocalState'; -import { ObservableQuery } from './ObservableQuery'; -import { QueryOptions } from './watchQueryOptions'; -import { Cache } from '../cache'; -import { IsStrictlyAny } from '../utilities'; - -export { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import type { DocumentNode, GraphQLError } from 'graphql'; + +import type { ApolloCache } from '../cache/index.js'; +import type { FetchResult } from '../link/core/index.js'; +import type { ApolloError } from '../errors/index.js'; +import type { QueryInfo } from './QueryInfo.js'; +import type { NetworkStatus } from './networkStatus.js'; +import type { Resolver } from './LocalState.js'; +import type { ObservableQuery } from './ObservableQuery.js'; +import type { QueryOptions } from './watchQueryOptions.js'; +import type { Cache } from '../cache/index.js'; +import type { IsStrictlyAny } from '../utilities/index.js'; + +export type { TypedDocumentNode } from '@graphql-typed-document-node/core'; + +export type MethodKeys<T> = { + [P in keyof T]: T[P] extends Function ? P : never +}[keyof T]; export interface DefaultContext extends Record<string, any> {}; diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -1,17 +1,17 @@ -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import type { DocumentNode } from 'graphql'; +import type { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { FetchResult } from '../link/core'; -import { +import type { FetchResult } from '../link/core/index.js'; +import type { DefaultContext, MutationQueryReducersMap, OperationVariables, MutationUpdaterFunction, OnQueryUpdated, InternalRefetchQueriesInclude, -} from './types'; -import { ApolloCache } from '../cache'; -import { ObservableQuery } from './ObservableQuery'; +} from './types.js'; +import type { ApolloCache } from '../cache/index.js'; +import type { ObservableQuery } from './ObservableQuery.js'; /** * fetchPolicy determines where the client may return a result from. The options are: diff --git a/src/dev/index.ts b/src/dev/index.ts new file mode 100644 --- /dev/null +++ b/src/dev/index.ts @@ -0,0 +1,3 @@ +export { loadDevMessages } from "./loadDevMessages.js"; +export { loadErrorMessageHandler } from "./loadErrorMessageHandler.js"; +export { loadErrorMessages } from "./loadErrorMessages.js"; diff --git a/src/dev/loadDevMessages.ts b/src/dev/loadDevMessages.ts new file mode 100644 --- /dev/null +++ b/src/dev/loadDevMessages.ts @@ -0,0 +1,6 @@ +import { devDebug, devError, devLog, devWarn } from "../invariantErrorCodes.js"; +import { loadErrorMessageHandler } from "./loadErrorMessageHandler.js"; + +export function loadDevMessages() { + loadErrorMessageHandler(devDebug, devError, devLog, devWarn); +} diff --git a/src/dev/loadErrorMessageHandler.ts b/src/dev/loadErrorMessageHandler.ts new file mode 100644 --- /dev/null +++ b/src/dev/loadErrorMessageHandler.ts @@ -0,0 +1,27 @@ +import type { ErrorCodes } from "../invariantErrorCodes.js"; +import { global } from "../utilities/globals/index.js"; +import { ApolloErrorMessageHandler } from "../utilities/globals/invariantWrappers.js"; + +export function loadErrorMessageHandler(...errorCodes: ErrorCodes[]) { + if (!global[ApolloErrorMessageHandler]) { + global[ApolloErrorMessageHandler] = handler as typeof handler & ErrorCodes; + } + + for (const codes of errorCodes) { + Object.assign(global[ApolloErrorMessageHandler], codes); + } + + return global[ApolloErrorMessageHandler]; + + function handler(message: string | number, args: unknown[]) { + if (typeof message === "number") { + const definition = global[ApolloErrorMessageHandler]![message]; + if (!message || !definition.message) return; + message = definition.message; + } + return args.reduce<string>( + (msg, arg) => msg.replace("%s", String(arg)), + String(message) + ); + } +} diff --git a/src/dev/loadErrorMessages.ts b/src/dev/loadErrorMessages.ts new file mode 100644 --- /dev/null +++ b/src/dev/loadErrorMessages.ts @@ -0,0 +1,6 @@ +import { errorCodes } from "../invariantErrorCodes.js"; +import { loadErrorMessageHandler } from "./loadErrorMessageHandler.js"; + +export function loadErrorMessages() { + loadErrorMessageHandler(errorCodes); +} diff --git a/src/errors/index.ts b/src/errors/index.ts --- a/src/errors/index.ts +++ b/src/errors/index.ts @@ -1,11 +1,11 @@ -import '../utilities/globals'; +import '../utilities/globals/index.js'; -import { GraphQLError, GraphQLErrorExtensions } from 'graphql'; +import type { GraphQLError, GraphQLErrorExtensions } from 'graphql'; -import { isNonNullObject } from '../utilities'; -import { ServerParseError } from '../link/http'; -import { ServerError } from '../link/utils'; -import { FetchResult } from "../link/core"; +import { isNonNullObject } from '../utilities/index.js'; +import type { ServerParseError } from '../link/http/index.js'; +import type { ServerError } from '../link/utils/index.js'; +import type { FetchResult } from "../link/core/index.js"; // This Symbol allows us to pass transport-specific errors from the link chain // into QueryManager/client internals without risking a naming collision within diff --git a/src/index.ts b/src/index.ts --- a/src/index.ts +++ b/src/index.ts @@ -1,2 +1,2 @@ -export * from './core'; -export * from './react'; +export * from './core/index.js'; +export * from './react/index.js'; diff --git a/src/invariantErrorCodes.ts b/src/invariantErrorCodes.ts new file mode 100644 --- /dev/null +++ b/src/invariantErrorCodes.ts @@ -0,0 +1,9 @@ +export interface ErrorCodes { + [key: number]: { file: string, condition?: string, message?: string } +} + +export const errorCodes: ErrorCodes = {}; +export const devDebug: ErrorCodes = {}; +export const devLog: ErrorCodes = {}; +export const devWarn: ErrorCodes = {}; +export const devError: ErrorCodes = {}; diff --git a/src/link/batch-http/batchHttpLink.ts b/src/link/batch-http/batchHttpLink.ts --- a/src/link/batch-http/batchHttpLink.ts +++ b/src/link/batch-http/batchHttpLink.ts @@ -1,6 +1,13 @@ -import { ApolloLink, Operation, FetchResult } from '../core'; -import { Observable } from '../../utilities'; -import { fromError } from '../utils'; +import type { Operation, FetchResult } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import { + Observable, + hasDirectives, + removeClientSetsFromDocument +} from '../../utilities/index.js'; +import { fromError } from '../utils/index.js'; +import type { + HttpOptions} from '../http/index.js'; import { serializeFetchParameter, selectURI, @@ -9,10 +16,9 @@ import { selectHttpOptionsAndBodyInternal, defaultPrinter, fallbackHttpConfig, - HttpOptions, - createSignalIfSupported, -} from '../http'; -import { BatchLink } from '../batch'; +} from '../http/index.js'; +import { BatchLink } from '../batch/index.js'; +import { filterOperationVariables } from "../utils/filterOperationVariables.js"; export namespace BatchHttpLink { export type Options = Pick< @@ -45,6 +51,7 @@ export class BatchHttpLink extends ApolloLink { batchDebounce, batchMax, batchKey, + includeUnusedVariables = false, ...requestOptions } = fetchParams || ({} as BatchHttpLink.Options); @@ -95,16 +102,43 @@ export class BatchHttpLink extends ApolloLink { headers: { ...clientAwarenessHeaders, ...context.headers }, }; + const queries = operations.map(({ query }) => { + if (hasDirectives(['client'], query)) { + return removeClientSetsFromDocument(query); + } + + return query; + }); + + // If we have a query that returned `null` after removing client-only + // fields, it indicates a query that is using all client-only fields. + if (queries.some(query => !query)) { + return fromError<FetchResult[]>( + new Error( + 'BatchHttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or enable the `transformOptions.removeClientFields` option.' + ) + ); + } + //uses fallback, link, and then context to build options - const optsAndBody = operations.map(operation => - selectHttpOptionsAndBodyInternal( - operation, + const optsAndBody = operations.map((operation, index) => { + const result = selectHttpOptionsAndBodyInternal( + { ...operation, query: queries[index]! }, print, fallbackHttpConfig, linkConfig, - contextConfig, - ), - ); + contextConfig + ); + + if (result.body.variables && !includeUnusedVariables) { + result.body.variables = filterOperationVariables( + result.body.variables, + operation.query + ); + } + + return result; + }); const loadedBody = optsAndBody.map(({ body }) => body); const options = optsAndBody[0].options; @@ -122,11 +156,10 @@ export class BatchHttpLink extends ApolloLink { return fromError<FetchResult[]>(parseError); } - let controller: any; - if (!(options as any).signal) { - const { controller: _controller, signal } = createSignalIfSupported(); - controller = _controller; - if (controller) (options as any).signal = signal; + let controller: AbortController | undefined; + if (!options.signal && typeof AbortController !== 'undefined') { + controller = new AbortController(); + options.signal = controller.signal; } return new Observable<FetchResult[]>(observer => { @@ -138,14 +171,14 @@ export class BatchHttpLink extends ApolloLink { }) .then(parseAndCheckHttpResponse(operations)) .then(result => { + controller = undefined; // we have data and can send it to back up the link chain observer.next(result); observer.complete(); return result; }) .catch(err => { - // fetch was cancelled so its already been cleaned up in the unsubscribe - if (err.name === 'AbortError') return; + controller = undefined; // if it is a network error, BUT there is graphql result info // fire the next observer before calling error // this gives apollo-client (and react-apollo) the `graphqlErrors` and `networkErrors` diff --git a/src/link/batch-http/index.ts b/src/link/batch-http/index.ts --- a/src/link/batch-http/index.ts +++ b/src/link/batch-http/index.ts @@ -1 +1 @@ -export * from './batchHttpLink'; +export * from './batchHttpLink.js'; diff --git a/src/link/batch/batchLink.ts b/src/link/batch/batchLink.ts --- a/src/link/batch/batchLink.ts +++ b/src/link/batch/batchLink.ts @@ -1,7 +1,10 @@ -import { ApolloLink, Operation, FetchResult, NextLink } from '../core'; -import { Observable } from '../../utilities'; -import { OperationBatcher, BatchHandler } from './batching'; -export { OperationBatcher, BatchableRequest, BatchHandler } from './batching'; +import type { Operation, FetchResult, NextLink } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { Observable } from '../../utilities/index.js'; +import type { BatchHandler } from './batching.js'; +import { OperationBatcher } from './batching.js'; +export type { BatchableRequest, BatchHandler } from './batching.js'; +export { OperationBatcher } from './batching.js'; export namespace BatchLink { diff --git a/src/link/batch/batching.ts b/src/link/batch/batching.ts --- a/src/link/batch/batching.ts +++ b/src/link/batch/batching.ts @@ -1,5 +1,6 @@ -import { FetchResult, NextLink, Operation } from '../core'; -import { Observable, ObservableSubscription } from '../../utilities'; +import type { FetchResult, NextLink, Operation } from '../core/index.js'; +import type { ObservableSubscription } from '../../utilities/index.js'; +import { Observable } from '../../utilities/index.js'; export type BatchHandler = ( operations: Operation[], diff --git a/src/link/batch/index.ts b/src/link/batch/index.ts --- a/src/link/batch/index.ts +++ b/src/link/batch/index.ts @@ -1 +1 @@ -export * from './batchLink'; +export * from './batchLink.js'; diff --git a/src/link/context/index.ts b/src/link/context/index.ts --- a/src/link/context/index.ts +++ b/src/link/context/index.ts @@ -1,6 +1,8 @@ -import { ApolloLink, Operation, GraphQLRequest, NextLink } from '../core'; -import { Observable, ObservableSubscription } from '../../utilities'; -import { DefaultContext } from '../../core'; +import type { Operation, GraphQLRequest, NextLink } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { ObservableSubscription } from '../../utilities/index.js'; +import { Observable } from '../../utilities/index.js'; +import type { DefaultContext } from '../../core/index.js'; export type ContextSetter = ( operation: GraphQLRequest, diff --git a/src/link/core/ApolloLink.ts b/src/link/core/ApolloLink.ts --- a/src/link/core/ApolloLink.ts +++ b/src/link/core/ApolloLink.ts @@ -1,18 +1,19 @@ -import { InvariantError, invariant } from '../../utilities/globals'; +import { newInvariantError, invariant } from '../../utilities/globals/index.js'; -import { Observable, Observer } from '../../utilities'; -import { +import type { Observer } from '../../utilities/index.js'; +import { Observable } from '../../utilities/index.js'; +import type { NextLink, Operation, RequestHandler, FetchResult, GraphQLRequest -} from './types'; +} from './types.js'; import { validateOperation, createOperation, transformOperation, -} from '../utils'; +} from '../utils/index.js'; function passthrough(op: Operation, forward: NextLink) { return (forward ? forward(op) : Observable.of()) as Observable<FetchResult>; @@ -26,14 +27,6 @@ function isTerminating(link: ApolloLink): boolean { return link.request.length <= 1; } -class LinkError extends Error { - public link?: ApolloLink; - constructor(message?: string, link?: ApolloLink) { - super(message); - this.link = link; - } -} - export class ApolloLink { public static empty(): ApolloLink { return new ApolloLink(() => Observable.of()); @@ -88,10 +81,8 @@ export class ApolloLink { const firstLink = toLink(first); if (isTerminating(firstLink)) { invariant.warn( - new LinkError( - `You are calling concat on a terminating link, which will have no effect`, + `You are calling concat on a terminating link, which will have no effect %o`, firstLink, - ), ); return firstLink; } @@ -138,7 +129,7 @@ export class ApolloLink { operation: Operation, forward?: NextLink, ): Observable<FetchResult> | null { - throw new InvariantError('request is not implemented'); + throw newInvariantError('request is not implemented'); } protected onError( diff --git a/src/link/core/concat.ts b/src/link/core/concat.ts --- a/src/link/core/concat.ts +++ b/src/link/core/concat.ts @@ -1,3 +1,3 @@ -import { ApolloLink } from './ApolloLink'; +import { ApolloLink } from './ApolloLink.js'; export const concat = ApolloLink.concat; diff --git a/src/link/core/empty.ts b/src/link/core/empty.ts --- a/src/link/core/empty.ts +++ b/src/link/core/empty.ts @@ -1,3 +1,3 @@ -import { ApolloLink } from './ApolloLink'; +import { ApolloLink } from './ApolloLink.js'; export const empty = ApolloLink.empty; diff --git a/src/link/core/execute.ts b/src/link/core/execute.ts --- a/src/link/core/execute.ts +++ b/src/link/core/execute.ts @@ -1,3 +1,3 @@ -import { ApolloLink } from './ApolloLink'; +import { ApolloLink } from './ApolloLink.js'; export const execute = ApolloLink.execute; diff --git a/src/link/core/from.ts b/src/link/core/from.ts --- a/src/link/core/from.ts +++ b/src/link/core/from.ts @@ -1,3 +1,3 @@ -import { ApolloLink } from './ApolloLink'; +import { ApolloLink } from './ApolloLink.js'; export const from = ApolloLink.from; diff --git a/src/link/core/index.ts b/src/link/core/index.ts --- a/src/link/core/index.ts +++ b/src/link/core/index.ts @@ -1,10 +1,10 @@ -import '../../utilities/globals'; +import '../../utilities/globals/index.js'; -export { empty } from './empty'; -export { from } from './from'; -export { split } from './split'; -export { concat } from './concat'; -export { execute } from './execute'; -export { ApolloLink } from './ApolloLink'; +export { empty } from './empty.js'; +export { from } from './from.js'; +export { split } from './split.js'; +export { concat } from './concat.js'; +export { execute } from './execute.js'; +export { ApolloLink } from './ApolloLink.js'; -export * from './types'; +export * from './types.js'; diff --git a/src/link/core/split.ts b/src/link/core/split.ts --- a/src/link/core/split.ts +++ b/src/link/core/split.ts @@ -1,3 +1,3 @@ -import { ApolloLink } from './ApolloLink'; +import { ApolloLink } from './ApolloLink.js'; export const split = ApolloLink.split; diff --git a/src/link/core/types.ts b/src/link/core/types.ts --- a/src/link/core/types.ts +++ b/src/link/core/types.ts @@ -1,8 +1,9 @@ -import { DocumentNode, ExecutionResult, GraphQLError } from "graphql"; -import { DefaultContext } from "../../core"; -export { DocumentNode }; +import type { ExecutionResult, GraphQLError } from "graphql"; +import type { DocumentNode } from "graphql"; +import type { DefaultContext } from "../../core/index.js"; +export type { DocumentNode }; -import { Observable } from "../../utilities"; +import type { Observable } from "../../utilities/index.js"; export type Path = ReadonlyArray<string | number>; diff --git a/src/link/error/index.ts b/src/link/error/index.ts --- a/src/link/error/index.ts +++ b/src/link/error/index.ts @@ -1,8 +1,9 @@ -import { ExecutionResult } from 'graphql'; +import type { ExecutionResult } from 'graphql'; -import { NetworkError, GraphQLErrors } from '../../errors'; -import { Observable } from '../../utilities'; -import { ApolloLink, Operation, FetchResult, NextLink } from '../core'; +import type { NetworkError, GraphQLErrors } from '../../errors/index.js'; +import { Observable } from '../../utilities/index.js'; +import type { Operation, FetchResult, NextLink } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; export interface ErrorResponse { graphQLErrors?: GraphQLErrors; diff --git a/src/link/http/HttpLink.ts b/src/link/http/HttpLink.ts --- a/src/link/http/HttpLink.ts +++ b/src/link/http/HttpLink.ts @@ -1,6 +1,7 @@ -import { ApolloLink, RequestHandler } from '../core'; -import { HttpOptions } from './selectHttpOptionsAndBody'; -import { createHttpLink } from './createHttpLink'; +import type { RequestHandler } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { HttpOptions } from './selectHttpOptionsAndBody.js'; +import { createHttpLink } from './createHttpLink.js'; export class HttpLink extends ApolloLink { public requester: RequestHandler; diff --git a/src/link/http/checkFetcher.ts b/src/link/http/checkFetcher.ts --- a/src/link/http/checkFetcher.ts +++ b/src/link/http/checkFetcher.ts @@ -1,8 +1,8 @@ -import { InvariantError } from '../../utilities/globals'; +import { newInvariantError } from '../../utilities/globals/index.js'; export const checkFetcher = (fetcher: WindowOrWorkerGlobalScope['fetch'] | undefined) => { if (!fetcher && typeof fetch === 'undefined') { - throw new InvariantError(` + throw newInvariantError(` "fetch" has not been found globally and no fetcher has been \ configured. To fix this, install a fetch package (like \ https://www.npmjs.com/package/cross-fetch), instantiate the \ diff --git a/src/link/http/createHttpLink.ts b/src/link/http/createHttpLink.ts --- a/src/link/http/createHttpLink.ts +++ b/src/link/http/createHttpLink.ts @@ -1,28 +1,32 @@ -import '../../utilities/globals'; -import { invariant } from '../../utilities/globals'; +import { invariant } from '../../utilities/globals/index.js'; -import { visit, DefinitionNode, VariableDefinitionNode } from 'graphql'; +import type { DefinitionNode } from 'graphql'; -import { ApolloLink } from '../core'; -import { Observable, hasDirectives } from '../../utilities'; -import { serializeFetchParameter } from './serializeFetchParameter'; -import { selectURI } from './selectURI'; +import { ApolloLink } from '../core/index.js'; +import { Observable, hasDirectives } from '../../utilities/index.js'; +import { serializeFetchParameter } from './serializeFetchParameter.js'; +import { selectURI } from './selectURI.js'; import { handleError, readMultipartBody, - readJsonBody -} from './parseAndCheckHttpResponse'; -import { checkFetcher } from './checkFetcher'; + parseAndCheckHttpResponse +} from './parseAndCheckHttpResponse.js'; +import { checkFetcher } from './checkFetcher.js'; +import type { + HttpOptions +} from './selectHttpOptionsAndBody.js'; import { selectHttpOptionsAndBodyInternal, defaultPrinter, - fallbackHttpConfig, - HttpOptions -} from './selectHttpOptionsAndBody'; -import { createSignalIfSupported } from './createSignalIfSupported'; -import { rewriteURIForGET } from './rewriteURIForGET'; -import { fromError } from '../utils'; -import { maybe, getMainDefinition } from '../../utilities'; + fallbackHttpConfig +} from './selectHttpOptionsAndBody.js'; +import { rewriteURIForGET } from './rewriteURIForGET.js'; +import { fromError, filterOperationVariables } from '../utils/index.js'; +import { + maybe, + getMainDefinition, + removeClientSetsFromDocument +} from '../../utilities/index.js'; const backupFetch = maybe(() => fetch); @@ -87,6 +91,20 @@ export const createHttpLink = (linkOptions: HttpOptions = {}) => { headers: contextHeaders, }; + if (hasDirectives(['client'], operation.query)) { + const transformedQuery = removeClientSetsFromDocument(operation.query); + + if (!transformedQuery) { + return fromError( + new Error( + 'HttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or set the `transformOptions.removeClientFields` option to `true`.' + ) + ); + } + + operation.query = transformedQuery; + } + //uses fallback, link, and then context to build options const { options, body } = selectHttpOptionsAndBodyInternal( operation, @@ -97,33 +115,13 @@ export const createHttpLink = (linkOptions: HttpOptions = {}) => { ); if (body.variables && !includeUnusedVariables) { - const unusedNames = new Set(Object.keys(body.variables)); - visit(operation.query, { - Variable(node, _key, parent) { - // A variable type definition at the top level of a query is not - // enough to silence server-side errors about the variable being - // unused, so variable definitions do not count as usage. - // https://spec.graphql.org/draft/#sec-All-Variables-Used - if (parent && (parent as VariableDefinitionNode).kind !== 'VariableDefinition') { - unusedNames.delete(node.name.value); - } - }, - }); - if (unusedNames.size) { - // Make a shallow copy of body.variables (with keys in the same - // order) and then delete unused variables from the copy. - body.variables = { ...body.variables }; - unusedNames.forEach(name => { - delete body.variables![name]; - }); - } + body.variables = filterOperationVariables(body.variables, operation.query); } - let controller: any; - if (!(options as any).signal) { - const { controller: _controller, signal } = createSignalIfSupported(); - controller = _controller; - if (controller) (options as any).signal = signal; + let controller: AbortController | undefined; + if (!options.signal && typeof AbortController !== 'undefined') { + controller = new AbortController(); + options.signal = controller.signal; } // If requested, set method to GET if there are no mutations. @@ -182,18 +180,26 @@ export const createHttpLink = (linkOptions: HttpOptions = {}) => { // removal of window.fetch, which is unlikely but not impossible. const currentFetch = preferredFetch || maybe(() => fetch) || backupFetch; + const observerNext = observer.next.bind(observer); currentFetch!(chosenURI, options) .then(response => { operation.setContext({ response }); const ctype = response.headers?.get('content-type'); if (ctype !== null && /^multipart\/mixed/i.test(ctype)) { - return readMultipartBody(response, observer); + return readMultipartBody(response, observerNext); } else { - return readJsonBody(response, operation, observer); + return parseAndCheckHttpResponse(operation)(response).then(observerNext); } }) - .catch(err => handleError(err, observer)); + .then(() => { + controller = undefined; + observer.complete(); + }) + .catch(err => { + controller = undefined; + handleError(err, observer) + }); return () => { // XXX support canceling this request diff --git a/src/link/http/createSignalIfSupported.ts b/src/link/http/createSignalIfSupported.ts --- a/src/link/http/createSignalIfSupported.ts +++ b/src/link/http/createSignalIfSupported.ts @@ -1,3 +1,8 @@ +/** + * @deprecated + * This is not used internally any more and will be removed in + * the next major version of Apollo Client. + */ export const createSignalIfSupported = () => { if (typeof AbortController === 'undefined') return { controller: false, signal: false }; diff --git a/src/link/http/index.ts b/src/link/http/index.ts --- a/src/link/http/index.ts +++ b/src/link/http/index.ts @@ -1,24 +1,19 @@ -import '../../utilities/globals'; +import '../../utilities/globals/index.js'; +export type { ServerParseError } from './parseAndCheckHttpResponse.js'; +export { parseAndCheckHttpResponse } from './parseAndCheckHttpResponse.js'; +export type { ClientParseError } from './serializeFetchParameter.js'; +export { serializeFetchParameter } from './serializeFetchParameter.js'; +export type { HttpOptions, UriFunction } from './selectHttpOptionsAndBody.js'; export { - parseAndCheckHttpResponse, - ServerParseError -} from './parseAndCheckHttpResponse'; -export { - serializeFetchParameter, - ClientParseError -} from './serializeFetchParameter'; -export { - HttpOptions, fallbackHttpConfig, defaultPrinter, selectHttpOptionsAndBody, selectHttpOptionsAndBodyInternal, // needed by ../batch-http but not public - UriFunction -} from './selectHttpOptionsAndBody'; -export { checkFetcher } from './checkFetcher'; -export { createSignalIfSupported } from './createSignalIfSupported'; -export { selectURI } from './selectURI'; -export { createHttpLink } from './createHttpLink'; -export { HttpLink } from './HttpLink'; -export { rewriteURIForGET } from './rewriteURIForGET'; +} from './selectHttpOptionsAndBody.js'; +export { checkFetcher } from './checkFetcher.js'; +export { createSignalIfSupported } from './createSignalIfSupported.js'; +export { selectURI } from './selectURI.js'; +export { createHttpLink } from './createHttpLink.js'; +export { HttpLink } from './HttpLink.js'; +export { rewriteURIForGET } from './rewriteURIForGET.js'; diff --git a/src/link/http/iterators/nodeStream.ts b/src/link/http/iterators/nodeStream.ts --- a/src/link/http/iterators/nodeStream.ts +++ b/src/link/http/iterators/nodeStream.ts @@ -3,8 +3,8 @@ * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/nodeStream.ts */ -import { Readable as NodeReadableStream } from "stream"; -import { canUseAsyncIteratorSymbol } from "../../../utilities"; +import type { Readable as NodeReadableStream } from "stream"; +import { canUseAsyncIteratorSymbol } from "../../../utilities/index.js"; interface NodeStreamIterator<T> { next(): Promise<IteratorResult<T, boolean | undefined>>; diff --git a/src/link/http/iterators/promise.ts b/src/link/http/iterators/promise.ts --- a/src/link/http/iterators/promise.ts +++ b/src/link/http/iterators/promise.ts @@ -3,7 +3,7 @@ * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/promise.ts */ -import { canUseAsyncIteratorSymbol } from "../../../utilities"; +import { canUseAsyncIteratorSymbol } from "../../../utilities/index.js"; interface PromiseIterator<T> { next(): Promise<IteratorResult<T, ArrayBuffer | undefined>>; diff --git a/src/link/http/iterators/reader.ts b/src/link/http/iterators/reader.ts --- a/src/link/http/iterators/reader.ts +++ b/src/link/http/iterators/reader.ts @@ -3,7 +3,7 @@ * https://github.com/kmalakoff/response-iterator/blob/master/src/iterators/reader.ts */ -import { canUseAsyncIteratorSymbol } from "../../../utilities"; +import { canUseAsyncIteratorSymbol } from "../../../utilities/index.js"; interface ReaderIterator<T> { next(): Promise<ReadableStreamReadResult<T>>; diff --git a/src/link/http/parseAndCheckHttpResponse.ts b/src/link/http/parseAndCheckHttpResponse.ts --- a/src/link/http/parseAndCheckHttpResponse.ts +++ b/src/link/http/parseAndCheckHttpResponse.ts @@ -1,11 +1,11 @@ -import { responseIterator } from "./responseIterator"; -import { Operation } from "../core"; -import { throwServerError } from "../utils"; -import { PROTOCOL_ERRORS_SYMBOL } from '../../errors'; -import { Observer } from "../../utilities"; +import { responseIterator } from "./responseIterator.js"; +import type { Operation } from "../core/index.js"; +import { throwServerError } from "../utils/index.js"; +import { PROTOCOL_ERRORS_SYMBOL } from '../../errors/index.js'; import { isApolloPayloadResult -} from '../../utilities/common/incrementalResult'; +} from '../../utilities/common/incrementalResult.js'; +import type { SubscriptionObserver } from "zen-observable-ts"; const { hasOwnProperty } = Object.prototype; @@ -17,7 +17,7 @@ export type ServerParseError = Error & { export async function readMultipartBody< T extends object = Record<string, unknown> ->(response: Response, observer: Observer<T>) { +>(response: Response, nextValue: (value: T) => void) { if (TextDecoder === undefined) { throw new Error( "TextDecoder must be defined in the environment: please import a polyfill." @@ -74,52 +74,47 @@ export async function readMultipartBody< const body = message.slice(i); if (body) { - try { - const result = parseJsonBody<T>(response, body); - if ( - Object.keys(result).length > 1 || - "data" in result || - "incremental" in result || - "errors" in result || - "payload" in result - ) { - if (isApolloPayloadResult(result)) { - let next = {}; - if ("payload" in result) { - next = { ...result.payload }; - } - if ("errors" in result) { - next = { - ...next, - extensions: { - ...("extensions" in next ? next.extensions : null as any), - [PROTOCOL_ERRORS_SYMBOL]: result.errors - }, - }; - } - observer.next?.(next as T); - } else { - // for the last chunk with only `hasNext: false` - // we don't need to call observer.next as there is no data/errors - observer.next?.(result); + const result = parseJsonBody<T>(response, body); + if ( + Object.keys(result).length > 1 || + "data" in result || + "incremental" in result || + "errors" in result || + "payload" in result + ) { + if (isApolloPayloadResult(result)) { + let next = {}; + if ("payload" in result) { + next = { ...result.payload }; + } + if ("errors" in result) { + next = { + ...next, + extensions: { + ...("extensions" in next ? next.extensions : null as any), + [PROTOCOL_ERRORS_SYMBOL]: result.errors + }, + }; } - } else if ( - // If the chunk contains only a "hasNext: false", we can call - // observer.complete() immediately. - Object.keys(result).length === 1 && - "hasNext" in result && - !result.hasNext - ) { - observer.complete?.(); + nextValue(next as T); + } else { + // for the last chunk with only `hasNext: false` + // we don't need to call observer.next as there is no data/errors + nextValue(result); } - } catch (err) { - handleError(err, observer); + } else if ( + // If the chunk contains only a "hasNext: false", we can call + // observer.complete() immediately. + Object.keys(result).length === 1 && + "hasNext" in result && + !result.hasNext + ) { + return; } } bi = buffer.indexOf(boundary); } } - observer.complete?.(); } export function parseHeaders(headerText: string): Record<string, string> { @@ -165,8 +160,7 @@ export function parseJsonBody<T>(response: Response, bodyText: string): T { } } -export function handleError(err: any, observer: Observer<any>) { - if (err.name === "AbortError") return; +export function handleError(err: any, observer: SubscriptionObserver<any>) { // if it is a network error, BUT there is graphql result info fire // the next observer before calling error this gives apollo-client // (and react-apollo) the `graphqlErrors` and `networkErrors` to @@ -200,23 +194,10 @@ export function handleError(err: any, observer: Observer<any>) { // status code of above would be a 401 // in the UI you want to show data where you can, errors as data where you can // and use correct http status codes - observer.next?.(err.result); + observer.next(err.result); } - observer.error?.(err); -} - -export function readJsonBody<T = Record<string, unknown>>( - response: Response, - operation: Operation, - observer: Observer<T> -) { - parseAndCheckHttpResponse(operation)(response) - .then((result) => { - observer.next?.(result); - observer.complete?.(); - }) - .catch((err) => handleError(err, observer)); + observer.error(err); } export function parseAndCheckHttpResponse(operations: Operation | Operation[]) { diff --git a/src/link/http/responseIterator.ts b/src/link/http/responseIterator.ts --- a/src/link/http/responseIterator.ts +++ b/src/link/http/responseIterator.ts @@ -3,7 +3,7 @@ * https://github.com/kmalakoff/response-iterator/blob/master/src/index.ts */ -import { Response as NodeResponse } from "node-fetch"; +import type { Response as NodeResponse } from "node-fetch"; import { isAsyncIterableIterator, isBlob, @@ -11,12 +11,12 @@ import { isNodeReadableStream, isReadableStream, isStreamableBlob, -} from "../../utilities/common/responseIterator"; +} from "../../utilities/index.js"; -import asyncIterator from "./iterators/async"; -import nodeStreamIterator from "./iterators/nodeStream"; -import promiseIterator from "./iterators/promise"; -import readerIterator from "./iterators/reader"; +import asyncIterator from "./iterators/async.js"; +import nodeStreamIterator from "./iterators/nodeStream.js"; +import promiseIterator from "./iterators/promise.js"; +import readerIterator from "./iterators/reader.js"; export function responseIterator<T>( response: Response | NodeResponse diff --git a/src/link/http/rewriteURIForGET.ts b/src/link/http/rewriteURIForGET.ts --- a/src/link/http/rewriteURIForGET.ts +++ b/src/link/http/rewriteURIForGET.ts @@ -1,5 +1,5 @@ -import { serializeFetchParameter } from './serializeFetchParameter'; -import { Body } from './selectHttpOptionsAndBody'; +import { serializeFetchParameter } from './serializeFetchParameter.js'; +import type { Body } from './selectHttpOptionsAndBody.js'; // For GET operations, returns the given URI rewritten with parameters, or a // parse error. diff --git a/src/link/http/selectHttpOptionsAndBody.ts b/src/link/http/selectHttpOptionsAndBody.ts --- a/src/link/http/selectHttpOptionsAndBody.ts +++ b/src/link/http/selectHttpOptionsAndBody.ts @@ -1,6 +1,7 @@ -import { ASTNode, print } from 'graphql'; +import type { ASTNode} from 'graphql'; +import { print } from '../../utilities/index.js'; -import { Operation } from '../core'; +import type { Operation } from '../core/index.js'; export interface Printer { (node: ASTNode, originalPrint: typeof print): string diff --git a/src/link/http/selectURI.ts b/src/link/http/selectURI.ts --- a/src/link/http/selectURI.ts +++ b/src/link/http/selectURI.ts @@ -1,4 +1,4 @@ -import { Operation } from '../core'; +import type { Operation } from '../core/index.js'; export const selectURI = ( operation: Operation, diff --git a/src/link/http/serializeFetchParameter.ts b/src/link/http/serializeFetchParameter.ts --- a/src/link/http/serializeFetchParameter.ts +++ b/src/link/http/serializeFetchParameter.ts @@ -1,4 +1,5 @@ -import { InvariantError } from '../../utilities/globals'; +import { newInvariantError } from '../../utilities/globals/index.js'; +import type { InvariantError } from '../../utilities/globals/index.js'; export type ClientParseError = InvariantError & { parseError: Error; @@ -9,8 +10,10 @@ export const serializeFetchParameter = (p: any, label: string) => { try { serialized = JSON.stringify(p); } catch (e) { - const parseError = new InvariantError( - `Network request failed. ${label} is not serializable: ${e.message}`, + const parseError = newInvariantError( + `Network request failed. %s is not serializable: %s`, + label, + e.message ) as ClientParseError; parseError.parseError = e; throw parseError; diff --git a/src/link/persisted-queries/index.ts b/src/link/persisted-queries/index.ts --- a/src/link/persisted-queries/index.ts +++ b/src/link/persisted-queries/index.ts @@ -1,22 +1,24 @@ -import { invariant } from '../../utilities/globals'; +import { invariant } from '../../utilities/globals/index.js'; -import { print } from 'graphql'; -import { +import { print } from '../../utilities/index.js'; +import type { DocumentNode, ExecutionResult, GraphQLError, } from 'graphql'; -import { ApolloLink, Operation } from '../core'; +import type { Operation } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { + Observer, + ObservableSubscription} from '../../utilities/index.js'; import { Observable, - Observer, - ObservableSubscription, compact, isNonEmptyArray, -} from '../../utilities'; -import { NetworkError } from '../../errors'; -import { ServerError } from '../utils'; +} from '../../utilities/index.js'; +import type { NetworkError } from '../../errors/index.js'; +import type { ServerError } from '../utils/index.js'; export const VERSION = 1; diff --git a/src/link/remove-typename/index.ts b/src/link/remove-typename/index.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/index.ts @@ -0,0 +1,5 @@ +export type { RemoveTypenameFromVariablesOptions } from "./removeTypenameFromVariables.js"; +export { + removeTypenameFromVariables, + KEEP, +} from "./removeTypenameFromVariables.js"; diff --git a/src/link/remove-typename/removeTypenameFromVariables.ts b/src/link/remove-typename/removeTypenameFromVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/removeTypenameFromVariables.ts @@ -0,0 +1,116 @@ +import { wrap } from "optimism"; +import type { DocumentNode, TypeNode } from "graphql"; +import { Kind, visit } from "graphql"; +import { ApolloLink } from "../core/index.js"; +import { stripTypename, isPlainObject } from "../../utilities/index.js"; +import type { OperationVariables } from "../../core/index.js"; + +export const KEEP = "__KEEP"; + +interface KeepTypenameConfig { + [key: string]: typeof KEEP | KeepTypenameConfig; +} + +export interface RemoveTypenameFromVariablesOptions { + except?: KeepTypenameConfig; +} + +export function removeTypenameFromVariables( + options: RemoveTypenameFromVariablesOptions = Object.create(null) +) { + return new ApolloLink((operation, forward) => { + const { except } = options; + const { query, variables } = operation; + + if (variables) { + operation.variables = except + ? maybeStripTypenameUsingConfig(query, variables, except) + : stripTypename(variables); + } + + return forward(operation); + }); +} + +function maybeStripTypenameUsingConfig( + query: DocumentNode, + variables: OperationVariables, + config: KeepTypenameConfig +) { + const variableDefinitions = getVariableDefinitions(query); + + return Object.fromEntries( + Object.entries(variables).map((keyVal) => { + const [key, value] = keyVal; + const typename = variableDefinitions[key]; + const typenameConfig = config[typename]; + + keyVal[1] = typenameConfig + ? maybeStripTypename(value, typenameConfig) + : stripTypename(value); + + return keyVal; + }) + ); +} + +type JSONPrimitive = string | number | null | boolean; +type JSONValue = JSONPrimitive | JSONValue[] | { [key: string]: JSONValue }; + +function maybeStripTypename( + value: JSONValue, + config: KeepTypenameConfig[string] +): JSONValue { + if (config === KEEP) { + return value; + } + + if (Array.isArray(value)) { + return value.map((item) => maybeStripTypename(item, config)); + } + + if (isPlainObject(value)) { + const modified: Record<string, any> = {}; + + Object.keys(value).forEach((key) => { + const child = value[key]; + + if (key === "__typename") { + return; + } + + const fieldConfig = config[key]; + + modified[key] = fieldConfig + ? maybeStripTypename(child, fieldConfig) + : stripTypename(child); + }); + + return modified; + } + + return value; +} + +const getVariableDefinitions = wrap((document: DocumentNode) => { + const definitions: Record<string, string> = {}; + + visit(document, { + VariableDefinition(node) { + definitions[node.variable.name.value] = unwrapType(node.type); + }, + }); + + return definitions; +}); + +function unwrapType(node: TypeNode): string { + switch (node.kind) { + case Kind.NON_NULL_TYPE: + return unwrapType(node.type); + case Kind.LIST_TYPE: + return unwrapType(node.type); + case Kind.NAMED_TYPE: + return node.name.value; + } +} diff --git a/src/link/retry/delayFunction.ts b/src/link/retry/delayFunction.ts --- a/src/link/retry/delayFunction.ts +++ b/src/link/retry/delayFunction.ts @@ -1,4 +1,4 @@ -import { Operation } from '../core'; +import type { Operation } from '../core/index.js'; /** * Advanced mode: a function that implements the strategy for calculating delays diff --git a/src/link/retry/index.ts b/src/link/retry/index.ts --- a/src/link/retry/index.ts +++ b/src/link/retry/index.ts @@ -1 +1 @@ -export * from './retryLink'; +export * from './retryLink.js'; diff --git a/src/link/retry/retryFunction.ts b/src/link/retry/retryFunction.ts --- a/src/link/retry/retryFunction.ts +++ b/src/link/retry/retryFunction.ts @@ -1,4 +1,4 @@ -import { Operation } from '../core'; +import type { Operation } from '../core/index.js'; /** * Advanced mode: a function that determines both whether a particular diff --git a/src/link/retry/retryLink.ts b/src/link/retry/retryLink.ts --- a/src/link/retry/retryLink.ts +++ b/src/link/retry/retryLink.ts @@ -1,15 +1,19 @@ -import { ApolloLink, Operation, FetchResult, NextLink } from '../core'; -import { Observable, Observer, ObservableSubscription } from '../../utilities'; -import { +import type { Operation, FetchResult, NextLink } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { Observer, ObservableSubscription } from '../../utilities/index.js'; +import { Observable } from '../../utilities/index.js'; +import type { DelayFunction, - DelayFunctionOptions, - buildDelayFunction, -} from './delayFunction'; + DelayFunctionOptions} from './delayFunction.js'; import { + buildDelayFunction, +} from './delayFunction.js'; +import type { RetryFunction, - RetryFunctionOptions, + RetryFunctionOptions} from './retryFunction.js'; +import { buildRetryFunction, -} from './retryFunction'; +} from './retryFunction.js'; export namespace RetryLink { export interface Options { diff --git a/src/link/schema/index.ts b/src/link/schema/index.ts --- a/src/link/schema/index.ts +++ b/src/link/schema/index.ts @@ -1,7 +1,9 @@ -import { validate, execute, GraphQLSchema } from 'graphql'; +import type { GraphQLSchema } from 'graphql'; +import { validate, execute } from 'graphql'; -import { ApolloLink, Operation, FetchResult } from '../core'; -import { Observable } from '../../utilities'; +import type { Operation, FetchResult } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import { Observable } from '../../utilities/index.js'; export namespace SchemaLink { export type ResolverContext = Record<string, any>; diff --git a/src/link/subscriptions/index.ts b/src/link/subscriptions/index.ts --- a/src/link/subscriptions/index.ts +++ b/src/link/subscriptions/index.ts @@ -28,12 +28,13 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -import { print } from "graphql"; +import { print } from '../../utilities/index.js'; import type { Client } from "graphql-ws"; -import { ApolloLink, Operation, FetchResult } from "../core"; -import { isNonNullObject, Observable } from "../../utilities"; -import { ApolloError } from "../../errors"; +import type { Operation, FetchResult } from "../core/index.js"; +import { ApolloLink } from "../core/index.js"; +import { isNonNullObject, Observable } from "../../utilities/index.js"; +import { ApolloError } from "../../errors/index.js"; // https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/close_event function isLikeCloseEvent(val: unknown): val is CloseEvent { diff --git a/src/link/utils/createOperation.ts b/src/link/utils/createOperation.ts --- a/src/link/utils/createOperation.ts +++ b/src/link/utils/createOperation.ts @@ -1,4 +1,4 @@ -import { GraphQLRequest, Operation } from '../core'; +import type { GraphQLRequest, Operation } from '../core/index.js'; export function createOperation( starting: any, diff --git a/src/link/utils/filterOperationVariables.ts b/src/link/utils/filterOperationVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/utils/filterOperationVariables.ts @@ -0,0 +1,28 @@ +import type { VariableDefinitionNode, DocumentNode } from 'graphql'; +import { visit } from 'graphql'; + +export function filterOperationVariables( + variables: Record<string, any>, + query: DocumentNode +) { + const result = { ...variables }; + const unusedNames = new Set(Object.keys(variables)); + visit(query, { + Variable(node, _key, parent) { + // A variable type definition at the top level of a query is not + // enough to silence server-side errors about the variable being + // unused, so variable definitions do not count as usage. + // https://spec.graphql.org/draft/#sec-All-Variables-Used + if ( + parent && + (parent as VariableDefinitionNode).kind !== 'VariableDefinition' + ) { + unusedNames.delete(node.name.value); + } + }, + }); + unusedNames.forEach((name) => { + delete result![name]; + }); + return result; +} diff --git a/src/link/utils/fromError.ts b/src/link/utils/fromError.ts --- a/src/link/utils/fromError.ts +++ b/src/link/utils/fromError.ts @@ -1,4 +1,4 @@ -import { Observable } from '../../utilities'; +import { Observable } from '../../utilities/index.js'; export function fromError<T>(errorValue: any): Observable<T> { return new Observable<T>(observer => { diff --git a/src/link/utils/fromPromise.ts b/src/link/utils/fromPromise.ts --- a/src/link/utils/fromPromise.ts +++ b/src/link/utils/fromPromise.ts @@ -1,4 +1,4 @@ -import { Observable } from '../../utilities'; +import { Observable } from '../../utilities/index.js'; export function fromPromise<T>(promise: Promise<T>): Observable<T> { return new Observable<T>(observer => { diff --git a/src/link/utils/index.ts b/src/link/utils/index.ts --- a/src/link/utils/index.ts +++ b/src/link/utils/index.ts @@ -1,9 +1,11 @@ -import '../../utilities/globals'; +import '../../utilities/globals/index.js'; -export { fromError } from './fromError'; -export { toPromise } from './toPromise'; -export { fromPromise } from './fromPromise'; -export { ServerError, throwServerError } from './throwServerError'; -export { validateOperation } from './validateOperation'; -export { createOperation } from './createOperation'; -export { transformOperation } from './transformOperation'; +export { fromError } from './fromError.js'; +export { toPromise } from './toPromise.js'; +export { fromPromise } from './fromPromise.js'; +export type { ServerError } from './throwServerError.js'; +export { throwServerError } from './throwServerError.js'; +export { validateOperation } from './validateOperation.js'; +export { createOperation } from './createOperation.js'; +export { transformOperation } from './transformOperation.js'; +export { filterOperationVariables } from './filterOperationVariables.js'; diff --git a/src/link/utils/toPromise.ts b/src/link/utils/toPromise.ts --- a/src/link/utils/toPromise.ts +++ b/src/link/utils/toPromise.ts @@ -1,5 +1,5 @@ -import { invariant } from '../../utilities/globals'; -import { Observable } from '../../utilities'; +import { invariant } from '../../utilities/globals/index.js'; +import type { Observable } from '../../utilities/index.js'; export function toPromise<R>(observable: Observable<R>): Promise<R> { let completed = false; diff --git a/src/link/utils/transformOperation.ts b/src/link/utils/transformOperation.ts --- a/src/link/utils/transformOperation.ts +++ b/src/link/utils/transformOperation.ts @@ -1,5 +1,5 @@ -import { GraphQLRequest, Operation } from '../core'; -import { getOperationName } from '../../utilities'; +import type { GraphQLRequest, Operation } from '../core/index.js'; +import { getOperationName } from '../../utilities/index.js'; export function transformOperation(operation: GraphQLRequest): GraphQLRequest { const transformedOperation: GraphQLRequest = { diff --git a/src/link/utils/validateOperation.ts b/src/link/utils/validateOperation.ts --- a/src/link/utils/validateOperation.ts +++ b/src/link/utils/validateOperation.ts @@ -1,5 +1,5 @@ -import { InvariantError } from '../../utilities/globals' -import { GraphQLRequest } from '../core'; +import { newInvariantError } from '../../utilities/globals/index.js' +import type { GraphQLRequest } from '../core/index.js'; export function validateOperation(operation: GraphQLRequest): GraphQLRequest { const OPERATION_FIELDS = [ @@ -11,7 +11,7 @@ export function validateOperation(operation: GraphQLRequest): GraphQLRequest { ]; for (let key of Object.keys(operation)) { if (OPERATION_FIELDS.indexOf(key) < 0) { - throw new InvariantError(`illegal argument: ${key}`); + throw newInvariantError(`illegal argument: %s`, key); } } diff --git a/src/link/ws/index.ts b/src/link/ws/index.ts --- a/src/link/ws/index.ts +++ b/src/link/ws/index.ts @@ -1,7 +1,9 @@ -import { SubscriptionClient, ClientOptions } from 'subscriptions-transport-ws'; +import type { ClientOptions } from 'subscriptions-transport-ws'; +import { SubscriptionClient } from 'subscriptions-transport-ws'; -import { ApolloLink, Operation, FetchResult } from '../core'; -import { Observable } from '../../utilities'; +import type { Operation, FetchResult } from '../core/index.js'; +import { ApolloLink } from '../core/index.js'; +import type { Observable } from '../../utilities/index.js'; export namespace WebSocketLink { /** diff --git a/src/react/cache/QueryReference.ts b/src/react/cache/QueryReference.ts new file mode 100644 --- /dev/null +++ b/src/react/cache/QueryReference.ts @@ -0,0 +1,295 @@ +import { equal } from "@wry/equality"; +import type { + ApolloError, + ApolloQueryResult, + ObservableQuery, + OperationVariables, + WatchQueryOptions, +} from "../../core/index.js"; +import { isNetworkRequestSettled } from "../../core/index.js"; +import type { ObservableSubscription } from "../../utilities/index.js"; +import { + createFulfilledPromise, + createRejectedPromise, +} from "../../utilities/index.js"; +import type { CacheKey } from "./types.js"; +import type { useBackgroundQuery, useReadQuery } from "../hooks/index.js"; + +type Listener<TData> = (promise: Promise<ApolloQueryResult<TData>>) => void; + +type FetchMoreOptions<TData> = Parameters< + ObservableQuery<TData>["fetchMore"] +>[0]; + +const QUERY_REFERENCE_SYMBOL: unique symbol = Symbol(); +/** + * A `QueryReference` is an opaque object returned by {@link useBackgroundQuery}. + * A child component reading the `QueryReference` via {@link useReadQuery} will + * suspend until the promise resolves. + */ +export interface QueryReference<TData = unknown> { + [QUERY_REFERENCE_SYMBOL]: InternalQueryReference<TData>; +} + +interface InternalQueryReferenceOptions { + key: CacheKey; + onDispose?: () => void; + autoDisposeTimeoutMs?: number; +} + +export function wrapQueryRef<TData>( + internalQueryRef: InternalQueryReference<TData> +): QueryReference<TData> { + return { [QUERY_REFERENCE_SYMBOL]: internalQueryRef }; +} + +export function unwrapQueryRef<TData>( + queryRef: QueryReference<TData> +): InternalQueryReference<TData> { + return queryRef[QUERY_REFERENCE_SYMBOL]; +} + +const OBSERVED_CHANGED_OPTIONS = [ + "canonizeResults", + "context", + "errorPolicy", + "fetchPolicy", + "refetchWritePolicy", + "returnPartialData", +] as const; + +type ObservedOptions = Pick< + WatchQueryOptions, + typeof OBSERVED_CHANGED_OPTIONS[number] +>; + +export class InternalQueryReference<TData = unknown> { + public result: ApolloQueryResult<TData>; + public readonly key: CacheKey; + public readonly observable: ObservableQuery<TData>; + + public promiseCache?: Map<CacheKey, Promise<ApolloQueryResult<TData>>>; + public promise: Promise<ApolloQueryResult<TData>>; + + private subscription: ObservableSubscription; + private listeners = new Set<Listener<TData>>(); + private autoDisposeTimeoutId: NodeJS.Timeout; + private status: "idle" | "loading" = "loading"; + + private resolve: ((result: ApolloQueryResult<TData>) => void) | undefined; + private reject: ((error: unknown) => void) | undefined; + + private references = 0; + + constructor( + observable: ObservableQuery<TData>, + options: InternalQueryReferenceOptions + ) { + this.handleNext = this.handleNext.bind(this); + this.handleError = this.handleError.bind(this); + this.dispose = this.dispose.bind(this); + this.observable = observable; + // Don't save this result as last result to prevent delivery of last result + // when first subscribing + this.result = observable.getCurrentResult(false); + this.key = options.key; + + if (options.onDispose) { + this.onDispose = options.onDispose; + } + + if ( + isNetworkRequestSettled(this.result.networkStatus) || + (this.result.data && + (!this.result.partial || this.watchQueryOptions.returnPartialData)) + ) { + this.promise = createFulfilledPromise(this.result); + this.status = "idle"; + } else { + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + } + + this.subscription = observable + .filter(({ data }) => !equal(data, {})) + .subscribe({ + next: this.handleNext, + error: this.handleError, + }); + + // Start a timer that will automatically dispose of the query if the + // suspended resource does not use this queryRef in the given time. This + // helps prevent memory leaks when a component has unmounted before the + // query has finished loading. + this.autoDisposeTimeoutId = setTimeout( + this.dispose, + options.autoDisposeTimeoutMs ?? 30_000 + ); + } + + get watchQueryOptions() { + return this.observable.options; + } + + retain() { + this.references++; + clearTimeout(this.autoDisposeTimeoutId); + let disposed = false; + + return () => { + if (disposed) { + return; + } + + disposed = true; + this.references--; + + // Wait before fully disposing in case the app is running in strict mode. + setTimeout(() => { + if (!this.references) { + this.dispose(); + } + }); + }; + } + + didChangeOptions(watchQueryOptions: ObservedOptions) { + return OBSERVED_CHANGED_OPTIONS.some( + (option) => + !equal(this.watchQueryOptions[option], watchQueryOptions[option]) + ); + } + + applyOptions(watchQueryOptions: ObservedOptions) { + const { + fetchPolicy: currentFetchPolicy, + canonizeResults: currentCanonizeResults, + } = this.watchQueryOptions; + + // "standby" is used when `skip` is set to `true`. Detect when we've + // enabled the query (i.e. `skip` is `false`) to execute a network request. + if ( + currentFetchPolicy === "standby" && + currentFetchPolicy !== watchQueryOptions.fetchPolicy + ) { + this.initiateFetch(this.observable.reobserve(watchQueryOptions)); + } else { + this.observable.silentSetOptions(watchQueryOptions); + + if (currentCanonizeResults !== watchQueryOptions.canonizeResults) { + this.result = { ...this.result, ...this.observable.getCurrentResult() }; + this.promise = createFulfilledPromise(this.result); + } + } + + return this.promise; + } + + listen(listener: Listener<TData>) { + this.listeners.add(listener); + + return () => { + this.listeners.delete(listener); + }; + } + + refetch(variables: OperationVariables | undefined) { + return this.initiateFetch(this.observable.refetch(variables)); + } + + fetchMore(options: FetchMoreOptions<TData>) { + return this.initiateFetch(this.observable.fetchMore<TData>(options)); + } + + private dispose() { + this.subscription.unsubscribe(); + this.onDispose(); + } + + private onDispose() { + // noop. overridable by options + } + + private handleNext(result: ApolloQueryResult<TData>) { + switch (this.status) { + case "loading": { + // Maintain the last successful `data` value if the next result does not + // have one. + if (result.data === void 0) { + result.data = this.result.data; + } + this.status = "idle"; + this.result = result; + this.resolve?.(result); + break; + } + case "idle": { + // This occurs when switching to a result that is fully cached when this + // class is instantiated. ObservableQuery will run reobserve when + // subscribing, which delivers a result from the cache. + if (result.data === this.result.data) { + return; + } + + // Maintain the last successful `data` value if the next result does not + // have one. + if (result.data === void 0) { + result.data = this.result.data; + } + + this.result = result; + this.promise = createFulfilledPromise(result); + this.deliver(this.promise); + break; + } + } + } + + private handleError(error: ApolloError) { + switch (this.status) { + case "loading": { + this.status = "idle"; + this.reject?.(error); + break; + } + case "idle": { + this.promise = createRejectedPromise(error); + this.deliver(this.promise); + } + } + } + + private deliver(promise: Promise<ApolloQueryResult<TData>>) { + this.listeners.forEach((listener) => listener(promise)); + } + + private initiateFetch(returnedPromise: Promise<ApolloQueryResult<TData>>) { + this.status = "loading"; + + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + + this.promise.catch(() => {}); + + // If the data returned from the fetch is deeply equal to the data already + // in the cache, `handleNext` will not be triggered leaving the promise we + // created in a pending state forever. To avoid this situtation, we attempt + // to resolve the promise if `handleNext` hasn't been run to ensure the + // promise is resolved correctly. + returnedPromise + .then((result) => { + if (this.status === "loading") { + this.status = "idle"; + this.result = result; + this.resolve?.(result); + } + }) + .catch(() => {}); + + return returnedPromise; + } +} diff --git a/src/react/cache/SuspenseCache.ts b/src/react/cache/SuspenseCache.ts new file mode 100644 --- /dev/null +++ b/src/react/cache/SuspenseCache.ts @@ -0,0 +1,49 @@ +import { Trie } from "@wry/trie"; +import type { ObservableQuery } from "../../core/index.js"; +import { canUseWeakMap } from "../../utilities/index.js"; +import { InternalQueryReference } from "./QueryReference.js"; +import type { CacheKey } from "./types.js"; + +export interface SuspenseCacheOptions { + /** + * Specifies the amount of time, in milliseconds, the suspense cache will wait + * for a suspended component to read from the suspense cache before it + * automatically disposes of the query. This prevents memory leaks when a + * component unmounts before a suspended resource finishes loading. Increase + * the timeout if your queries take longer than than the specified time to + * prevent your queries from suspending over and over. + * + * Defaults to 30 seconds. + */ + autoDisposeTimeoutMs?: number; +} + +export class SuspenseCache { + private queryRefs = new Trie<{ current?: InternalQueryReference }>( + canUseWeakMap + ); + private options: SuspenseCacheOptions; + + constructor(options: SuspenseCacheOptions = Object.create(null)) { + this.options = options; + } + + getQueryRef<TData = any>( + cacheKey: CacheKey, + createObservable: () => ObservableQuery<TData> + ) { + const ref = this.queryRefs.lookupArray(cacheKey); + + if (!ref.current) { + ref.current = new InternalQueryReference(createObservable(), { + key: cacheKey, + autoDisposeTimeoutMs: this.options.autoDisposeTimeoutMs, + onDispose: () => { + delete ref.current; + }, + }); + } + + return ref.current as InternalQueryReference<TData>; + } +} diff --git a/src/react/cache/getSuspenseCache.ts b/src/react/cache/getSuspenseCache.ts new file mode 100644 --- /dev/null +++ b/src/react/cache/getSuspenseCache.ts @@ -0,0 +1,27 @@ +import type { SuspenseCacheOptions } from "./index.js"; +import { SuspenseCache } from "./SuspenseCache.js"; +import type { ApolloClient } from "../../core/ApolloClient.js"; + +declare module "../../core/ApolloClient.js" { + interface DefaultOptions { + react?: { + suspense?: Readonly<SuspenseCacheOptions>; + }; + } +} + +const suspenseCacheSymbol = Symbol.for("apollo.suspenseCache"); + +export function getSuspenseCache( + client: ApolloClient<object> & { + [suspenseCacheSymbol]?: SuspenseCache; + } +) { + if (!client[suspenseCacheSymbol]) { + client[suspenseCacheSymbol] = new SuspenseCache( + client.defaultOptions.react?.suspense + ); + } + + return client[suspenseCacheSymbol]; +} diff --git a/src/react/cache/index.ts b/src/react/cache/index.ts new file mode 100644 --- /dev/null +++ b/src/react/cache/index.ts @@ -0,0 +1,27 @@ +export type { SuspenseCacheOptions } from "./SuspenseCache.js"; +export { getSuspenseCache } from "./getSuspenseCache.js"; + +import { SuspenseCache as RealSuspenseCache } from "./SuspenseCache.js"; + +// TODO: remove export with release 3.8 +// replace with +// export type { SuspenseCache } from './SuspenseCache.js'; +/** + * @deprecated + * It is no longer necessary to create a `SuspenseCache` instance and pass it into the `ApolloProvider`. + * Please remove this code from your application. + * + * This export will be removed with the final 3.8 release. + */ +export class SuspenseCache extends RealSuspenseCache { + constructor() { + super(); + // throwing an error here instead of using invariant - we do not want this error + // message to be link-ified, but to directly show up as bold as possible + throw new Error( + "It is no longer necessary to create a `SuspenseCache` instance and pass it into the `ApolloProvider`.\n" + + "Please remove this code from your application. \n\n" + + "This export will be removed with the final 3.8 release." + ); + } +} diff --git a/src/react/cache/types.ts b/src/react/cache/types.ts new file mode 100644 --- /dev/null +++ b/src/react/cache/types.ts @@ -0,0 +1,7 @@ +import type { DocumentNode } from "graphql"; + +export type CacheKey = [ + query: DocumentNode, + stringifiedVariables: string, + ...queryKey: any[], +]; diff --git a/src/react/components/index.ts b/src/react/components/index.ts --- a/src/react/components/index.ts +++ b/src/react/components/index.ts @@ -1,5 +1,5 @@ -export { Query } from './Query'; -export { Mutation } from './Mutation'; -export { Subscription } from './Subscription'; +export { Query } from "./Query.js"; +export { Mutation } from "./Mutation.js"; +export { Subscription } from "./Subscription.js"; -export * from './types'; +export * from "./types.js"; diff --git a/src/react/components/types.ts b/src/react/components/types.ts --- a/src/react/components/types.ts +++ b/src/react/components/types.ts @@ -1,20 +1,24 @@ -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import type { DocumentNode } from "graphql"; +import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; -import { OperationVariables, DefaultContext, ApolloCache } from '../../core'; -import { +import type { + OperationVariables, + DefaultContext, + ApolloCache, +} from "../../core/index.js"; +import type { QueryFunctionOptions, QueryResult, BaseMutationOptions, MutationFunction, MutationResult, BaseSubscriptionOptions, - SubscriptionResult -} from '../types/types'; + SubscriptionResult, +} from "../types/types.js"; export interface QueryComponentOptions< TData = any, - TVariables extends OperationVariables = OperationVariables + TVariables extends OperationVariables = OperationVariables, > extends QueryFunctionOptions<TData, TVariables> { children: (result: QueryResult<TData, TVariables>) => JSX.Element | null; query: DocumentNode | TypedDocumentNode<TData, TVariables>; @@ -24,7 +28,7 @@ export interface MutationComponentOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any> + TCache extends ApolloCache<any> = ApolloCache<any>, > extends BaseMutationOptions<TData, TVariables, TContext, TCache> { mutation: DocumentNode | TypedDocumentNode<TData, TVariables>; children: ( @@ -35,7 +39,7 @@ export interface MutationComponentOptions< export interface SubscriptionComponentOptions< TData = any, - TVariables extends OperationVariables = OperationVariables + TVariables extends OperationVariables = OperationVariables, > extends BaseSubscriptionOptions<TData, TVariables> { subscription: DocumentNode | TypedDocumentNode<TData, TVariables>; children?: null | ((result: SubscriptionResult<TData>) => JSX.Element | null); diff --git a/src/react/context/ApolloContext.ts b/src/react/context/ApolloContext.ts --- a/src/react/context/ApolloContext.ts +++ b/src/react/context/ApolloContext.ts @@ -1,7 +1,8 @@ -import * as React from 'react'; -import { ApolloClient } from '../../core'; -import { canUseSymbol } from '../../utilities'; -import type { RenderPromises } from '../ssr'; +import * as React from "react"; +import type { ApolloClient } from "../../core/index.js"; +import { canUseSymbol } from "../../utilities/index.js"; +import type { RenderPromises } from "../ssr/index.js"; +import { invariant } from "../../utilities/globals/index.js"; export interface ApolloContextValue { client?: ApolloClient<object>; @@ -13,21 +14,37 @@ export interface ApolloContextValue { // in one context, then attempting to retrieve it from another different // context), a single Apollo context is created and tracked in global state. const contextKey = canUseSymbol - ? Symbol.for('__APOLLO_CONTEXT__') - : '__APOLLO_CONTEXT__'; + ? Symbol.for("__APOLLO_CONTEXT__") + : "__APOLLO_CONTEXT__"; export function getApolloContext(): React.Context<ApolloContextValue> { - let context = (React.createContext as any)[contextKey] as React.Context<ApolloContextValue>; + invariant( + "createContext" in React, + "Invoking `getApolloContext` in an environment where `React.createContext` is not available.\n" + + "The Apollo Client functionality you are trying to use is only available in React Client Components.\n" + + 'Please make sure to add "use client" at the top of your file.\n' + + // TODO: change to React documentation once React documentation contains information about Client Components + "For more information, see https://nextjs.org/docs/getting-started/react-essentials#client-components" + ); + + let context = (React.createContext as any)[ + contextKey + ] as React.Context<ApolloContextValue>; if (!context) { Object.defineProperty(React.createContext, contextKey, { - value: context = React.createContext<ApolloContextValue>({}), + value: (context = React.createContext<ApolloContextValue>({})), enumerable: false, writable: false, configurable: true, }); - context.displayName = 'ApolloContext'; + context.displayName = "ApolloContext"; } return context; } -export { getApolloContext as resetApolloContext } +/** + * @deprecated This function has no "resetting" effect since Apollo Client 3.4.12, + * and will be removed in the next major version of Apollo Client. + * If you want to get the Apollo Context, use `getApolloContext` instead. + */ +export const resetApolloContext = getApolloContext; diff --git a/src/react/context/index.ts b/src/react/context/index.ts --- a/src/react/context/index.ts +++ b/src/react/context/index.ts @@ -1,9 +1,8 @@ -import '../../utilities/globals'; +import "../../utilities/globals/index.js"; -export { ApolloConsumer, ApolloConsumerProps } from './ApolloConsumer'; -export { - ApolloContextValue, - getApolloContext, - getApolloContext as resetApolloContext -} from './ApolloContext'; -export { ApolloProvider, ApolloProviderProps } from './ApolloProvider'; +export type { ApolloConsumerProps } from "./ApolloConsumer.js"; +export { ApolloConsumer } from "./ApolloConsumer.js"; +export type { ApolloContextValue } from "./ApolloContext.js"; +export { getApolloContext, resetApolloContext } from "./ApolloContext.js"; +export type { ApolloProviderProps } from "./ApolloProvider.js"; +export { ApolloProvider } from "./ApolloProvider.js"; diff --git a/src/react/hoc/index.ts b/src/react/hoc/index.ts --- a/src/react/hoc/index.ts +++ b/src/react/hoc/index.ts @@ -1,10 +1,10 @@ -import '../../utilities/globals'; +import "../../utilities/globals/index.js"; -export { graphql } from './graphql'; +export { graphql } from "./graphql.js"; -export { withQuery } from './query-hoc'; -export { withMutation } from './mutation-hoc'; -export { withSubscription } from './subscription-hoc'; -export { withApollo } from './withApollo'; +export { withQuery } from "./query-hoc.js"; +export { withMutation } from "./mutation-hoc.js"; +export { withSubscription } from "./subscription-hoc.js"; +export { withApollo } from "./withApollo.js"; -export * from './types'; +export * from "./types.js"; diff --git a/src/react/hoc/types.ts b/src/react/hoc/types.ts --- a/src/react/hoc/types.ts +++ b/src/react/hoc/types.ts @@ -1,6 +1,6 @@ -import { ApolloCache, ApolloClient } from '../../core'; -import { ApolloError } from '../../errors'; -import { +import type { ApolloCache, ApolloClient } from "../../core/index.js"; +import type { ApolloError } from "../../errors/index.js"; +import type { ApolloQueryResult, OperationVariables, FetchMoreOptions, @@ -8,17 +8,17 @@ import { FetchMoreQueryOptions, SubscribeToMoreOptions, DefaultContext, -} from '../../core'; -import { +} from "../../core/index.js"; +import type { MutationFunction, BaseQueryOptions, BaseMutationOptions, - MutationResult -} from '../types/types'; + MutationResult, +} from "../types/types.js"; export interface QueryControls< TData = any, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > { error?: ApolloError; networkStatus: number; @@ -39,7 +39,7 @@ export interface QueryControls< export type DataValue< TData, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > = QueryControls<TData, TGraphQLVariables> & // data may not yet be loaded Partial<TData>; @@ -50,7 +50,7 @@ export interface DataProps<TData, TGraphQLVariables = OperationVariables> { export interface MutateProps< TData = any, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > { mutate: MutationFunction<TData, TGraphQLVariables>; result: MutationResult<TData>; @@ -59,7 +59,7 @@ export interface MutateProps< export type ChildProps< TProps = {}, TData = {}, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > = TProps & Partial<DataProps<TData, TGraphQLVariables>> & Partial<MutateProps<TData, TGraphQLVariables>>; @@ -67,21 +67,20 @@ export type ChildProps< export type ChildDataProps< TProps = {}, TData = {}, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > = TProps & DataProps<TData, TGraphQLVariables>; export type ChildMutateProps< TProps = {}, TData = {}, - TGraphQLVariables = OperationVariables + TGraphQLVariables = OperationVariables, > = TProps & MutateProps<TData, TGraphQLVariables>; export interface OptionProps< TProps = any, TData = any, - TGraphQLVariables = OperationVariables -> - extends Partial<DataProps<TData, TGraphQLVariables>>, + TGraphQLVariables = OperationVariables, +> extends Partial<DataProps<TData, TGraphQLVariables>>, Partial<MutateProps<TData, TGraphQLVariables>> { ownProps: TProps; } @@ -101,8 +100,7 @@ export interface OperationOption< props: TProps ) => | BaseQueryOptions<TGraphQLVariables> - | BaseMutationOptions<TData, TGraphQLVariables, TContext, TCache> - ); + | BaseMutationOptions<TData, TGraphQLVariables, TContext, TCache>); props?: ( props: OptionProps<TProps, TData, TGraphQLVariables>, lastProps?: TChildProps | void diff --git a/src/react/hooks/constants.ts b/src/react/hooks/constants.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/constants.ts @@ -0,0 +1,2 @@ +export const skipToken = Symbol.for('apollo.skipToken'); +export type SkipToken = typeof skipToken; diff --git a/src/react/hooks/index.ts b/src/react/hooks/index.ts --- a/src/react/hooks/index.ts +++ b/src/react/hooks/index.ts @@ -1,9 +1,16 @@ -import '../../utilities/globals'; +import "../../utilities/globals/index.js"; -export * from './useApolloClient'; -export * from './useLazyQuery'; -export * from './useMutation'; -export { useQuery } from './useQuery'; -export * from './useSubscription'; -export * from './useReactiveVar'; -export * from './useFragment'; +export * from "./useApolloClient.js"; +export * from "./useLazyQuery.js"; +export * from "./useMutation.js"; +export { useQuery } from "./useQuery.js"; +export * from "./useSubscription.js"; +export * from "./useReactiveVar.js"; +export * from "./useFragment.js"; +export type { UseSuspenseQueryResult } from "./useSuspenseQuery.js"; +export { useSuspenseQuery } from "./useSuspenseQuery.js"; +export type { UseBackgroundQueryResult } from "./useBackgroundQuery.js"; +export { useBackgroundQuery } from "./useBackgroundQuery.js"; +export { useReadQuery } from "./useReadQuery.js"; +export { skipToken } from "./constants.js"; +export type { SkipToken } from "./constants.js"; diff --git a/src/react/hooks/internal/__use.ts b/src/react/hooks/internal/__use.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/internal/__use.ts @@ -0,0 +1,26 @@ +import { wrapPromiseWithState } from "../../../utilities/index.js"; +import * as React from "react"; + +type Use = <T>(promise: Promise<T>) => T; +// Prevent webpack from complaining about our feature detection of the +// use property of the React namespace, which is expected not +// to exist when using current stable versions, and that's fine. +const useKey = "use" as keyof typeof React; +const realHook = React[useKey] as Use | undefined; + +// This is named with two underscores to allow this hook to evade typical rules of +// hooks (i.e. it can be used conditionally) +export const __use = + realHook || + function __use<TValue>(promise: Promise<TValue>) { + const statefulPromise = wrapPromiseWithState(promise); + + switch (statefulPromise.status) { + case "pending": + throw statefulPromise; + case "rejected": + throw statefulPromise.reason; + case "fulfilled": + return statefulPromise.value; + } + }; diff --git a/src/react/hooks/internal/index.ts b/src/react/hooks/internal/index.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/internal/index.ts @@ -0,0 +1,4 @@ +// These hooks are used internally and are not exported publicly by the library +export { useDeepMemo } from "./useDeepMemo.js"; +export { useIsomorphicLayoutEffect } from "./useIsomorphicLayoutEffect.js"; +export { __use } from "./__use.js"; diff --git a/src/react/hooks/internal/useDeepMemo.ts b/src/react/hooks/internal/useDeepMemo.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/internal/useDeepMemo.ts @@ -0,0 +1,16 @@ +import type { DependencyList } from "react"; +import * as React from "react"; +import { equal } from "@wry/equality"; + +export function useDeepMemo<TValue>( + memoFn: () => TValue, + deps: DependencyList +) { + const ref = React.useRef<{ deps: DependencyList; value: TValue }>(); + + if (!ref.current || !equal(ref.current.deps, deps)) { + ref.current = { value: memoFn(), deps }; + } + + return ref.current.value; +} diff --git a/src/react/hooks/internal/useIsomorphicLayoutEffect.ts b/src/react/hooks/internal/useIsomorphicLayoutEffect.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/internal/useIsomorphicLayoutEffect.ts @@ -0,0 +1,11 @@ +import * as React from "react"; +import { canUseDOM } from "../../../utilities/index.js"; + +// use canUseDOM here instead of canUseLayoutEffect because we want to be able +// to use useLayoutEffect in our jest tests. useLayoutEffect seems to work fine +// in useSuspenseQuery tests, but to honor the original comment about the +// warnings for useSyncExternalStore implementation, canUseLayoutEffect is left +// alone. +export const useIsomorphicLayoutEffect = canUseDOM + ? React.useLayoutEffect + : React.useEffect; diff --git a/src/react/hooks/useApolloClient.ts b/src/react/hooks/useApolloClient.ts --- a/src/react/hooks/useApolloClient.ts +++ b/src/react/hooks/useApolloClient.ts @@ -1,18 +1,18 @@ -import { invariant } from '../../utilities/globals'; -import { useContext } from 'react'; -import { ApolloClient } from '../../core'; -import { getApolloContext } from '../context'; +import { invariant } from "../../utilities/globals/index.js"; +import * as React from "react"; +import type { ApolloClient } from "../../core/index.js"; +import { getApolloContext } from "../context/index.js"; export function useApolloClient( - override?: ApolloClient<object>, + override?: ApolloClient<object> ): ApolloClient<object> { - const context = useContext(getApolloContext()); + const context = React.useContext(getApolloContext()); const client = override || context.client; invariant( !!client, 'Could not find "client" in the context or passed in as an option. ' + - 'Wrap the root component in an <ApolloProvider>, or pass an ApolloClient ' + - 'instance in via options.', + "Wrap the root component in an <ApolloProvider>, or pass an ApolloClient " + + "instance in via options." ); return client; diff --git a/src/react/hooks/useBackgroundQuery.ts b/src/react/hooks/useBackgroundQuery.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/useBackgroundQuery.ts @@ -0,0 +1,250 @@ +import * as React from "react"; +import type { + DocumentNode, + OperationVariables, + TypedDocumentNode, +} from "../../core/index.js"; +import { useApolloClient } from "./useApolloClient.js"; +import { wrapQueryRef, type QueryReference } from "../cache/QueryReference.js"; +import type { BackgroundQueryHookOptions, NoInfer } from "../types/types.js"; +import { __use } from "./internal/index.js"; +import { getSuspenseCache } from "../cache/index.js"; +import { useWatchQueryOptions } from "./useSuspenseQuery.js"; +import type { FetchMoreFunction, RefetchFunction } from "./useSuspenseQuery.js"; +import { canonicalStringify } from "../../cache/index.js"; +import type { DeepPartial } from "../../utilities/index.js"; +import type { CacheKey } from "../cache/types.js"; +import type { SkipToken } from "./constants.js"; + +export type UseBackgroundQueryResult< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +> = { + fetchMore: FetchMoreFunction<TData, TVariables>; + refetch: RefetchFunction<TData, TVariables>; +}; + +type BackgroundQueryHookOptionsNoInfer< + TData, + TVariables extends OperationVariables +> = BackgroundQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>>; + +export function useBackgroundQuery< + TData, + TVariables extends OperationVariables, + TOptions extends Omit<BackgroundQueryHookOptions<TData>, "variables"> +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & TOptions +): [ + ( + | QueryReference< + TOptions["errorPolicy"] extends "ignore" | "all" + ? TOptions["returnPartialData"] extends true + ? DeepPartial<TData> | undefined + : TData | undefined + : TOptions["returnPartialData"] extends true + ? DeepPartial<TData> + : TData + > + | (TOptions["skip"] extends boolean ? undefined : never) + ), + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + returnPartialData: true; + errorPolicy: "ignore" | "all"; + } +): [ + QueryReference<DeepPartial<TData> | undefined>, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + errorPolicy: "ignore" | "all"; + } +): [ + QueryReference<TData | undefined>, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + skip: boolean; + returnPartialData: true; + } +): [ + QueryReference<DeepPartial<TData>> | undefined, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + returnPartialData: true; + } +): [ + QueryReference<DeepPartial<TData>>, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + skip: boolean; + } +): [ + QueryReference<TData> | undefined, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: BackgroundQueryHookOptionsNoInfer<TData, TVariables> +): [QueryReference<TData>, UseBackgroundQueryResult<TData, TVariables>]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SkipToken +): [undefined, UseBackgroundQueryResult<TData, TVariables>]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: + | SkipToken + | (BackgroundQueryHookOptionsNoInfer<TData, TVariables> & { + returnPartialData: true; + }) +): [ + QueryReference<DeepPartial<TData>> | undefined, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: SkipToken | BackgroundQueryHookOptionsNoInfer<TData, TVariables> +): [ + QueryReference<TData> | undefined, + UseBackgroundQueryResult<TData, TVariables> +]; + +export function useBackgroundQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: + | (SkipToken & + Partial<BackgroundQueryHookOptionsNoInfer<TData, TVariables>>) + | BackgroundQueryHookOptionsNoInfer<TData, TVariables> = Object.create(null) +): [ + QueryReference<TData> | undefined, + UseBackgroundQueryResult<TData, TVariables> +] { + const client = useApolloClient(options.client); + const suspenseCache = getSuspenseCache(client); + const watchQueryOptions = useWatchQueryOptions({ client, query, options }); + const { fetchPolicy, variables } = watchQueryOptions; + const { queryKey = [] } = options; + + // This ref tracks the first time query execution is enabled to determine + // whether to return a query ref or `undefined`. When initialized + // in a skipped state (either via `skip: true` or `skipToken`) we return + // `undefined` for the `queryRef` until the query has been enabled. Once + // enabled, a query ref is always returned regardless of whether the query is + // skipped again later. + const didFetchResult = React.useRef(fetchPolicy !== "standby"); + didFetchResult.current ||= fetchPolicy !== "standby"; + + const cacheKey: CacheKey = [ + query, + canonicalStringify(variables), + ...([] as any[]).concat(queryKey), + ]; + + const queryRef = suspenseCache.getQueryRef(cacheKey, () => + client.watchQuery(watchQueryOptions) + ); + + const [promiseCache, setPromiseCache] = React.useState( + () => new Map([[queryRef.key, queryRef.promise]]) + ); + + if (queryRef.didChangeOptions(watchQueryOptions)) { + const promise = queryRef.applyOptions(watchQueryOptions); + promiseCache.set(queryRef.key, promise); + } + + React.useEffect(() => queryRef.retain(), [queryRef]); + + const fetchMore: FetchMoreFunction<TData, TVariables> = React.useCallback( + (options) => { + const promise = queryRef.fetchMore(options); + + setPromiseCache((promiseCache) => + new Map(promiseCache).set(queryRef.key, queryRef.promise) + ); + + return promise; + }, + [queryRef] + ); + + const refetch: RefetchFunction<TData, TVariables> = React.useCallback( + (variables) => { + const promise = queryRef.refetch(variables); + + setPromiseCache((promiseCache) => + new Map(promiseCache).set(queryRef.key, queryRef.promise) + ); + + return promise; + }, + [queryRef] + ); + + queryRef.promiseCache = promiseCache; + + const wrappedQueryRef = React.useMemo( + () => wrapQueryRef(queryRef), + [queryRef] + ); + + return [ + didFetchResult.current ? wrappedQueryRef : void 0, + { fetchMore, refetch }, + ]; +} diff --git a/src/react/hooks/useFragment.ts b/src/react/hooks/useFragment.ts --- a/src/react/hooks/useFragment.ts +++ b/src/react/hooks/useFragment.ts @@ -1,87 +1,62 @@ -import { useRef } from "react"; +import * as React from "react"; import { equal } from "@wry/equality"; -import { mergeDeepArray } from "../../utilities"; -import { +import type { DeepPartial } from "../../utilities/index.js"; +import { mergeDeepArray } from "../../utilities/index.js"; +import type { Cache, Reference, StoreObject, MissingTree, -} from "../../cache"; +} from "../../cache/index.js"; -import { useApolloClient } from "./useApolloClient"; -import { useSyncExternalStore } from "./useSyncExternalStore"; -import { OperationVariables } from "../../core"; +import { useApolloClient } from "./useApolloClient.js"; +import { useSyncExternalStore } from "./useSyncExternalStore.js"; +import type { OperationVariables } from "../../core/index.js"; +import type { NoInfer } from "../types/types.js"; export interface UseFragmentOptions<TData, TVars> -extends Omit< - Cache.DiffOptions<TData, TVars>, - | "id" - | "query" - | "optimistic" - | "previousResult" - | "returnPartialData" ->, Omit<Cache.ReadFragmentOptions<TData, TVars>, - | "id" - | "returnPartialData" -> { + extends Omit< + Cache.DiffOptions<NoInfer<TData>, NoInfer<TVars>>, + "id" | "query" | "optimistic" | "previousResult" | "returnPartialData" + >, + Omit< + Cache.ReadFragmentOptions<TData, TVars>, + "id" | "variables" | "returnPartialData" + > { from: StoreObject | Reference | string; // Override this field to make it optional (default: true). optimistic?: boolean; - - /** - * Whether to return incomplete data rather than null. - * Defaults to `true`. - * @deprecated This option will be removed in Apollo Client 3.8. - * Please check `result.missing` instead. - */ - returnPartialData?: boolean; } -// Since the above definition of UseFragmentOptions can be hard to parse without -// help from TypeScript/VSCode, here are the intended fields and their types. -// Uncomment this code to check that it's consistent with the definition above. -// -// export interface UseFragmentOptions<TData, TVars> { -// from: string | StoreObject | Reference; -// fragment: DocumentNode | TypedDocumentNode<TData, TVars>; -// fragmentName?: string; -// optimistic?: boolean; -// variables?: TVars; -// returnPartialData?: boolean; -// canonizeResults?: boolean; -// } - -export interface UseFragmentResult<TData> { - data: TData | undefined; - complete: boolean; - missing?: MissingTree; -} +export type UseFragmentResult<TData> = + | { + data: TData; + complete: true; + missing?: never; + } + | { + data: DeepPartial<TData>; + complete: false; + missing?: MissingTree; + }; -export function useFragment_experimental< - TData = any, - TVars = OperationVariables ->( - options: UseFragmentOptions<TData, TVars>, +export function useFragment<TData = any, TVars = OperationVariables>( + options: UseFragmentOptions<TData, TVars> ): UseFragmentResult<TData> { const { cache } = useApolloClient(); - const { - fragment, - fragmentName, - from, - optimistic = true, - ...rest - } = options; + const { fragment, fragmentName, from, optimistic = true, ...rest } = options; const diffOptions: Cache.DiffOptions<TData, TVars> = { ...rest, + returnPartialData: true, id: typeof from === "string" ? from : cache.identify(from), query: cache["getFragmentDoc"](fragment, fragmentName), optimistic, }; - const resultRef = useRef<UseFragmentResult<TData>>(); + const resultRef = React.useRef<UseFragmentResult<TData>>(); let latestDiff = cache.diff<TData>(diffOptions); // Used for both getSnapshot and getServerSnapshot @@ -95,16 +70,21 @@ export function useFragment_experimental< return useSyncExternalStore( (forceUpdate) => { - return cache.watch({ + let lastTimeout = 0; + const unsubcribe = cache.watch({ ...diffOptions, immediate: true, callback(diff) { if (!equal(diff, latestDiff)) { resultRef.current = diffToResult((latestDiff = diff)); - forceUpdate(); + lastTimeout = setTimeout(forceUpdate) as any; } }, }); + return () => { + unsubcribe(); + clearTimeout(lastTimeout); + }; }, getSnapshot, getSnapshot @@ -112,17 +92,15 @@ export function useFragment_experimental< } function diffToResult<TData>( - diff: Cache.DiffResult<TData>, + diff: Cache.DiffResult<TData> ): UseFragmentResult<TData> { - const result: UseFragmentResult<TData> = { - data: diff.result, + const result = { + data: diff.result!, complete: !!diff.complete, - }; + } as UseFragmentResult<TData>; if (diff.missing) { - result.missing = mergeDeepArray( - diff.missing.map(error => error.missing), - ); + result.missing = mergeDeepArray(diff.missing.map((error) => error.missing)); } return result; diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -1,39 +1,47 @@ -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { useCallback, useMemo, useRef } from 'react'; - -import { OperationVariables } from '../../core'; -import { mergeOptions } from '../../utilities'; -import { +import type { DocumentNode } from "graphql"; +import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; +import * as React from "react"; + +import type { OperationVariables } from "../../core/index.js"; +import { mergeOptions } from "../../utilities/index.js"; +import type { + LazyQueryHookExecOptions, LazyQueryHookOptions, LazyQueryResultTuple, + NoInfer, QueryResult, -} from '../types/types'; -import { useInternalState } from './useQuery'; -import { useApolloClient } from './useApolloClient'; +} from "../types/types.js"; +import { useInternalState } from "./useQuery.js"; +import { useApolloClient } from "./useApolloClient.js"; // The following methods, when called will execute the query, regardless of // whether the useLazyQuery execute function was called before. const EAGER_METHODS = [ - 'refetch', - 'reobserve', - 'fetchMore', - 'updateQuery', - 'startPolling', - 'subscribeToMore', + "refetch", + "reobserve", + "fetchMore", + "updateQuery", + "startPolling", + "subscribeToMore", ] as const; -export function useLazyQuery<TData = any, TVariables extends OperationVariables = OperationVariables>( +export function useLazyQuery< + TData = any, + TVariables extends OperationVariables = OperationVariables, +>( query: DocumentNode | TypedDocumentNode<TData, TVariables>, - options?: LazyQueryHookOptions<TData, TVariables> + options?: LazyQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> ): LazyQueryResultTuple<TData, TVariables> { - const execOptionsRef = useRef<Partial<LazyQueryHookOptions<TData, TVariables>>>(); - const optionsRef = useRef<LazyQueryHookOptions<TData, TVariables>>(); - const queryRef = useRef<DocumentNode | TypedDocumentNode<TData, TVariables>>(); - const merged = execOptionsRef.current ? mergeOptions(options, execOptionsRef.current) : options; + const execOptionsRef = + React.useRef<Partial<LazyQueryHookExecOptions<TData, TVariables>>>(); + const optionsRef = React.useRef<LazyQueryHookOptions<TData, TVariables>>(); + const queryRef = React.useRef< + DocumentNode | TypedDocumentNode<TData, TVariables> + >(); + const merged = mergeOptions(options, execOptionsRef.current || {}); const document = merged?.query ?? query; - // Use refs to track options and the used query to ensure the `execute` + // Use refs to track options and the used query to ensure the `execute` // function remains referentially stable between renders. optionsRef.current = merged; queryRef.current = document; @@ -52,13 +60,12 @@ export function useLazyQuery<TData = any, TVariables extends OperationVariables useQueryResult.observable.options.initialFetchPolicy || internalState.getDefaultFetchPolicy(); - const result: QueryResult<TData, TVariables> = - Object.assign(useQueryResult, { - called: !!execOptionsRef.current, - }); + const result: QueryResult<TData, TVariables> = Object.assign(useQueryResult, { + called: !!execOptionsRef.current, + }); // We use useMemo here to make sure the eager methods have a stable identity. - const eagerMethods = useMemo(() => { + const eagerMethods = React.useMemo(() => { const eagerMethods: Record<string, any> = {}; for (const key of EAGER_METHODS) { const method = result[key]; @@ -66,7 +73,7 @@ export function useLazyQuery<TData = any, TVariables extends OperationVariables if (!execOptionsRef.current) { execOptionsRef.current = Object.create(null); // Only the first time populating execOptionsRef.current matters here. - internalState.forceUpdate(); + internalState.forceUpdateState(); } return method.apply(this, arguments); }; @@ -77,31 +84,34 @@ export function useLazyQuery<TData = any, TVariables extends OperationVariables Object.assign(result, eagerMethods); - const execute = useCallback< - LazyQueryResultTuple<TData, TVariables>[0] - >(executeOptions => { - execOptionsRef.current = executeOptions ? { - ...executeOptions, - fetchPolicy: executeOptions.fetchPolicy || initialFetchPolicy, - } : { - fetchPolicy: initialFetchPolicy, - }; - - const options = mergeOptions(optionsRef.current, { - query: queryRef.current, - ...execOptionsRef.current, - }) - - const promise = internalState - .executeQuery({ ...options, skip: false }) - .then((queryResult) => Object.assign(queryResult, eagerMethods)); - - // Because the return value of `useLazyQuery` is usually floated, we need - // to catch the promise to prevent unhandled rejections. - promise.catch(() => {}); - - return promise; - }, []); + const execute = React.useCallback<LazyQueryResultTuple<TData, TVariables>[0]>( + (executeOptions) => { + execOptionsRef.current = executeOptions + ? { + ...executeOptions, + fetchPolicy: executeOptions.fetchPolicy || initialFetchPolicy, + } + : { + fetchPolicy: initialFetchPolicy, + }; + + const options = mergeOptions(optionsRef.current, { + query: queryRef.current, + ...execOptionsRef.current, + }); + + const promise = internalState + .executeQuery({ ...options, skip: false }) + .then((queryResult) => Object.assign(queryResult, eagerMethods)); + + // Because the return value of `useLazyQuery` is usually floated, we need + // to catch the promise to prevent unhandled rejections. + promise.catch(() => {}); + + return promise; + }, + [] + ); return [execute, result]; } diff --git a/src/react/hooks/useMutation.ts b/src/react/hooks/useMutation.ts --- a/src/react/hooks/useMutation.ts +++ b/src/react/hooks/useMutation.ts @@ -1,23 +1,24 @@ -import { useCallback, useEffect, useRef, useState } from 'react'; -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { +import * as React from "react"; +import type { DocumentNode } from "graphql"; +import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; +import type { MutationFunctionOptions, MutationHookOptions, MutationResult, MutationTuple, -} from '../types/types'; + NoInfer, +} from "../types/types.js"; -import { +import type { ApolloCache, DefaultContext, - mergeOptions, OperationVariables, -} from '../../core'; -import { equal } from '@wry/equality'; -import { DocumentType, verifyDocumentType } from '../parser'; -import { ApolloError } from '../../errors'; -import { useApolloClient } from './useApolloClient'; +} from "../../core/index.js"; +import { mergeOptions } from "../../utilities/index.js"; +import { equal } from "@wry/equality"; +import { DocumentType, verifyDocumentType } from "../parser/index.js"; +import { ApolloError } from "../../errors/index.js"; +import { useApolloClient } from "./useApolloClient.js"; export function useMutation< TData = any, @@ -26,17 +27,22 @@ export function useMutation< TCache extends ApolloCache<any> = ApolloCache<any>, >( mutation: DocumentNode | TypedDocumentNode<TData, TVariables>, - options?: MutationHookOptions<TData, TVariables, TContext, TCache>, + options?: MutationHookOptions< + NoInfer<TData>, + NoInfer<TVariables>, + TContext, + TCache + > ): MutationTuple<TData, TVariables, TContext, TCache> { const client = useApolloClient(options?.client); verifyDocumentType(mutation, DocumentType.Mutation); - const [result, setResult] = useState<Omit<MutationResult, 'reset'>>({ + const [result, setResult] = React.useState<Omit<MutationResult, "reset">>({ called: false, loading: false, client, }); - const ref = useRef({ + const ref = React.useRef({ result, mutationId: 0, isMounted: true, @@ -51,100 +57,118 @@ export function useMutation< Object.assign(ref.current, { client, options, mutation }); } - const execute = useCallback(( - executeOptions: MutationFunctionOptions< - TData, - TVariables, - TContext, - TCache - > = {} - ) => { - const {options, mutation} = ref.current; - const baseOptions = { ...options, mutation }; - const client = executeOptions.client || ref.current.client; - - if (!ref.current.result.loading && !baseOptions.ignoreResults && ref.current.isMounted) { - setResult(ref.current.result = { - loading: true, - error: void 0, - data: void 0, - called: true, - client, - }); - } - - const mutationId = ++ref.current.mutationId; - const clientOptions = mergeOptions( - baseOptions, - executeOptions as any, - ); - - return client.mutate(clientOptions).then((response) => { - const { data, errors } = response; - const error = - errors && errors.length > 0 - ? new ApolloError({ graphQLErrors: errors }) - : void 0; - - if ( - mutationId === ref.current.mutationId && - !clientOptions.ignoreResults - ) { - const result = { - called: true, - loading: false, - data, - error, - client, - }; - - if (ref.current.isMounted && !equal(ref.current.result, result)) { - setResult(ref.current.result = result); - } - } + const execute = React.useCallback( + ( + executeOptions: MutationFunctionOptions< + TData, + TVariables, + TContext, + TCache + > = {} + ) => { + const { options, mutation } = ref.current; + const baseOptions = { ...options, mutation }; + const client = executeOptions.client || ref.current.client; - const onCompleted = executeOptions.onCompleted || ref.current.options?.onCompleted - onCompleted?.(response.data!, clientOptions); - - return response; - }).catch((error) => { if ( - mutationId === ref.current.mutationId && + !ref.current.result.loading && + !baseOptions.ignoreResults && ref.current.isMounted ) { - const result = { - loading: false, - error, - data: void 0, - called: true, - client, - }; - - if (!equal(ref.current.result, result)) { - setResult(ref.current.result = result); - } + setResult( + (ref.current.result = { + loading: true, + error: void 0, + data: void 0, + called: true, + client, + }) + ); } - const onError = executeOptions.onError || ref.current.options?.onError - - if (onError) { - onError(error, clientOptions); - - // TODO(brian): why are we returning this here??? - return { data: void 0, errors: error }; - } - - throw error; - }); - }, []); - - const reset = useCallback(() => { + const mutationId = ++ref.current.mutationId; + const clientOptions = mergeOptions(baseOptions, executeOptions as any); + + return client + .mutate(clientOptions) + .then((response) => { + const { data, errors } = response; + const error = + errors && errors.length > 0 + ? new ApolloError({ graphQLErrors: errors }) + : void 0; + + const onError = + executeOptions.onError || ref.current.options?.onError; + + if (error && onError) { + onError(error, clientOptions); + } + + if ( + mutationId === ref.current.mutationId && + !clientOptions.ignoreResults + ) { + const result = { + called: true, + loading: false, + data, + error, + client, + }; + + if (ref.current.isMounted && !equal(ref.current.result, result)) { + setResult((ref.current.result = result)); + } + } + + const onCompleted = + executeOptions.onCompleted || ref.current.options?.onCompleted; + + if (!error) { + onCompleted?.(response.data!, clientOptions); + } + + return response; + }) + .catch((error) => { + if (mutationId === ref.current.mutationId && ref.current.isMounted) { + const result = { + loading: false, + error, + data: void 0, + called: true, + client, + }; + + if (!equal(ref.current.result, result)) { + setResult((ref.current.result = result)); + } + } + + const onError = + executeOptions.onError || ref.current.options?.onError; + + if (onError) { + onError(error, clientOptions); + + // TODO(brian): why are we returning this here??? + return { data: void 0, errors: error }; + } + + throw error; + }); + }, + [] + ); + + const reset = React.useCallback(() => { if (ref.current.isMounted) { setResult({ called: false, loading: false, client }); } }, []); - useEffect(() => { + React.useEffect(() => { ref.current.isMounted = true; return () => { diff --git a/src/react/hooks/useQuery.ts b/src/react/hooks/useQuery.ts --- a/src/react/hooks/useQuery.ts +++ b/src/react/hooks/useQuery.ts @@ -1,41 +1,44 @@ -import { invariant } from '../../utilities/globals'; - -import { - useCallback, - useContext, - useMemo, - useRef, - useState, -} from 'react'; -import { useSyncExternalStore } from './useSyncExternalStore'; -import { equal } from '@wry/equality'; - -import { mergeOptions, OperationVariables, WatchQueryFetchPolicy } from '../../core'; -import { ApolloContextValue, getApolloContext } from '../context'; -import { ApolloError } from '../../errors'; -import { +import { invariant } from "../../utilities/globals/index.js"; + +import * as React from "react"; +import { useSyncExternalStore } from "./useSyncExternalStore.js"; +import { equal } from "@wry/equality"; + +import type { + OperationVariables, + WatchQueryFetchPolicy, +} from "../../core/index.js"; +import { mergeOptions } from "../../utilities/index.js"; +import type { ApolloContextValue } from "../context/index.js"; +import { getApolloContext } from "../context/index.js"; +import { ApolloError } from "../../errors/index.js"; +import type { ApolloClient, ApolloQueryResult, - NetworkStatus, ObservableQuery, DocumentNode, TypedDocumentNode, WatchQueryOptions, -} from '../../core'; -import { +} from "../../core/index.js"; +import { NetworkStatus } from "../../core/index.js"; +import type { QueryHookOptions, QueryResult, ObservableQueryFields, -} from '../types/types'; + NoInfer, +} from "../types/types.js"; -import { DocumentType, verifyDocumentType } from '../parser'; -import { useApolloClient } from './useApolloClient'; -import { canUseWeakMap, compact, isNonEmptyArray, maybeDeepFreeze } from '../../utilities'; +import { DocumentType, verifyDocumentType } from "../parser/index.js"; +import { useApolloClient } from "./useApolloClient.js"; +import { + canUseWeakMap, + compact, + isNonEmptyArray, + maybeDeepFreeze, +} from "../../utilities/index.js"; const { - prototype: { - hasOwnProperty, - }, + prototype: { hasOwnProperty }, } = Object; export function useQuery< @@ -43,19 +46,21 @@ export function useQuery< TVariables extends OperationVariables = OperationVariables, >( query: DocumentNode | TypedDocumentNode<TData, TVariables>, - options: QueryHookOptions<TData, TVariables> = Object.create(null), + options: QueryHookOptions< + NoInfer<TData>, + NoInfer<TVariables> + > = Object.create(null) ): QueryResult<TData, TVariables> { - return useInternalState( - useApolloClient(options.client), - query, - ).useQuery(options); + return useInternalState(useApolloClient(options.client), query).useQuery( + options + ); } export function useInternalState<TData, TVariables extends OperationVariables>( client: ApolloClient<any>, - query: DocumentNode | TypedDocumentNode<TData, TVariables>, + query: DocumentNode | TypedDocumentNode<TData, TVariables> ): InternalState<TData, TVariables> { - const stateRef = useRef<InternalState<TData, TVariables>>(); + const stateRef = React.useRef<InternalState<TData, TVariables>>(); if ( !stateRef.current || client !== stateRef.current.client || @@ -71,10 +76,7 @@ export function useInternalState<TData, TVariables extends OperationVariables>( // setTick function. Updating this state by calling state.forceUpdate is the // only way we trigger React component updates (no other useState calls within // the InternalState class). - const [_tick, setTick] = useState(0); - state.forceUpdate = () => { - setTick(tick => tick + 1); - }; + state.forceUpdateState = React.useReducer((tick) => tick + 1, 0)[1]; return state; } @@ -83,7 +85,7 @@ class InternalState<TData, TVariables extends OperationVariables> { constructor( public readonly client: ReturnType<typeof useApolloClient>, public readonly query: DocumentNode | TypedDocumentNode<TData, TVariables>, - previous?: InternalState<TData, TVariables>, + previous?: InternalState<TData, TVariables> ) { verifyDocumentType(query, DocumentType.Query); @@ -96,18 +98,37 @@ class InternalState<TData, TVariables extends OperationVariables> { } } - forceUpdate() { + /** + * Forces an update using local component state. + * As this is not batched with `useSyncExternalStore` updates, + * this is only used as a fallback if the `useSyncExternalStore` "force update" + * method is not registered at the moment. + * See https://github.com/facebook/react/issues/25191 + * */ + forceUpdateState() { // Replaced (in useInternalState) with a method that triggers an update. - invariant.warn("Calling default no-op implementation of InternalState#forceUpdate"); + invariant.warn( + "Calling default no-op implementation of InternalState#forceUpdate" + ); } - executeQuery(options: QueryHookOptions<TData, TVariables>) { + /** + * Will be overwritten by the `useSyncExternalStore` "force update" method + * whenever it is available and reset to `forceUpdateState` when it isn't. + */ + forceUpdate = () => this.forceUpdateState(); + + executeQuery( + options: QueryHookOptions<TData, TVariables> & { + query?: DocumentNode; + } + ) { if (options.query) { - Object.assign(this, { query: options.query }) + Object.assign(this, { query: options.query }); } this.watchQueryOptions = this.createWatchQueryOptions( - this.queryHookOptions = options, + (this.queryHookOptions = options) ); const concast = this.observable.reobserveAsConcast( @@ -124,7 +145,7 @@ class InternalState<TData, TVariables extends OperationVariables> { return new Promise<QueryResult<TData, TVariables>>((resolve) => { let result: ApolloQueryResult<TData>; - // Subscribe to the concast independently of the ObservableQuery in case + // Subscribe to the concast independently of the ObservableQuery in case // the component gets unmounted before the promise resolves. This prevents // the concast from terminating early and resolving with `undefined` when // there are no more subscribers for the concast. @@ -137,7 +158,7 @@ class InternalState<TData, TVariables extends OperationVariables> { }, complete: () => { resolve(this.toQueryResult(result)); - } + }, }); }); } @@ -153,95 +174,103 @@ class InternalState<TData, TVariables extends OperationVariables> { // initialization, this.renderPromises is usually undefined (unless SSR is // happening), but that's fine as long as it has been initialized that way, // rather than left uninitialized. - this.renderPromises = useContext(getApolloContext()).renderPromises; + this.renderPromises = React.useContext(getApolloContext()).renderPromises; this.useOptions(options); const obsQuery = this.useObservableQuery(); const result = useSyncExternalStore( - useCallback(() => { - if (this.renderPromises) { - return () => {}; - } - - const onNext = () => { - const previousResult = this.result; - // We use `getCurrentResult()` instead of the onNext argument because - // the values differ slightly. Specifically, loading results will have - // an empty object for data instead of `undefined` for some reason. - const result = obsQuery.getCurrentResult(); - // Make sure we're not attempting to re-render similar results - if ( - previousResult && - previousResult.loading === result.loading && - previousResult.networkStatus === result.networkStatus && - equal(previousResult.data, result.data) - ) { - return; - } - - this.setResult(result); - }; - - const onError = (error: Error) => { - const last = obsQuery["last"]; - subscription.unsubscribe(); - // Unfortunately, if `lastError` is set in the current - // `observableQuery` when the subscription is re-created, - // the subscription will immediately receive the error, which will - // cause it to terminate again. To avoid this, we first clear - // the last error/result from the `observableQuery` before re-starting - // the subscription, and restore it afterwards (so the subscription - // has a chance to stay open). - try { - obsQuery.resetLastResults(); - subscription = obsQuery.subscribe(onNext, onError); - } finally { - obsQuery["last"] = last; - } - - if (!hasOwnProperty.call(error, 'graphQLErrors')) { - // The error is not a GraphQL error - throw error; + React.useCallback( + (handleStoreChange) => { + if (this.renderPromises) { + return () => {}; } - const previousResult = this.result; - if ( - !previousResult || - (previousResult && previousResult.loading) || - !equal(error, previousResult.error) - ) { - this.setResult({ - data: (previousResult && previousResult.data) as TData, - error: error as ApolloError, - loading: false, - networkStatus: NetworkStatus.error, - }); - } - }; - - let subscription = obsQuery.subscribe(onNext, onError); - - // Do the "unsubscribe" with a short delay. - // This way, an existing subscription can be reused without an additional - // request if "unsubscribe" and "resubscribe" to the same ObservableQuery - // happen in very fast succession. - return () => setTimeout(() => subscription.unsubscribe()); - }, [ - // We memoize the subscribe function using useCallback and the following - // dependency keys, because the subscribe function reference is all that - // useSyncExternalStore uses internally as a dependency key for the - // useEffect ultimately responsible for the subscription, so we are - // effectively passing this dependency array to that useEffect buried - // inside useSyncExternalStore, as desired. - obsQuery, - this.renderPromises, - this.client.disableNetworkFetches, - ]), + this.forceUpdate = handleStoreChange; + + const onNext = () => { + const previousResult = this.result; + // We use `getCurrentResult()` instead of the onNext argument because + // the values differ slightly. Specifically, loading results will have + // an empty object for data instead of `undefined` for some reason. + const result = obsQuery.getCurrentResult(); + // Make sure we're not attempting to re-render similar results + if ( + previousResult && + previousResult.loading === result.loading && + previousResult.networkStatus === result.networkStatus && + equal(previousResult.data, result.data) + ) { + return; + } + + this.setResult(result); + }; + + const onError = (error: Error) => { + const last = obsQuery["last"]; + subscription.unsubscribe(); + // Unfortunately, if `lastError` is set in the current + // `observableQuery` when the subscription is re-created, + // the subscription will immediately receive the error, which will + // cause it to terminate again. To avoid this, we first clear + // the last error/result from the `observableQuery` before re-starting + // the subscription, and restore it afterwards (so the subscription + // has a chance to stay open). + try { + obsQuery.resetLastResults(); + subscription = obsQuery.subscribe(onNext, onError); + } finally { + obsQuery["last"] = last; + } + + if (!hasOwnProperty.call(error, "graphQLErrors")) { + // The error is not a GraphQL error + throw error; + } + + const previousResult = this.result; + if ( + !previousResult || + (previousResult && previousResult.loading) || + !equal(error, previousResult.error) + ) { + this.setResult({ + data: (previousResult && previousResult.data) as TData, + error: error as ApolloError, + loading: false, + networkStatus: NetworkStatus.error, + }); + } + }; + + let subscription = obsQuery.subscribe(onNext, onError); + + // Do the "unsubscribe" with a short delay. + // This way, an existing subscription can be reused without an additional + // request if "unsubscribe" and "resubscribe" to the same ObservableQuery + // happen in very fast succession. + return () => { + setTimeout(() => subscription.unsubscribe()); + this.forceUpdate = () => this.forceUpdateState(); + }; + }, + [ + // We memoize the subscribe function using useCallback and the following + // dependency keys, because the subscribe function reference is all that + // useSyncExternalStore uses internally as a dependency key for the + // useEffect ultimately responsible for the subscription, so we are + // effectively passing this dependency array to that useEffect buried + // inside useSyncExternalStore, as desired. + obsQuery, + this.renderPromises, + this.client.disableNetworkFetches, + ] + ), () => this.getCurrentResult(), - () => this.getCurrentResult(), + () => this.getCurrentResult() ); // TODO Remove this method when we remove support for options.partialRefetch. @@ -258,11 +287,9 @@ class InternalState<TData, TVariables extends OperationVariables> { private queryHookOptions: QueryHookOptions<TData, TVariables>; private watchQueryOptions: WatchQueryOptions<TVariables, TData>; - private useOptions( - options: QueryHookOptions<TData, TVariables>, - ) { + private useOptions(options: QueryHookOptions<TData, TVariables>) { const watchQueryOptions = this.createWatchQueryOptions( - this.queryHookOptions = options, + (this.queryHookOptions = options) ); // Update this.watchQueryOptions, but only when they have changed, which @@ -298,7 +325,8 @@ class InternalState<TData, TVariables extends OperationVariables> { // Like the forceUpdate method, the versions of these methods inherited from // InternalState.prototype are empty no-ops, but we can override them on the // base state object (without modifying the prototype). - this.onCompleted = options.onCompleted || InternalState.prototype.onCompleted; + this.onCompleted = + options.onCompleted || InternalState.prototype.onCompleted; this.onError = options.onError || InternalState.prototype.onError; if ( @@ -311,7 +339,7 @@ class InternalState<TData, TVariables extends OperationVariables> { this.result = this.ssrDisabledResult; } else if ( this.queryHookOptions.skip || - this.watchQueryOptions.fetchPolicy === 'standby' + this.watchQueryOptions.fetchPolicy === "standby" ) { // When skipping a query (ie. we're not querying for data but still want to // render children), make sure the `data` is cleared out and `loading` is @@ -333,9 +361,7 @@ class InternalState<TData, TVariables extends OperationVariables> { } private getObsQueryOptions(): WatchQueryOptions<TVariables, TData> { - const toMerge: Array< - Partial<WatchQueryOptions<TVariables, TData>> - > = []; + const toMerge: Array<Partial<WatchQueryOptions<TVariables, TData>>> = []; const globalDefaults = this.client.defaultOptions.watchQuery; if (globalDefaults) toMerge.push(globalDefaults); @@ -354,14 +380,14 @@ class InternalState<TData, TVariables extends OperationVariables> { // (if provided) should be merged, to ensure individual defaulted // variables always have values, if not otherwise defined in // observable.options or watchQueryOptions. - toMerge.push(compact( - this.observable && this.observable.options, - this.watchQueryOptions, - )); - - return toMerge.reduce( - mergeOptions - ) as WatchQueryOptions<TVariables, TData>; + toMerge.push( + compact( + this.observable && this.observable.options, + this.watchQueryOptions + ) + ); + + return toMerge.reduce(mergeOptions) as WatchQueryOptions<TVariables, TData>; } private ssrDisabledResult = maybeDeepFreeze({ @@ -389,7 +415,10 @@ class InternalState<TData, TVariables extends OperationVariables> { // makes otherOptions almost a WatchQueryOptions object, except for the // query property that we add below. ...otherOptions - }: QueryHookOptions<TData, TVariables> = {}): WatchQueryOptions<TVariables, TData> { + }: QueryHookOptions<TData, TVariables> = {}): WatchQueryOptions< + TVariables, + TData + > { // This Object.assign is safe because otherOptions is a fresh ...rest object // that did not exist until just now, so modifications are still allowed. const watchQueryOptions: WatchQueryOptions<TVariables, TData> = @@ -397,14 +426,12 @@ class InternalState<TData, TVariables extends OperationVariables> { if ( this.renderPromises && - ( - watchQueryOptions.fetchPolicy === 'network-only' || - watchQueryOptions.fetchPolicy === 'cache-and-network' - ) + (watchQueryOptions.fetchPolicy === "network-only" || + watchQueryOptions.fetchPolicy === "cache-and-network") ) { // this behavior was added to react-apollo without explanation in this PR // https://github.com/apollographql/react-apollo/pull/1579 - watchQueryOptions.fetchPolicy = 'cache-first'; + watchQueryOptions.fetchPolicy = "cache-first"; } if (!watchQueryOptions.variables) { @@ -422,7 +449,7 @@ class InternalState<TData, TVariables extends OperationVariables> { // fetchPolicy that would have been used if not skipping. Object.assign(watchQueryOptions, { initialFetchPolicy, - fetchPolicy: 'standby', + fetchPolicy: "standby", }); } else if (!watchQueryOptions.fetchPolicy) { watchQueryOptions.fetchPolicy = @@ -457,25 +484,27 @@ class InternalState<TData, TVariables extends OperationVariables> { // See if there is an existing observable that was used to fetch the same // data and if so, use it instead since it will contain the proper queryId // to fetch the result set. This is used during SSR. - const obsQuery = this.observable = - this.renderPromises - && this.renderPromises.getSSRObservable(this.watchQueryOptions) - || this.observable // Reuse this.observable if possible (and not SSR) - || this.client.watchQuery(this.getObsQueryOptions()); - - this.obsQueryFields = useMemo(() => ({ - refetch: obsQuery.refetch.bind(obsQuery), - reobserve: obsQuery.reobserve.bind(obsQuery), - fetchMore: obsQuery.fetchMore.bind(obsQuery), - updateQuery: obsQuery.updateQuery.bind(obsQuery), - startPolling: obsQuery.startPolling.bind(obsQuery), - stopPolling: obsQuery.stopPolling.bind(obsQuery), - subscribeToMore: obsQuery.subscribeToMore.bind(obsQuery), - }), [obsQuery]); + const obsQuery = (this.observable = + (this.renderPromises && + this.renderPromises.getSSRObservable(this.watchQueryOptions)) || + this.observable || // Reuse this.observable if possible (and not SSR) + this.client.watchQuery(this.getObsQueryOptions())); + + this.obsQueryFields = React.useMemo( + () => ({ + refetch: obsQuery.refetch.bind(obsQuery), + reobserve: obsQuery.reobserve.bind(obsQuery), + fetchMore: obsQuery.fetchMore.bind(obsQuery), + updateQuery: obsQuery.updateQuery.bind(obsQuery), + startPolling: obsQuery.startPolling.bind(obsQuery), + stopPolling: obsQuery.stopPolling.bind(obsQuery), + subscribeToMore: obsQuery.subscribeToMore.bind(obsQuery), + }), + [obsQuery] + ); const ssrAllowed = !( - this.queryHookOptions.ssr === false || - this.queryHookOptions.skip + this.queryHookOptions.ssr === false || this.queryHookOptions.skip ); if (this.renderPromises && ssrAllowed) { @@ -504,30 +533,41 @@ class InternalState<TData, TVariables extends OperationVariables> { // Calling state.setResult always triggers an update, though some call sites // perform additional equality checks before committing to an update. this.forceUpdate(); - this.handleErrorOrCompleted(nextResult); + this.handleErrorOrCompleted(nextResult, previousResult); } - private handleErrorOrCompleted(result: ApolloQueryResult<TData>) { + private handleErrorOrCompleted( + result: ApolloQueryResult<TData>, + previousResult?: ApolloQueryResult<TData> + ) { if (!result.loading) { const error = this.toApolloError(result); // wait a tick in case we are in the middle of rendering a component - Promise.resolve().then(() => { - if (error) { - this.onError(error); - } else if (result.data) { - this.onCompleted(result.data); - } - }).catch(error => { - invariant.warn(error); - }); + Promise.resolve() + .then(() => { + if (error) { + this.onError(error); + } else if ( + result.data && + previousResult?.networkStatus !== result.networkStatus && + result.networkStatus === NetworkStatus.ready + ) { + this.onCompleted(result.data); + } + }) + .catch((error) => { + invariant.warn(error); + }); } } - private toApolloError(result: ApolloQueryResult<TData>): ApolloError | undefined { + private toApolloError( + result: ApolloQueryResult<TData> + ): ApolloError | undefined { return isNonEmptyArray(result.errors) ? new ApolloError({ graphQLErrors: result.errors }) - : result.error + : result.error; } private getCurrentResult(): ApolloQueryResult<TData> { @@ -536,7 +576,7 @@ class InternalState<TData, TVariables extends OperationVariables> { // we're doing server rendering and therefore override the result below. if (!this.result) { this.handleErrorOrCompleted( - this.result = this.observable.getCurrentResult() + (this.result = this.observable.getCurrentResult()) ); } return this.result; @@ -551,22 +591,25 @@ class InternalState<TData, TVariables extends OperationVariables> { >(); toQueryResult( - result: ApolloQueryResult<TData>, + result: ApolloQueryResult<TData> ): QueryResult<TData, TVariables> { let queryResult = this.toQueryResultCache.get(result); if (queryResult) return queryResult; const { data, partial, ...resultWithoutPartial } = result; - this.toQueryResultCache.set(result, queryResult = { - data, // Ensure always defined, even if result.data is missing. - ...resultWithoutPartial, - ...this.obsQueryFields, - client: this.client, - observable: this.observable, - variables: this.observable.variables, - called: !this.queryHookOptions.skip, - previousData: this.previousData, - }); + this.toQueryResultCache.set( + result, + (queryResult = { + data, // Ensure always defined, even if result.data is missing. + ...resultWithoutPartial, + ...this.obsQueryFields, + client: this.client, + observable: this.observable, + variables: this.observable.variables, + called: !this.queryHookOptions.skip, + previousData: this.previousData, + }) + ); if (!queryResult.error && isNonEmptyArray(result.errors)) { // Until a set naming convention for networkError and graphQLErrors is @@ -590,7 +633,7 @@ class InternalState<TData, TVariables extends OperationVariables> { this.queryHookOptions.partialRefetch && !result.loading && (!result.data || Object.keys(result.data).length === 0) && - this.observable.options.fetchPolicy !== 'cache-only' + this.observable.options.fetchPolicy !== "cache-only" ) { Object.assign(result, { loading: true, diff --git a/src/react/hooks/useReactiveVar.ts b/src/react/hooks/useReactiveVar.ts --- a/src/react/hooks/useReactiveVar.ts +++ b/src/react/hooks/useReactiveVar.ts @@ -1,17 +1,17 @@ -import { useEffect, useState } from 'react'; -import { ReactiveVar } from '../../core'; +import * as React from "react"; +import type { ReactiveVar } from "../../core/index.js"; export function useReactiveVar<T>(rv: ReactiveVar<T>): T { const value = rv(); // We don't actually care what useState thinks the value of the variable // is, so we take only the update function from the returned array. - const setValue = useState(value)[1]; + const setValue = React.useState(value)[1]; // We subscribe to variable updates on initial mount and when the value has // changed. This avoids a subtle bug in React.StrictMode where multiple // listeners are added, leading to inconsistent updates. - useEffect(() => { + React.useEffect(() => { const probablySameValue = rv(); if (value !== probablySameValue) { // If the value of rv has already changed, we don't need to listen for the diff --git a/src/react/hooks/useReadQuery.ts b/src/react/hooks/useReadQuery.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/useReadQuery.ts @@ -0,0 +1,49 @@ +import * as React from "react"; +import { + unwrapQueryRef, + type QueryReference, +} from "../cache/QueryReference.js"; +import { __use } from "./internal/index.js"; +import { toApolloError } from "./useSuspenseQuery.js"; +import { invariant } from "../../utilities/globals/index.js"; +import { useSyncExternalStore } from "./useSyncExternalStore.js"; + +export function useReadQuery<TData>(queryRef: QueryReference<TData>) { + const internalQueryRef = unwrapQueryRef(queryRef); + invariant( + internalQueryRef.promiseCache, + "It appears that `useReadQuery` was used outside of `useBackgroundQuery`. " + + "`useReadQuery` is only supported for use with `useBackgroundQuery`. " + + "Please ensure you are passing the `queryRef` returned from `useBackgroundQuery`." + ); + + const { promiseCache, key } = internalQueryRef; + + if (!promiseCache.has(key)) { + promiseCache.set(key, internalQueryRef.promise); + } + + const promise = useSyncExternalStore( + React.useCallback( + (forceUpdate) => { + return internalQueryRef.listen((promise) => { + internalQueryRef.promiseCache!.set(internalQueryRef.key, promise); + forceUpdate(); + }); + }, + [internalQueryRef] + ), + () => promiseCache.get(key)!, + () => promiseCache.get(key)! + ); + + const result = __use(promise); + + return React.useMemo(() => { + return { + data: result.data, + networkStatus: result.networkStatus, + error: toApolloError(result), + }; + }, [result]); +} diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -1,26 +1,31 @@ -import '../../utilities/globals'; -import { useState, useRef, useEffect } from 'react'; -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { invariant } from '../../utilities/globals' -import { equal } from '@wry/equality'; - -import { DocumentType, verifyDocumentType } from '../parser'; -import { +import { invariant } from "../../utilities/globals/index.js"; +import * as React from "react"; +import type { DocumentNode } from "graphql"; +import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; +import { equal } from "@wry/equality"; + +import { DocumentType, verifyDocumentType } from "../parser/index.js"; +import type { + NoInfer, SubscriptionHookOptions, - SubscriptionResult -} from '../types/types'; -import { OperationVariables } from '../../core'; -import { useApolloClient } from './useApolloClient'; - -export function useSubscription<TData = any, TVariables extends OperationVariables = OperationVariables>( + SubscriptionResult, +} from "../types/types.js"; +import type { OperationVariables } from "../../core/index.js"; +import { useApolloClient } from "./useApolloClient.js"; + +export function useSubscription< + TData = any, + TVariables extends OperationVariables = OperationVariables, +>( subscription: DocumentNode | TypedDocumentNode<TData, TVariables>, - options?: SubscriptionHookOptions<TData, TVariables>, + options?: SubscriptionHookOptions<NoInfer<TData>, NoInfer<TVariables>> ) { - const hasIssuedDeprecationWarningRef = useRef(false); + const hasIssuedDeprecationWarningRef = React.useRef(false); const client = useApolloClient(options?.client); verifyDocumentType(subscription, DocumentType.Subscription); - const [result, setResult] = useState<SubscriptionResult<TData>>({ + const [result, setResult] = React.useState< + SubscriptionResult<TData, TVariables> + >({ loading: !options?.skip, error: void 0, data: void 0, @@ -47,7 +52,7 @@ export function useSubscription<TData = any, TVariables extends OperationVariabl } } - const [observable, setObservable] = useState(() => { + const [observable, setObservable] = React.useState(() => { if (options?.skip) { return null; } @@ -60,22 +65,25 @@ export function useSubscription<TData = any, TVariables extends OperationVariabl }); }); - const canResetObservableRef = useRef(false); - useEffect(() => { + const canResetObservableRef = React.useRef(false); + React.useEffect(() => { return () => { canResetObservableRef.current = true; }; }, []); - const ref = useRef({ client, subscription, options }); - useEffect(() => { + const ref = React.useRef({ client, subscription, options }); + React.useEffect(() => { let shouldResubscribe = options?.shouldResubscribe; - if (typeof shouldResubscribe === 'function') { + if (typeof shouldResubscribe === "function") { shouldResubscribe = !!shouldResubscribe(options!); } if (options?.skip) { - if (!options?.skip !== !ref.current.options?.skip || canResetObservableRef.current) { + if ( + !options?.skip !== !ref.current.options?.skip || + canResetObservableRef.current + ) { setResult({ loading: false, data: void 0, @@ -100,19 +108,21 @@ export function useSubscription<TData = any, TVariables extends OperationVariabl error: void 0, variables: options?.variables, }); - setObservable(client.subscribe({ - query: subscription, - variables: options?.variables, - fetchPolicy: options?.fetchPolicy, - context: options?.context, - })); + setObservable( + client.subscribe({ + query: subscription, + variables: options?.variables, + fetchPolicy: options?.fetchPolicy, + context: options?.context, + }) + ); canResetObservableRef.current = false; } Object.assign(ref.current, { client, subscription, options }); }, [client, subscription, options, canResetObservableRef.current]); - useEffect(() => { + React.useEffect(() => { if (!observable) { return; } @@ -137,12 +147,12 @@ export function useSubscription<TData = any, TVariables extends OperationVariabl if (ref.current.options?.onData) { ref.current.options.onData({ client, - data: result + data: result, }); } else if (ref.current.options?.onSubscriptionData) { ref.current.options.onSubscriptionData({ client, - subscriptionData: result + subscriptionData: result, }); } }, @@ -155,7 +165,7 @@ export function useSubscription<TData = any, TVariables extends OperationVariabl variables: options?.variables, }); ref.current.options?.onError?.(error); - }; + } }, complete() { if (!subscriptionStopped) { diff --git a/src/react/hooks/useSuspenseQuery.ts b/src/react/hooks/useSuspenseQuery.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/useSuspenseQuery.ts @@ -0,0 +1,375 @@ +import * as React from "react"; +import { invariant } from "../../utilities/globals/index.js"; +import type { + ApolloClient, + ApolloQueryResult, + DocumentNode, + OperationVariables, + TypedDocumentNode, + WatchQueryFetchPolicy, + FetchMoreQueryOptions, + WatchQueryOptions, +} from "../../core/index.js"; +import { ApolloError, NetworkStatus } from "../../core/index.js"; +import type { DeepPartial } from "../../utilities/index.js"; +import { isNonEmptyArray } from "../../utilities/index.js"; +import { useApolloClient } from "./useApolloClient.js"; +import { DocumentType, verifyDocumentType } from "../parser/index.js"; +import type { + SuspenseQueryHookOptions, + ObservableQueryFields, + NoInfer, +} from "../types/types.js"; +import { __use, useDeepMemo } from "./internal/index.js"; +import { getSuspenseCache } from "../cache/index.js"; +import { canonicalStringify } from "../../cache/index.js"; +import { skipToken, type SkipToken } from "./constants.js"; +import type { CacheKey } from "../cache/types.js"; + +export interface UseSuspenseQueryResult< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +> { + client: ApolloClient<any>; + data: TData; + error: ApolloError | undefined; + fetchMore: FetchMoreFunction<TData, TVariables>; + networkStatus: NetworkStatus; + refetch: RefetchFunction<TData, TVariables>; + subscribeToMore: SubscribeToMoreFunction<TData, TVariables>; +} + +export type FetchMoreFunction<TData, TVariables extends OperationVariables> = ( + fetchMoreOptions: FetchMoreQueryOptions<TVariables, TData> & { + updateQuery?: ( + previousQueryResult: TData, + options: { + fetchMoreResult: TData; + variables: TVariables; + } + ) => TData; + } +) => Promise<ApolloQueryResult<TData>>; + +export type RefetchFunction< + TData, + TVariables extends OperationVariables +> = ObservableQueryFields<TData, TVariables>["refetch"]; + +export type SubscribeToMoreFunction< + TData, + TVariables extends OperationVariables +> = ObservableQueryFields<TData, TVariables>["subscribeToMore"]; + +export function useSuspenseQuery< + TData, + TVariables extends OperationVariables, + TOptions extends Omit<SuspenseQueryHookOptions<TData>, "variables"> +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & + TOptions +): UseSuspenseQueryResult< + TOptions["errorPolicy"] extends "ignore" | "all" + ? TOptions["returnPartialData"] extends true + ? DeepPartial<TData> | undefined + : TData | undefined + : TOptions["returnPartialData"] extends true + ? TOptions["skip"] extends boolean + ? DeepPartial<TData> | undefined + : DeepPartial<TData> + : TOptions["skip"] extends boolean + ? TData | undefined + : TData, + TVariables +>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + returnPartialData: true; + errorPolicy: "ignore" | "all"; + } +): UseSuspenseQueryResult<DeepPartial<TData> | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + errorPolicy: "ignore" | "all"; + } +): UseSuspenseQueryResult<TData | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + skip: boolean; + returnPartialData: true; + } +): UseSuspenseQueryResult<DeepPartial<TData> | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + returnPartialData: true; + } +): UseSuspenseQueryResult<DeepPartial<TData>, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + skip: boolean; + } +): UseSuspenseQueryResult<TData | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> +): UseSuspenseQueryResult<TData, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: + | SkipToken + | (SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> & { + returnPartialData: true; + }) +): UseSuspenseQueryResult<DeepPartial<TData> | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options?: + | SkipToken + | SuspenseQueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> +): UseSuspenseQueryResult<TData | undefined, TVariables>; + +export function useSuspenseQuery< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: + | (SkipToken & Partial<SuspenseQueryHookOptions<TData, TVariables>>) + | SuspenseQueryHookOptions<TData, TVariables> = Object.create(null) +): UseSuspenseQueryResult<TData | undefined, TVariables> { + const client = useApolloClient(options.client); + const suspenseCache = getSuspenseCache(client); + const watchQueryOptions = useWatchQueryOptions({ client, query, options }); + const { fetchPolicy, variables } = watchQueryOptions; + const { queryKey = [] } = options; + + const cacheKey: CacheKey = [ + query, + canonicalStringify(variables), + ...([] as any[]).concat(queryKey), + ]; + + const queryRef = suspenseCache.getQueryRef(cacheKey, () => + client.watchQuery(watchQueryOptions) + ); + + const [promiseCache, setPromiseCache] = React.useState( + () => new Map([[queryRef.key, queryRef.promise]]) + ); + + let promise = promiseCache.get(queryRef.key); + + if (queryRef.didChangeOptions(watchQueryOptions)) { + promise = queryRef.applyOptions(watchQueryOptions); + promiseCache.set(queryRef.key, promise); + } + + if (!promise) { + promise = queryRef.promise; + promiseCache.set(queryRef.key, promise); + } + + React.useEffect(() => { + const dispose = queryRef.retain(); + + const removeListener = queryRef.listen((promise) => { + setPromiseCache((promiseCache) => + new Map(promiseCache).set(queryRef.key, promise) + ); + }); + + return () => { + removeListener(); + dispose(); + }; + }, [queryRef]); + + const skipResult = React.useMemo(() => { + const error = toApolloError(queryRef.result); + + return { + loading: false, + data: queryRef.result.data, + networkStatus: error ? NetworkStatus.error : NetworkStatus.ready, + error, + }; + }, [queryRef.result]); + + const result = fetchPolicy === "standby" ? skipResult : __use(promise); + + const fetchMore: FetchMoreFunction<TData, TVariables> = React.useCallback( + (options) => { + const promise = queryRef.fetchMore(options); + + setPromiseCache((previousPromiseCache) => + new Map(previousPromiseCache).set(queryRef.key, queryRef.promise) + ); + + return promise; + }, + [queryRef] + ); + + const refetch: RefetchFunction<TData, TVariables> = React.useCallback( + (variables) => { + const promise = queryRef.refetch(variables); + + setPromiseCache((previousPromiseCache) => + new Map(previousPromiseCache).set(queryRef.key, queryRef.promise) + ); + + return promise; + }, + [queryRef] + ); + + const subscribeToMore: SubscribeToMoreFunction<TData, TVariables> = + React.useCallback( + (options) => queryRef.observable.subscribeToMore(options), + [queryRef] + ); + + return React.useMemo(() => { + return { + client, + data: result.data, + error: toApolloError(result), + networkStatus: result.networkStatus, + fetchMore, + refetch, + subscribeToMore, + }; + }, [client, fetchMore, refetch, result, subscribeToMore]); +} + +function validateOptions(options: WatchQueryOptions) { + const { query, fetchPolicy, returnPartialData } = options; + + verifyDocumentType(query, DocumentType.Query); + validateFetchPolicy(fetchPolicy); + validatePartialDataReturn(fetchPolicy, returnPartialData); +} + +function validateFetchPolicy( + fetchPolicy: WatchQueryFetchPolicy = "cache-first" +) { + const supportedFetchPolicies: WatchQueryFetchPolicy[] = [ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ]; + + invariant( + supportedFetchPolicies.includes(fetchPolicy), + `The fetch policy \`%s\` is not supported with suspense.`, + fetchPolicy + ); +} + +function validatePartialDataReturn( + fetchPolicy: WatchQueryFetchPolicy | undefined, + returnPartialData: boolean | undefined +) { + if (fetchPolicy === "no-cache" && returnPartialData) { + invariant.warn( + "Using `returnPartialData` with a `no-cache` fetch policy has no effect. To read partial data from the cache, consider using an alternate fetch policy." + ); + } +} + +export function toApolloError(result: ApolloQueryResult<any>) { + return isNonEmptyArray(result.errors) + ? new ApolloError({ graphQLErrors: result.errors }) + : result.error; +} + +interface UseWatchQueryOptionsHookOptions< + TData, + TVariables extends OperationVariables +> { + client: ApolloClient<unknown>; + query: DocumentNode | TypedDocumentNode<TData, TVariables>; + options: SkipToken | SuspenseQueryHookOptions<TData, TVariables>; +} + +export function useWatchQueryOptions< + TData, + TVariables extends OperationVariables +>({ + client, + query, + options, +}: UseWatchQueryOptionsHookOptions<TData, TVariables>): WatchQueryOptions< + TVariables, + TData +> { + return useDeepMemo<WatchQueryOptions<TVariables, TData>>(() => { + if (options === skipToken) { + return { query, fetchPolicy: "standby" }; + } + + const fetchPolicy = + options.fetchPolicy || + client.defaultOptions.watchQuery?.fetchPolicy || + "cache-first"; + + const watchQueryOptions = { + ...options, + fetchPolicy, + query, + notifyOnNetworkStatusChange: false, + nextFetchPolicy: void 0, + }; + + if (__DEV__) { + validateOptions(watchQueryOptions); + } + + // Assign the updated fetch policy after our validation since `standby` is + // not a supported fetch policy on its own without the use of `skip`. + if (options.skip) { + watchQueryOptions.fetchPolicy = "standby"; + } + + return watchQueryOptions; + }, [client, options, query]); +} diff --git a/src/react/hooks/useSyncExternalStore.ts b/src/react/hooks/useSyncExternalStore.ts --- a/src/react/hooks/useSyncExternalStore.ts +++ b/src/react/hooks/useSyncExternalStore.ts @@ -1,7 +1,7 @@ -import { invariant } from '../../utilities/globals'; -import * as React from 'react'; +import { invariant } from "../../utilities/globals/index.js"; +import * as React from "react"; -import { canUseLayoutEffect } from '../../utilities'; +import { canUseLayoutEffect } from "../../utilities/index.js"; let didWarnUncachedGetSnapshot = false; @@ -22,96 +22,96 @@ const realHook = React[uSESKey] as RealUseSESHookType | undefined; // Apollo Client deviations called out by "// DEVIATION ..." comments. // When/if React.useSyncExternalStore is defined, delegate fully to it. -export const useSyncExternalStore: RealUseSESHookType = realHook || (( - subscribe, - getSnapshot, - getServerSnapshot, -) => { - // Read the current snapshot from the store on every render. Again, this - // breaks the rules of React, and only works here because of specific - // implementation details, most importantly that updates are - // always synchronous. - const value = getSnapshot(); - if ( - // DEVIATION: Using our own __DEV__ polyfill (from ../../utilities/globals). - __DEV__ && - !didWarnUncachedGetSnapshot && - // DEVIATION: Not using Object.is because we know our snapshots will never - // be exotic primitive values like NaN, which is !== itself. - value !== getSnapshot() - ) { - didWarnUncachedGetSnapshot = true; - // DEVIATION: Using invariant.error instead of console.error directly. - invariant.error( - 'The result of getSnapshot should be cached to avoid an infinite loop', - ); - } +export const useSyncExternalStore: RealUseSESHookType = + realHook || + ((subscribe, getSnapshot, getServerSnapshot) => { + // Read the current snapshot from the store on every render. Again, this + // breaks the rules of React, and only works here because of specific + // implementation details, most importantly that updates are + // always synchronous. + const value = getSnapshot(); + if ( + // DEVIATION: Using __DEV__ + __DEV__ && + !didWarnUncachedGetSnapshot && + // DEVIATION: Not using Object.is because we know our snapshots will never + // be exotic primitive values like NaN, which is !== itself. + value !== getSnapshot() + ) { + didWarnUncachedGetSnapshot = true; + // DEVIATION: Using invariant.error instead of console.error directly. + invariant.error( + "The result of getSnapshot should be cached to avoid an infinite loop" + ); + } - // Because updates are synchronous, we don't queue them. Instead we force a - // re-render whenever the subscribed state changes by updating an some - // arbitrary useState hook. Then, during render, we call getSnapshot to read - // the current value. - // - // Because we don't actually use the state returned by the useState hook, we - // can save a bit of memory by storing other stuff in that slot. - // - // To implement the early bailout, we need to track some things on a mutable - // object. Usually, we would put that in a useRef hook, but we can stash it in - // our useState hook instead. - // - // To force a re-render, we call forceUpdate({inst}). That works because the - // new object always fails an equality check. - const [{inst}, forceUpdate] = React.useState({inst: {value, getSnapshot}}); + // Because updates are synchronous, we don't queue them. Instead we force a + // re-render whenever the subscribed state changes by updating an some + // arbitrary useState hook. Then, during render, we call getSnapshot to read + // the current value. + // + // Because we don't actually use the state returned by the useState hook, we + // can save a bit of memory by storing other stuff in that slot. + // + // To implement the early bailout, we need to track some things on a mutable + // object. Usually, we would put that in a useRef hook, but we can stash it in + // our useState hook instead. + // + // To force a re-render, we call forceUpdate({inst}). That works because the + // new object always fails an equality check. + const [{ inst }, forceUpdate] = React.useState({ + inst: { value, getSnapshot }, + }); - // Track the latest getSnapshot function with a ref. This needs to be updated - // in the layout phase so we can access it during the tearing check that - // happens on subscribe. - if (canUseLayoutEffect) { - // DEVIATION: We avoid calling useLayoutEffect when !canUseLayoutEffect, - // which may seem like a conditional hook, but this code ends up behaving - // unconditionally (one way or the other) because canUseLayoutEffect is - // constant. - React.useLayoutEffect(() => { + // Track the latest getSnapshot function with a ref. This needs to be updated + // in the layout phase so we can access it during the tearing check that + // happens on subscribe. + if (canUseLayoutEffect) { + // DEVIATION: We avoid calling useLayoutEffect when !canUseLayoutEffect, + // which may seem like a conditional hook, but this code ends up behaving + // unconditionally (one way or the other) because canUseLayoutEffect is + // constant. + React.useLayoutEffect(() => { + Object.assign(inst, { value, getSnapshot }); + // Whenever getSnapshot or subscribe changes, we need to check in the + // commit phase if there was an interleaved mutation. In concurrent mode + // this can happen all the time, but even in synchronous mode, an earlier + // effect may have mutated the store. + if (checkIfSnapshotChanged(inst)) { + // Force a re-render. + forceUpdate({ inst }); + } + }, [subscribe, value, getSnapshot]); + } else { Object.assign(inst, { value, getSnapshot }); - // Whenever getSnapshot or subscribe changes, we need to check in the - // commit phase if there was an interleaved mutation. In concurrent mode - // this can happen all the time, but even in synchronous mode, an earlier - // effect may have mutated the store. - if (checkIfSnapshotChanged(inst)) { - // Force a re-render. - forceUpdate({inst}); - } - }, [subscribe, value, getSnapshot]); - } else { - Object.assign(inst, { value, getSnapshot }); - } - - React.useEffect(() => { - // Check for changes right before subscribing. Subsequent changes will be - // detected in the subscription handler. - if (checkIfSnapshotChanged(inst)) { - // Force a re-render. - forceUpdate({inst}); } - // Subscribe to the store and return a clean-up function. - return subscribe(function handleStoreChange() { - // TODO: Because there is no cross-renderer API for batching updates, it's - // up to the consumer of this library to wrap their subscription event - // with unstable_batchedUpdates. Should we try to detect when this isn't - // the case and print a warning in development? - - // The store changed. Check if the snapshot changed since the last time we - // read from the store. + React.useEffect(() => { + // Check for changes right before subscribing. Subsequent changes will be + // detected in the subscription handler. if (checkIfSnapshotChanged(inst)) { // Force a re-render. - forceUpdate({inst}); + forceUpdate({ inst }); } - }); - }, [subscribe]); - return value; -}); + // Subscribe to the store and return a clean-up function. + return subscribe(function handleStoreChange() { + // TODO: Because there is no cross-renderer API for batching updates, it's + // up to the consumer of this library to wrap their subscription event + // with unstable_batchedUpdates. Should we try to detect when this isn't + // the case and print a warning in development? + + // The store changed. Check if the snapshot changed since the last time we + // read from the store. + if (checkIfSnapshotChanged(inst)) { + // Force a re-render. + forceUpdate({ inst }); + } + }); + }, [subscribe]); + + return value; + }); function checkIfSnapshotChanged<Snapshot>({ value, diff --git a/src/react/index.ts b/src/react/index.ts --- a/src/react/index.ts +++ b/src/react/index.ts @@ -1,20 +1,18 @@ -import '../utilities/globals'; +import "../utilities/globals/index.js"; +export type { ApolloContextValue } from "./context/index.js"; export { ApolloProvider, ApolloConsumer, getApolloContext, resetApolloContext, - ApolloContextValue -} from './context'; +} from "./context/index.js"; -export * from './hooks'; +export * from "./hooks/index.js"; +// TODO: remove export with release 3.8 +export { SuspenseCache } from "./cache/index.js"; -export { - DocumentType, - IDocumentDefinition, - operationName, - parser -} from './parser'; +export type { IDocumentDefinition } from "./parser/index.js"; +export { DocumentType, operationName, parser } from "./parser/index.js"; -export * from './types/types'; +export * from "./types/types.js"; diff --git a/src/react/parser/index.ts b/src/react/parser/index.ts --- a/src/react/parser/index.ts +++ b/src/react/parser/index.ts @@ -1,16 +1,16 @@ -import { invariant } from '../../utilities/globals'; +import { invariant } from "../../utilities/globals/index.js"; -import { +import type { DocumentNode, DefinitionNode, VariableDefinitionNode, - OperationDefinitionNode -} from 'graphql'; + OperationDefinitionNode, +} from "graphql"; export enum DocumentType { Query, Mutation, - Subscription + Subscription, } export interface IDocumentDefinition { @@ -25,13 +25,13 @@ export function operationName(type: DocumentType) { let name; switch (type) { case DocumentType.Query: - name = 'Query'; + name = "Query"; break; case DocumentType.Mutation: - name = 'Mutation'; + name = "Mutation"; break; case DocumentType.Subscription: - name = 'Subscription'; + name = "Subscription"; break; } return name; @@ -46,31 +46,32 @@ export function parser(document: DocumentNode): IDocumentDefinition { invariant( !!document && !!document.kind, - `Argument of ${document} passed to parser was not a valid GraphQL ` + + `Argument of %s passed to parser was not a valid GraphQL ` + `DocumentNode. You may need to use 'graphql-tag' or another method ` + - `to convert your operation into a document` + `to convert your operation into a document`, + document ); - const fragments: DefinitionNode[] = [] - const queries: DefinitionNode[] = [] - const mutations: DefinitionNode[] = [] - const subscriptions: DefinitionNode[] = [] + const fragments: DefinitionNode[] = []; + const queries: DefinitionNode[] = []; + const mutations: DefinitionNode[] = []; + const subscriptions: DefinitionNode[] = []; for (const x of document.definitions) { - if (x.kind === 'FragmentDefinition') { + if (x.kind === "FragmentDefinition") { fragments.push(x); - continue + continue; } - if (x.kind === 'OperationDefinition') { + if (x.kind === "OperationDefinition") { switch (x.operation) { - case 'query': + case "query": queries.push(x); break; - case 'mutation': + case "mutation": mutations.push(x); break; - case 'subscription': + case "subscription": subscriptions.push(x); break; } @@ -79,7 +80,9 @@ export function parser(document: DocumentNode): IDocumentDefinition { invariant( !fragments.length || - (queries.length || mutations.length || subscriptions.length), + queries.length || + mutations.length || + subscriptions.length, `Passing only a fragment to 'graphql' is not yet supported. ` + `You must include a query, subscription or mutation as well` ); @@ -87,9 +90,13 @@ export function parser(document: DocumentNode): IDocumentDefinition { invariant( queries.length + mutations.length + subscriptions.length <= 1, `react-apollo only supports a query, subscription, or a mutation per HOC. ` + - `${document} had ${queries.length} queries, ${subscriptions.length} ` + - `subscriptions and ${mutations.length} mutations. ` + - `You can use 'compose' to join multiple operation types to a component` + `%s had %s queries, %s ` + + `subscriptions and %s mutations. ` + + `You can use 'compose' to join multiple operation types to a component`, + document, + queries.length, + subscriptions.length, + mutations.length ); type = queries.length ? DocumentType.Query : DocumentType.Mutation; @@ -103,18 +110,20 @@ export function parser(document: DocumentNode): IDocumentDefinition { invariant( definitions.length === 1, - `react-apollo only supports one definition per HOC. ${document} had ` + - `${definitions.length} definitions. ` + - `You can use 'compose' to join multiple operation types to a component` + `react-apollo only supports one definition per HOC. %s had ` + + `%s definitions. ` + + `You can use 'compose' to join multiple operation types to a component`, + document, + definitions.length ); const definition = definitions[0] as OperationDefinitionNode; variables = definition.variableDefinitions || []; - if (definition.name && definition.name.kind === 'Name') { + if (definition.name && definition.name.kind === "Name") { name = definition.name.value; } else { - name = 'data'; // fallback to using data if no name + name = "data"; // fallback to using data if no name } const payload = { name, type, variables }; @@ -128,8 +137,9 @@ export function verifyDocumentType(document: DocumentNode, type: DocumentType) { const usedOperationName = operationName(operation.type); invariant( operation.type === type, - `Running a ${requiredOperationName} requires a graphql ` + - `${requiredOperationName}, but a ${usedOperationName} was used instead.` + `Running a %s requires a graphql ` + `%s, but a %s was used instead.`, + requiredOperationName, + requiredOperationName, + usedOperationName ); } - diff --git a/src/react/ssr/RenderPromises.ts b/src/react/ssr/RenderPromises.ts --- a/src/react/ssr/RenderPromises.ts +++ b/src/react/ssr/RenderPromises.ts @@ -1,7 +1,7 @@ -import { DocumentNode } from 'graphql'; +import type { DocumentNode } from "graphql"; -import { ObservableQuery, OperationVariables } from '../../core'; -import { QueryDataOptions } from '../types/types'; +import type { ObservableQuery, OperationVariables } from "../../core/index.js"; +import type { QueryDataOptions } from "../types/types.js"; // TODO: A vestigial interface from when hooks were implemented with utility // classes, which should be deleted in the future. @@ -18,7 +18,7 @@ type QueryInfo = { function makeDefaultQueryInfo(): QueryInfo { return { seen: false, - observable: null + observable: null, }; } @@ -43,7 +43,7 @@ export class RenderPromises { // Registers the server side rendered observable. public registerSSRObservable<TData, TVariables extends OperationVariables>( - observable: ObservableQuery<any, TVariables>, + observable: ObservableQuery<any, TVariables> ) { if (this.stopped) return; this.lookupQueryInfo(observable.options).observable = observable; @@ -58,14 +58,14 @@ export class RenderPromises { public addQueryPromise( queryInstance: QueryData, - finish?: () => React.ReactNode, + finish?: () => React.ReactNode ): React.ReactNode { if (!this.stopped) { const info = this.lookupQueryInfo(queryInstance.getOptions()); if (!info.seen) { this.queryPromises.set( queryInstance.getOptions(), - new Promise(resolve => { + new Promise((resolve) => { resolve(queryInstance.fetchData()); }) ); @@ -77,30 +77,32 @@ export class RenderPromises { return finish ? finish() : null; } - public addObservableQueryPromise<TData, TVariables extends OperationVariables>( - obsQuery: ObservableQuery<TData, TVariables>, - ) { + public addObservableQueryPromise< + TData, + TVariables extends OperationVariables, + >(obsQuery: ObservableQuery<TData, TVariables>) { return this.addQueryPromise({ // The only options which seem to actually be used by the // RenderPromises class are query and variables. getOptions: () => obsQuery.options, - fetchData: () => new Promise<void>((resolve) => { - const sub = obsQuery.subscribe({ - next(result) { - if (!result.loading) { - resolve() + fetchData: () => + new Promise<void>((resolve) => { + const sub = obsQuery.subscribe({ + next(result) { + if (!result.loading) { + resolve(); + sub.unsubscribe(); + } + }, + error() { + resolve(); sub.unsubscribe(); - } - }, - error() { - resolve(); - sub.unsubscribe(); - }, - complete() { - resolve(); - }, - }); - }), + }, + complete() { + resolve(); + }, + }); + }), }); } diff --git a/src/react/ssr/getDataFromTree.ts b/src/react/ssr/getDataFromTree.ts --- a/src/react/ssr/getDataFromTree.ts +++ b/src/react/ssr/getDataFromTree.ts @@ -1,7 +1,7 @@ -import * as React from 'react'; -import { getApolloContext } from '../context'; -import { RenderPromises } from './RenderPromises'; -import { renderToStaticMarkup } from 'react-dom/server'; +import * as React from "react"; +import { getApolloContext } from "../context/index.js"; +import { RenderPromises } from "./RenderPromises.js"; +import { renderToStaticMarkup } from "react-dom/server"; export function getDataFromTree( tree: React.ReactNode, @@ -12,7 +12,7 @@ export function getDataFromTree( context, // If you need to configure this renderFunction, call getMarkupFromTree // directly instead of getDataFromTree. - renderFunction: renderToStaticMarkup + renderFunction: renderToStaticMarkup, }); } @@ -20,7 +20,7 @@ export type GetMarkupFromTreeOptions = { tree: React.ReactNode; context?: { [key: string]: any }; renderFunction?: ( - tree: React.ReactElement<any>, + tree: React.ReactElement<any> ) => string | PromiseLike<string>; }; @@ -30,7 +30,7 @@ export function getMarkupFromTree({ // The rendering function is configurable! We use renderToStaticMarkup as // the default, because it's a little less expensive than renderToString, // and legacy usage of getDataFromTree ignores the return value anyway. - renderFunction = renderToStaticMarkup + renderFunction = renderToStaticMarkup, }: GetMarkupFromTreeOptions): Promise<string> { const renderPromises = new RenderPromises(); @@ -42,20 +42,22 @@ export function getMarkupFromTree({ // elements) for a subtree of the original component tree. const ApolloContext = getApolloContext(); - return new Promise<string>(resolve => { + return new Promise<string>((resolve) => { const element = React.createElement( ApolloContext.Provider, - { value: { ...context, renderPromises }}, - tree, + { value: { ...context, renderPromises } }, + tree ); resolve(renderFunction(element)); - }).then(html => { - return renderPromises.hasPromises() - ? renderPromises.consumeAndAwaitPromises().then(process) - : html; - }).finally(() => { - renderPromises.stop(); - }); + }) + .then((html) => { + return renderPromises.hasPromises() + ? renderPromises.consumeAndAwaitPromises().then(process) + : html; + }) + .finally(() => { + renderPromises.stop(); + }); } return Promise.resolve().then(process); diff --git a/src/react/ssr/index.ts b/src/react/ssr/index.ts --- a/src/react/ssr/index.ts +++ b/src/react/ssr/index.ts @@ -1,3 +1,3 @@ -export { getMarkupFromTree, getDataFromTree } from './getDataFromTree'; -export { renderToStringWithData } from './renderToStringWithData'; -export { RenderPromises } from './RenderPromises'; +export { getMarkupFromTree, getDataFromTree } from "./getDataFromTree.js"; +export { renderToStringWithData } from "./renderToStringWithData.js"; +export { RenderPromises } from "./RenderPromises.js"; diff --git a/src/react/ssr/renderToStringWithData.ts b/src/react/ssr/renderToStringWithData.ts --- a/src/react/ssr/renderToStringWithData.ts +++ b/src/react/ssr/renderToStringWithData.ts @@ -1,12 +1,12 @@ -import { ReactElement } from 'react'; -import { getMarkupFromTree } from './getDataFromTree'; -import { renderToString } from 'react-dom/server'; +import type { ReactElement } from "react"; +import { getMarkupFromTree } from "./getDataFromTree.js"; +import { renderToString } from "react-dom/server"; export function renderToStringWithData( component: ReactElement<any> ): Promise<string> { return getMarkupFromTree({ tree: component, - renderFunction: renderToString + renderFunction: renderToString, }); } diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -1,11 +1,14 @@ -import { ReactNode } from 'react'; -import { DocumentNode } from 'graphql'; -import { TypedDocumentNode } from '@graphql-typed-document-node/core'; - -import { Observable, ObservableSubscription } from '../../utilities'; -import { FetchResult } from '../../link/core'; -import { ApolloError } from '../../errors'; -import { +import type { ReactNode } from "react"; +import type { DocumentNode } from "graphql"; +import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; + +import type { + Observable, + ObservableSubscription, +} from "../../utilities/index.js"; +import type { FetchResult } from "../../link/core/index.js"; +import type { ApolloError } from "../../errors/index.js"; +import type { ApolloCache, ApolloClient, DefaultContext, @@ -16,11 +19,17 @@ import { OperationVariables, InternalRefetchQueriesInclude, WatchQueryOptions, -} from '../../core'; + WatchQueryFetchPolicy, +} from "../../core/index.js"; +import type { SuspenseCache } from "../cache/index.js"; + +/* QueryReference type */ + +export type { QueryReference } from "../cache/QueryReference.js"; /* Common types */ -export type { DefaultContext as Context } from "../../core"; +export type { DefaultContext as Context } from "../../core/index.js"; export type CommonOptions<TOptions> = TOptions & { client?: ApolloClient<object>; @@ -28,8 +37,9 @@ export type CommonOptions<TOptions> = TOptions & { /* Query types */ -export interface BaseQueryOptions<TVariables extends OperationVariables = OperationVariables> -extends Omit<WatchQueryOptions<TVariables>, "query"> { +export interface BaseQueryOptions< + TVariables extends OperationVariables = OperationVariables +> extends Omit<WatchQueryOptions<TVariables>, "query"> { ssr?: boolean; client?: ApolloClient<any>; context?: DefaultContext; @@ -51,20 +61,25 @@ export interface QueryFunctionOptions< defaultOptions?: Partial<WatchQueryOptions<TVariables, TData>>; } -export type ObservableQueryFields<TData, TVariables extends OperationVariables> = Pick< +export type ObservableQueryFields< + TData, + TVariables extends OperationVariables +> = Pick< ObservableQuery<TData, TVariables>, - | 'startPolling' - | 'stopPolling' - | 'subscribeToMore' - | 'updateQuery' - | 'refetch' - | 'reobserve' - | 'variables' - | 'fetchMore' + | "startPolling" + | "stopPolling" + | "subscribeToMore" + | "updateQuery" + | "refetch" + | "reobserve" + | "variables" + | "fetchMore" >; -export interface QueryResult<TData = any, TVariables extends OperationVariables = OperationVariables> - extends ObservableQueryFields<TData, TVariables> { +export interface QueryResult< + TData = any, + TVariables extends OperationVariables = OperationVariables +> extends ObservableQueryFields<TData, TVariables> { client: ApolloClient<any>; observable: ObservableQuery<TData, TVariables>; data: TData | undefined; @@ -75,21 +90,106 @@ export interface QueryResult<TData = any, TVariables extends OperationVariables called: boolean; } -export interface QueryDataOptions<TData = any, TVariables extends OperationVariables = OperationVariables> - extends QueryFunctionOptions<TData, TVariables> { +export interface QueryDataOptions< + TData = any, + TVariables extends OperationVariables = OperationVariables +> extends QueryFunctionOptions<TData, TVariables> { children?: (result: QueryResult<TData, TVariables>) => ReactNode; query: DocumentNode | TypedDocumentNode<TData, TVariables>; } -export interface QueryHookOptions<TData = any, TVariables extends OperationVariables = OperationVariables> - extends QueryFunctionOptions<TData, TVariables> { - query?: DocumentNode | TypedDocumentNode<TData, TVariables>; -} +export interface QueryHookOptions< + TData = any, + TVariables extends OperationVariables = OperationVariables +> extends QueryFunctionOptions<TData, TVariables> {} export interface LazyQueryHookOptions< TData = any, TVariables extends OperationVariables = OperationVariables -> extends Omit<QueryHookOptions<TData, TVariables>, 'skip'> {} +> extends Omit<QueryHookOptions<TData, TVariables>, "skip"> {} + +export interface LazyQueryHookExecOptions< + TData = any, + TVariables extends OperationVariables = OperationVariables +> extends LazyQueryHookOptions<TData, TVariables> { + query?: DocumentNode | TypedDocumentNode<TData, TVariables>; +} + +export type SuspenseQueryHookFetchPolicy = Extract< + WatchQueryFetchPolicy, + "cache-first" | "network-only" | "no-cache" | "cache-and-network" +>; + +export interface SuspenseQueryHookOptions< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +> extends Pick< + QueryHookOptions<TData, TVariables>, + | "client" + | "variables" + | "errorPolicy" + | "context" + | "canonizeResults" + | "returnPartialData" + | "refetchWritePolicy" + > { + fetchPolicy?: SuspenseQueryHookFetchPolicy; + suspenseCache?: SuspenseCache; + queryKey?: string | number | any[]; + + /** + * If `true`, the query is not executed. The default value is `false`. + * + * @deprecated We recommend using `skipToken` in place of the `skip` option as + * it is more type-safe. + * + * @example Recommended usage of `skipToken`: + * ```ts + * import { skipToken, useSuspenseQuery } from '@apollo/client'; + * + * const { data } = useSuspenseQuery(query, id ? { variables: { id } } : skipToken); + * ``` + */ + skip?: boolean; +} + +export type BackgroundQueryHookFetchPolicy = Extract< + WatchQueryFetchPolicy, + "cache-first" | "network-only" | "no-cache" | "cache-and-network" +>; + +export interface BackgroundQueryHookOptions< + TData = unknown, + TVariables extends OperationVariables = OperationVariables +> extends Pick< + QueryHookOptions<TData, TVariables>, + | "client" + | "variables" + | "errorPolicy" + | "context" + | "canonizeResults" + | "returnPartialData" + | "refetchWritePolicy" + > { + fetchPolicy?: BackgroundQueryHookFetchPolicy; + suspenseCache?: SuspenseCache; + queryKey?: string | number | any[]; + + /** + * If `true`, the query is not executed. The default value is `false`. + * + * @deprecated We recommend using `skipToken` in place of the `skip` option as + * it is more type-safe. + * + * @example Recommended usage of `skipToken`: + * ```ts + * import { skipToken, useBackgroundQuery } from '@apollo/client'; + * + * const [queryRef] = useBackgroundQuery(query, id ? { variables: { id } } : skipToken); + * ``` + */ + skip?: boolean; +} /** * @deprecated TODO Delete this unused interface. @@ -102,22 +202,30 @@ export interface QueryLazyOptions<TVariables> { /** * @deprecated TODO Delete this unused type alias. */ -export type LazyQueryResult<TData, TVariables extends OperationVariables> = QueryResult<TData, TVariables>; +export type LazyQueryResult< + TData, + TVariables extends OperationVariables +> = QueryResult<TData, TVariables>; /** * @deprecated TODO Delete this unused type alias. */ -export type QueryTuple<TData, TVariables extends OperationVariables> = - LazyQueryResultTuple<TData, TVariables>; +export type QueryTuple< + TData, + TVariables extends OperationVariables +> = LazyQueryResultTuple<TData, TVariables>; -export type LazyQueryExecFunction<TData, TVariables extends OperationVariables> = ( - options?: Partial<LazyQueryHookOptions<TData, TVariables>>, +export type LazyQueryExecFunction< + TData, + TVariables extends OperationVariables +> = ( + options?: Partial<LazyQueryHookExecOptions<TData, TVariables>> ) => Promise<QueryResult<TData, TVariables>>; -export type LazyQueryResultTuple<TData, TVariables extends OperationVariables> = [ - LazyQueryExecFunction<TData, TVariables>, - QueryResult<TData, TVariables>, -]; +export type LazyQueryResultTuple< + TData, + TVariables extends OperationVariables +> = [LazyQueryExecFunction<TData, TVariables>, QueryResult<TData, TVariables>]; /* Mutation types */ @@ -131,9 +239,9 @@ export interface BaseMutationOptions< TContext = DefaultContext, TCache extends ApolloCache<any> = ApolloCache<any> > extends Omit< - MutationOptions<TData, TVariables, TContext, TCache>, - | "mutation" -> { + MutationOptions<TData, TVariables, TContext, TCache>, + "mutation" + > { client?: ApolloClient<object>; notifyOnNetworkStatusChange?: boolean; onCompleted?: (data: TData, clientOptions?: BaseMutationOptions) => void; @@ -145,7 +253,7 @@ export interface MutationFunctionOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache<any> = ApolloCache<any> > extends BaseMutationOptions<TData, TVariables, TContext, TCache> { mutation?: DocumentNode | TypedDocumentNode<TData, TVariables>; } @@ -163,7 +271,7 @@ export declare type MutationFunction< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache<any> = ApolloCache<any> > = ( options?: MutationFunctionOptions<TData, TVariables, TContext, TCache> ) => Promise<FetchResult<TData>>; @@ -172,16 +280,14 @@ export interface MutationHookOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, -> extends BaseMutationOptions<TData, TVariables, TContext, TCache> { - mutation?: DocumentNode | TypedDocumentNode<TData, TVariables>; -} + TCache extends ApolloCache<any> = ApolloCache<any> +> extends BaseMutationOptions<TData, TVariables, TContext, TCache> {} export interface MutationDataOptions< TData = any, TVariables = OperationVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache<any> = ApolloCache<any> > extends BaseMutationOptions<TData, TVariables, TContext, TCache> { mutation: DocumentNode | TypedDocumentNode<TData, TVariables>; } @@ -190,14 +296,14 @@ export type MutationTuple< TData, TVariables, TContext = DefaultContext, - TCache extends ApolloCache<any> = ApolloCache<any>, + TCache extends ApolloCache<any> = ApolloCache<any> > = [ ( options?: MutationFunctionOptions<TData, TVariables, TContext, TCache> // TODO This FetchResult<TData> seems strange here, as opposed to an // ApolloQueryResult<TData> ) => Promise<FetchResult<TData>>, - MutationResult<TData>, + MutationResult<TData> ]; /* Subscription types */ @@ -227,13 +333,13 @@ export interface BaseSubscriptionOptions< onComplete?: () => void; onData?: (options: OnDataOptions<TData>) => any; /** - * @deprecated Use onData instead - */ + * @deprecated Use onData instead + */ onSubscriptionData?: (options: OnSubscriptionDataOptions<TData>) => any; onError?: (error: ApolloError) => void; /** - * @deprecated Use onComplete instead - */ + * @deprecated Use onComplete instead + */ onSubscriptionComplete?: () => void; } @@ -249,9 +355,7 @@ export interface SubscriptionResult<TData = any, TVariables = any> { export interface SubscriptionHookOptions< TData = any, TVariables extends OperationVariables = OperationVariables -> extends BaseSubscriptionOptions<TData, TVariables> { - subscription?: DocumentNode | TypedDocumentNode<TData, TVariables>; -} +> extends BaseSubscriptionOptions<TData, TVariables> {} export interface SubscriptionDataOptions< TData = any, @@ -265,3 +369,30 @@ export interface SubscriptionCurrentObservable { query?: Observable<any>; subscription?: ObservableSubscription; } + +/** +Helper type that allows using a type in a way that cannot be "widened" by inference on the value it is used on. + +This type was first suggested [in this Github discussion](https://github.com/microsoft/TypeScript/issues/14829#issuecomment-504042546). + +Example usage: +```ts +export function useQuery< + TData = any, + TVariables extends OperationVariables = OperationVariables, +>( + query: DocumentNode | TypedDocumentNode<TData, TVariables>, + options: QueryHookOptions<NoInfer<TData>, NoInfer<TVariables>> = Object.create(null), +) +``` +In this case, `TData` and `TVariables` should be inferred from `query`, but never widened from something in `options`. + +So, in this code example: +```ts +declare const typedNode: TypedDocumentNode<{ foo: string}, { bar: number }> +const { variables } = useQuery(typedNode, { variables: { bar: 4, nonExistingVariable: "string" } }); +``` +Without the use of `NoInfer`, `variables` would now be of the type `{ bar: number, nonExistingVariable: "string" }`. +With `NoInfer`, it will instead give an error on `nonExistingVariable`. + */ +export type NoInfer<T> = [T][T extends any ? 0 : never]; diff --git a/src/utilities/common/canUse.ts b/src/utilities/common/canUse.ts --- a/src/utilities/common/canUse.ts +++ b/src/utilities/common/canUse.ts @@ -1,4 +1,4 @@ -import { maybe } from "../globals"; +import { maybe } from "../globals/index.js"; export const canUseWeakMap = typeof WeakMap === 'function' && diff --git a/src/utilities/common/compact.ts b/src/utilities/common/compact.ts --- a/src/utilities/common/compact.ts +++ b/src/utilities/common/compact.ts @@ -1,4 +1,4 @@ -import { TupleToIntersection } from './mergeDeep'; +import type { TupleToIntersection } from './mergeDeep.js'; /** * Merges the provided objects shallowly and removes diff --git a/src/utilities/common/errorHandling.ts b/src/utilities/common/errorHandling.ts --- a/src/utilities/common/errorHandling.ts +++ b/src/utilities/common/errorHandling.ts @@ -1,6 +1,6 @@ -import { FetchResult } from "../../link/core"; -import { isNonEmptyArray } from "../../utilities/common/arrays"; -import { isExecutionPatchIncrementalResult } from "../../utilities/common/incrementalResult"; +import type { FetchResult } from "../../link/core/index.js"; +import { isNonEmptyArray } from "./arrays.js"; +import { isExecutionPatchIncrementalResult } from "./incrementalResult.js"; export function graphQLResultHasError<T>(result: FetchResult<T>): boolean { const errors = getGraphQLErrorsFromResult(result); diff --git a/src/utilities/common/incrementalResult.ts b/src/utilities/common/incrementalResult.ts --- a/src/utilities/common/incrementalResult.ts +++ b/src/utilities/common/incrementalResult.ts @@ -1,13 +1,13 @@ -import { +import type { ExecutionPatchIncrementalResult, ExecutionPatchInitialResult, ExecutionPatchResult, ApolloPayloadResult, FetchResult, -} from "../../link/core"; -import { isNonNullObject } from "./objects"; -import { isNonEmptyArray } from "./arrays"; -import { DeepMerger } from "./mergeDeep"; +} from "../../link/core/index.js"; +import { isNonNullObject } from "./objects.js"; +import { isNonEmptyArray } from "./arrays.js"; +import { DeepMerger } from "./mergeDeep.js"; export function isExecutionPatchIncrementalResult<T>( value: FetchResult<T> diff --git a/src/utilities/common/maybeDeepFreeze.ts b/src/utilities/common/maybeDeepFreeze.ts --- a/src/utilities/common/maybeDeepFreeze.ts +++ b/src/utilities/common/maybeDeepFreeze.ts @@ -1,5 +1,4 @@ -import '../globals'; // For __DEV__ -import { isNonNullObject } from './objects'; +import { isNonNullObject } from './objects.js'; function deepFreeze(value: any) { const workSet = new Set([value]); diff --git a/src/utilities/common/mergeDeep.ts b/src/utilities/common/mergeDeep.ts --- a/src/utilities/common/mergeDeep.ts +++ b/src/utilities/common/mergeDeep.ts @@ -1,4 +1,4 @@ -import { isNonNullObject } from "./objects"; +import { isNonNullObject } from "./objects.js"; const { hasOwnProperty } = Object.prototype; diff --git a/src/utilities/common/mergeOptions.ts b/src/utilities/common/mergeOptions.ts --- a/src/utilities/common/mergeOptions.ts +++ b/src/utilities/common/mergeOptions.ts @@ -3,9 +3,9 @@ import type { WatchQueryOptions, MutationOptions, OperationVariables, -} from "../../core"; +} from "../../core/index.js"; -import { compact } from "./compact"; +import { compact } from "./compact.js"; type OptionsUnion<TData, TVariables extends OperationVariables, TContext> = | WatchQueryOptions<TVariables, TData> @@ -13,15 +13,16 @@ type OptionsUnion<TData, TVariables extends OperationVariables, TContext> = | MutationOptions<TData, TVariables, TContext>; export function mergeOptions< - TOptions extends Partial<OptionsUnion<any, any, any>> + TDefaultOptions extends Partial<OptionsUnion<any, any, any>>, + TOptions extends TDefaultOptions >( - defaults: TOptions | Partial<TOptions> | undefined, + defaults: TDefaultOptions | Partial<TDefaultOptions> | undefined, options: TOptions | Partial<TOptions>, -): TOptions { +): TOptions & TDefaultOptions { return compact(defaults, options, options.variables && { - variables: { + variables: compact({ ...(defaults && defaults.variables), ...options.variables, - }, + }), }); } diff --git a/src/utilities/common/objects.ts b/src/utilities/common/objects.ts --- a/src/utilities/common/objects.ts +++ b/src/utilities/common/objects.ts @@ -1,3 +1,12 @@ export function isNonNullObject(obj: any): obj is Record<string | number, any> { return obj !== null && typeof obj === 'object'; } + +export function isPlainObject(obj: any): obj is Record<string | number, any> { + return ( + obj !== null && + typeof obj === 'object' && + (Object.getPrototypeOf(obj) === Object.prototype || + Object.getPrototypeOf(obj) === null) + ); +} diff --git a/src/utilities/common/omitDeep.ts b/src/utilities/common/omitDeep.ts new file mode 100644 --- /dev/null +++ b/src/utilities/common/omitDeep.ts @@ -0,0 +1,55 @@ +import type { DeepOmit } from "../types/DeepOmit.js"; +import { isPlainObject } from "./objects.js"; + +export function omitDeep<T, K extends string>(value: T, key: K) { + return __omitDeep(value, key); +} + +function __omitDeep<T, K extends string>( + value: T, + key: K, + known = new Map<any, any>() +): DeepOmit<T, K> { + if (known.has(value)) { + return known.get(value); + } + + let modified = false; + + if (Array.isArray(value)) { + const array: any[] = []; + known.set(value, array); + + value.forEach((value, index) => { + const result = __omitDeep(value, key, known); + modified ||= result !== value; + + array[index] = result; + }); + + if (modified) { + return array as DeepOmit<T, K>; + } + } else if (isPlainObject(value)) { + const obj = Object.create(Object.getPrototypeOf(value)); + known.set(value, obj); + + Object.keys(value).forEach((k) => { + if (k === key) { + modified = true; + return; + } + + const result = __omitDeep(value[k], key, known); + modified ||= result !== value[k]; + + obj[k] = result; + }); + + if (modified) { + return obj; + } + } + + return value as DeepOmit<T, K>; +} diff --git a/src/utilities/common/responseIterator.ts b/src/utilities/common/responseIterator.ts --- a/src/utilities/common/responseIterator.ts +++ b/src/utilities/common/responseIterator.ts @@ -1,6 +1,6 @@ -import { Response as NodeResponse } from "node-fetch"; -import { Readable as NodeReadableStream } from "stream"; -import { canUseAsyncIteratorSymbol } from "./canUse"; +import type { Response as NodeResponse } from "node-fetch"; +import type { Readable as NodeReadableStream } from "stream"; +import { canUseAsyncIteratorSymbol } from "./canUse.js"; export function isNodeResponse(value: any): value is NodeResponse { return !!(value as NodeResponse).body; diff --git a/src/utilities/common/stringifyForDisplay.ts b/src/utilities/common/stringifyForDisplay.ts --- a/src/utilities/common/stringifyForDisplay.ts +++ b/src/utilities/common/stringifyForDisplay.ts @@ -1,8 +1,8 @@ -import { makeUniqueId } from "./makeUniqueId"; +import { makeUniqueId } from "./makeUniqueId.js"; -export function stringifyForDisplay(value: any): string { +export function stringifyForDisplay(value: any, space = 0): string { const undefId = makeUniqueId("stringifyForDisplay"); return JSON.stringify(value, (key, value) => { return value === void 0 ? undefId : value; - }).split(JSON.stringify(undefId)).join("<undefined>"); + }, space).split(JSON.stringify(undefId)).join("<undefined>"); } diff --git a/src/utilities/common/stripTypename.ts b/src/utilities/common/stripTypename.ts new file mode 100644 --- /dev/null +++ b/src/utilities/common/stripTypename.ts @@ -0,0 +1,5 @@ +import { omitDeep } from "./omitDeep.js"; + +export function stripTypename<T>(value: T) { + return omitDeep(value, "__typename"); +} diff --git a/src/utilities/globals/DEV.ts b/src/utilities/globals/DEV.ts deleted file mode 100644 --- a/src/utilities/globals/DEV.ts +++ /dev/null @@ -1,29 +0,0 @@ -import global from "./global"; -import { maybe } from "./maybe"; - -// To keep string-based find/replace minifiers from messing with __DEV__ inside -// string literals or properties like global.__DEV__, we construct the "__DEV__" -// string in a roundabout way that won't be altered by find/replace strategies. -const __ = "__"; -const GLOBAL_KEY = [__, __].join("DEV"); - -function getDEV() { - try { - return Boolean(__DEV__); - } catch { - Object.defineProperty(global, GLOBAL_KEY, { - // In a buildless browser environment, maybe(() => process.env.NODE_ENV) - // evaluates as undefined, so __DEV__ becomes true by default, but can be - // initialized to false instead by a script/module that runs earlier. - value: maybe(() => process.env.NODE_ENV) !== "production", - enumerable: false, - configurable: true, - writable: true, - }); - // Using computed property access rather than global.__DEV__ here prevents - // string-based find/replace strategies from munging this to global.false: - return (global as any)[GLOBAL_KEY]; - } -} - -export default getDEV(); diff --git a/src/utilities/globals/fix-graphql.ts b/src/utilities/globals/fix-graphql.ts deleted file mode 100644 --- a/src/utilities/globals/fix-graphql.ts +++ /dev/null @@ -1,14 +0,0 @@ -// The ordering of these imports is important, because it ensures the temporary -// process.env.NODE_ENV polyfill is defined globally (if necessary) before we -// import { Source } from 'graphql'. The instanceOf function that we really care -// about (the one that uses process.env.NODE_ENV) is not exported from the -// top-level graphql package, but graphql/language/source uses instanceOf, and -// has relatively few dependencies, so importing it here should not increase -// bundle sizes as much as other options. -import { remove } from 'ts-invariant/process'; -import { Source } from 'graphql'; - -export function removeTemporaryGlobals() { - // Using Source here here just to make sure it won't be tree-shaken away. - return typeof Source === "function" ? remove() : remove(); -} diff --git a/src/utilities/globals/global.ts b/src/utilities/globals/global.ts --- a/src/utilities/globals/global.ts +++ b/src/utilities/globals/global.ts @@ -1,23 +1,10 @@ -import { maybe } from "./maybe"; +import { maybe } from "./maybe.js"; declare global { - // Despite our attempts to reuse the React Native __DEV__ constant instead of - // inventing something new and Apollo-specific, declaring a useful type for - // __DEV__ unfortunately conflicts (TS2451) with the global declaration in - // @types/react-native/index.d.ts. - // - // To hide that harmless conflict, we @ts-ignore this line, which should - // continue to provide a type for __DEV__ elsewhere in the Apollo Client - // codebase, even when @types/react-native is not in use. - // - // However, because TypeScript drops @ts-ignore comments when generating .d.ts - // files (https://github.com/microsoft/TypeScript/issues/38628), we also - // sanitize the dist/utilities/globals/global.d.ts file to avoid declaring - // __DEV__ globally altogether when @apollo/client is installed in the - // node_modules directory of an application. - // - // @ts-ignore - const __DEV__: boolean | undefined; + const __DEV__: boolean; // will be removed in `dist` by the `postprocessDist` script + interface Window { + __DEV__?: boolean; + } } export default ( @@ -32,6 +19,4 @@ export default ( // improve your static analysis to detect this obfuscation, think again. This // is an arms race you cannot win, at least not in JavaScript. maybe(function() { return maybe.constructor("return this")() }) -) as typeof globalThis & { - __DEV__: typeof __DEV__; -}; +) as typeof globalThis & Window; diff --git a/src/utilities/globals/index.ts b/src/utilities/globals/index.ts --- a/src/utilities/globals/index.ts +++ b/src/utilities/globals/index.ts @@ -1,25 +1,15 @@ -import { invariant, InvariantError } from "ts-invariant"; - -// Just in case the graphql package switches from process.env.NODE_ENV to -// __DEV__, make sure __DEV__ is polyfilled before importing graphql. -import DEV from "./DEV"; -export { DEV } -export function checkDEV() { - invariant("boolean" === typeof DEV, DEV); -} - -// Import graphql/jsutils/instanceOf safely, working around its unchecked usage -// of process.env.NODE_ENV and https://github.com/graphql/graphql-js/pull/2894. -import { removeTemporaryGlobals } from "./fix-graphql"; - -// Synchronously undo the global process.env.NODE_ENV polyfill that we created -// temporarily while importing the offending graphql/jsutils/instanceOf module. -removeTemporaryGlobals(); - -export { maybe } from "./maybe"; -export { default as global } from "./global"; -export { invariant, InvariantError } - -// Ensure __DEV__ was properly initialized, and prevent tree-shaking bundlers -// from mistakenly pruning the ./DEV module (see issue #8674). -checkDEV(); +import { invariant, newInvariantError, InvariantError } from "./invariantWrappers.js"; + +export { maybe } from "./maybe.js"; +export { default as global } from "./global.js"; +export { invariant, newInvariantError, InvariantError } + +/** + * @deprecated we do not use this internally anymore, + * it is just exported for backwards compatibility + */ +// this file is extempt from automatic `__DEV__` replacement +// so we have to write it out here +// @ts-ignore +export const DEV = globalThis.__DEV__ !== false; +export { DEV as __DEV__ }; \ No newline at end of file diff --git a/src/utilities/globals/invariantWrappers.ts b/src/utilities/globals/invariantWrappers.ts new file mode 100644 --- /dev/null +++ b/src/utilities/globals/invariantWrappers.ts @@ -0,0 +1,128 @@ +import { invariant as originalInvariant, InvariantError } from "ts-invariant"; +import { version } from "../../version.js"; +import global from "./global.js"; +import type { ErrorCodes } from "../../invariantErrorCodes.js"; +import { stringifyForDisplay } from "../common/stringifyForDisplay.js"; + +function wrap(fn: (msg?: string, ...args: any[]) => void) { + return function (message: string | number, ...args: any[]) { + fn(typeof message === "number" ? getErrorMsg(message) : message, ...args); + }; +} + +type LogFunction = { + /** + * Logs a `$level` message if the user used `ts-invariant`'s `setVerbosity` to set + * a verbosity level of `$level` or lower. (defaults to `"log"`). + * + * The user will either be presented with a link to the documentation for the message, + * or they can use the `loadDevMessages` to add the message strings to the bundle. + * The documentation will display the message without argument substitution. + * Instead, the arguments will be printed on the console after the link. + * + * `message` can only be a string, a concatenation of strings, or a ternary statement + * that results in a string. This will be enforced on build, where the message will + * be replaced with a message number. + * + * String substitutions like %s, %o, %d or %f are supported. + */ + (message?: any, ...optionalParams: unknown[]): void; +}; + +type WrappedInvariant = { + /** + * Throws and InvariantError with the given message if the condition is false. + * + * `message` can only be a string, a concatenation of strings, or a ternary statement + * that results in a string. This will be enforced on build, where the message will + * be replaced with a message number. + * + * The user will either be presented with a link to the documentation for the message, + * or they can use the `loadErrorMessages` to add the message strings to the bundle. + * The documentation will display the message with the arguments substituted. + * + * String substitutions with %s are supported and will also return + * pretty-stringified objects. + * Excess `optionalParams` will be swallowed. + */ + ( + condition: any, + message?: string | number, + ...optionalParams: unknown[] + ): asserts condition; + + debug: LogFunction; + log: LogFunction; + warn: LogFunction; + error: LogFunction; +}; +const invariant: WrappedInvariant = Object.assign( + function invariant( + condition: any, + message?: string | number, + ...args: unknown[] + ): asserts condition { + if (!condition) { + originalInvariant(condition, getErrorMsg(message, args)); + } + }, + { + debug: wrap(originalInvariant.debug), + log: wrap(originalInvariant.log), + warn: wrap(originalInvariant.warn), + error: wrap(originalInvariant.error), + } +); + +/** + * Returns an InvariantError. + * + * `message` can only be a string, a concatenation of strings, or a ternary statement + * that results in a string. This will be enforced on build, where the message will + * be replaced with a message number. + * String substitutions with %s are supported and will also return + * pretty-stringified objects. + * Excess `optionalParams` will be swallowed. + */ +function newInvariantError( + message?: string | number, + ...optionalParams: unknown[] +) { + return new InvariantError(getErrorMsg(message, optionalParams)); +} + +const ApolloErrorMessageHandler = Symbol.for( + "ApolloErrorMessageHandler_" + version +); +declare global { + interface Window { + [ApolloErrorMessageHandler]?: { + (message: string | number, args: unknown[]): string | undefined; + } & ErrorCodes; + } +} + +function getErrorMsg(message?: string | number, messageArgs: unknown[] = []) { + if (!message) return; + const args = messageArgs.map((arg) => + typeof arg == "string" ? arg : stringifyForDisplay(arg, 2).slice(0, 1000) + ); + return ( + (global[ApolloErrorMessageHandler] && + global[ApolloErrorMessageHandler](message, args)) || + `An error occured! For more details, see the full error text at https://go.apollo.dev/c/err#${encodeURIComponent( + JSON.stringify({ + version, + message, + args, + }) + )}` + ); +} + +export { + invariant, + InvariantError, + newInvariantError, + ApolloErrorMessageHandler, +}; diff --git a/src/utilities/graphql/DocumentTransform.ts b/src/utilities/graphql/DocumentTransform.ts new file mode 100644 --- /dev/null +++ b/src/utilities/graphql/DocumentTransform.ts @@ -0,0 +1,132 @@ +import { Trie } from "@wry/trie"; +import { canUseWeakMap, canUseWeakSet } from "../common/canUse.js"; +import { checkDocument } from "./getFromAST.js"; +import { invariant } from "../globals/index.js"; +import type { DocumentNode } from "graphql"; + +export type DocumentTransformCacheKey = ReadonlyArray<unknown>; + +type TransformFn = (document: DocumentNode) => DocumentNode; + +interface DocumentTransformOptions { + cache?: boolean; + getCacheKey?: ( + document: DocumentNode + ) => DocumentTransformCacheKey | undefined; +} + +function identity(document: DocumentNode) { + return document; +} + +export class DocumentTransform { + private readonly transform: TransformFn; + + private readonly resultCache = canUseWeakSet + ? new WeakSet<DocumentNode>() + : new Set<DocumentNode>(); + + private stableCacheKeys: + | Trie<{ key: DocumentTransformCacheKey; value?: DocumentNode }> + | undefined; + + // This default implementation of getCacheKey can be overridden by providing + // options.getCacheKey to the DocumentTransform constructor. In general, a + // getCacheKey function may either return an array of keys (often including + // the document) to be used as a cache key, or undefined to indicate the + // transform for this document should not be cached. + private getCacheKey( + document: DocumentNode + ): DocumentTransformCacheKey | undefined { + return [document]; + } + + static identity() { + // No need to cache this transform since it just returns the document + // unchanged. This should save a bit of memory that would otherwise be + // needed to populate the `documentCache` of this transform. + return new DocumentTransform(identity, { cache: false }); + } + + static split( + predicate: (document: DocumentNode) => boolean, + left: DocumentTransform, + right: DocumentTransform = DocumentTransform.identity() + ) { + return new DocumentTransform( + (document) => { + const documentTransform = predicate(document) ? left : right; + + return documentTransform.transformDocument(document); + }, + // Reasonably assume both `left` and `right` transforms handle their own caching + { cache: false } + ); + } + + constructor( + transform: TransformFn, + options: DocumentTransformOptions = Object.create(null) + ) { + this.transform = transform; + + if (options.getCacheKey) { + // Override default `getCacheKey` function, which returns [document]. + this.getCacheKey = options.getCacheKey; + } + + if (options.cache !== false) { + this.stableCacheKeys = new Trie(canUseWeakMap, (key) => ({ key })); + } + } + + transformDocument(document: DocumentNode) { + // If a user passes an already transformed result back to this function, + // immediately return it. + if (this.resultCache.has(document)) { + return document; + } + + const cacheEntry = this.getStableCacheEntry(document); + + if (cacheEntry && cacheEntry.value) { + return cacheEntry.value; + } + + checkDocument(document); + + const transformedDocument = this.transform(document); + + this.resultCache.add(transformedDocument); + + if (cacheEntry) { + cacheEntry.value = transformedDocument; + } + + return transformedDocument; + } + + concat(otherTransform: DocumentTransform) { + return new DocumentTransform( + (document) => { + return otherTransform.transformDocument( + this.transformDocument(document) + ); + }, + // Reasonably assume both transforms handle their own caching + { cache: false } + ); + } + + getStableCacheEntry(document: DocumentNode) { + if (!this.stableCacheKeys) return; + const cacheKeys = this.getCacheKey(document); + if (cacheKeys) { + invariant( + Array.isArray(cacheKeys), + "`getCacheKey` must return an array or undefined" + ); + return this.stableCacheKeys.lookupArray(cacheKeys); + } + } +} diff --git a/src/utilities/graphql/directives.ts b/src/utilities/graphql/directives.ts --- a/src/utilities/graphql/directives.ts +++ b/src/utilities/graphql/directives.ts @@ -1,8 +1,8 @@ -import { invariant } from '../globals'; +import { invariant } from '../globals/index.js'; // Provides the methods that allow QueryManager to handle the `skip` and // `include` directives within GraphQL. -import { +import type { SelectionNode, VariableNode, BooleanValueNode, @@ -10,7 +10,8 @@ import { DocumentNode, ArgumentNode, ValueNode, - ASTNode, + ASTNode} from 'graphql'; +import { visit, BREAK, } from 'graphql'; @@ -34,7 +35,8 @@ export function shouldInclude( evaledValue = variables && variables[(ifArgument.value as VariableNode).name.value]; invariant( evaledValue !== void 0, - `Invalid variable referenced in @${directive.name.value} directive.`, + `Invalid variable referenced in @%s directive.`, + directive.name.value ); } else { evaledValue = (ifArgument.value as BooleanValueNode).value; @@ -116,13 +118,15 @@ export function getInclusionDirectives( invariant( directiveArguments && directiveArguments.length === 1, - `Incorrect number of arguments for the @${directiveName} directive.`, + `Incorrect number of arguments for the @%s directive.`, + directiveName ); const ifArgument = directiveArguments![0]; invariant( ifArgument.name && ifArgument.name.value === 'if', - `Invalid argument for the @${directiveName} directive.`, + `Invalid argument for the @%s directive.`, + directiveName ); const ifValue: ValueNode = ifArgument.value; @@ -131,7 +135,8 @@ export function getInclusionDirectives( invariant( ifValue && (ifValue.kind === 'Variable' || ifValue.kind === 'BooleanValue'), - `Argument for the @${directiveName} directive must be a variable or a boolean value.`, + `Argument for the @%s directive must be a variable or a boolean value.`, + directiveName ); result.push({ directive, ifArgument }); diff --git a/src/utilities/graphql/fragments.ts b/src/utilities/graphql/fragments.ts --- a/src/utilities/graphql/fragments.ts +++ b/src/utilities/graphql/fragments.ts @@ -1,6 +1,6 @@ -import { invariant, InvariantError } from '../globals'; +import { invariant, newInvariantError } from '../globals/index.js'; -import { +import type { DocumentNode, FragmentDefinitionNode, InlineFragmentNode, @@ -46,11 +46,11 @@ export function getFragmentQueryDocument( // Throw an error if we encounter an operation definition because we will // define our own operation definition later on. if (definition.kind === 'OperationDefinition') { - throw new InvariantError( - `Found a ${definition.operation} operation${ - definition.name ? ` named '${definition.name.value}'` : '' - }. ` + + throw newInvariantError( + `Found a %s operation%s. ` + 'No operations are allowed when using a fragment as a query. Only fragments are allowed.', + definition.operation, + definition.name ? ` named '${definition.name.value}'` : '' ); } // Add our definition to the fragments array if it is a fragment @@ -65,9 +65,8 @@ export function getFragmentQueryDocument( if (typeof actualFragmentName === 'undefined') { invariant( fragments.length === 1, - `Found ${ - fragments.length - } fragments. \`fragmentName\` must be provided when there is not exactly 1 fragment.`, + `Found %s fragments. \`fragmentName\` must be provided when there is not exactly 1 fragment.`, + fragments.length ); actualFragmentName = fragments[0].name.value; } @@ -136,7 +135,7 @@ export function getFragmentFromSelection( return fragmentMap(fragmentName); } const fragment = fragmentMap && fragmentMap[fragmentName]; - invariant(fragment, `No fragment named ${fragmentName}`); + invariant(fragment, `No fragment named %s`, fragmentName); return fragment || null; } default: diff --git a/src/utilities/graphql/getFromAST.ts b/src/utilities/graphql/getFromAST.ts --- a/src/utilities/graphql/getFromAST.ts +++ b/src/utilities/graphql/getFromAST.ts @@ -1,13 +1,13 @@ -import { invariant, InvariantError } from '../globals'; +import { invariant, newInvariantError } from '../globals/index.js'; -import { +import type { DocumentNode, OperationDefinitionNode, FragmentDefinitionNode, ValueNode, } from 'graphql'; -import { valueToObjectRepresentation } from './storeUtils'; +import { valueToObjectRepresentation } from './storeUtils.js'; type OperationDefinitionWithName = OperationDefinitionNode & { name: NonNullable<OperationDefinitionNode['name']>; @@ -25,10 +25,9 @@ string in a "gql" tag? http://docs.apollostack.com/apollo-client/core.html#gql`, .filter(d => d.kind !== 'FragmentDefinition') .map(definition => { if (definition.kind !== 'OperationDefinition') { - throw new InvariantError( - `Schema type definitions not allowed in queries. Found: "${ - definition.kind - }"`, + throw newInvariantError( + `Schema type definitions not allowed in queries. Found: "%s"`, + definition.kind ); } return definition; @@ -36,7 +35,8 @@ string in a "gql" tag? http://docs.apollostack.com/apollo-client/core.html#gql`, invariant( operations.length <= 1, - `Ambiguous GraphQL document: contains ${operations.length} operations`, + `Ambiguous GraphQL document: contains %s operations`, + operations.length ); return doc; @@ -142,7 +142,7 @@ export function getMainDefinition( return fragmentDefinition; } - throw new InvariantError( + throw newInvariantError( 'Expected a parsed GraphQL query with a query, mutation, subscription, or a fragment.', ); } diff --git a/src/utilities/graphql/operations.ts b/src/utilities/graphql/operations.ts new file mode 100644 --- /dev/null +++ b/src/utilities/graphql/operations.ts @@ -0,0 +1,21 @@ +import type { DocumentNode } from "../../core/index.js"; +import { getOperationDefinition } from "./getFromAST.js"; + +function isOperation( + document: DocumentNode, + operation: "query" | "mutation" | "subscription" +) { + return getOperationDefinition(document)?.operation === operation; +} + +export function isMutationOperation(document: DocumentNode) { + return isOperation(document, "mutation"); +} + +export function isQueryOperation(document: DocumentNode) { + return isOperation(document, "query"); +} + +export function isSubscriptionOperation(document: DocumentNode) { + return isOperation(document, "subscription"); +} diff --git a/src/utilities/graphql/print.ts b/src/utilities/graphql/print.ts new file mode 100644 --- /dev/null +++ b/src/utilities/graphql/print.ts @@ -0,0 +1,14 @@ +import { print as origPrint } from "graphql"; +import { canUseWeakMap } from "../common/canUse.js"; + +const printCache = canUseWeakMap ? new WeakMap() : undefined; +export const print: typeof origPrint = (ast) => { + let result; + result = printCache?.get(ast); + + if (!result) { + result = origPrint(ast); + printCache?.set(ast, result); + } + return result; +}; diff --git a/src/utilities/graphql/storeUtils.ts b/src/utilities/graphql/storeUtils.ts --- a/src/utilities/graphql/storeUtils.ts +++ b/src/utilities/graphql/storeUtils.ts @@ -1,6 +1,6 @@ -import { InvariantError } from '../globals'; +import { newInvariantError } from '../globals/index.js'; -import { +import type { DirectiveNode, FieldNode, IntValueNode, @@ -21,8 +21,9 @@ import { FragmentSpreadNode, } from 'graphql'; -import { isNonNullObject } from '../common/objects'; -import { FragmentMap, getFragmentFromSelection } from './fragments'; +import { isNonNullObject } from '../common/objects.js'; +import type { FragmentMap} from './fragments.js'; +import { getFragmentFromSelection } from './fragments.js'; export interface Reference { readonly __ref: string; @@ -131,10 +132,11 @@ export function valueToObjectRepresentation( } else if (isNullValue(value)) { argObj[name.value] = null; } else { - throw new InvariantError( - `The inline argument "${name.value}" of kind "${(value as any).kind}"` + + throw newInvariantError( + `The inline argument "%s" of kind "%s"` + 'is not supported. Use variables instead of inline arguments to ' + 'overcome this limitation.', + name.value, (value as any).kind ); } } @@ -186,6 +188,7 @@ const KNOWN_DIRECTIVES: string[] = [ 'client', 'rest', 'export', + 'nonreactive', ]; export const getStoreKeyName = Object.assign(function ( diff --git a/src/utilities/graphql/transform.ts b/src/utilities/graphql/transform.ts --- a/src/utilities/graphql/transform.ts +++ b/src/utilities/graphql/transform.ts @@ -1,6 +1,6 @@ -import { invariant } from '../globals'; +import { invariant } from '../globals/index.js'; -import { +import type { DocumentNode, SelectionNode, SelectionSetNode, @@ -11,11 +11,12 @@ import { ArgumentNode, FragmentSpreadNode, VariableDefinitionNode, - visit, ASTNode, - Kind, ASTVisitor, - InlineFragmentNode, + InlineFragmentNode} from 'graphql'; +import { + visit, + Kind } from 'graphql'; import { @@ -24,13 +25,14 @@ import { getFragmentDefinition, getFragmentDefinitions, getMainDefinition, -} from './getFromAST'; -import { isField } from './storeUtils'; +} from './getFromAST.js'; +import { isField } from './storeUtils.js'; +import type { + FragmentMap} from './fragments.js'; import { - createFragmentMap, - FragmentMap, -} from './fragments'; -import { isArray } from '../common/arrays'; + createFragmentMap +} from './fragments.js'; +import { isArray, isNonEmptyArray } from '../common/arrays.js'; export type RemoveNodeConfig<N> = { name?: string; @@ -83,22 +85,39 @@ function nullIfDocIsEmpty(doc: DocumentNode) { } function getDirectiveMatcher( - directives: (RemoveDirectiveConfig | GetDirectiveConfig)[], + configs: (RemoveDirectiveConfig | GetDirectiveConfig)[], ) { - const nameSet = new Set<string>(); - const tests: Array<(directive: DirectiveNode) => boolean> = []; - directives.forEach(directive => { - if (directive.name) { - nameSet.add(directive.name); - } else if (directive.test) { - tests.push(directive.test); + const names = new Map< + string, + RemoveDirectiveConfig | GetDirectiveConfig + >(); + + const tests = new Map< + (directive: DirectiveNode) => boolean, + RemoveDirectiveConfig | GetDirectiveConfig + >(); + + configs.forEach(directive => { + if (directive) { + if (directive.name) { + names.set(directive.name, directive); + } else if (directive.test) { + tests.set(directive.test, directive); + } } }); - return (directive: DirectiveNode) => ( - nameSet.has(directive.name.value) || - tests.some(test => test(directive)) - ); + return (directive: DirectiveNode) => { + let config = names.get(directive.name.value); + if (!config && tests.size) { + tests.forEach((testConfig, test) => { + if (test(directive)) { + config = testConfig; + } + }); + } + return config; + }; } // Helper interface and function used by removeDirectivesFromDocument to keep @@ -138,6 +157,8 @@ export function removeDirectivesFromDocument( directives: RemoveDirectiveConfig[], doc: DocumentNode, ): DocumentNode | null { + checkDocument(doc); + // Passing empty strings to makeInUseGetterFunction means we handle anonymous // operations as if their names were "". Anonymous fragment definitions are // not supposed to be possible, but the same default naming strategy seems @@ -173,13 +194,10 @@ export function removeDirectivesFromDocument( } const directiveMatcher = getDirectiveMatcher(directives); - const hasRemoveDirective = directives.some(directive => directive.remove); - const shouldRemoveField = ( - nodeDirectives: FieldNode["directives"] - ) => ( - hasRemoveDirective && - nodeDirectives && - nodeDirectives.some(directiveMatcher) + const shouldRemoveField = (nodeDirectives: FieldNode["directives"]) => ( + isNonEmptyArray(nodeDirectives) && + nodeDirectives.map(directiveMatcher).some( + (config: RemoveDirectiveConfig | undefined) => config && config.remove) ); const originalFragmentDefsByPath = new Map<string, FragmentDefinitionNode>(); diff --git a/src/utilities/index.ts b/src/utilities/index.ts --- a/src/utilities/index.ts +++ b/src/utilities/index.ts @@ -1,8 +1,10 @@ -export { DEV, maybe } from './globals'; +export { DEV, maybe } from "./globals/index.js"; -export { +export type { DirectiveInfo, InclusionDirectives, +} from "./graphql/directives.js"; +export { shouldInclude, hasDirectives, hasAnyDirectives, @@ -10,15 +12,17 @@ export { hasClientExports, getDirectiveNames, getInclusionDirectives, -} from './graphql/directives'; +} from "./graphql/directives.js"; +export type { DocumentTransformCacheKey } from "./graphql/DocumentTransform.js"; +export { DocumentTransform } from "./graphql/DocumentTransform.js"; + +export type { FragmentMap, FragmentMapFunction } from "./graphql/fragments.js"; export { - FragmentMap, - FragmentMapFunction, createFragmentMap, getFragmentQueryDocument, getFragmentFromSelection, -} from './graphql/fragments'; +} from "./graphql/fragments.js"; export { checkDocument, @@ -29,14 +33,18 @@ export { getFragmentDefinition, getMainDefinition, getDefaultValues, -} from './graphql/getFromAST'; +} from "./graphql/getFromAST.js"; -export { +export { print } from "./graphql/print.js"; + +export type { StoreObject, Reference, StoreValue, Directives, VariableValue, +} from "./graphql/storeUtils.js"; +export { makeReference, isDocumentNode, isReference, @@ -48,9 +56,9 @@ export { resultKeyNameFromField, getStoreKeyName, getTypenameFromResult, -} from './graphql/storeUtils'; +} from "./graphql/storeUtils.js"; -export { +export type { RemoveNodeConfig, GetNodeConfig, RemoveDirectiveConfig, @@ -60,6 +68,8 @@ export { RemoveFragmentSpreadConfig, RemoveFragmentDefinitionConfig, RemoveVariableDefinitionConfig, +} from "./graphql/transform.js"; +export { addTypenameToDocument, buildQueryFromSelectionSet, removeDirectivesFromDocument, @@ -67,34 +77,54 @@ export { removeArgumentsFromDocument, removeFragmentSpreadFromDocument, removeClientSetsFromDocument, -} from './graphql/transform'; +} from "./graphql/transform.js"; + +export { + isMutationOperation, + isQueryOperation, + isSubscriptionOperation, +} from "./graphql/operations.js"; export { concatPagination, offsetLimitPagination, relayStylePagination, -} from './policies/pagination'; +} from "./policies/pagination.js"; -export { - Observable, +export type { Observer, - ObservableSubscription -} from './observables/Observable'; + ObservableSubscription, +} from "./observables/Observable.js"; +export { Observable } from "./observables/Observable.js"; + +export { + isStatefulPromise, + createFulfilledPromise, + createRejectedPromise, + wrapPromiseWithState, +} from "./promises/decoration.js"; + +export * from "./common/mergeDeep.js"; +export * from "./common/cloneDeep.js"; +export * from "./common/maybeDeepFreeze.js"; +export * from "./observables/iteration.js"; +export * from "./observables/asyncMap.js"; +export * from "./observables/Concast.js"; +export * from "./observables/subclassing.js"; +export * from "./common/arrays.js"; +export * from "./common/objects.js"; +export * from "./common/errorHandling.js"; +export * from "./common/canUse.js"; +export * from "./common/compact.js"; +export * from "./common/makeUniqueId.js"; +export * from "./common/stringifyForDisplay.js"; +export * from "./common/mergeOptions.js"; +export * from "./common/responseIterator.js"; +export * from "./common/incrementalResult.js"; -export * from './common/mergeDeep'; -export * from './common/cloneDeep'; -export * from './common/maybeDeepFreeze'; -export * from './observables/iteration'; -export * from './observables/asyncMap'; -export * from './observables/Concast'; -export * from './observables/subclassing'; -export * from './common/arrays'; -export * from './common/objects'; -export * from './common/errorHandling'; -export * from './common/canUse'; -export * from './common/compact'; -export * from './common/makeUniqueId'; -export * from './common/stringifyForDisplay'; -export * from './common/mergeOptions'; +export { omitDeep } from "./common/omitDeep.js"; +export { stripTypename } from "./common/stripTypename.js"; -export * from './types/IsStrictlyAny'; +export * from "./types/IsStrictlyAny.js"; +export type { DeepOmit } from "./types/DeepOmit.js"; +export type { DeepPartial } from "./types/DeepPartial.js"; diff --git a/src/utilities/observables/Concast.ts b/src/utilities/observables/Concast.ts --- a/src/utilities/observables/Concast.ts +++ b/src/utilities/observables/Concast.ts @@ -1,6 +1,7 @@ -import { Observable, Observer, ObservableSubscription, Subscriber } from "./Observable"; -import { iterateObserversSafely } from "./iteration"; -import { fixObservableSubclass } from "./subclassing"; +import type { Observer, ObservableSubscription, Subscriber } from "./Observable.js"; +import { Observable } from "./Observable.js"; +import { iterateObserversSafely } from "./iteration.js"; +import { fixObservableSubclass } from "./subclassing.js"; type MaybeAsync<T> = T | PromiseLike<T>; diff --git a/src/utilities/observables/Observable.ts b/src/utilities/observables/Observable.ts --- a/src/utilities/observables/Observable.ts +++ b/src/utilities/observables/Observable.ts @@ -1,8 +1,9 @@ -import { - Observable, +import type { Observer, Subscription as ObservableSubscription, - Subscriber, + Subscriber} from 'zen-observable-ts'; +import { + Observable } from 'zen-observable-ts'; // This simplified polyfill attempts to follow the ECMAScript Observable diff --git a/src/utilities/observables/asyncMap.ts b/src/utilities/observables/asyncMap.ts --- a/src/utilities/observables/asyncMap.ts +++ b/src/utilities/observables/asyncMap.ts @@ -1,4 +1,5 @@ -import { Observable, Observer } from "./Observable"; +import type { Observer } from "./Observable.js"; +import { Observable } from "./Observable.js"; // Like Observable.prototype.map, except that the mapping function can // optionally return a Promise (or be async). diff --git a/src/utilities/observables/iteration.ts b/src/utilities/observables/iteration.ts --- a/src/utilities/observables/iteration.ts +++ b/src/utilities/observables/iteration.ts @@ -1,4 +1,4 @@ -import { Observer } from "./Observable"; +import type { Observer } from "./Observable.js"; export function iterateObserversSafely<E, A>( observers: Set<Observer<E>>, diff --git a/src/utilities/observables/subclassing.ts b/src/utilities/observables/subclassing.ts --- a/src/utilities/observables/subclassing.ts +++ b/src/utilities/observables/subclassing.ts @@ -1,5 +1,5 @@ -import { Observable } from "./Observable"; -import { canUseSymbol } from "../common/canUse"; +import { Observable } from "./Observable.js"; +import { canUseSymbol } from "../common/canUse.js"; // Generic implementations of Observable.prototype methods like map and // filter need to know how to create a new Observable from an Observable diff --git a/src/utilities/policies/pagination.ts b/src/utilities/policies/pagination.ts --- a/src/utilities/policies/pagination.ts +++ b/src/utilities/policies/pagination.ts @@ -1,7 +1,7 @@ import { __rest } from "tslib"; -import { FieldPolicy, Reference } from '../../cache'; -import { mergeDeep } from '../common/mergeDeep'; +import type { FieldPolicy, Reference } from '../../cache/index.js'; +import { mergeDeep } from '../common/mergeDeep.js'; type KeyArgs = FieldPolicy<any>["keyArgs"]; diff --git a/src/utilities/promises/decoration.ts b/src/utilities/promises/decoration.ts new file mode 100644 --- /dev/null +++ b/src/utilities/promises/decoration.ts @@ -0,0 +1,79 @@ +export interface PendingPromise<TValue> extends Promise<TValue> { + status: "pending"; +} + +export interface FulfilledPromise<TValue> extends Promise<TValue> { + status: "fulfilled"; + value: TValue; +} + +export interface RejectedPromise<TValue> extends Promise<TValue> { + status: "rejected"; + reason: unknown; +} + +export type PromiseWithState<TValue> = + | PendingPromise<TValue> + | FulfilledPromise<TValue> + | RejectedPromise<TValue>; + +export function createFulfilledPromise<TValue>(value: TValue) { + const promise = Promise.resolve(value) as FulfilledPromise<TValue>; + + promise.status = "fulfilled"; + promise.value = value; + + return promise; +} + +export function createRejectedPromise<TValue = unknown>(reason: unknown) { + const promise = Promise.reject(reason) as RejectedPromise<TValue>; + + // prevent potential edge cases leaking unhandled error rejections + promise.catch(() => {}); + + promise.status = "rejected"; + promise.reason = reason; + + return promise; +} + +export function isStatefulPromise<TValue>( + promise: Promise<TValue> +): promise is PromiseWithState<TValue> { + return "status" in promise; +} + +export function wrapPromiseWithState<TValue>( + promise: Promise<TValue> +): PromiseWithState<TValue> { + if (isStatefulPromise(promise)) { + return promise; + } + + const pendingPromise = promise as PendingPromise<TValue>; + pendingPromise.status = "pending"; + + pendingPromise.then( + (value) => { + if (pendingPromise.status === "pending") { + const fulfilledPromise = + pendingPromise as unknown as FulfilledPromise<TValue>; + + fulfilledPromise.status = "fulfilled"; + fulfilledPromise.value = value; + } + }, + (reason: unknown) => { + if (pendingPromise.status === "pending") { + const rejectedPromise = + pendingPromise as unknown as RejectedPromise<TValue>; + + rejectedPromise.status = "rejected"; + rejectedPromise.reason = reason; + } + } + ); + + return promise as PromiseWithState<TValue>; +} diff --git a/src/utilities/types/DeepOmit.ts b/src/utilities/types/DeepOmit.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/DeepOmit.ts @@ -0,0 +1,32 @@ +import type { Primitive } from "./Primitive.js"; + +// DeepOmit primitives include functions since these are unmodified. +type DeepOmitPrimitive = Primitive | Function; + +export type DeepOmitArray<T extends any[], K> = { + [P in keyof T]: DeepOmit<T[P], K>; +}; + +// Unfortunately there is one major flaw in this type: This will omit properties +// from class instances in the return type even though our omitDeep helper +// ignores class instances, therefore resulting in a type mismatch between +// the return value and the runtime value. +// +// It is not currently possible with TypeScript to distinguish between plain +// objects and class instances. +// https://github.com/microsoft/TypeScript/issues/29063 +// +// This should be fine as of the time of this writing until omitDeep gets +// broader use since this utility is only used to strip __typename from +// `variables`; a case in which class instances are invalid anyways. +export type DeepOmit<T, K> = T extends DeepOmitPrimitive + ? T + : { + [P in Exclude<keyof T, K>]: T[P] extends infer TP + ? TP extends DeepOmitPrimitive + ? TP + : TP extends any[] + ? DeepOmitArray<TP, K> + : DeepOmit<TP, K> + : never; + }; diff --git a/src/utilities/types/DeepPartial.ts b/src/utilities/types/DeepPartial.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/DeepPartial.ts @@ -0,0 +1,58 @@ +// Inspired by type-fest PartialDeep: https://github.com/sindresorhus/type-fest/blob/9feb8c89be9a0f2f688bf2f497230298a8e2472e/source/partial-deep.d.ts +// +// We're including the license to give credit to the original implementation. +// https://github.com/sindresorhus/type-fest/blob/main/license-mit + +/* + * MIT License + * + * Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ +import type { Primitive } from "./Primitive.js"; + +type DeepPartialPrimitive = Primitive | Date | RegExp; + +export type DeepPartial<T> = T extends DeepPartialPrimitive + ? T + : T extends Map<infer TKey, infer TValue> + ? DeepPartialMap<TKey, TValue> + : T extends ReadonlyMap<infer TKey, infer TValue> + ? DeepPartialReadonlyMap<TKey, TValue> + : T extends Set<infer TItem> + ? DeepPartialSet<TItem> + : T extends ReadonlySet<infer TItem> + ? DeepPartialReadonlySet<TItem> + : T extends (...args: any[]) => unknown + ? T | undefined + : T extends object + ? T extends ReadonlyArray<infer TItem> // Test for arrays/tuples + ? TItem[] extends T // Test for non-tuples + ? readonly TItem[] extends T + ? ReadonlyArray<DeepPartial<TItem | undefined>> + : Array<DeepPartial<TItem | undefined>> + : DeepPartialObject<T> + : DeepPartialObject<T> + : unknown; + +type DeepPartialMap<TKey, TValue> = {} & Map< + DeepPartial<TKey>, + DeepPartial<TValue> +>; + +type DeepPartialReadonlyMap<TKey, TValue> = {} & ReadonlyMap< + DeepPartial<TKey>, + DeepPartial<TValue> +>; + +type DeepPartialSet<T> = {} & Set<DeepPartial<T>>; +type DeepPartialReadonlySet<T> = {} & ReadonlySet<DeepPartial<T>>; + +type DeepPartialObject<T extends object> = { + [K in keyof T]?: DeepPartial<T[K]>; +}; diff --git a/src/utilities/types/Primitive.ts b/src/utilities/types/Primitive.ts new file mode 100644 --- /dev/null +++ b/src/utilities/types/Primitive.ts @@ -0,0 +1,9 @@ +// Matches any primitive value: https://developer.mozilla.org/en-US/docs/Glossary/Primitive. +export type Primitive = + | null + | undefined + | string + | number + | boolean + | symbol + | bigint;
diff --git a/integration-tests/browser-esm/tests/playwright/jsdeliver-esm.test.ts b/integration-tests/browser-esm/tests/playwright/jsdeliver-esm.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/browser-esm/tests/playwright/jsdeliver-esm.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000/jsdeliver-esm.html"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible({ timeout: 10000 }); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/browser-esm/tests/playwright/jspm-prepared.test.ts b/integration-tests/browser-esm/tests/playwright/jspm-prepared.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/browser-esm/tests/playwright/jspm-prepared.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000/jspm-prepared.html"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible({ timeout: 10000 }); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/browser-esm/tests/playwright/unpkg-unmangled.test.ts b/integration-tests/browser-esm/tests/playwright/unpkg-unmangled.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/browser-esm/tests/playwright/unpkg-unmangled.test.ts @@ -0,0 +1,12 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("Basic Test", async ({ withHar }) => { + await withHar.goto("http://localhost:3000/unpkg-unmangled.html"); + + await expect(withHar.getByText("loading")).toBeVisible(); + await expect(withHar.getByText("loading")).not.toBeVisible({ + timeout: 10000, + }); + await expect(withHar.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/cra4/tests/playwright/apollo-client.test.ts b/integration-tests/cra4/tests/playwright/apollo-client.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/cra4/tests/playwright/apollo-client.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible(); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/cra5/tests/playwright/apollo-client.test.ts b/integration-tests/cra5/tests/playwright/apollo-client.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/cra5/tests/playwright/apollo-client.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible(); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/next/tests/playwright/apollo-client.test.ts b/integration-tests/next/tests/playwright/apollo-client.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/next/tests/playwright/apollo-client.test.ts @@ -0,0 +1,28 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture"; + +test("RSC", async ({ page, blockRequest }) => { + await page.goto("http://localhost:3000"); + + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); + +test("CC", async ({ page, blockRequest }) => { + await page.goto("http://localhost:3000/cc"); + + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); + +test("Pages", async ({ page, blockRequest }) => { + await page.goto("http://localhost:3000/pages"); + + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); + +test("Pages without SSR", async ({ page, withHar }) => { + await page.goto("http://localhost:3000/pages-no-ssr"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible(); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/vite-swc/tests/playwright/apollo-client.test.ts b/integration-tests/vite-swc/tests/playwright/apollo-client.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite-swc/tests/playwright/apollo-client.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture.ts"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible(); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/integration-tests/vite/tests/playwright/apollo-client.test.ts b/integration-tests/vite/tests/playwright/apollo-client.test.ts new file mode 100644 --- /dev/null +++ b/integration-tests/vite/tests/playwright/apollo-client.test.ts @@ -0,0 +1,10 @@ +import { expect } from "@playwright/test"; +import { test } from "shared/fixture.ts"; + +test("Basic Test", async ({ page, withHar }) => { + await page.goto("http://localhost:3000"); + + await expect(page.getByText("loading")).toBeVisible(); + await expect(page.getByText("loading")).not.toBeVisible(); + await expect(page.getByText("Soft Warm Apollo Beanie")).toBeVisible(); +}); diff --git a/src/__tests__/ApolloClient.ts b/src/__tests__/ApolloClient.ts --- a/src/__tests__/ApolloClient.ts +++ b/src/__tests__/ApolloClient.ts @@ -2,6 +2,7 @@ import gql from 'graphql-tag'; import { ApolloClient, + ApolloError, DefaultOptions, FetchPolicy, QueryOptions, @@ -15,7 +16,7 @@ import { HttpLink } from '../link/http'; import { InMemoryCache } from '../cache'; import { itAsync, withErrorSpy } from '../testing'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { invariant } from 'ts-invariant'; +import { invariant } from '../utilities/globals'; describe('ApolloClient', () => { describe('constructor', () => { @@ -2370,7 +2371,7 @@ describe('ApolloClient', () => { setTimeout(() => { try { expect(invariantDebugSpy).toHaveBeenCalledTimes(1); - expect(invariantDebugSpy).toHaveBeenCalledWith('In client.refetchQueries, Promise.all promise rejected with error ApolloError: refetch failed'); + expect(invariantDebugSpy).toHaveBeenCalledWith('In client.refetchQueries, Promise.all promise rejected with error %o', new ApolloError({errorMessage:"refetch failed"})); resolve(); } catch (err) { reject(err); diff --git a/src/__tests__/__snapshots__/ApolloClient.ts.snap b/src/__tests__/__snapshots__/ApolloClient.ts.snap --- a/src/__tests__/__snapshots__/ApolloClient.ts.snap +++ b/src/__tests__/__snapshots__/ApolloClient.ts.snap @@ -213,10 +213,12 @@ exports[`ApolloClient writeFragment should warn when the data provided does not [MockFunction] { "calls": Array [ Array [ - "Missing field 'e' while writing result { - \\"__typename\\": \\"Bar\\", - \\"i\\": 10 -}", + "Missing field '%s' while writing result %o", + "e", + Object { + "__typename": "Bar", + "i": 10, + }, ], ], "results": Array [ @@ -382,11 +384,13 @@ exports[`ApolloClient writeQuery should warn when the data provided does not mat [MockFunction] { "calls": Array [ Array [ - "Missing field 'description' while writing result { - \\"id\\": \\"1\\", - \\"name\\": \\"Todo 1\\", - \\"__typename\\": \\"Todo\\" -}", + "Missing field '%s' while writing result %o", + "description", + Object { + "__typename": "Todo", + "id": "1", + "name": "Todo 1", + }, ], ], "results": Array [ diff --git a/src/__tests__/__snapshots__/client.ts.snap b/src/__tests__/__snapshots__/client.ts.snap --- a/src/__tests__/__snapshots__/client.ts.snap +++ b/src/__tests__/__snapshots__/client.ts.snap @@ -46,12 +46,14 @@ exports[`client should warn if server returns wrong data 1`] = ` [MockFunction] { "calls": Array [ Array [ - "Missing field 'description' while writing result { - \\"id\\": \\"1\\", - \\"name\\": \\"Todo 1\\", - \\"price\\": 100, - \\"__typename\\": \\"Todo\\" -}", + "Missing field '%s' while writing result %o", + "description", + Object { + "__typename": "Todo", + "id": "1", + "name": "Todo 1", + "price": 100, + }, ], ], "results": Array [ diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -9,6 +9,7 @@ Array [ "ApolloLink", "ApolloProvider", "Cache", + "DocumentTransform", "DocumentType", "HttpLink", "InMemoryCache", @@ -16,6 +17,7 @@ Array [ "NetworkStatus", "Observable", "ObservableQuery", + "SuspenseCache", "checkFetcher", "concat", "createHttpLink", @@ -34,6 +36,7 @@ Array [ "getApolloContext", "gql", "isApolloError", + "isNetworkRequestSettled", "isReference", "makeReference", "makeVar", @@ -49,16 +52,20 @@ Array [ "selectURI", "serializeFetchParameter", "setLogVerbosity", + "skipToken", "split", "throwServerError", "toPromise", "useApolloClient", - "useFragment_experimental", + "useBackgroundQuery", + "useFragment", "useLazyQuery", "useMutation", "useQuery", "useReactiveVar", + "useReadQuery", "useSubscription", + "useSuspenseQuery", ] `; @@ -88,6 +95,7 @@ Array [ "ApolloError", "ApolloLink", "Cache", + "DocumentTransform", "HttpLink", "InMemoryCache", "MissingFieldError", @@ -111,6 +119,7 @@ Array [ "fromPromise", "gql", "isApolloError", + "isNetworkRequestSettled", "isReference", "makeReference", "makeVar", @@ -129,6 +138,14 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/dev 1`] = ` +Array [ + "loadDevMessages", + "loadErrorMessageHandler", + "loadErrorMessages", +] +`; + exports[`exports of public entry points @apollo/client/errors 1`] = ` Array [ "ApolloError", @@ -200,6 +217,13 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/link/remove-typename 1`] = ` +Array [ + "KEEP", + "removeTypenameFromVariables", +] +`; + exports[`exports of public entry points @apollo/client/link/retry 1`] = ` Array [ "RetryLink", @@ -221,6 +245,7 @@ Array [ exports[`exports of public entry points @apollo/client/link/utils 1`] = ` Array [ "createOperation", + "filterOperationVariables", "fromError", "fromPromise", "throwServerError", @@ -241,17 +266,22 @@ Array [ "ApolloConsumer", "ApolloProvider", "DocumentType", + "SuspenseCache", "getApolloContext", "operationName", "parser", "resetApolloContext", + "skipToken", "useApolloClient", - "useFragment_experimental", + "useBackgroundQuery", + "useFragment", "useLazyQuery", "useMutation", "useQuery", "useReactiveVar", + "useReadQuery", "useSubscription", + "useSuspenseQuery", ] `; @@ -284,13 +314,17 @@ Array [ exports[`exports of public entry points @apollo/client/react/hooks 1`] = ` Array [ + "skipToken", "useApolloClient", - "useFragment_experimental", + "useBackgroundQuery", + "useFragment", "useLazyQuery", "useMutation", "useQuery", "useReactiveVar", + "useReadQuery", "useSubscription", + "useSuspenseQuery", ] `; @@ -352,6 +386,7 @@ Array [ "Concast", "DEV", "DeepMerger", + "DocumentTransform", "Observable", "addTypenameToDocument", "argumentsObjectFromField", @@ -368,6 +403,8 @@ Array [ "compact", "concatPagination", "createFragmentMap", + "createFulfilledPromise", + "createRejectedPromise", "fixObservableSubclass", "getDefaultValues", "getDirectiveNames", @@ -388,13 +425,28 @@ Array [ "hasAnyDirectives", "hasClientExports", "hasDirectives", + "isApolloPayloadResult", "isArray", + "isAsyncIterableIterator", + "isBlob", "isDocumentNode", + "isExecutionPatchIncrementalResult", + "isExecutionPatchInitialResult", + "isExecutionPatchResult", "isField", "isInlineFragment", + "isMutationOperation", + "isNodeReadableStream", + "isNodeResponse", "isNonEmptyArray", "isNonNullObject", + "isPlainObject", + "isQueryOperation", + "isReadableStream", "isReference", + "isStatefulPromise", + "isStreamableBlob", + "isSubscriptionOperation", "iterateObserversSafely", "makeReference", "makeUniqueId", @@ -402,8 +454,11 @@ Array [ "maybeDeepFreeze", "mergeDeep", "mergeDeepArray", + "mergeIncrementalData", "mergeOptions", "offsetLimitPagination", + "omitDeep", + "print", "relayStylePagination", "removeArgumentsFromDocument", "removeClientSetsFromDocument", @@ -414,7 +469,9 @@ Array [ "shouldInclude", "storeKeyNameFromField", "stringifyForDisplay", + "stripTypename", "valueToObjectRepresentation", + "wrapPromiseWithState", ] `; @@ -422,9 +479,10 @@ exports[`exports of public entry points @apollo/client/utilities/globals 1`] = ` Array [ "DEV", "InvariantError", - "checkDEV", + "__DEV__", "global", "invariant", "maybe", + "newInvariantError", ] `; diff --git a/src/__tests__/__snapshots__/mutationResults.ts.snap b/src/__tests__/__snapshots__/mutationResults.ts.snap --- a/src/__tests__/__snapshots__/mutationResults.ts.snap +++ b/src/__tests__/__snapshots__/mutationResults.ts.snap @@ -4,11 +4,13 @@ exports[`mutation results should warn when the result fields don't match the que [MockFunction] { "calls": Array [ Array [ - "Missing field 'description' while writing result { - \\"id\\": \\"2\\", - \\"name\\": \\"Todo 2\\", - \\"__typename\\": \\"createTodo\\" -}", + "Missing field '%s' while writing result %o", + "description", + Object { + "__typename": "createTodo", + "id": "2", + "name": "Todo 2", + }, ], ], "results": Array [ diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -1,5 +1,5 @@ import { cloneDeep, assign } from 'lodash'; -import { GraphQLError, ExecutionResult, DocumentNode } from 'graphql'; +import { GraphQLError, ExecutionResult, DocumentNode, Kind, print, visit } from 'graphql'; import gql from 'graphql-tag'; import { @@ -8,12 +8,14 @@ import { WatchQueryFetchPolicy, QueryOptions, ObservableQuery, + Operation, TypedDocumentNode, + NetworkStatus, } from '../core'; -import { Observable, ObservableSubscription } from '../utilities'; +import { DocumentTransform, Observable, ObservableSubscription, offsetLimitPagination, removeDirectivesFromDocument } from '../utilities'; import { ApolloLink } from '../link/core'; -import { InMemoryCache, makeVar, PossibleTypesMap } from '../cache'; +import { createFragmentRegistry, InMemoryCache, makeVar, PossibleTypesMap } from '../cache'; import { ApolloError } from '../errors'; import { @@ -21,7 +23,10 @@ import { subscribeAndCount, mockSingleLink, withErrorSpy, + MockLink, + wait, } from '../testing'; +import { waitFor } from '@testing-library/react'; describe('client', () => { it('can be loaded via require', () => { @@ -940,6 +945,54 @@ describe('client', () => { .then(resolve, reject); }); + it('removes @client fields from the query before it reaches the link', async () => { + const result: { current: Operation | undefined } = { + current: undefined + } + + const query = gql` + query { + author { + firstName + lastName + isInCollection @client + } + } + `; + + const transformedQuery = gql` + query { + author { + firstName + lastName + } + } + `; + + const link = new ApolloLink((operation) => { + result.current = operation; + + return Observable.of({ + data: { + author: { + firstName: 'John', + lastName: 'Smith', + __typename: 'Author', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ addTypename: false }), + }); + + await client.query({ query }); + + expect(print(result.current!.query)).toEqual(print(transformedQuery)); + }); + itAsync('should handle named fragments on mutations', (resolve, reject) => { const mutation = gql` mutation { @@ -3545,6 +3598,2507 @@ describe('@connection', () => { }); }); +describe('custom document transforms', () => { + it('runs custom document transform when calling `query`', async () => { + const query = gql` + query TestQuery { + dogs { + id + name + breed @custom + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + dogs: [ + { + id: 1, + name: 'Buddy', + breed: 'German Shepard', + __typename: 'Dog' + } + ], + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + documentTransform, + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + dogs { + id + name + breed + __typename + } + } + `); + + expect(data).toEqual({ + dogs: [ + { + id: 1, + name: 'Buddy', + breed: 'German Shepard', + __typename: 'Dog', + } + ] + }); + }); + + it('requests and caches fields added from custom document transforms when calling `query`', async () => { + const query = gql` + query TestQuery { + dogs { + name + breed + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'dogs' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + { + kind: Kind.FIELD, + name: { kind: Kind.NAME, value: 'id' }, + }, + ...node.selectionSet.selections, + ] + } + } + } + } + }) + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + dogs: [ + { + id: 1, + name: 'Buddy', + breed: 'German Shepard', + __typename: 'Dog' + } + ], + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + documentTransform, + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + dogs { + id + name + breed + __typename + } + } + `); + + expect(data).toEqual({ + dogs: [ + { + id: 1, + name: 'Buddy', + breed: 'German Shepard', + __typename: 'Dog' + } + ] + }); + + const cache = client.cache.extract(); + + expect(cache['Dog:1']).toEqual({ + id: 1, + name: 'Buddy', + breed: 'German Shepard', + __typename: 'Dog', + }); + }); + + it('runs document transforms before reading from the cache when calling `query`', async () => { + const query = gql` + query TestQuery { + product { + id + name + } + } + `; + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'product' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + ...node.selectionSet.selections, + { + kind: Kind.FRAGMENT_SPREAD, + name: { kind: Kind.NAME, value: 'ProductFields' } + } + ] + } + } + } + } + }); + }); + + const link = new ApolloLink(() => { + return Observable.of({ + data: { + product: { + __typename: 'Product', + id: 2, + name: 'unused', + description: 'unused', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + fragments: createFragmentRegistry(gql` + fragment ProductFields on Product { + description + } + `) + }), + documentTransform, + }); + + // Use the transformed document to write to the cache to ensure it contains + // the fragment spread + client.writeQuery({ + query: documentTransform.transformDocument(query), + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Cached product', + description: 'Cached product description' + } + } + }); + + const { data } = await client.query({ query }); + + expect(data).toEqual({ + product: { + __typename: 'Product', + id: 1, + name: 'Cached product', + description: 'Cached product description', + } + }); + }); + + it('runs @client directives added from custom transforms through local state', async () => { + const query = gql` + query TestQuery { + currentUser { + id + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'currentUser' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + ...node.selectionSet.selections, + { + kind: Kind.FIELD, + name: { kind: Kind.NAME, value: 'isLoggedIn' }, + directives: [ + { + kind: Kind.DIRECTIVE, + name: { kind: Kind.NAME, value: 'client' } + } + ] + }, + ] + } + } + } + } + }) + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + currentUser: { + id: 1, + __typename: 'User' , + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache({ + typePolicies: { + User: { + fields: { + isLoggedIn: { + read() { + return true; + } + } + } + } + } + }), + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + currentUser { + id + __typename + } + } + `); + + expect(data).toEqual({ + currentUser: { + id: 1, + isLoggedIn: true, + __typename: 'User' + } + }); + }); + + it('runs custom transform only once when calling `query`', async () => { + const query = gql` + query TestQuery { + currentUser { + id + } + } + `; + + const transform = jest.fn((document: DocumentNode) => document); + const documentTransform = new DocumentTransform(transform, { cache: false }); + + const link = new ApolloLink(() => { + return Observable.of({ + data: { + currentUser: { + id: 1, + __typename: 'User' , + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + await client.query({ query }); + + expect(transform).toHaveBeenCalledTimes(1); + }); + + it('runs default transforms with no custom document transform when calling `query`', async () => { + const query = gql` + query TestQuery { + currentUser @nonreactive { + id + isLoggedIn @client + favoriteFlavors @connection { + flavor + } + } + } + `; + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of(); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + currentUser { + id + favoriteFlavors { + flavor + __typename + } + __typename + } + } + `); + }); + + it('runs custom transform when calling `mutate`', async () => { + const mutation = gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username @custom + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + changeUsername: { + id: 1, + username: operation.variables.username, + __typename: 'User' , + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + const { data } = await client.mutate({ + mutation, + variables: { username: 'foo' } + }); + + expect(document!).toMatchDocument(gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username + __typename + } + } + `); + + expect(data).toEqual({ + changeUsername: { + id: 1, + username: 'foo', + __typename: 'User' , + }, + }) + }); + + it('runs custom transform on queries defined in refetchQueries using legacy option when calling `mutate`', async () => { + const mutation = gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username @custom + } + } + `; + + const query = gql` + query TestQuery { + currentUser { + id + username @custom + } + } + `; + + const requests: Operation[] = []; + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const mocks = [ + { + request: { + query: documentTransform.transformDocument(mutation), + variables: { username: 'foo' } + }, + result: { + data: { + changeUsername: { __typename: 'User', id: 1, username: 'foo' } + } + } + }, + { + request: { query: documentTransform.transformDocument(query) }, + result: { + data: { + currentUser: { __typename: 'User', id: 1, username: 'foo' } + } + } + } + ]; + + const link = new ApolloLink((operation, forward) => { + requests.push(operation); + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + documentTransform, + cache: new InMemoryCache(), + }); + + const { data } = await client.mutate({ + mutation, + variables: { username: 'foo' }, + refetchQueries: [{ query }], + awaitRefetchQueries: true, + }); + + expect(data).toEqual({ + changeUsername: { + id: 1, + username: 'foo', + __typename: 'User' , + }, + }) + + expect(requests[0].query).toMatchDocument(gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username + __typename + } + } + `); + + expect(requests[1].query).toMatchDocument(gql` + query TestQuery { + currentUser { + id + username + __typename + } + } + `); + }); + + it('requests and caches fields added from custom document transforms when calling `mutate`', async () => { + const mutation = gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + username + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'changeUsername' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + { + kind: Kind.FIELD, + name: { kind: Kind.NAME, value: 'id' } + }, + ...node.selectionSet.selections, + ] + } + } + } + } + }) + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + changeUsername: { + id: 1, + username: operation.variables.username, + __typename: 'User' , + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + const { data } = await client.mutate({ + mutation, + variables: { username: 'foo' } + }); + + expect(document!).toMatchDocument(gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username + __typename + } + } + `); + + expect(data).toEqual({ + changeUsername: { + id: 1, + username: 'foo', + __typename: 'User' , + }, + }); + + const cache = client.cache.extract(); + + expect(cache['User:1']).toEqual({ + __typename: 'User', + id: 1, + username: 'foo' + }); + }); + + it('runs custom transforms only once when running `mutation`', async () => { + const mutation = gql` + mutation TestMutation($username: String) { + changeUsername(username: $username) { + id + username + } + } + `; + + const transform = jest.fn((document: DocumentNode) => document); + const documentTransform = new DocumentTransform(transform, { cache: false }); + + const link = new ApolloLink((operation) => { + return Observable.of({ + data: { + changeUsername: { + id: 1, + username: operation.variables.username, + __typename: 'User' , + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + await client.mutate({ mutation, variables: { username: 'foo' } }); + + expect(transform).toHaveBeenCalledTimes(1); + }); + + it('runs default transforms with no custom document transform when calling `mutate`', async () => { + const mutation = gql` + mutation TestMutation { + updateProfile @nonreactive { + id + isLoggedIn @client + favoriteFlavors @connection { + flavor + } + } + } + `; + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + updateProfile: { + __typename: 'Profile', + id: 1, + favoriteFlavors: [{ __typename: 'Flavor', flavor: 'Strawberry '}] + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + await client.mutate({ mutation }); + + expect(document!).toMatchDocument(gql` + mutation TestMutation { + updateProfile { + id + favoriteFlavors { + flavor + __typename + } + __typename + } + } + `); + }); + + it('runs custom document transforms when calling `subscribe`', async () => { + const query = gql` + subscription TestSubscription { + profileUpdated { + id + username @custom + } + } + `; + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + profileUpdated: { + id: 1, + username: 'foo', + __typename: 'Profile' + } + } + }) + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + const onNext = jest.fn() + + const subscription = client + .subscribe({ query }) + .subscribe(onNext); + + await waitFor(() => subscription.closed); + + expect(document!).toMatchDocument(gql` + subscription TestSubscription { + profileUpdated { + id + username + __typename + } + } + `); + + expect(onNext).toHaveBeenLastCalledWith({ + data: { + profileUpdated: { id: 1, username: 'foo', __typename: 'Profile', } + } + }); + }); + + it('requests and caches fields added from custom document transforms when calling `subscribe`', async () => { + const query = gql` + subscription TestSubscription { + profileUpdated { + username + } + } + `; + + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'profileUpdated' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + { + kind: Kind.FIELD, + name: { kind: Kind.NAME, value: 'id' } + }, + ...node.selectionSet.selections, + ] + } + } + } + } + }) + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + profileUpdated: { + id: 1, + username: 'foo', + __typename: 'Profile', + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + documentTransform, + cache: new InMemoryCache(), + }); + + const onNext = jest.fn(); + + const subscription = client + .subscribe({ query }) + .subscribe(onNext); + + await waitFor(() => subscription.closed); + + expect(document!).toMatchDocument(gql` + subscription TestSubscription { + profileUpdated { + id + username + __typename + } + } + `); + + expect(onNext).toHaveBeenLastCalledWith({ + data: { + profileUpdated: { id: 1, username: 'foo', __typename: 'Profile', } + } + }); + + const cache = client.cache.extract(); + + expect(cache['Profile:1']).toEqual({ + __typename: 'Profile', + id: 1, + username: 'foo' + }); + }); + + it('runs custom transforms only once when calling `subscribe`', async () => { + const query = gql` + subscription TestSubscription { + profileUpdated { + username + } + } + `; + + const transform = jest.fn((document: DocumentNode) => document); + const documentTransform = new DocumentTransform(transform, { cache: false }); + + const client = new ApolloClient({ + link: ApolloLink.empty(), + documentTransform, + cache: new InMemoryCache(), + }); + + const subscription = client + .subscribe({ query }) + .subscribe(jest.fn()); + + await waitFor(() => subscription.closed); + + expect(transform).toHaveBeenCalledTimes(1); + }); + + it('runs default transforms with no custom document transform when calling `subscribe`', async () => { + const query = gql` + subscription TestSubscription { + profileUpdated @nonreactive { + id + isLoggedIn @client + favoriteFlavors @connection { + flavor + } + } + } + `; + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + profileUpdated: { + __typename: 'Profile', + id: 1, + favoriteFlavors: [{ __typename: 'Flavor', flavor: 'Strawberry '}] + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const subscription = client + .subscribe({ query }) + .subscribe(jest.fn()); + + await waitFor(() => subscription.closed); + + expect(document!).toMatchDocument(gql` + subscription TestSubscription { + profileUpdated { + id + favoriteFlavors { + flavor + __typename + } + __typename + } + } + `); + }); + + it('runs custom document transforms when subscribing to observable after calling `watchQuery`', async () => { + const query = gql` + query TestQuery { + currentUser { + id + name @custom + } + } + `; + + const transformedQuery = gql` + query TestQuery { + currentUser { + id + name + __typename + } + } + `; + + const transform = jest.fn((document: DocumentNode) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const documentTransform = new DocumentTransform( + transform, + { cache: false } + ); + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { currentUser: { __typename: 'User', id: 1, name: 'John Doe' }} + }) + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query }); + + expect(transform).toHaveBeenCalledTimes(1); + // `options.query` should always reflect the raw, untransformed query + expect(observable.options.query).toMatchDocument(query); + // The computed `query` property should always reflect the last requested + // transformed document. + expect(observable.query).toMatchDocument(transformedQuery); + + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + currentUser: { __typename: 'User', id: 1, name: 'John Doe' } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(transformedQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(transformedQuery); + expect(transform).toHaveBeenCalledTimes(2); + }); + }); + + it('runs default transforms with no custom document transform when calling `watchQuery`', async () => { + const query = gql` + query TestQuery @nonreactive { + currentUser { + id + isLoggedIn @client + favorites @connection { + id + } + } + } + `; + + let document: DocumentNode; + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + currentUser: { + __typename: 'User', + id: 1, + favorites: [{ __typename: 'Favorite', id: 1 }] + }, + }, + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const observable = client.watchQuery({ query }) + + observable.subscribe(jest.fn()); + + await waitFor(() => { + expect(document!).toMatchDocument(gql` + query TestQuery { + currentUser { + id + favorites { + id + __typename + } + __typename + } + } + `); + }); + }); + + it('runs document transforms before reading from the cache when calling `watchQuery`', async () => { + const query = gql` + query TestQuery { + product { + id + name + } + } + `; + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'product' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + ...node.selectionSet.selections, + { + kind: Kind.FRAGMENT_SPREAD, + name: { kind: Kind.NAME, value: 'ProductFields' } + } + ] + } + } + } + } + }); + }); + + const link = new ApolloLink(() => { + return Observable.of({ + data: { + product: { + __typename: 'Product', + id: 2, + name: 'unused', + description: 'unused', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + fragments: createFragmentRegistry(gql` + fragment ProductFields on Product { + description + } + `) + }), + documentTransform, + }); + + // Use the transformed document to write to the cache to ensure it contains + // the fragment spread + client.writeQuery({ + query: documentTransform.transformDocument(query), + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Cached product', + description: 'Cached product description' + } + } + }); + + const observable = client.watchQuery({ query }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Cached product', + description: 'Cached product description' + } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }) + }); + }); + + it('re-runs custom document transforms when calling `refetch`', async () => { + const query = gql` + query TestQuery { + product { + id + metrics @whenEnabled + } + } + `; + + const enabledQuery = gql` + query TestQuery { + product { + id + metrics + __typename + } + } + `; + + const disabledQuery = gql` + query TestQuery { + product { + id + __typename + } + } + `; + + const mocks = [ + { + request: { query: enabledQuery }, + result: { + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + } + } + }, + { + request: { query: disabledQuery }, + result: { + data: { + product: { __typename: 'Product', id: 1 } + } + } + } + ]; + + let enabled = true; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument( + [{ name: 'whenEnabled', remove: !enabled }], + document + )!; + }, { cache: false }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(enabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(enabledQuery); + }); + + enabled = false; + + const { data } = await observable.refetch(); + + expect(document!).toMatchDocument(disabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(disabledQuery); + + expect(data).toEqual({ + product: { __typename: 'Product', id: 1 } + }) + + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 1 } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('re-runs custom document transforms when calling `fetchMore`', async () => { + const query = gql` + query TestQuery($offset: Int) { + products(offset: $offset) { + id + metrics @whenEnabled + } + } + `; + + const enabledQuery = gql` + query TestQuery($offset: Int) { + products(offset: $offset) { + id + metrics + __typename + } + } + `; + + const disabledQuery = gql` + query TestQuery($offset: Int) { + products(offset: $offset) { + id + __typename + } + } + `; + + const mocks = [ + { + request: { query: enabledQuery, variables: { offset: 0 } }, + result: { + data: { + products: [{ __typename: 'Product', id: 1, metrics: '1000/vpm' }] + } + } + }, + { + request: { query: disabledQuery, variables: { offset: 1 } }, + result: { + data: { + products: [{ __typename: 'Product', id: 2 }] + } + } + } + ]; + + let enabled = true; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument( + [{ name: 'whenEnabled', remove: !enabled }], + document + )!; + }, { cache: false }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + products: offsetLimitPagination() + } + } + } + }), + documentTransform, + }); + + const observable = client.watchQuery({ query, variables: { offset: 0 } }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + products: [{ __typename: 'Product', id: 1, metrics: '1000/vpm' }] + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(enabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(enabledQuery); + }); + + enabled = false; + + const { data } = await observable.fetchMore({ variables: { offset: 1 }}); + + expect(document!).toMatchDocument(disabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(disabledQuery); + + expect(data).toEqual({ + products: [{ __typename: 'Product', id: 2 }] + }) + + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + products: [ + { __typename: 'Product', id: 1 }, + { __typename: 'Product', id: 2 } + ] + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('runs custom document transforms on the passed query and original query when calling `fetchMore` with a different query', async () => { + const initialQuery = gql` + query TestQuery($offset: Int) { + currentUser { + id + } + products(offset: $offset) { + id + metrics @whenEnabled + } + } + `; + + const enabledInitialQuery = gql` + query TestQuery($offset: Int) { + currentUser { + id + __typename + } + products(offset: $offset) { + id + metrics + __typename + } + } + `; + + const disabledInitialQuery = gql` + query TestQuery($offset: Int) { + currentUser { + id + __typename + } + products(offset: $offset) { + id + __typename + } + } + `; + + const productsQuery = gql` + query TestQuery($offset: Int) { + products(offset: $offset) { + id + metrics @whenEnabled + } + } + `; + + const transformedProductsQuery = gql` + query TestQuery($offset: Int) { + products(offset: $offset) { + id + __typename + } + } + `; + + const mocks = [ + { + request: { query: enabledInitialQuery, variables: { offset: 0 } }, + result: { + data: { + currentUser: { id: 1 }, + products: [{ __typename: 'Product', id: 1, metrics: '1000/vpm' }] + } + } + }, + { + request: { query: transformedProductsQuery, variables: { offset: 1 } }, + result: { + data: { + products: [{ __typename: 'Product', id: 2 }] + } + } + } + ]; + + let enabled = true; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument( + [{ name: 'whenEnabled', remove: !enabled }], + document + )!; + }, { cache: false }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + products: { + keyArgs: false, + merge(existing = [], incoming) { + return [...existing, ...incoming] + } + } + } + } + } + }), + documentTransform, + }); + + const observable = client.watchQuery({ + query: initialQuery, + variables: { offset: 0 }, + }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + currentUser: { id: 1 }, + products: [{ __typename: 'Product', id: 1, metrics: '1000/vpm' }] + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(handleNext).toHaveBeenCalledTimes(1); + expect(document).toMatchDocument(enabledInitialQuery); + expect(observable.options.query).toMatchDocument(initialQuery); + expect(observable.query).toMatchDocument(enabledInitialQuery); + }); + + enabled = false; + + const { data } = await observable.fetchMore({ + query: productsQuery, + variables: { offset: 1 }, + }); + + expect(data).toEqual({ + products: [{ __typename: 'Product', id: 2 }] + }); + + expect(document!).toMatchDocument(transformedProductsQuery); + expect(observable.options.query).toMatchDocument(initialQuery); + // Even though we pass a different query to `fetchMore`, we don't want to + // override the original query. We do however run transforms on the + // initial query to ensure the broadcasted result and the cache match + // the expected query document in case the transforms contain a runtime + // condition that impacts the query in a significant way (such as removing + // a field). + expect(observable.query).toMatchDocument(disabledInitialQuery); + + // QueryInfo.notify is run in a setTimeout, so give time for it to run + // before we make assertions on it. + await wait(0); + + expect(handleNext).toHaveBeenCalledTimes(2); + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + currentUser: { id: 1 }, + products: [ + { __typename: 'Product', id: 1 }, + { __typename: 'Product', id: 2 } + ] + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('re-runs custom document transforms when calling `setVariables`', async () => { + const query = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics @whenEnabled + } + } + `; + + const enabledQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics + __typename + } + } + `; + + const disabledQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + __typename + } + } + `; + + const mocks = [ + { + request: { query: enabledQuery, variables: { id: 1 } }, + result: { + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + } + } + }, + { + request: { query: disabledQuery, variables: { id: 2 } }, + result: { + data: { + product: { __typename: 'Product', id: 2 } + } + } + } + ]; + + let enabled = true; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument( + [{ name: 'whenEnabled', remove: !enabled }], + document + )!; + }, { cache: false }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query, variables: { id: 1 } }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(enabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(enabledQuery); + }); + + enabled = false; + + const result = await observable.setVariables({ id: 2 }); + + expect(document!).toMatchDocument(disabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(disabledQuery); + + expect(result!.data).toEqual({ + product: { __typename: 'Product', id: 2 } + }) + + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 2 } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('re-runs custom document transforms when calling `setOptions`', async () => { + const query = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics @whenEnabled + } + } + `; + + const enabledQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics + __typename + } + } + `; + + const disabledQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + __typename + } + } + `; + + const mocks = [ + { + request: { query: enabledQuery, variables: { id: 1 } }, + result: { + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + } + } + }, + { + request: { query: disabledQuery, variables: { id: 2 } }, + result: { + data: { + product: { __typename: 'Product', id: 2 } + } + } + } + ]; + + let enabled = true; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument( + [{ name: 'whenEnabled', remove: !enabled }], + document + )!; + }, { cache: false }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query, variables: { id: 1 } }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(enabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(enabledQuery); + }); + + enabled = false; + + const { data } = await observable.setOptions({ variables: { id: 2 }}); + + expect(document!).toMatchDocument(disabledQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(disabledQuery); + + expect(data).toEqual({ + product: { __typename: 'Product', id: 2 } + }) + + expect(handleNext).toHaveBeenLastCalledWith({ + data: { + product: { __typename: 'Product', id: 2 } + }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('runs custom document transforms when passing a new query to `setOptions`', async () => { + const query = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics @custom + } + } + `; + + const transformedQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + metrics + __typename + } + } + `; + + const updatedQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + name + metrics @custom + } + } + `; + + const transformedUpdatedQuery = gql` + query TestQuery($id: ID!) { + product(id: $id) { + id + name + metrics + __typename + } + } + `; + + const mocks = [ + { + request: { query: transformedQuery, variables: { id: 1 } }, + result: { + data: { + product: { __typename: 'Product', id: 1, metrics: '1000/vpm' } + } + } + }, + { + request: { query: transformedUpdatedQuery, variables: { id: 1 } }, + result: { + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Acme Inc Product', + metrics: '1000/vpm' + } + } + } + } + ]; + + const documentTransform = new DocumentTransform((document: DocumentNode) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)!; + }); + + let document: DocumentNode; + + const link = new ApolloLink((operation, forward) => { + document = operation.query; + + return forward(operation); + }); + + const client = new ApolloClient({ + link: ApolloLink.from([link, new MockLink(mocks)]), + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query, variables: { id: 1 } }); + const handleNext = jest.fn(); + + observable.subscribe(handleNext); + + await waitFor(() => { + expect(handleNext).toHaveBeenLastCalledWith({ + data: mocks[0].result.data, + loading: false, + networkStatus: NetworkStatus.ready, + }); + + expect(document).toMatchDocument(transformedQuery); + expect(observable.options.query).toMatchDocument(query); + expect(observable.query).toMatchDocument(transformedQuery); + }); + + const { data } = await observable.setOptions({ query: updatedQuery }); + + expect(document!).toMatchDocument(transformedUpdatedQuery); + expect(observable.options.query).toMatchDocument(updatedQuery); + expect(observable.query).toMatchDocument(transformedUpdatedQuery); + + expect(data).toEqual(mocks[1].result.data); + + expect(handleNext).toHaveBeenLastCalledWith({ + data: mocks[1].result.data, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + + it('runs custom document transforms with fragments defined in the fragment registery', async () => { + const query = gql` + query TestQuery { + product { + id + name @custom + ...ProductFields + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + fragments: createFragmentRegistry(gql` + fragment ProductFields on Product { + description @custom + } + `) + }), + documentTransform, + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + product { + id + name + ...ProductFields + __typename + } + } + + fragment ProductFields on Product { + description + __typename + } + `); + + expect(data).toEqual({ + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + }); + }); + + it('runs custom document transforms on fragments that override registered fragments in the fragment registery', async () => { + const query = gql` + query TestQuery { + product { + id + name @custom + ...ProductFields + } + } + + fragment ProductFields on Product { + description @custom + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + fragments: createFragmentRegistry(gql` + fragment ProductFields on Product { + unused @custom + } + `) + }), + documentTransform, + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + product { + id + name + ...ProductFields + __typename + } + } + + fragment ProductFields on Product { + description + __typename + } + `); + + expect(data).toEqual({ + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + }); + }); + + it('adds fragment definitions to the query for fragment spreads added from custom document transforms', async () => { + const query = gql` + query TestQuery { + product { + id + name + } + } + `; + + let document: DocumentNode + + const documentTransform = new DocumentTransform((document) => { + return visit(document, { + Field(node) { + if (node.name.value === 'product' && node.selectionSet) { + return { + ...node, + selectionSet: { + ...node.selectionSet, + selections: [ + ...node.selectionSet.selections, + { + kind: Kind.FRAGMENT_SPREAD, + name: { kind: Kind.NAME, value: 'ProductFields' } + } + ] + } + } + } + } + }); + }); + + const link = new ApolloLink((operation) => { + document = operation.query; + + return Observable.of({ + data: { + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + } + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + fragments: createFragmentRegistry(gql` + fragment ProductFields on Product { + description + } + `) + }), + documentTransform, + }); + + const { data } = await client.query({ query }); + + expect(document!).toMatchDocument(gql` + query TestQuery { + product { + id + name + __typename + ...ProductFields + } + } + + fragment ProductFields on Product { + description + __typename + } + `); + + expect(data).toEqual({ + product: { + __typename: 'Product', + id: 1, + name: 'Product', + description: 'Product description', + } + }); + }); + + it('runs custom transforms on active queries when calling `refetchQueries` with "include"', async () => { + const aQuery = gql` + query A { + a @custom + } + `; + const bQuery = gql` + query B { + b @custom + } + `; + const abQuery = gql` + query AB { + a @custom + b + } + `; + + const requests: Operation[] = [] + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const client = new ApolloClient({ + documentTransform, + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + requests.push(operation); + + return Observable.of({ + data: operation.operationName + .split('') + .reduce<Record<string, string>>( + (memo, letter) => ({ + ...memo, + [letter.toLowerCase()]: letter.toUpperCase() + }), + {} + ) + }); + }) + }); + + client.watchQuery({ query: aQuery }).subscribe(jest.fn()); + client.watchQuery({ query: bQuery }).subscribe(jest.fn()); + // purposely avoid subscribing to prevent it from being an "active" query + client.watchQuery({ query: abQuery }); + + await waitFor(() => { + return ( + client.readQuery({ query: aQuery }) && + client.readQuery({ query: bQuery }) + ); + }); + + expect(requests.length).toBe(2); + expect(requests[0].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[1].query).toMatchDocument(gql` + query B { + b + } + `); + + const results = await client.refetchQueries({ include: 'active' }); + + expect(results.map(r => r.data)).toEqual([ + { a: 'A' }, + { b: 'B' } + ]); + + expect(requests.length).toBe(4); + expect(requests[2].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[3].query).toMatchDocument(gql` + query B { + b + } + `); + }); + + it('runs custom transforms on all queries when calling `refetchQueries` with "all"', async () => { + const aQuery = gql` + query A { + a @custom + } + `; + const bQuery = gql` + query B { + b @custom + } + `; + const abQuery = gql` + query AB { + a @custom + b + } + `; + + const requests: Operation[] = [] + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const client = new ApolloClient({ + documentTransform, + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + requests.push(operation); + + return Observable.of({ + data: operation.operationName + .split('') + .reduce<Record<string, string>>( + (memo, letter) => ({ + ...memo, + [letter.toLowerCase()]: letter.toUpperCase() + }), + {} + ) + }); + }) + }); + + client.watchQuery({ query: aQuery }).subscribe(jest.fn()); + client.watchQuery({ query: bQuery }).subscribe(jest.fn()); + // purposely avoid subscribing to prevent it from being an "active" query + client.watchQuery({ query: abQuery }); + + await waitFor(() => { + return ( + client.readQuery({ query: aQuery }) && + client.readQuery({ query: bQuery }) + ); + }); + + expect(requests.length).toBe(2); + expect(requests[0].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[1].query).toMatchDocument(gql` + query B { + b + } + `); + + const results = await client.refetchQueries({ include: 'all' }); + + expect(results.map(r => r.data)).toEqual([ + { a: 'A' }, + { b: 'B' }, + { a: 'A', b: 'B' } + ]); + + expect(requests.length).toBe(5); + expect(requests[2].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[3].query).toMatchDocument(gql` + query B { + b + } + `); + expect(requests[4].query).toMatchDocument(gql` + query AB { + a + b + } + `); + }); + + it('runs custom transforms on matched queries when calling `refetchQueries` with string array', async () => { + const aQuery = gql` + query A { + a @custom + } + `; + const bQuery = gql` + query B { + b @custom + } + `; + + const requests: Operation[] = [] + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const client = new ApolloClient({ + documentTransform, + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + requests.push(operation); + + return Observable.of({ + data: operation.operationName + .split('') + .reduce<Record<string, string>>( + (memo, letter) => ({ + ...memo, + [letter.toLowerCase()]: letter.toUpperCase() + }), + {} + ) + }); + }) + }); + + client.watchQuery({ query: aQuery }).subscribe(jest.fn()); + client.watchQuery({ query: bQuery }).subscribe(jest.fn()); + + await waitFor(() => { + return ( + client.readQuery({ query: aQuery }) && + client.readQuery({ query: bQuery }) + ); + }); + + expect(requests.length).toBe(2); + expect(requests[0].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[1].query).toMatchDocument(gql` + query B { + b + } + `); + + const results = await client.refetchQueries({ + include: ['B'] + }); + + expect(results.map(r => r.data)).toEqual([ + { b: 'B' }, + ]); + + expect(requests.length).toBe(3); + expect(requests[2].query).toMatchDocument(gql` + query B { + b + } + `); + }); + + it('runs custom transforms on matched queries when calling `refetchQueries` with document nodes', async () => { + const aQuery = gql` + query A { + a @custom + } + `; + const bQuery = gql` + query B { + b @custom + } + `; + + const requests: Operation[] = [] + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'custom' }], document)! + }); + + const client = new ApolloClient({ + documentTransform, + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + requests.push(operation); + + return Observable.of({ + data: operation.operationName + .split('') + .reduce<Record<string, string>>( + (memo, letter) => ({ + ...memo, + [letter.toLowerCase()]: letter.toUpperCase() + }), + {} + ) + }); + }) + }); + + client.watchQuery({ query: aQuery }).subscribe(jest.fn()); + client.watchQuery({ query: bQuery }).subscribe(jest.fn()); + + await waitFor(() => { + return ( + client.readQuery({ query: aQuery }) && + client.readQuery({ query: bQuery }) + ); + }); + + expect(requests.length).toBe(2); + expect(requests[0].query).toMatchDocument(gql` + query A { + a + } + `); + expect(requests[1].query).toMatchDocument(gql` + query B { + b + } + `); + + const results = await client.refetchQueries({ + include: [bQuery] + }); + + expect(results.map(r => r.data)).toEqual([ + { b: 'B' }, + ]); + + expect(requests.length).toBe(3); + expect(requests[2].query).toMatchDocument(gql` + query B { + b + } + `); + }); +}); + function clientRoundtrip( resolve: (result: any) => any, reject: (reason: any) => any, diff --git a/src/__tests__/exports.ts b/src/__tests__/exports.ts --- a/src/__tests__/exports.ts +++ b/src/__tests__/exports.ts @@ -6,6 +6,7 @@ import * as cache from "../cache"; import * as client from ".."; import * as core from "../core"; +import * as dev from '../dev'; import * as errors from "../errors"; import * as linkBatch from "../link/batch"; import * as linkBatchHTTP from "../link/batch-http"; @@ -15,6 +16,7 @@ import * as linkError from "../link/error"; import * as linkHTTP from "../link/http"; import * as linkPersistedQueries from "../link/persisted-queries"; import * as linkRetry from "../link/retry"; +import * as linkRemoveTypename from "../link/remove-typename"; import * as linkSchema from "../link/schema"; import * as linkSubscriptions from "../link/subscriptions"; import * as linkUtils from "../link/utils"; @@ -48,6 +50,7 @@ describe('exports of public entry points', () => { check("@apollo/client", client); check("@apollo/client/cache", cache); check("@apollo/client/core", core); + check("@apollo/client/dev", dev); check("@apollo/client/errors", errors); check("@apollo/client/link/batch", linkBatch); check("@apollo/client/link/batch-http", linkBatchHTTP); @@ -56,6 +59,7 @@ describe('exports of public entry points', () => { check("@apollo/client/link/error", linkError); check("@apollo/client/link/http", linkHTTP); check("@apollo/client/link/persisted-queries", linkPersistedQueries); + check("@apollo/client/link/remove-typename", linkRemoveTypename); check("@apollo/client/link/retry", linkRetry); check("@apollo/client/link/schema", linkSchema); check("@apollo/client/link/subscriptions", linkSubscriptions); diff --git a/src/__tests__/local-state/__snapshots__/export.ts.snap b/src/__tests__/local-state/__snapshots__/export.ts.snap --- a/src/__tests__/local-state/__snapshots__/export.ts.snap +++ b/src/__tests__/local-state/__snapshots__/export.ts.snap @@ -4,14 +4,18 @@ exports[`@client @export tests should NOT refetch if an @export variable has not [MockFunction] { "calls": Array [ Array [ - "Missing field 'postCount' while writing result { - \\"currentAuthorId\\": 101 -}", + "Missing field '%s' while writing result %o", + "postCount", + Object { + "currentAuthorId": 101, + }, ], Array [ - "Missing field 'postCount' while writing result { - \\"currentAuthorId\\": 100 -}", + "Missing field '%s' while writing result %o", + "postCount", + Object { + "currentAuthorId": 100, + }, ], ], "results": Array [ @@ -31,13 +35,15 @@ exports[`@client @export tests should allow @client @export variables to be used [MockFunction] { "calls": Array [ Array [ - "Missing field 'postCount' while writing result { - \\"currentAuthor\\": { - \\"name\\": \\"John Smith\\", - \\"authorId\\": 100, - \\"__typename\\": \\"Author\\" - } -}", + "Missing field '%s' while writing result %o", + "postCount", + Object { + "currentAuthor": Object { + "__typename": "Author", + "authorId": 100, + "name": "John Smith", + }, + }, ], ], "results": Array [ @@ -53,63 +59,79 @@ exports[`@client @export tests should refetch if an @export variable changes, th [MockFunction] { "calls": Array [ Array [ - "Missing field 'postCount' while writing result { - \\"appContainer\\": { - \\"systemDetails\\": { - \\"currentAuthor\\": { - \\"name\\": \\"John Smith\\", - \\"authorId\\": 100, - \\"__typename\\": \\"Author\\" + "Missing field '%s' while writing result %o", + "postCount", + Object { + "appContainer": Object { + "__typename": "AppContainer", + "systemDetails": Object { + "__typename": "SystemDetails", + "currentAuthor": Object { + "__typename": "Author", + "authorId": 100, + "name": "John Smith", + }, + }, + }, }, - \\"__typename\\": \\"SystemDetails\\" - }, - \\"__typename\\": \\"AppContainer\\" - } -}", ], Array [ - "Missing field 'title' while writing result { - \\"loggedInReviewerId\\": 100, - \\"__typename\\": \\"Post\\", - \\"id\\": 10 -}", + "Missing field '%s' while writing result %o", + "title", + Object { + "__typename": "Post", + "id": 10, + "loggedInReviewerId": 100, + }, ], Array [ - "Missing field 'reviewerDetails' while writing result { - \\"postRequiringReview\\": { - \\"loggedInReviewerId\\": 100, - \\"__typename\\": \\"Post\\", - \\"id\\": 10 - } -}", + "Missing field '%s' while writing result %o", + "reviewerDetails", + Object { + "postRequiringReview": Object { + "__typename": "Post", + "id": 10, + "loggedInReviewerId": 100, + }, + }, ], Array [ - "Missing field 'id' while writing result { - \\"__typename\\": \\"Post\\" -}", + "Missing field '%s' while writing result %o", + "id", + Object { + "__typename": "Post", + }, ], Array [ - "Missing field 'title' while writing result { - \\"__typename\\": \\"Post\\" -}", + "Missing field '%s' while writing result %o", + "title", + Object { + "__typename": "Post", + }, ], Array [ - "Missing field 'reviewerDetails' while writing result { - \\"postRequiringReview\\": { - \\"__typename\\": \\"Post\\" - } -}", + "Missing field '%s' while writing result %o", + "reviewerDetails", + Object { + "postRequiringReview": Object { + "__typename": "Post", + }, + }, ], Array [ - "Missing field 'post' while writing result { - \\"primaryReviewerId\\": 100, - \\"secondaryReviewerId\\": 200 -}", + "Missing field '%s' while writing result %o", + "post", + Object { + "primaryReviewerId": 100, + "secondaryReviewerId": 200, + }, ], Array [ - "Missing field 'postCount' while writing result { - \\"currentAuthorId\\": 100 -}", + "Missing field '%s' while writing result %o", + "postCount", + Object { + "currentAuthorId": 100, + }, ], ], "results": Array [ diff --git a/src/__tests__/local-state/__snapshots__/general.ts.snap b/src/__tests__/local-state/__snapshots__/general.ts.snap --- a/src/__tests__/local-state/__snapshots__/general.ts.snap +++ b/src/__tests__/local-state/__snapshots__/general.ts.snap @@ -8,9 +8,11 @@ exports[`Combining client and server state/operations should handle a simple que [MockFunction] { "calls": Array [ Array [ - "Missing field 'lastCount' while writing result { - \\"count\\": 0 -}", + "Missing field '%s' while writing result %o", + "lastCount", + Object { + "count": 0, + }, ], ], "results": Array [ @@ -26,11 +28,13 @@ exports[`Combining client and server state/operations should support nested quer [MockFunction] { "calls": Array [ Array [ - "Missing field 'lastName' while writing result { - \\"__typename\\": \\"User\\", - \\"id\\": 123, - \\"firstName\\": \\"John\\" -}", + "Missing field '%s' while writing result %o", + "lastName", + Object { + "__typename": "User", + "firstName": "John", + "id": 123, + }, ], ], "results": Array [ diff --git a/src/cache/core/__tests__/cache.ts b/src/cache/core/__tests__/cache.ts --- a/src/cache/core/__tests__/cache.ts +++ b/src/cache/core/__tests__/cache.ts @@ -2,7 +2,7 @@ import gql from 'graphql-tag'; import { ApolloCache } from '../cache'; import { Cache, DataProxy } from '../..'; import { Reference } from '../../../utilities/graphql/storeUtils'; - +import { expectTypeOf } from 'expect-type' class TestCache extends ApolloCache<unknown> { constructor() { super(); @@ -308,3 +308,142 @@ describe('abstract cache', () => { }); }); }); + +describe.skip('Cache type tests', () => { + describe('modify', () => { + test('field types are inferred correctly from passed entity type', () => { + const cache = new TestCache(); + cache.modify<{ + prop1: string; + prop2: number; + child: { + someObject: true + }, + children: { + anotherObject: false + }[] + }>({ + fields: { + prop1(field) { + expectTypeOf(field).toEqualTypeOf<string>(); + return field; + }, + prop2(field) { + expectTypeOf(field).toEqualTypeOf<number>(); + return field; + }, + child(field) { + expectTypeOf(field).toEqualTypeOf<{ someObject: true } | Reference>(); + return field; + }, + children(field) { + expectTypeOf(field).toEqualTypeOf<(ReadonlyArray<{ anotherObject: false }>) | ReadonlyArray<Reference>>(); + return field; + } + } + }) + }) + test('field method needs to return a value of the correct type', () => { + const cache = new TestCache(); + cache.modify<{ p1: string, p2: string, p3: string, p4: string, p5: string }>({ + fields: { + p1() { return "" }, + // @ts-expect-error returns wrong type + p2() { return 1 }, + // @ts-expect-error needs return statement + p3() {}, + p4(_, { DELETE }) { return DELETE }, + p5(_, { INVALIDATE }) { return INVALIDATE }, + } + }) + }) + test('passing a function as `field` should infer all entity properties as possible input (interfaces)', () => { + interface ParentEntity { + prop1: string; + prop2: number; + child: ChildEntity; + } + interface ChildEntity { + prop1: boolean; + prop2: symbol; + children: OtherChildEntry[]; + } + interface OtherChildEntry { + foo: false + } + + const cache = new TestCache(); + // with reference + cache.modify<ParentEntity>({ + id: 'foo', + fields(field) { + expectTypeOf(field).toEqualTypeOf<string | number | ChildEntity | Reference>(); + return field; + } + }) + // without reference + cache.modify<ChildEntity>({ + id: 'foo', + fields(field) { + expectTypeOf(field).toEqualTypeOf<boolean | symbol | readonly OtherChildEntry[] | readonly Reference[]>(); + return field; + } + }) + }) + test('passing a function as `field` should infer all entity properties as possible input (types)', () => { + type ParentEntity = { + prop1: string; + prop2: number; + child: ChildEntity; + } + type ChildEntity = { + prop1: boolean; + prop2: symbol; + children: OtherChildEntry[]; + } + type OtherChildEntry = { + foo: false + } + + const cache = new TestCache(); + // with reference + cache.modify<ParentEntity>({ + id: 'foo', + fields(field) { + expectTypeOf(field).toEqualTypeOf<string | number | ChildEntity | Reference>(); + return field; + } + }) + // without reference + cache.modify<ChildEntity>({ + id: 'foo', + fields(field) { + expectTypeOf(field).toEqualTypeOf<boolean | symbol | readonly OtherChildEntry[] | readonly Reference[]>(); + return field; + } + }) + }) + test('passing a function as `field` w/o specifying an entity type', () => { + const cache = new TestCache(); + cache.modify({ + id: 'foo', + fields(field) { + expectTypeOf(field).toEqualTypeOf<any>(); + return field; + } + }); + }); + test('passing a function as `field` property w/o specifying an entity type', () => { + const cache = new TestCache(); + cache.modify({ + id: 'foo', + fields: { + p1(field) { + expectTypeOf(field).toEqualTypeOf<any>(); + return field; + } + } + }); + }); + }); +}); diff --git a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap --- a/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/policies.ts.snap @@ -54,29 +54,33 @@ exports[`type policies complains about missing key fields 1`] = ` [MockFunction] { "calls": Array [ Array [ - "Missing field 'title' while writing result { - \\"year\\": 2011, - \\"theInformationBookData\\": { - \\"__typename\\": \\"Book\\", - \\"isbn\\": \\"1400096235\\", - \\"title\\": \\"The Information\\", - \\"subtitle\\": \\"A History, a Theory, a Flood\\", - \\"author\\": { - \\"name\\": \\"James Gleick\\" - } - } -}", + "Missing field '%s' while writing result %o", + "title", + Object { + "theInformationBookData": Object { + "__typename": "Book", + "author": Object { + "name": "James Gleick", + }, + "isbn": "1400096235", + "subtitle": "A History, a Theory, a Flood", + "title": "The Information", + }, + "year": 2011, + }, ], Array [ - "Missing field 'year' while writing result { - \\"__typename\\": \\"Book\\", - \\"isbn\\": \\"1400096235\\", - \\"title\\": \\"The Information\\", - \\"subtitle\\": \\"A History, a Theory, a Flood\\", - \\"author\\": { - \\"name\\": \\"James Gleick\\" - } -}", + "Missing field '%s' while writing result %o", + "year", + Object { + "__typename": "Book", + "author": Object { + "name": "James Gleick", + }, + "isbn": "1400096235", + "subtitle": "A History, a Theory, a Flood", + "title": "The Information", + }, ], ], "results": Array [ @@ -1280,11 +1284,13 @@ exports[`type policies field policies readField helper function calls custom rea [MockFunction] { "calls": Array [ Array [ - "Missing field 'blockers' while writing result { - \\"__typename\\": \\"Task\\", - \\"id\\": 4, - \\"description\\": \\"grandchild task\\" -}", + "Missing field '%s' while writing result %o", + "blockers", + Object { + "__typename": "Task", + "description": "grandchild task", + "id": 4, + }, ], ], "results": Array [ @@ -1300,34 +1306,40 @@ exports[`type policies field policies runs nested merge functions as well as anc [MockFunction] { "calls": Array [ Array [ - "Missing field 'time' while writing result { - \\"__typename\\": \\"Event\\", - \\"id\\": 123 -}", + "Missing field '%s' while writing result %o", + "time", + Object { + "__typename": "Event", + "id": 123, + }, ], Array [ - "Missing field 'time' while writing result { - \\"__typename\\": \\"Event\\", - \\"id\\": 345, - \\"name\\": \\"Rooftop dog party\\", - \\"attendees\\": [ - { - \\"__typename\\": \\"Attendee\\", - \\"id\\": 456, - \\"name\\": \\"Inspector Beckett\\" - }, - { - \\"__typename\\": \\"Attendee\\", - \\"id\\": 234 - } - ] -}", + "Missing field '%s' while writing result %o", + "time", + Object { + "__typename": "Event", + "attendees": Array [ + Object { + "__typename": "Attendee", + "id": 456, + "name": "Inspector Beckett", + }, + Object { + "__typename": "Attendee", + "id": 234, + }, + ], + "id": 345, + "name": "Rooftop dog party", + }, ], Array [ - "Missing field 'name' while writing result { - \\"__typename\\": \\"Attendee\\", - \\"id\\": 234 -}", + "Missing field '%s' while writing result %o", + "name", + Object { + "__typename": "Attendee", + "id": 234, + }, ], ], "results": Array [ @@ -1351,10 +1363,12 @@ exports[`type policies readField warns if explicitly passed undefined \`from\` o [MockFunction] { "calls": Array [ Array [ - "Undefined 'from' passed to readField with arguments [{\\"fieldName\\":\\"firstName\\",\\"from\\":<undefined>}]", + "Undefined 'from' passed to readField with arguments %s", + "[{\\"fieldName\\":\\"firstName\\",\\"from\\":<undefined>}]", ], Array [ - "Undefined 'from' passed to readField with arguments [\\"lastName\\",<undefined>]", + "Undefined 'from' passed to readField with arguments %s", + "[\\"lastName\\",<undefined>]", ], ], "results": Array [ diff --git a/src/cache/inmemory/__tests__/__snapshots__/roundtrip.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/roundtrip.ts.snap --- a/src/cache/inmemory/__tests__/__snapshots__/roundtrip.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/roundtrip.ts.snap @@ -4,11 +4,13 @@ exports[`roundtrip fragments should throw an error on two of the same inline fra [MockFunction] { "calls": Array [ Array [ - "Missing field 'rank' while writing result { - \\"__typename\\": \\"Jedi\\", - \\"name\\": \\"Luke Skywalker\\", - \\"side\\": \\"bright\\" -}", + "Missing field '%s' while writing result %o", + "rank", + Object { + "__typename": "Jedi", + "name": "Luke Skywalker", + "side": "bright", + }, ], ], "results": Array [ @@ -24,11 +26,13 @@ exports[`roundtrip fragments should throw on error on two of the same spread fra [MockFunction] { "calls": Array [ Array [ - "Missing field 'rank' while writing result { - \\"__typename\\": \\"Jedi\\", - \\"name\\": \\"Luke Skywalker\\", - \\"side\\": \\"bright\\" -}", + "Missing field '%s' while writing result %o", + "rank", + Object { + "__typename": "Jedi", + "name": "Luke Skywalker", + "side": "bright", + }, ], ], "results": Array [ diff --git a/src/cache/inmemory/__tests__/__snapshots__/writeToStore.ts.snap b/src/cache/inmemory/__tests__/__snapshots__/writeToStore.ts.snap --- a/src/cache/inmemory/__tests__/__snapshots__/writeToStore.ts.snap +++ b/src/cache/inmemory/__tests__/__snapshots__/writeToStore.ts.snap @@ -75,10 +75,12 @@ exports[`writing to the store should not keep reference when type of mixed inlin [MockFunction] { "calls": Array [ Array [ - "Missing field 'id' while writing result { - \\"__typename\\": \\"Cat\\", - \\"name\\": \\"cat\\" -}", + "Missing field '%s' while writing result %o", + "id", + Object { + "__typename": "Cat", + "name": "cat", + }, ], ], "results": Array [ @@ -205,12 +207,14 @@ exports[`writing to the store writeResultToStore shape checking should warn when [MockFunction] { "calls": Array [ Array [ - "Missing field 'price' while writing result { - \\"id\\": \\"1\\", - \\"name\\": \\"Todo 1\\", - \\"description\\": \\"Description 1\\", - \\"__typename\\": \\"ShoppingCartItem\\" -}", + "Missing field '%s' while writing result %o", + "price", + Object { + "__typename": "ShoppingCartItem", + "description": "Description 1", + "id": "1", + "name": "Todo 1", + }, ], ], "results": Array [ @@ -226,10 +230,12 @@ exports[`writing to the store writeResultToStore shape checking should warn when [MockFunction] { "calls": Array [ Array [ - "Missing field 'description' while writing result { - \\"id\\": \\"1\\", - \\"name\\": \\"Todo 1\\" -}", + "Missing field '%s' while writing result %o", + "description", + Object { + "id": "1", + "name": "Todo 1", + }, ], ], "results": Array [ @@ -245,10 +251,12 @@ exports[`writing to the store writeResultToStore shape checking should write the [MockFunction] { "calls": Array [ Array [ - "Missing field 'description' while writing result { - \\"id\\": \\"1\\", - \\"name\\": \\"Todo 1\\" -}", + "Missing field '%s' while writing result %o", + "description", + Object { + "id": "1", + "name": "Todo 1", + }, ], ], "results": Array [ diff --git a/src/cache/inmemory/__tests__/cache.ts b/src/cache/inmemory/__tests__/cache.ts --- a/src/cache/inmemory/__tests__/cache.ts +++ b/src/cache/inmemory/__tests__/cache.ts @@ -1,4 +1,5 @@ import gql, { disableFragmentWarnings } from 'graphql-tag'; +import { expectTypeOf } from 'expect-type' import { cloneDeep } from '../../../utilities/common/cloneDeep'; import { makeReference, Reference, makeVar, TypedDocumentNode, isReference, DocumentNode } from '../../../core'; @@ -6,8 +7,6 @@ import { Cache } from '../../../cache'; import { InMemoryCache } from '../inMemoryCache'; import { InMemoryCacheConfig } from '../types'; -jest.mock('optimism'); -import { wrap } from 'optimism'; import { StoreReader } from '../readFromStore'; import { StoreWriter } from '../writeToStore'; import { ObjectCanon } from '../object-canon'; @@ -2011,32 +2010,21 @@ describe('Cache', () => { }); describe('resultCacheMaxSize', () => { - let wrapSpy: jest.Mock = wrap as jest.Mock; - beforeEach(() => { - wrapSpy.mockClear(); - }); - - it("does not set max size on caches if resultCacheMaxSize is not configured", () => { - new InMemoryCache(); - expect(wrapSpy).toHaveBeenCalled(); + const defaultMaxSize = Math.pow(2, 16); - // The first wrap call is for getFragmentQueryDocument which intentionally - // does not have a max set since it's not expected to grow. - wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => { - expect(max).toBeUndefined(); - }) + it("uses default max size on caches if resultCacheMaxSize is not configured", () => { + const cache = new InMemoryCache(); + expect(cache["maybeBroadcastWatch"].options.max).toBe(defaultMaxSize); + expect(cache["storeReader"]["executeSelectionSet"].options.max).toBe(defaultMaxSize); + expect(cache["getFragmentDoc"].options.max).toBe(defaultMaxSize); }); it("configures max size on caches when resultCacheMaxSize is set", () => { const resultCacheMaxSize = 12345; - new InMemoryCache({ resultCacheMaxSize }); - expect(wrapSpy).toHaveBeenCalled(); - - // The first wrap call is for getFragmentQueryDocument which intentionally - // does not have a max set since it's not expected to grow. - wrapSpy.mock.calls.splice(1).forEach(([, { max }]) => { - expect(max).toBe(resultCacheMaxSize); - }) + const cache = new InMemoryCache({ resultCacheMaxSize }); + expect(cache["maybeBroadcastWatch"].options.max).toBe(resultCacheMaxSize); + expect(cache["storeReader"]["executeSelectionSet"].options.max).toBe(resultCacheMaxSize); + expect(cache["getFragmentDoc"].options.max).toBe(defaultMaxSize); }); }); @@ -2830,7 +2818,7 @@ describe("InMemoryCache#modify", () => { cache.modify({ fields: { - comments(comments: Reference[], { readField }) { + comments(comments: readonly Reference[], { readField }) { expect(Object.isFrozen(comments)).toBe(true); expect(comments.length).toBe(3); const filtered = comments.filter(comment => { @@ -2915,6 +2903,7 @@ describe("InMemoryCache#modify", () => { expect(fieldName).not.toBe("b"); if (fieldName === "a") expect(value).toBe(1); if (fieldName === "c") expect(value).toBe(3); + return value; }, optimistic: true, }); @@ -3918,18 +3907,43 @@ describe('TypedDocumentNode<Data, Variables>', () => { } `; - it('should determine Data and Variables types of {write,read}{Query,Fragment}', () => { - const cache = new InMemoryCache({ + // We need to define these objects separately from calling writeQuery, + // because passing them directly to writeQuery will trigger excess property + // warnings due to the extra __typename and isbn fields. Internally, we + // almost never pass object literals to writeQuery or writeFragment, so + // excess property checks should not be a problem in practice. + const jcmAuthor = { + __typename: "Author", + name: "John C. Mitchell", + }; + + const ffplBook = { + __typename: "Book", + isbn: "0262133210", + title: "Foundations for Programming Languages", + author: jcmAuthor, + }; + + const ffplVariables = { + isbn: "0262133210", + }; + + function getBookCache() { + return new InMemoryCache({ typePolicies: { Query: { fields: { book(existing, { args, toReference }) { - return existing ?? (args && toReference({ - __typename: "Book", - isbn: args.isbn, - })); - } - } + return ( + existing ?? + (args && + toReference({ + __typename: "Book", + isbn: args.isbn, + })) + ); + }, + }, }, Book: { @@ -3941,27 +3955,10 @@ describe('TypedDocumentNode<Data, Variables>', () => { }, }, }); + } - // We need to define these objects separately from calling writeQuery, - // because passing them directly to writeQuery will trigger excess property - // warnings due to the extra __typename and isbn fields. Internally, we - // almost never pass object literals to writeQuery or writeFragment, so - // excess property checks should not be a problem in practice. - const jcmAuthor = { - __typename: "Author", - name: "John C. Mitchell", - }; - - const ffplBook = { - __typename: "Book", - isbn: "0262133210", - title: "Foundations for Programming Languages", - author: jcmAuthor, - }; - - const ffplVariables = { - isbn: "0262133210", - }; + it("should determine Data and Variables types of {write,read}{Query,Fragment}", () => { + const cache = getBookCache(); cache.writeQuery({ query, @@ -4041,4 +4038,40 @@ describe('TypedDocumentNode<Data, Variables>', () => { }, }); }); + + it.skip("should infer the types of modifier fields", () => { + const cache = getBookCache(); + + cache.writeQuery({ + query, + variables: ffplVariables, + data: { + book: ffplBook, + }, + }); + + cache.modify<Book>({ + id: cache.identify(ffplBook), + fields: { + isbn: (value) => { + expectTypeOf(value).toEqualTypeOf<string>(); + return value; + }, + title: (value, { INVALIDATE }) => { + expectTypeOf(value).toEqualTypeOf<string>(); + return INVALIDATE; + }, + author: (value, { DELETE, isReference }) => { + expectTypeOf(value).toEqualTypeOf<Reference | Book["author"]>(); + if (isReference(value)) { + expectTypeOf(value).toEqualTypeOf<Reference>(); + } else { + expectTypeOf(value).toEqualTypeOf<Book["author"]>(); + } + + return DELETE; + }, + }, + }); + }); }); diff --git a/src/cache/inmemory/__tests__/entityStore.ts b/src/cache/inmemory/__tests__/entityStore.ts --- a/src/cache/inmemory/__tests__/entityStore.ts +++ b/src/cache/inmemory/__tests__/entityStore.ts @@ -8,6 +8,8 @@ import { Cache } from '../../core/types/Cache'; import { Reference, makeReference, isReference, StoreValue } from '../../../utilities/graphql/storeUtils'; import { MissingFieldError } from '../..'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import { stringifyForDisplay } from '../../../utilities'; +import { InvariantError } from '../../../utilities/globals'; describe('EntityStore', () => { it('should support result caching if so configured', () => { @@ -1782,11 +1784,11 @@ describe('EntityStore', () => { try { expect(cache.identify(ABCs)).toBeUndefined(); expect(consoleWarnSpy).toHaveBeenCalledTimes(1); - expect(consoleWarnSpy).toHaveBeenCalledWith( - new Error(`Missing field 'b' while extracting keyFields from ${ - JSON.stringify(ABCs) - }`), - ); + expect(consoleWarnSpy).toHaveBeenCalledWith(new InvariantError( + `Missing field 'b' while extracting keyFields from ${ + stringifyForDisplay(ABCs, 2) + }`, + )); } finally { consoleWarnSpy.mockRestore(); } diff --git a/src/cache/inmemory/__tests__/fragmentMatcher.ts b/src/cache/inmemory/__tests__/fragmentMatcher.ts --- a/src/cache/inmemory/__tests__/fragmentMatcher.ts +++ b/src/cache/inmemory/__tests__/fragmentMatcher.ts @@ -234,8 +234,8 @@ describe("policies.fragmentMatches", () => { beforeEach(() => { warnings.length = 0; - console.warn = function (message: any) { - warnings.push(message); + console.warn = function (...args: any) { + warnings.push(args); }; }); @@ -384,9 +384,9 @@ describe("policies.fragmentMatches", () => { }); expect(warnings).toEqual([ - "Inferring subtype E of supertype C", - "Inferring subtype F of supertype C", - "Inferring subtype G of supertype C", + ["Inferring subtype %s of supertype %s", "E", "C"], + ["Inferring subtype %s of supertype %s", "F", "C"], + ["Inferring subtype %s of supertype %s", "G", "C"], // Note that TooLong is not inferred here. ]); diff --git a/src/cache/inmemory/__tests__/fragmentRegistry.ts b/src/cache/inmemory/__tests__/fragmentRegistry.ts --- a/src/cache/inmemory/__tests__/fragmentRegistry.ts +++ b/src/cache/inmemory/__tests__/fragmentRegistry.ts @@ -90,6 +90,7 @@ describe("FragmentRegistry", () => { loading: true, networkStatus: NetworkStatus.loading, data: { + __typename: 'Query', source: "local", }, }); @@ -99,6 +100,7 @@ describe("FragmentRegistry", () => { loading: false, networkStatus: NetworkStatus.ready, data: { + __typename: 'Query', source: "link", }, }); diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -4,7 +4,7 @@ import { InMemoryCache } from "../inMemoryCache"; import { ReactiveVar, makeVar } from "../reactiveVars"; import { Reference, StoreObject, ApolloClient, NetworkStatus, TypedDocumentNode, DocumentNode } from "../../../core"; import { MissingFieldError } from "../.."; -import { relayStylePagination } from "../../../utilities"; +import { relayStylePagination, stringifyForDisplay } from "../../../utilities"; import { FieldPolicy, StorageType } from "../policies"; import { itAsync, @@ -443,8 +443,9 @@ describe("type policies", function () { }, }); }).toThrowError( - `Missing field 'year' while extracting keyFields from ${JSON.stringify( - theInformationBookData + `Missing field 'year' while extracting keyFields from ${stringifyForDisplay( + theInformationBookData, + 2 )}`, ); }); @@ -627,6 +628,182 @@ describe("type policies", function () { })).toBe('DeathAdder:{"tagId":"LethalAbacus666"}'); }); + it("typePolicies can be inherited from supertypes with fuzzy possibleTypes", () => { + const cache = new InMemoryCache({ + possibleTypes: { + EntitySupertype: [".*Entity"], + }, + typePolicies: { + Query: { + fields: { + coworkers: { + merge(existing, incoming) { + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + + // The point of this test is to ensure keyFields: ["uid"] can be + // registered for all __typename strings matching the RegExp /.*Entity/, + // without manually enumerating all of them. + EntitySupertype: { + keyFields: ["uid"], + }, + }, + }); + + type Coworker = { + __typename: "CoworkerEntity" | "ManagerEntity"; + uid: string; + name: string; + } + + const query: TypedDocumentNode<{ + coworkers: Coworker[]; + }> = gql` + query { + coworkers { + uid + name + } + } + `; + + cache.writeQuery({ + query, + data: { + coworkers: [ + { __typename: "CoworkerEntity", uid: "qwer", name: "Alessia" }, + { __typename: "CoworkerEntity", uid: "asdf", name: "Jerel" }, + { __typename: "CoworkerEntity", uid: "zxcv", name: "Lenz" }, + { __typename: "ManagerEntity", uid: "uiop", name: "Jeff" }, + ], + }, + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + coworkers: [ + { __ref: 'CoworkerEntity:{"uid":"qwer"}' }, + { __ref: 'CoworkerEntity:{"uid":"asdf"}' }, + { __ref: 'CoworkerEntity:{"uid":"zxcv"}' }, + { __ref: 'ManagerEntity:{"uid":"uiop"}' }, + ], + }, + 'CoworkerEntity:{"uid":"qwer"}': { + __typename: "CoworkerEntity", + uid: "qwer", + name: "Alessia", + }, + 'CoworkerEntity:{"uid":"asdf"}': { + __typename: "CoworkerEntity", + uid: "asdf", + name: "Jerel", + }, + 'CoworkerEntity:{"uid":"zxcv"}': { + __typename: "CoworkerEntity", + uid: "zxcv", + name: "Lenz", + }, + 'ManagerEntity:{"uid":"uiop"}': { + __typename: "ManagerEntity", + uid: "uiop", + name: "Jeff", + }, + }); + + interface CoworkerWithAlias extends Omit<Coworker, "uid"> { + idAlias: string; + } + + const queryWithAlias: TypedDocumentNode<{ + coworkers: CoworkerWithAlias[]; + }> = gql` + query { + coworkers { + idAlias: uid + name + } + } + `; + + expect(cache.readQuery({ query: queryWithAlias })).toEqual({ + coworkers: [ + { __typename: "CoworkerEntity", idAlias: "qwer", name: "Alessia" }, + { __typename: "CoworkerEntity", idAlias: "asdf", name: "Jerel" }, + { __typename: "CoworkerEntity", idAlias: "zxcv", name: "Lenz" }, + { __typename: "ManagerEntity", idAlias: "uiop", name: "Jeff" }, + ], + }); + + cache.writeQuery({ + query: queryWithAlias, + data: { + coworkers: [ + { __typename: "CoworkerEntity", idAlias: "hjkl", name: "Martijn" }, + { __typename: "ManagerEntity", idAlias: "vbnm", name: "Hugh" }, + ], + }, + }); + + expect(cache.readQuery({ query })).toEqual({ + coworkers: [ + { __typename: "CoworkerEntity", uid: "qwer", name: "Alessia" }, + { __typename: "CoworkerEntity", uid: "asdf", name: "Jerel" }, + { __typename: "CoworkerEntity", uid: "zxcv", name: "Lenz" }, + { __typename: "ManagerEntity", uid: "uiop", name: "Jeff" }, + { __typename: "CoworkerEntity", uid: "hjkl", name: "Martijn" }, + { __typename: "ManagerEntity", uid: "vbnm", name: "Hugh" }, + ], + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + coworkers: [ + { __ref: 'CoworkerEntity:{"uid":"qwer"}' }, + { __ref: 'CoworkerEntity:{"uid":"asdf"}' }, + { __ref: 'CoworkerEntity:{"uid":"zxcv"}' }, + { __ref: 'ManagerEntity:{"uid":"uiop"}' }, + { __ref: 'CoworkerEntity:{"uid":"hjkl"}' }, + { __ref: 'ManagerEntity:{"uid":"vbnm"}' }, + ], + }, + 'CoworkerEntity:{"uid":"qwer"}': { + __typename: "CoworkerEntity", + uid: "qwer", + name: "Alessia", + }, + 'CoworkerEntity:{"uid":"asdf"}': { + __typename: "CoworkerEntity", + uid: "asdf", + name: "Jerel", + }, + 'CoworkerEntity:{"uid":"zxcv"}': { + __typename: "CoworkerEntity", + uid: "zxcv", + name: "Lenz", + }, + 'ManagerEntity:{"uid":"uiop"}': { + __typename: "ManagerEntity", + uid: "uiop", + name: "Jeff", + }, + 'CoworkerEntity:{"uid":"hjkl"}': { + __typename: "CoworkerEntity", + uid: "hjkl", + name: "Martijn", + }, + 'ManagerEntity:{"uid":"vbnm"}': { + __typename: "ManagerEntity", + uid: "vbnm", + name: "Hugh", + }, + }); + }); + describe("field policies", function () { it(`can filter arguments using keyArgs`, function () { const cache = new InMemoryCache({ diff --git a/src/cache/inmemory/__tests__/readFromStore.ts b/src/cache/inmemory/__tests__/readFromStore.ts --- a/src/cache/inmemory/__tests__/readFromStore.ts +++ b/src/cache/inmemory/__tests__/readFromStore.ts @@ -18,37 +18,22 @@ import { TypedDocumentNode, } from '../../../core'; -jest.mock('optimism'); -import { wrap } from 'optimism'; - describe('resultCacheMaxSize', () => { const cache = new InMemoryCache(); - let wrapSpy: jest.Mock = wrap as jest.Mock; - beforeEach(() => { - wrapSpy.mockClear(); - }); - - it("does not set max size on caches if resultCacheMaxSize is not configured", () => { - new StoreReader({ cache }); - expect(wrapSpy).toHaveBeenCalled(); + const defaultMaxSize = Math.pow(2, 16); - wrapSpy.mock.calls.forEach(([, { max }]) => { - expect(max).toBeUndefined(); - }) + it("uses default max size on caches if resultCacheMaxSize is not configured", () => { + const reader = new StoreReader({ cache }); + expect(reader["executeSelectionSet"].options.max).toBe(defaultMaxSize); }); it("configures max size on caches when resultCacheMaxSize is set", () => { const resultCacheMaxSize = 12345; - new StoreReader({ cache, resultCacheMaxSize }); - expect(wrapSpy).toHaveBeenCalled(); - - wrapSpy.mock.calls.forEach(([, { max }]) => { - expect(max).toBe(resultCacheMaxSize); - }) + const reader = new StoreReader({ cache, resultCacheMaxSize }); + expect(reader["executeSelectionSet"].options.max).toBe(resultCacheMaxSize); }); }); - describe('reading from the store', () => { const reader = new StoreReader({ cache: new InMemoryCache(), diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -2,11 +2,16 @@ import gql from "graphql-tag"; import { GraphQLError } from "graphql"; import { TypedDocumentNode } from "@graphql-typed-document-node/core"; -import { ApolloClient, ApolloQueryResult, NetworkStatus, WatchQueryFetchPolicy } from "../../core"; +import { + ApolloClient, + ApolloQueryResult, + NetworkStatus, + WatchQueryFetchPolicy +} from "../../core"; import { ObservableQuery } from "../ObservableQuery"; import { QueryManager } from "../QueryManager"; -import { Observable } from "../../utilities"; +import { DocumentTransform, Observable, removeDirectivesFromDocument } from "../../utilities"; import { ApolloLink, FetchResult } from "../../link/core"; import { InMemoryCache, NormalizedCacheObject } from "../../cache"; import { ApolloError } from "../../errors"; @@ -21,18 +26,16 @@ import { SubscriptionObserver } from "zen-observable-ts"; import { waitFor } from "@testing-library/react"; export const mockFetchQuery = (queryManager: QueryManager<any>) => { - const fetchQueryObservable = queryManager.fetchQueryObservable; const fetchConcastWithInfo = queryManager['fetchConcastWithInfo']; const fetchQueryByPolicy: QueryManager<any>["fetchQueryByPolicy"] = (queryManager as any) .fetchQueryByPolicy; - const mock = <T extends typeof fetchQueryObservable | typeof fetchConcastWithInfo | typeof fetchQueryByPolicy>(original: T) => + const mock = <T extends typeof fetchConcastWithInfo | typeof fetchQueryByPolicy>(original: T) => jest.fn<ReturnType<T>, Parameters<T>>(function () { return original.apply(queryManager, arguments); }); const mocks = { - fetchQueryObservable: mock(fetchQueryObservable), fetchConcastWithInfo: mock(fetchConcastWithInfo), fetchQueryByPolicy: mock(fetchQueryByPolicy), }; @@ -1471,9 +1474,11 @@ describe("ObservableQuery", () => { expect(consoleWarnSpy).toHaveBeenCalledTimes(1); expect(consoleWarnSpy).toHaveBeenCalledWith( [ - 'Called refetch({"variables":["d","e"]}) for query QueryWithoutVariables, which does not declare a $variables variable.', + 'Called refetch(%o) for query %o, which does not declare a $variables variable.', "Did you mean to call refetch(variables) instead of refetch({ variables })?", - ].join("\n") + ].join("\n"), + {"variables": ["d", "e"]}, + "QueryWithoutVariables" ); consoleWarnSpy.mockRestore(); @@ -1578,9 +1583,11 @@ describe("ObservableQuery", () => { expect(consoleWarnSpy).toHaveBeenCalledTimes(1); expect(consoleWarnSpy).toHaveBeenCalledWith( [ - 'Called refetch({"variables":{"vars":["d","e"]}}) for query QueryWithVarsVar, which does not declare a $variables variable.', + 'Called refetch(%o) for query %o, which does not declare a $variables variable.', "Did you mean to call refetch(variables) instead of refetch({ variables })?", - ].join("\n") + ].join("\n"), + {"variables":{"vars":["d","e"]}}, + "QueryWithVarsVar" ); consoleWarnSpy.mockRestore(); @@ -2685,6 +2692,132 @@ describe("ObservableQuery", () => { }); }); + describe('.query computed property', () => { + it('is equal to transformed query when instantiating via `watchQuery`', () => { + const query = gql` + query { + currentUser { + id + } + } + `; + + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + }); + + const observable = client.watchQuery({ query }); + + expect(observable.query).toMatchDocument(gql` + query { + currentUser { + id + __typename + } + } + `); + }); + + it('is referentially stable', () => { + const query = gql` + query { + currentUser { + id + } + } + `; + + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + }); + + const observable = client.watchQuery({ query }); + const result = observable.query; + + expect(observable.query).toBe(result); + }); + + it('is updated with transformed query when `setOptions` changes the query', () => { + const query = gql` + query { + currentUser { + id + } + } + `; + + const updatedQuery = gql` + query { + product { + id + } + } + ` + + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + }); + + const observable = client.watchQuery({ query }); + + expect(observable.query).toMatchDocument(gql` + query { + currentUser { + id + __typename + } + } + `); + + observable.setOptions({ query: updatedQuery }); + + expect(observable.query).toMatchDocument(gql` + query { + product { + id + __typename + } + } + `); + }); + + it('reflects query run through custom transforms', () => { + const query = gql` + query { + currentUser { + id + name @client + } + } + `; + + const documentTransform = new DocumentTransform((document) => { + return removeDirectivesFromDocument([{ name: 'client' }], document)! + }); + + const client = new ApolloClient({ + link: ApolloLink.empty(), + cache: new InMemoryCache(), + documentTransform, + }); + + const observable = client.watchQuery({ query }); + + expect(observable.query).toMatchDocument(gql` + query { + currentUser { + id + name + __typename + } + } + `); + }); + }); + itAsync("QueryInfo does not notify for !== but deep-equal results", (resolve, reject) => { const queryManager = mockQueryManager({ request: { query, variables }, diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -4847,7 +4847,7 @@ describe('QueryManager', () => { result => { expect(result.data).toEqual(secondReqData); expect(consoleWarnSpy).toHaveBeenLastCalledWith( - 'Unknown query named "fakeQuery" requested in refetchQueries options.include array' + 'Unknown query named "%s" requested in refetchQueries options.include array', "fakeQuery" ); }, ).then(resolve, reject); @@ -4914,7 +4914,7 @@ describe('QueryManager', () => { }); }).then(() => { expect(consoleWarnSpy).toHaveBeenLastCalledWith( - 'Unknown query named "getAuthors" requested in refetchQueries options.include array' + 'Unknown query named "%s" requested in refetchQueries options.include array', "getAuthors" ); }).then(resolve, reject); }); diff --git a/src/core/__tests__/QueryManager/links.ts b/src/core/__tests__/QueryManager/links.ts --- a/src/core/__tests__/QueryManager/links.ts +++ b/src/core/__tests__/QueryManager/links.ts @@ -1,5 +1,6 @@ // externals import gql from 'graphql-tag'; +import { print } from 'graphql' import { Observable, ObservableSubscription } from '../../../utilities/observables/Observable'; import { ApolloLink } from '../../../link/core'; @@ -360,4 +361,51 @@ describe('Link interactions', () => { }); }); }); + + it('removes @client fields from the query before it reaches the link', async () => { + const result: { current: Operation | undefined } = { + current: undefined + }; + + const query = gql` + query { + books { + id + title + isRead @client + } + } + `; + + const expectedQuery = gql` + query { + books { + id + title + } + } + `; + + const link = new ApolloLink((operation) => { + result.current = operation; + + return Observable.of({ + data: { + books: [ + { id: 1, title: 'Woo', __typename: 'Book' }, + { id: 2, title: 'Foo', __typename: 'Book' }, + ], + } + }); + }); + + const queryManager = new QueryManager({ + link, + cache: new InMemoryCache({ addTypename: false }), + }); + + await queryManager.query({ query }); + + expect(print(result.current!.query)).toEqual(print(expectedQuery)) + }); }); diff --git a/src/core/__tests__/equalByQuery.ts b/src/core/__tests__/equalByQuery.ts new file mode 100644 --- /dev/null +++ b/src/core/__tests__/equalByQuery.ts @@ -0,0 +1,722 @@ +import { GraphQLError } from "graphql"; +import { TypedDocumentNode, gql } from "../index"; +import { equalByQuery } from "../equalByQuery"; + +describe("equalByQuery", () => { + it("is importable and a function", () => { + expect(typeof equalByQuery).toBe("function"); + }); + + it("works with a basic single-field query", () => { + const query = gql` + query { + hello + } + `; + + expect( + equalByQuery(query, { data: { hello: "hi" } }, { data: { hello: "hi" } }) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { hello: "hi", unrelated: 1 } }, + { data: { hello: "hi", unrelated: 100 } } + ) + ).toBe(true); + + expect( + equalByQuery(query, { data: { hello: "hi" } }, { data: { hello: "hey" } }) + ).toBe(false); + + expect(equalByQuery(query, { data: {} }, { data: { hello: "hi" } })).toBe( + false + ); + + expect(equalByQuery(query, { data: { hello: "hi" } }, { data: {} })).toBe( + false + ); + + expect(equalByQuery(query, { data: { hello: "hi" } }, { data: null })).toBe( + false + ); + + expect(equalByQuery(query, { data: null }, { data: { hello: "hi" } })).toBe( + false + ); + + expect(equalByQuery(query, { data: null }, { data: null })).toBe(true); + + expect(equalByQuery(query, { data: {} }, { data: {} })).toBe(true); + + expect( + equalByQuery( + query, + { data: { unrelated: "whatever" } }, + { data: { unrelated: "no matter" } } + ) + ).toBe(true); + }); + + it("is not confused by properties in different orders", () => { + const query = gql` + query { + a + b + c + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { b: 2, c: 3, a: 1 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { d: "bogus", a: 1, b: 2, c: 3 } }, + { data: { b: 2, c: 3, a: 1, d: "also bogus" } } + ) + ).toBe(true); + }); + + it("respects the @nonreactive directive on fields", () => { + const query = gql` + query { + a + b + c @nonreactive + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: "different" } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: "different", b: 2, c: 4 } } + ) + ).toBe(false); + }); + + describe("@skip and @include directives", () => { + // The @skip and @include directives use query variables to determine + // whether subtrees of the query should be executed at all, so they can + // influence the comparison of results in ways similar to @nonreactive. The + // key difference is that @skip and @include will be sent to the server, + // whereas @nonreactive is a client-only directive, and does not prevent + // execution of nonreactive fields/subtrees on the server. + it("respects @skip directive, depending on variables", () => { + const skipQuery = gql` + query SkipC($condition: Boolean!) { + a + b + c @skip(if: $condition) + } + `; + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: false } + ) + ).toBe(true); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2 } }, + { condition: false } + ) + ).toBe(false); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2 } }, + { condition: true } + ) + ).toBe(true); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: false } + ) + ).toBe(false); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: true } + ) + ).toBe(true); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2 } }, + { condition: false } + ) + ).toBe(true); + + expect( + equalByQuery( + skipQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2 } }, + { condition: true } + ) + ).toBe(true); + }); + + it("respects @include directive, depending on variables", () => { + const includeQuery = gql` + query IncludeC($condition: Boolean!) { + a + b + c @include(if: $condition) + } + `; + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: true } + ) + ).toBe(true); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2 } }, + { condition: true } + ) + ).toBe(false); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2 } }, + { condition: false } + ) + ).toBe(true); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: true } + ) + ).toBe(false); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2, c: 3 } }, + { condition: false } + ) + ).toBe(true); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2 } }, + { condition: true } + ) + ).toBe(true); + + expect( + equalByQuery( + includeQuery, + { data: { a: 1, b: 2 } }, + { data: { a: 1, b: 2 } }, + { condition: false } + ) + ).toBe(true); + }); + }); + + it("considers errors as well as data", () => { + const query = gql` + query { + a + b @nonreactive + c + } + `; + + const data123 = { a: 1, b: 2, c: 3 }; + const oopsError = new GraphQLError("oops"); + const differentError = new GraphQLError("different"); + + expect( + equalByQuery( + query, + { data: data123 }, + { data: data123, errors: [oopsError] } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: data123, errors: [oopsError] }, + { data: data123 } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: data123, errors: [oopsError] }, + { data: data123, errors: [oopsError] } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: data123, errors: [oopsError] }, + { data: data123, errors: [differentError] } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: data123, errors: [oopsError] }, + { data: data123, errors: [oopsError] } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: data123, errors: [oopsError] }, + { data: { ...data123, b: 100 }, errors: [oopsError] } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: data123, errors: [] }, + { data: data123, errors: [] } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: data123, errors: [] }, + { data: { ...data123, b: 100 }, errors: [] } + ) + ).toBe(true); + }); + + it("respects the @nonreactive directive on inline fragments", () => { + const query = gql` + query { + a + ... @nonreactive { + b + c + } + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 30 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 10, b: 20, c: 30 } } + ) + ).toBe(false); + }); + + it("respects the @nonreactive directive on named fragment ...spreads", () => { + const query = gql` + query { + a + ...BCFragment @nonreactive + } + + fragment BCFragment on Query { + b + c + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 30 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 3 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 30 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 10, b: 20, c: 30 } } + ) + ).toBe(false); + }); + + it("respects the @nonreactive directive on named fragment definitions", () => { + const query = gql` + query { + a + ...BCFragment + } + + fragment BCFragment on Query @nonreactive { + b + c + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 30 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 3 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 30 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 10, b: 20, c: 30 } } + ) + ).toBe(false); + }); + + it("traverses fragments without @nonreactive", () => { + const query = gql` + query { + a + ...BCFragment + } + + fragment BCFragment on Query { + b + c + } + `; + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 3 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { c: 3, a: 1, b: 2 } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 2, c: 30 } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 3 } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 1, b: 20, c: 30 } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { a: 1, b: 2, c: 3 } }, + { data: { a: 10, b: 20, c: 30 } } + ) + ).toBe(false); + }); + + type Thing = { + __typename: "Thing"; + id: string; + stable: number; + volatile: number; + }; + + it.each<TypedDocumentNode<Thing>>([ + gql` + query { + things { + __typename + id + stable + volatile @nonreactive + } + } + `, + gql` + query { + things { + __typename + id + ...ThingDetails + } + } + + fragment ThingDetails on Thing { + stable + volatile @nonreactive + } + `, + gql` + query { + things { + __typename + id + ... on Thing { + stable + volatile @nonreactive + } + } + } + `, + gql` + query { + things { + __typename + id + stable + ... on Thing @nonreactive { + volatile + } + } + } + `, + gql` + query { + things { + __typename + id + stable + ...Volatile @nonreactive + } + } + + fragment Volatile on Thing { + volatile + } + `, + gql` + query { + things { + __typename + id + stable + ... @nonreactive { + volatile + } + } + } + `, + ])( + "iterates over array-valued result fields ignoring @nonreactive (%#)", + (query) => { + let nextVolatileIntegerPart = 0; + const makeThing = (id: string, stable = 1): Thing => ({ + __typename: "Thing", + id, + stable, + // Thing.volatile is always a different randomized number, which normally + // would threatens any deep comparison of Thing objects. These test cases + // demonstrate (among other things) that we can make the result comparison + // insensitive to this volatility by marking the volatile field with the + // @nonreactive directive. + volatile: nextVolatileIntegerPart++ + Math.random(), + }); + + const makeThings = ( + lettersToSplit: string, + stable: number = 1 + ): Thing[] => lettersToSplit.split("").map((id) => makeThing(id, stable)); + + expect( + equalByQuery( + query, + { data: { things: makeThings("abc") } }, + { data: { things: [makeThing("a"), makeThing("b"), makeThing("c")] } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { things: makeThings("abcdefg", 2) } }, + { data: { things: makeThings("abcdefg") } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { things: makeThings("abcdefg", 2) } }, + { data: { things: makeThings("abcdefg", 3) } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { things: makeThings("abcdefg", 3) } }, + { data: { things: makeThings("abcdefg", 3) } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { things: makeThings("ab", 2345) } }, + { data: { things: [makeThing("a"), makeThing("b", 2345)] } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { things: makeThings("ab", 3456) } }, + { data: { things: [makeThing("a", 3456), makeThing("b")] } } + ) + ).toBe(false); + + expect( + equalByQuery( + query, + { data: { things: makeThings("ab", 3456) } }, + { data: { things: [makeThing("a", 3456), makeThing("b", 3456)] } } + ) + ).toBe(true); + + expect( + equalByQuery( + query, + { data: { things: makeThings("abc") } }, + { data: { things: "not an array" } } + ) + ).toBe(false); + + expect( + equalByQuery(query, { data: { things: {} } }, { data: { things: [] } }) + ).toBe(false); + + expect( + equalByQuery(query, { data: { things: [] } }, { data: { things: {} } }) + ).toBe(false); + + expect( + equalByQuery(query, { data: { things: [] } }, { data: { things: [] } }) + ).toBe(true); + + expect( + equalByQuery( + query, + // There's nothing inherently array-like about the Query.things field as + // it's represented in query syntax, since `query { things { id } }` could + // (depending on the server/schema) return a single object for the things + // field, rather than an array. Although this might seem like a strange + // edge case to test, it demonstrates the equalByQuery function can handle + // any combination of array/non-array values. + { data: { things: {} } }, + { data: { things: {} } } + ) + ).toBe(true); + } + ); +}); diff --git a/src/link/batch-http/__tests__/batchHttpLink.ts b/src/link/batch-http/__tests__/batchHttpLink.ts --- a/src/link/batch-http/__tests__/batchHttpLink.ts +++ b/src/link/batch-http/__tests__/batchHttpLink.ts @@ -4,9 +4,10 @@ import { ASTNode, print, stripIgnoredCharacters } from 'graphql'; import { ApolloLink } from '../../core/ApolloLink'; import { execute } from '../../core/execute'; -import { Observable } from '../../../utilities/observables/Observable'; +import { Observable, Observer } from '../../../utilities/observables/Observable'; import { BatchHttpLink } from '../batchHttpLink'; import { itAsync } from '../../../testing'; +import { FetchResult } from '../../core'; const sampleQuery = gql` query SampleQuery { @@ -298,7 +299,7 @@ describe('SharedHttpTest', () => { it('raises warning if called with concat', () => { const link = createHttpLink(); const _warn = console.warn; - console.warn = (warning: any) => expect(warning['message']).toBeDefined(); + console.warn = (...args: any) => expect(args).toEqual(["You are calling concat on a terminating link, which will have no effect %o", link]); expect(link.concat((operation, forward) => forward(operation))).toEqual( link, ); @@ -352,6 +353,62 @@ describe('SharedHttpTest', () => { ); }); + itAsync('strips unused variables, respecting nested fragments', (resolve, reject) => { + const link = createHttpLink({ uri: '/data' }); + + const query = gql` + query PEOPLE ( + $declaredAndUsed: String, + $declaredButUnused: Int, + ) { + people( + surprise: $undeclared, + noSurprise: $declaredAndUsed, + ) { + ... on Doctor { + specialty(var: $usedByInlineFragment) + } + ...LawyerFragment + } + } + fragment LawyerFragment on Lawyer { + caseCount(var: $usedByNamedFragment) + } + `; + + const variables = { + unused: 'strip', + declaredButUnused: 'strip', + declaredAndUsed: 'keep', + undeclared: 'keep', + usedByInlineFragment: 'keep', + usedByNamedFragment: 'keep', + }; + + execute(link, { + query, + variables, + }).subscribe({ + next: makeCallback(resolve, reject, () => { + const [uri, options] = fetchMock.lastCall()!; + const { method, body } = options!; + expect(JSON.parse(body as string)).toEqual([{ + operationName: "PEOPLE", + query: print(query), + variables: { + declaredAndUsed: 'keep', + undeclared: 'keep', + usedByInlineFragment: 'keep', + usedByNamedFragment: 'keep', + }, + }]); + expect(method).toBe('POST'); + expect(uri).toBe('/data'); + }), + error: error => reject(error), + }); + }); + itAsync('unsubscribes without calling subscriber', (resolve, reject) => { const link = createHttpLink({ uri: '/data' }); const observable = execute(link, { @@ -371,6 +428,7 @@ describe('SharedHttpTest', () => { link: ApolloLink, after: () => void, includeExtensions: boolean, + includeUnusedVariables: boolean, reject: (e: Error) => void, ) => { const next = jest.fn(); @@ -389,7 +447,7 @@ describe('SharedHttpTest', () => { try { let body = convertBatchedBody(fetchMock.lastCall()![1]!.body); expect(body.query).toBe(print(sampleMutation)); - expect(body.variables).toEqual(variables); + expect(body.variables).toEqual(includeUnusedVariables ? variables : {}); expect(body.context).not.toBeDefined(); if (includeExtensions) { expect(body.extensions).toBeDefined(); @@ -410,8 +468,9 @@ describe('SharedHttpTest', () => { const link = createHttpLink({ uri: '/data', includeExtensions: true }); verifyRequest( link, - () => verifyRequest(link, resolve, true, reject), + () => verifyRequest(link, resolve, true, false, reject), true, + false, reject, ); }); @@ -420,7 +479,8 @@ describe('SharedHttpTest', () => { const link = createHttpLink({ uri: '/data' }); verifyRequest( link, - () => verifyRequest(link, resolve, false, reject), + () => verifyRequest(link, resolve, false, false, reject), + false, false, reject, ); @@ -582,7 +642,7 @@ describe('SharedHttpTest', () => { const variables = { params: 'stub' }; const link = createHttpLink({ uri: '/data', - headers: { + headers: { authorization: '1234', AUTHORIZATION: '1234', 'CONTENT-TYPE': 'application/json', @@ -898,4 +958,186 @@ describe('SharedHttpTest', () => { () => {}, ); }); + + it('removes @client fields from the query before sending it to the server', async () => { + fetchMock.mock('https://example.com/graphql', { + status: 200, + body: JSON.stringify({ + data: { + author: { __typename: 'Author', name: 'Test User' } + } + }), + headers: { 'content-type': 'application/json' } + }); + + const query = gql` + query { + author { + name + isInCollection @client + } + } + `; + + const serverQuery = gql` + query { + author { + name + } + } + `; + + const link = createHttpLink({ uri: 'https://example.com/graphql' }); + + await new Promise((resolve, reject) => { + execute(link, { query }).subscribe({ + next: resolve, + error: reject + }); + }); + + const [, options] = fetchMock.lastCall()!; + const { body } = options! + + expect(JSON.parse(body!.toString())).toEqual([ + { + query: print(serverQuery), + variables: {} + } + ]); + }); + + it('responds with error when trying to send a client-only query', async () => { + const errorHandler = jest.fn() + const query = gql` + query { + author @client { + name + } + } + `; + + const link = createHttpLink({ uri: 'https://example.com/graphql' }); + + await new Promise<void>((resolve, reject) => { + execute(link, { query }).subscribe({ + next: reject, + error: errorHandler.mockImplementation(resolve) + }); + }); + + expect(errorHandler).toHaveBeenCalledWith( + new Error('BatchHttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or enable the `transformOptions.removeClientFields` option.') + ); + }); + + describe('AbortController', () => { + const originalAbortController = globalThis.AbortController; + afterEach(() => { + globalThis.AbortController = originalAbortController; + }); + + function trackGlobalAbortControllers() { + const instances: AbortController[] = [] + class AbortControllerMock { + constructor() { + const instance = new originalAbortController() + instances.push(instance) + return instance + } + } + + globalThis.AbortController = AbortControllerMock as any; + return instances; + } + + const failingObserver: Observer<FetchResult> = { + next: () => { + fail('result should not have been called'); + }, + error: e => { + fail(e); + }, + complete: () => { + fail('complete should not have been called'); + }, + } + + function mockFetch() { + const text = jest.fn(async () => '{ "data": { "stub": { "id": "foo" } } }'); + const fetch = jest.fn(async (uri, options) => ({ text })); + return { text, fetch } + } + + it("aborts the request when unsubscribing before the request has completed", () => { + const { fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + const sub = execute(link, { query: sampleQuery }).subscribe(failingObserver); + sub.unsubscribe(); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(true); + }); + + it('a passed-in signal will be forwarded to the `fetch` call and not be overwritten by an internally-created one', () => { + const { fetch } = mockFetch(); + const externalAbortController = new AbortController(); + + const link = createHttpLink({ uri: 'data', fetch: fetch as any, fetchOptions: { signal: externalAbortController.signal } }); + + const sub = execute(link, { query: sampleQuery } ).subscribe(failingObserver); + sub.unsubscribe(); + + expect(fetch.mock.calls.length).toBe(1); + expect(fetch.mock.calls[0][1]).toEqual(expect.objectContaining({ signal: externalAbortController.signal })) + }); + + it('aborting the internal signal will not cause an error', async () => { + try { + fetchMock.restore(); + fetchMock.postOnce('data', async () => '{ "data": { "stub": { "id": "foo" } } }'); + const abortControllers = trackGlobalAbortControllers(); + + const link = createHttpLink({ uri: '/data' }); + execute(link, { query: sampleQuery } ).subscribe(failingObserver); + abortControllers[0].abort(); + } finally { + fetchMock.restore(); + } + }); + + it('resolving fetch does not cause the AbortController to be aborted', async () => { + const { text, fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + text.mockResolvedValueOnce('{ "data": { "hello": "world" } }'); + + // (the request is already finished at that point) + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + await new Promise<void>(resolve => execute(link, { query: sampleQuery }).subscribe({ + complete: resolve + })); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(false); + }); + + it('an unsuccessful fetch does not cause the AbortController to be aborted', async () => { + const { fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + fetch.mockRejectedValueOnce("This is an error!") + // the request would be closed by the browser in the case of an error anyways + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + await new Promise<void>(resolve => execute(link, { query: sampleQuery }).subscribe({ + error: resolve + })); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(false); + }); + }); }); diff --git a/src/link/batch/__tests__/batchLink.ts b/src/link/batch/__tests__/batchLink.ts --- a/src/link/batch/__tests__/batchLink.ts +++ b/src/link/batch/__tests__/batchLink.ts @@ -793,9 +793,9 @@ describe('BatchLink', () => { }); const link_no_op = new BatchLink({ batchHandler: () => Observable.of() }); const _warn = console.warn; - console.warn = (warning: any) => { + console.warn = (...args: any) => { calls++; - expect(warning.message).toBeDefined(); + expect(args).toEqual(["You are calling concat on a terminating link, which will have no effect %o", expect.any(BatchLink)]); }; expect( link_one_op.concat((operation, forward) => forward(operation)), diff --git a/src/link/core/__tests__/ApolloLink.ts b/src/link/core/__tests__/ApolloLink.ts --- a/src/link/core/__tests__/ApolloLink.ts +++ b/src/link/core/__tests__/ApolloLink.ts @@ -921,9 +921,9 @@ describe('ApolloClient', () => { describe('Terminating links', () => { const _warn = console.warn; - const warningStub = jest.fn(warning => { - expect(warning.message).toBe( - `You are calling concat on a terminating link, which will have no effect`, + const warningStub = jest.fn((warning, link) => { + expect(warning).toBe( + `You are calling concat on a terminating link, which will have no effect %o`, ); }); const data = { @@ -1017,7 +1017,7 @@ describe('ApolloClient', () => { link, ); expect(warningStub).toHaveBeenCalledTimes(1); - expect(warningStub.mock.calls[0][0].link).toEqual(link); + expect(warningStub.mock.calls[0][1]).toEqual(link); }); it('should warn if attempting to concat to a terminating Link', () => { @@ -1026,7 +1026,7 @@ describe('ApolloClient', () => { link, ); expect(warningStub).toHaveBeenCalledTimes(1); - expect(warningStub.mock.calls[0][0].link).toEqual(link); + expect(warningStub.mock.calls[0][1]).toEqual(link); }); it('should not warn if attempting concat a terminating Link at end', () => { diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -13,7 +13,7 @@ import { HttpLink } from '../HttpLink'; import { createHttpLink } from '../createHttpLink'; import { ClientParseError } from '../serializeFetchParameter'; import { ServerParseError } from '../parseAndCheckHttpResponse'; -import { ServerError } from '../../..'; +import { FetchResult, ServerError } from '../../..'; import { voidFetchDuringEachTest } from './helpers'; import { itAsync } from '../../../testing'; @@ -454,7 +454,7 @@ describe('HttpLink', () => { it('raises warning if called with concat', () => { const link = createHttpLink(); const _warn = console.warn; - console.warn = (warning: any) => expect(warning['message']).toBeDefined(); + console.warn = (...args: any) => expect(args).toEqual(["You are calling concat on a terminating link, which will have no effect %o", link]); expect(link.concat((operation, forward) => forward(operation))).toEqual( link, ); @@ -1041,6 +1041,76 @@ describe('HttpLink', () => { () => {}, ); }); + + it('removes @client fields from the query before sending it to the server', async () => { + fetchMock.mock('https://example.com/graphql', { + status: 200, + body: JSON.stringify({ + data: { + author: { __typename: 'Author', name: 'Test User' } + } + }), + headers: { 'content-type': 'application/json' } + }); + + const query = gql` + query { + author { + name + isInCollection @client + } + } + `; + + const serverQuery = gql` + query { + author { + name + } + } + `; + + const link = createHttpLink({ uri: 'https://example.com/graphql' }); + + await new Promise((resolve, reject) => { + execute(link, { query }).subscribe({ + next: resolve, + error: reject + }); + }); + + const [, options] = fetchMock.lastCall()!; + const { body } = options! + + expect(JSON.parse(body!.toString())).toEqual({ + query: print(serverQuery), + variables: {} + }); + }); + + it('responds with error when trying to send a client-only query', async () => { + const errorHandler = jest.fn() + const query = gql` + query { + author @client { + name + } + } + `; + + const link = createHttpLink({ uri: 'https://example.com/graphql' }); + + await new Promise<void>((resolve, reject) => { + execute(link, { query }).subscribe({ + next: reject, + error: errorHandler.mockImplementation(resolve) + }); + }); + + expect(errorHandler).toHaveBeenCalledWith( + new Error('HttpLink: Trying to send a client-only query to the server. To send to the server, ensure a non-client field is added to the query or set the `transformOptions.removeClientFields` option to `true`.') + ); + }); }); describe('Dev warnings', () => { @@ -1255,46 +1325,123 @@ describe('HttpLink', () => { }), ); }); - itAsync('supports being cancelled and does not throw', (resolve, reject) => { - let called = false; - class AbortController { - signal: {}; - abort = () => { - called = true; - }; - } - (global as any).AbortController = AbortController; + describe('AbortController', () => { + const originalAbortController = globalThis.AbortController; + afterEach(() => { + globalThis.AbortController = originalAbortController; + }); - fetch.mockReturnValueOnce(Promise.resolve({ text })); - text.mockReturnValueOnce( - Promise.resolve('{ "data": { "hello": "world" } }'), - ); + function trackGlobalAbortControllers() { + const instances: AbortController[] = [] + class AbortControllerMock { + constructor() { + const instance = new originalAbortController() + instances.push(instance) + return instance + } + } - const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + globalThis.AbortController = AbortControllerMock as any; + return instances; + } - const sub = execute(link, { query: sampleQuery }).subscribe({ - next: result => { - reject('result should not have been called'); + const failingObserver: Observer<FetchResult> = { + next: () => { + fail('result should not have been called'); }, error: e => { - reject(e); + fail(e); }, complete: () => { - reject('complete should not have been called'); + fail('complete should not have been called'); }, + } + + function mockFetch() { + const text = jest.fn(async () => '{ "data": { "stub": { "id": "foo" } } }'); + const fetch = jest.fn(async (uri, options) => ({ text })); + return { text, fetch } + } + + it("aborts the request when unsubscribing before the request has completed", () => { + const { fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + const sub = execute(link, { query: sampleQuery }).subscribe(failingObserver); + sub.unsubscribe(); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(true); }); - sub.unsubscribe(); - setTimeout( - makeCallback(resolve, reject, () => { - delete (global as any).AbortController; - expect(called).toBe(true); - fetch.mockReset(); - text.mockReset(); - }), - 150, - ); + it('a passed-in signal will be forwarded to the `fetch` call and not be overwritten by an internally-created one', () => { + const { fetch } = mockFetch(); + const externalAbortController = new AbortController(); + + const link = createHttpLink({ uri: 'data', fetch: fetch as any, fetchOptions: { signal: externalAbortController.signal } }); + + const sub = execute(link, { query: sampleQuery } ).subscribe(failingObserver); + sub.unsubscribe(); + + expect(fetch.mock.calls.length).toBe(1); + expect(fetch.mock.calls[0][1]).toEqual(expect.objectContaining({ signal: externalAbortController.signal })) + }); + + it('a passed-in signal that is cancelled will fail the observable with an `AbortError`', async () => { + try { + fetchMock.restore(); + fetchMock.postOnce('data', async () => '{ "data": { "stub": { "id": "foo" } } }'); + + const externalAbortController = new AbortController(); + + const link = createHttpLink({ uri: '/data', fetchOptions: { signal: externalAbortController.signal } }); + + const error = await new Promise<Error>(resolve => { + execute(link, { query: sampleQuery } ).subscribe({ + ...failingObserver, + error: resolve, + }); + externalAbortController.abort(); + }); + expect(error.name).toBe("AbortError") + } finally { + fetchMock.restore(); + } + }); + + it('resolving fetch does not cause the AbortController to be aborted', async () => { + const { text, fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + text.mockResolvedValueOnce('{ "data": { "hello": "world" } }'); + + // (the request is already finished at that point) + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + await new Promise<void>(resolve => execute(link, { query: sampleQuery }).subscribe({ + complete: resolve + })); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(false); + }); + + it('an unsuccessful fetch does not cause the AbortController to be aborted', async () => { + const { fetch } = mockFetch(); + const abortControllers = trackGlobalAbortControllers(); + fetch.mockRejectedValueOnce("This is an error!") + // the request would be closed by the browser in the case of an error anyways + const link = createHttpLink({ uri: 'data', fetch: fetch as any }); + + await new Promise<void>(resolve => execute(link, { query: sampleQuery }).subscribe({ + error: resolve + })); + + expect(abortControllers.length).toBe(1); + expect(abortControllers[0].signal.aborted).toBe(false); + }); }); const body = '{'; diff --git a/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts b/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/remove-typename/__tests__/removeTypenameFromVariables.ts @@ -0,0 +1,540 @@ +import { + KEEP, + removeTypenameFromVariables, +} from "../removeTypenameFromVariables"; +import { ApolloLink, Operation } from "../../core"; +import { Observable, gql } from "../../../core"; +import { createOperation, toPromise } from "../../utils"; + +type PartialOperation = Partial<Pick<Operation, "variables">> & + Pick<Operation, "query">; + +// Since this link modifies the `operation` and we only care to test against +// the changed operation, we use a custom `execute` helper here instead of the +// version exported by the `core` module, which expects a well-formed response. +async function execute(link: ApolloLink, operation: PartialOperation) { + function forward(operation: Operation) { + // use the `data` key to satisfy the TypeScript types required by + // `forward`'s' return value + return Observable.of({ data: operation }); + } + + const { data } = await toPromise( + link.request(createOperation({}, operation), forward)! + ); + + return data as Operation; +} + +test("strips all __typename keys by default", async () => { + const query = gql` + query Test($foo: FooInput!, $bar: BarInput!) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables(); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + foo: true, + bar: "Bar", + baz: { __typename: "Baz", baz: true }, + qux: [{ __typename: "Qux", qux: 0 }], + }, + bar: [{ __typename: "Bar", bar: true }], + }, + }); + + expect(variables).toStrictEqual({ + foo: { + foo: true, + bar: "Bar", + baz: { baz: true }, + qux: [{ qux: 0 }], + }, + bar: [{ bar: true }], + }); +}); + +test("does nothing when no variables are passed", async () => { + const query = gql` + query Test { + foo { + bar + } + } + `; + + const link = removeTypenameFromVariables(); + + const operation = { query }; + const resultOperation = await execute(link, operation); + + expect(resultOperation).toBe(operation); +}); + +test("does nothing when no variables are passed even if variables are declared in the document", async () => { + const query = gql` + query Test($unused: Boolean) { + foo { + bar + } + } + `; + + const link = removeTypenameFromVariables(); + + const operation = { query }; + const resultOperation = await execute(link, operation); + + expect(resultOperation).toBe(operation); +}); + +test("keeps __typename for variables with types defined by `except`", async () => { + const query = gql` + query Test($foo: JSON, $bar: BarInput) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + foo: true, + baz: { __typename: "Baz", baz: true }, + }, + bar: { __typename: "Bar", bar: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + __typename: "Foo", + foo: true, + baz: { __typename: "Baz", baz: true }, + }, + bar: { bar: true }, + }); +}); + +test("keeps __typename in all variables with types configured with `except`", async () => { + const query = gql` + query Test($foo: JSON, $bar: Config, $baz: BazInput) { + someField(foo: $foo, bar: $bar, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + Config: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { __typename: "Foo", foo: true }, + bar: { __typename: "Bar", bar: true }, + baz: { __typename: "Baz", baz: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { __typename: "Foo", foo: true }, + bar: { __typename: "Bar", bar: true }, + baz: { baz: true }, + }); +}); + +test("handles variable declarations declared as non null and list types", async () => { + const query = gql` + query Test($foo: JSON!, $bar: [JSON], $baz: [JSON!]!, $qux: QuxInput!) { + someField(foo: $foo, bar: $bar, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { __typename: "Foo", foo: true }, + bar: [ + { __typename: "Bar", bar: true, baz: { __typename: "Baz", baz: true } }, + ], + baz: [ + { __typename: "Baz", baz: true }, + { __typename: "Baz", baz: true }, + ], + qux: { __typename: "Qux", qux: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { __typename: "Foo", foo: true }, + bar: [ + { __typename: "Bar", bar: true, baz: { __typename: "Baz", baz: true } }, + ], + baz: [ + { __typename: "Baz", baz: true }, + { __typename: "Baz", baz: true }, + ], + qux: { qux: true }, + }); +}); + +test("keeps __typename at configured fields under input object types", async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + baz: KEEP, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + aa: true, + bar: { + __typename: "Bar", + bb: true, + }, + baz: { + __typename: "Baz", + cc: true, + }, + qux: { + __typename: "Qux", + dd: true, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + aa: true, + bar: { + __typename: "Bar", + bb: true, + }, + baz: { + __typename: "Baz", + cc: true, + }, + qux: { + dd: true, + }, + }, + }); +}); + +test("keeps __typename at a deeply nested field", async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: { + baz: { + qux: KEEP, + }, + }, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + bar: { + __typename: "Bar", + baz: { + __typename: "Baz", + qux: { + __typename: "Qux", + quux: true, + }, + }, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + baz: { + qux: { + __typename: "Qux", + quux: true, + }, + }, + }, + }, + }); +}); + +test("handles configured fields varying nesting levels", async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + baz: { + qux: KEEP, + }, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + bar: { + __typename: "Bar", + aa: true, + }, + baz: { + __typename: "Baz", + qux: { + __typename: "Qux", + quux: true, + }, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + __typename: "Bar", + aa: true, + }, + baz: { + qux: { + __typename: "Qux", + quux: true, + }, + }, + }, + }); +}); + +test("handles multiple configured types with fields", async () => { + const query = gql` + query Test($foo: FooInput, $baz: BazInput) { + someField(foo: $foo, baz: $baz) + } + `; + + const link = removeTypenameFromVariables({ + except: { + FooInput: { + bar: KEEP, + }, + BazInput: { + qux: KEEP, + }, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + bar: { + __typename: "Bar", + aa: true, + }, + }, + baz: { + __typename: "Bar", + qux: { + __typename: "Qux", + bb: true, + }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + bar: { + __typename: "Bar", + aa: true, + }, + }, + baz: { + qux: { + __typename: "Qux", + bb: true, + }, + }, + }); +}); + +test("handles when __typename is not present in all paths", async () => { + const query = gql` + query Test($foo: JSON, $bar: BarInput) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + foo: true, + baz: { __typename: "Baz", baz: true }, + }, + bar: { bar: true }, + qux: { __typename: "Qux", bar: true }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + foo: true, + baz: { __typename: "Baz", baz: true }, + }, + bar: { bar: true }, + qux: { bar: true }, + }); +}); + +test("handles when __typename is not present in variables", async () => { + const query = gql` + query Test($foo: JSON, $bar: BarInput) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + foo: true, + baz: { baz: true }, + }, + bar: { bar: true }, + qux: [{ foo: true }], + }, + }); + + expect(variables).toStrictEqual({ + foo: { + foo: true, + baz: { baz: true }, + }, + bar: { bar: true }, + qux: [{ foo: true }], + }); +}); + +test("handles when declared variables are unused", async () => { + const query = gql` + query Test($foo: FooInput, $unused: JSON) { + someField(foo: $foo, bar: $bar) + } + `; + + const link = removeTypenameFromVariables({ + except: { + JSON: KEEP, + }, + }); + + const { variables } = await execute(link, { + query, + variables: { + foo: { + __typename: "Foo", + foo: true, + baz: { __typename: "Bar", baz: true }, + }, + }, + }); + + expect(variables).toStrictEqual({ + foo: { + foo: true, + baz: { baz: true }, + }, + }); +}); + +test("ensures operation.getContext and operation.setContext functions are properly forwarded", async () => { + const query = gql` + query Test($foo: FooInput) { + someField(foo: $foo) + } + `; + + const link = removeTypenameFromVariables(); + + const operationWithoutVariables = await execute(link, { query }); + const operationWithVariables = await execute(link, { + query, + variables: { foo: { __typename: "FooInput", bar: true } }, + }); + + expect(typeof operationWithoutVariables.getContext).toBe("function"); + expect(typeof operationWithoutVariables.setContext).toBe("function"); + expect(typeof operationWithVariables.getContext).toBe("function"); + expect(typeof operationWithVariables.setContext).toBe("function"); +}); diff --git a/src/link/schema/__tests__/schemaLink.ts b/src/link/schema/__tests__/schemaLink.ts --- a/src/link/schema/__tests__/schemaLink.ts +++ b/src/link/schema/__tests__/schemaLink.ts @@ -29,7 +29,7 @@ describe('SchemaLink', () => { it('raises warning if called with concat', () => { const link = new SchemaLink({ schema }); const _warn = console.warn; - console.warn = (warning: any) => expect(warning['message']).toBeDefined(); + console.warn = (...args) => expect(args).toEqual(["You are calling concat on a terminating link, which will have no effect %o", link]); expect(link.concat((operation, forward) => forward(operation))).toEqual( link, ); diff --git a/src/link/utils/__tests__/filterOperationVariables.ts b/src/link/utils/__tests__/filterOperationVariables.ts new file mode 100644 --- /dev/null +++ b/src/link/utils/__tests__/filterOperationVariables.ts @@ -0,0 +1,38 @@ +import gql from 'graphql-tag'; +import { filterOperationVariables } from '../filterOperationVariables'; + +const sampleQueryWithVariables = gql` + query MyQuery($a: Int!) { + stub(a: $a) { + id + } + } +`; + +const sampleQueryWithoutVariables = gql` + query MyQuery { + stub { + id + } + } +`; + +describe('filterOperationVariables', () => { + it('filters unused variables', () => { + const variables = { a: 1, b: 2, c: 3 }; + const result = filterOperationVariables( + variables, + sampleQueryWithoutVariables + ); + expect(result).toEqual({}); + }); + + it('does not filter used variables', () => { + const variables = { a: 1, b: 2, c: 3 }; + const result = filterOperationVariables( + variables, + sampleQueryWithVariables + ); + expect(result).toEqual({ a: 1 }); + }); +}); diff --git a/src/react/components/__tests__/client/Mutation.test.tsx b/src/react/components/__tests__/client/Mutation.test.tsx --- a/src/react/components/__tests__/client/Mutation.test.tsx +++ b/src/react/components/__tests__/client/Mutation.test.tsx @@ -1,23 +1,21 @@ -import React, { useState, PropsWithChildren } from 'react'; -import gql from 'graphql-tag'; -import { ExecutionResult, GraphQLError } from 'graphql'; -import userEvent from '@testing-library/user-event'; -import { render, screen, waitFor, act } from '@testing-library/react'; - -import { ApolloClient } from '../../../../core'; -import { ApolloError } from '../../../../errors'; -import { DataProxy, InMemoryCache as Cache } from '../../../../cache'; -import { ApolloProvider } from '../../../context'; +import React, { useState, PropsWithChildren } from "react"; +import gql from "graphql-tag"; +import { ExecutionResult, GraphQLError } from "graphql"; +import userEvent from "@testing-library/user-event"; +import { render, screen, waitFor, act } from "@testing-library/react"; + +import { ApolloClient } from "../../../../core"; +import { ApolloError } from "../../../../errors"; +import { DataProxy, InMemoryCache as Cache } from "../../../../cache"; +import { ApolloProvider } from "../../../context"; import { itAsync, MockedProvider, MockLink, mockSingleLink, -} from '../../../../testing'; -import { Query } from '../../Query'; -import { Mutation } from '../../Mutation'; - -const IS_REACT_18 = React.version.startsWith('18'); +} from "../../../../testing"; +import { Query } from "../../Query"; +import { Mutation } from "../../Mutation"; const mutation = gql` mutation createTodo($text: String!) { @@ -43,79 +41,79 @@ type Data = { const data: Data = { createTodo: { - __typename: 'Todo', - id: '99', - text: 'This one was created with a mutation.', - completed: true + __typename: "Todo", + id: "99", + text: "This one was created with a mutation.", + completed: true, }, - __typename: 'Mutation' + __typename: "Mutation", }; const data2: Data = { createTodo: { - __typename: 'Todo', - id: '100', - text: 'This one was created with a mutation.', - completed: true + __typename: "Todo", + id: "100", + text: "This one was created with a mutation.", + completed: true, }, - __typename: 'Mutation' + __typename: "Mutation", }; const mocks = [ { request: { query: mutation }, - result: { data } + result: { data }, }, { request: { query: mutation }, - result: { data: data2 } - } + result: { data: data2 }, + }, ]; const cache = new Cache({ addTypename: false }); -describe('General Mutation testing', () => { - it('pick prop client over context client', async () => { +describe("General Mutation testing", () => { + it("pick prop client over context client", async () => { const mock = (text: string) => [ { request: { query: mutation }, result: { data: { createTodo: { - __typename: 'Todo', - id: '99', + __typename: "Todo", + id: "99", text, - completed: true + completed: true, }, - __typename: 'Mutation' - } - } + __typename: "Mutation", + }, + }, }, { request: { query: mutation }, result: { data: { createTodo: { - __typename: 'Todo', - id: '100', + __typename: "Todo", + id: "100", text, - completed: true + completed: true, }, - __typename: 'Mutation' - } - } - } + __typename: "Mutation", + }, + }, + }, ]; - const mocksProps = mock('This is the result of the prop client mutation.'); + const mocksProps = mock("This is the result of the prop client mutation."); const mocksContext = mock( - 'This is the result of the context client mutation.' + "This is the result of the context client mutation." ); function mockClient(m: any) { return new ApolloClient({ link: new MockLink(m, false), - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); } @@ -136,48 +134,66 @@ describe('General Mutation testing', () => { }; const { rerender } = render(<Component />); - await waitFor(() => { - screen.getByText('Create'); - }, { interval: 1 }); + await waitFor( + () => { + screen.getByText("Create"); + }, + { interval: 1 } + ); // context client mutation - await userEvent.click(screen.getByText('Create')); + await userEvent.click(screen.getByText("Create")); - await waitFor(() => { - expect(spy).toHaveBeenCalledWith(mocksContext[0].result); - }, { interval: 1 }); + await waitFor( + () => { + expect(spy).toHaveBeenCalledWith(mocksContext[0].result); + }, + { interval: 1 } + ); // props client mutation rerender(<Component propsClient={propsClient} />); - await userEvent.click(screen.getByText('Create')); + await userEvent.click(screen.getByText("Create")); - await waitFor(() => { - expect(spy).toHaveBeenCalledWith(mocksProps[0].result); - }, { interval: 1 }); + await waitFor( + () => { + expect(spy).toHaveBeenCalledWith(mocksProps[0].result); + }, + { interval: 1 } + ); // context client mutation rerender(<Component propsClient={undefined} />); - await userEvent.click(screen.getByText('Create')); + await userEvent.click(screen.getByText("Create")); - await waitFor(() => { - expect(spy).toHaveBeenCalledWith(mocksContext[1].result); - }, { interval: 1 }); + await waitFor( + () => { + expect(spy).toHaveBeenCalledWith(mocksContext[1].result); + }, + { interval: 1 } + ); // props client mutation rerender(<Component propsClient={propsClient} />); - await userEvent.click(screen.getByText('Create')); + await userEvent.click(screen.getByText("Create")); - await waitFor(() => { - expect(spy).toHaveBeenCalledWith(mocksProps[1].result); - }, { interval: 1 }); + await waitFor( + () => { + expect(spy).toHaveBeenCalledWith(mocksProps[1].result); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(spy).toHaveBeenCalledTimes(4); - }, { interval: 1 }); + await waitFor( + () => { + expect(spy).toHaveBeenCalledTimes(4); + }, + { interval: 1 } + ); }); - itAsync('performs a mutation', (resolve, reject) => { + itAsync("performs a mutation", (resolve, reject) => { let count = 0; const Component = () => ( <Mutation mutation={mutation}> @@ -215,37 +231,40 @@ describe('General Mutation testing', () => { }).then(resolve, reject); }); - itAsync('can bind only the mutation and not rerender by props', (resolve, reject) => { - let count = 0; - const Component = () => ( - <Mutation mutation={mutation} ignoreResults> - {(createTodo: any, result: any) => { - if (count === 0) { - expect(result.loading).toEqual(false); - expect(result.called).toEqual(false); - setTimeout(() => { - createTodo().then((r: any) => { - expect(r!.data).toEqual(data); - resolve(); + itAsync( + "can bind only the mutation and not rerender by props", + (resolve, reject) => { + let count = 0; + const Component = () => ( + <Mutation mutation={mutation} ignoreResults> + {(createTodo: any, result: any) => { + if (count === 0) { + expect(result.loading).toEqual(false); + expect(result.called).toEqual(false); + setTimeout(() => { + createTodo().then((r: any) => { + expect(r!.data).toEqual(data); + resolve(); + }); }); - }); - } else if (count === 1) { - reject('rerender happened with ignoreResults turned on'); - } - count++; - return <div />; - }} - </Mutation> - ); + } else if (count === 1) { + reject("rerender happened with ignoreResults turned on"); + } + count++; + return <div />; + }} + </Mutation> + ); - render( - <MockedProvider mocks={mocks}> - <Component /> - </MockedProvider> - ); - }); + render( + <MockedProvider mocks={mocks}> + <Component /> + </MockedProvider> + ); + } + ); - it('returns a resolved promise when calling the mutation function', async () => { + it("returns a resolved promise when calling the mutation function", async () => { let called = false; let result: any; const Component = () => ( @@ -253,7 +272,7 @@ describe('General Mutation testing', () => { {(createTodo: any) => { if (!called) { createTodo().then((_result: any) => { - result = _result + result = _result; }); } called = true; @@ -271,10 +290,10 @@ describe('General Mutation testing', () => { await waitFor(() => { expect(result!.data).toEqual(data); - }) + }); }); - it('returns rejected promise when calling the mutation function', async () => { + it("returns rejected promise when calling the mutation function", async () => { let done = false; let called = false; const Component = () => ( @@ -282,7 +301,7 @@ describe('General Mutation testing', () => { {(createTodo: any) => { if (!called) { createTodo().catch((error: any) => { - expect(error).toEqual(new Error('Error 1')); + expect(error).toEqual(new Error("Error 1")); done = true; }); } @@ -296,8 +315,8 @@ describe('General Mutation testing', () => { const mocksWithErrors = [ { request: { query: mutation }, - error: new Error('Error 1') - } + error: new Error("Error 1"), + }, ]; render( @@ -311,7 +330,7 @@ describe('General Mutation testing', () => { }); }); - it('only shows result for the latest mutation that is in flight', async () => { + it("only shows result for the latest mutation that is in flight", async () => { let count = 0; const onCompleted = (dataMutation: Data) => { @@ -354,14 +373,14 @@ describe('General Mutation testing', () => { }); }); - it('only shows the error for the latest mutation in flight', async () => { + it("only shows the error for the latest mutation in flight", async () => { let count = 0; const onError = (error: Error) => { if (count === 1) { - expect(error).toEqual(new Error('Error 1')); + expect(error).toEqual(new Error("Error 1")); } else if (count === 3) { - expect(error).toEqual(new Error('Error 2')); + expect(error).toEqual(new Error("Error 2")); } }; const Component = () => ( @@ -379,7 +398,7 @@ describe('General Mutation testing', () => { expect(result.loading).toEqual(false); expect(result.data).toEqual(undefined); expect(result.called).toEqual(true); - expect(result.error).toEqual(new Error('Error 2')); + expect(result.error).toEqual(new Error("Error 2")); } count++; return <div />; @@ -390,12 +409,12 @@ describe('General Mutation testing', () => { const mocksWithErrors = [ { request: { query: mutation }, - error: new Error('Error 2') + error: new Error("Error 2"), }, { request: { query: mutation }, - error: new Error('Error 2') - } + error: new Error("Error 2"), + }, ]; render( @@ -409,19 +428,19 @@ describe('General Mutation testing', () => { }); }); - it('calls the onCompleted prop as soon as the mutation is complete', async () => { + it("calls the onCompleted prop as soon as the mutation is complete", async () => { let onCompletedCalled = false; class Component extends React.Component { state = { - mutationDone: false + mutationDone: false, }; onCompleted = (mutationData: Data) => { expect(mutationData).toEqual(data); onCompletedCalled = true; this.setState({ - mutationDone: true + mutationDone: true, }); }; @@ -450,11 +469,11 @@ describe('General Mutation testing', () => { ); await waitFor(() => { - expect(onCompletedCalled).toEqual(true) + expect(onCompletedCalled).toEqual(true); }); }); - it('renders result of the children render prop', () => { + it("renders result of the children render prop", () => { const Component = () => ( <Mutation mutation={mutation}>{() => <div>result</div>}</Mutation> ); @@ -464,26 +483,24 @@ describe('General Mutation testing', () => { <Component /> </MockedProvider> ); - expect(screen.getByText('result')).toBeTruthy(); + expect(screen.getByText("result")).toBeTruthy(); // unmount here or else the mutation will resolve later and schedule an update that's not wrapped in act. - unmount() + unmount(); }); - it('renders an error state', async () => { + it("renders an error state", async () => { let count = 0; const Component = () => ( <Mutation mutation={mutation}> {(createTodo: any, result: any) => { if (count === 0) { createTodo().catch((err: any) => { - expect(err).toEqual(new Error('error occurred')); + expect(err).toEqual(new Error("error occurred")); }); } else if (count === 1) { expect(result.loading).toBeTruthy(); } else if (count === 2) { - expect(result.error).toEqual( - new Error('error occurred') - ); + expect(result.error).toEqual(new Error("error occurred")); } count++; return <div />; @@ -494,8 +511,8 @@ describe('General Mutation testing', () => { const mockError = [ { request: { query: mutation }, - error: new Error('error occurred') - } + error: new Error("error occurred"), + }, ]; render( @@ -509,11 +526,11 @@ describe('General Mutation testing', () => { }); }); - it('renders an error state and throws when encountering graphql errors', async () => { + it("renders an error state and throws when encountering graphql errors", async () => { let count = 0; const expectedError = new ApolloError({ - graphQLErrors: [new GraphQLError('error occurred')] + graphQLErrors: [new GraphQLError("error occurred")], }); const Component = () => ( @@ -522,7 +539,7 @@ describe('General Mutation testing', () => { if (count === 0) { createTodo() .then(() => { - throw new Error('Did not expect a result'); + throw new Error("Did not expect a result"); }) .catch((e: any) => { expect(e).toEqual(expectedError); @@ -542,9 +559,9 @@ describe('General Mutation testing', () => { { request: { query: mutation }, result: { - errors: [new GraphQLError('error occurred')] - } - } + errors: [new GraphQLError("error occurred")], + }, + }, ]; render( @@ -558,7 +575,7 @@ describe('General Mutation testing', () => { }); }); - it('renders an error state and does not throw when encountering graphql errors when errorPolicy=all', async () => { + it("renders an error state and does not throw when encountering graphql errors when errorPolicy=all", async () => { let count = 0; const Component = () => ( <Mutation mutation={mutation}> @@ -569,7 +586,7 @@ describe('General Mutation testing', () => { if (fetchResult && fetchResult.errors) { expect(fetchResult.errors.length).toEqual(1); expect(fetchResult.errors[0]).toEqual( - new GraphQLError('error occurred') + new GraphQLError("error occurred") ); } else { throw new Error( @@ -585,7 +602,7 @@ describe('General Mutation testing', () => { } else if (count === 2) { expect(result.error).toEqual( new ApolloError({ - graphQLErrors: [new GraphQLError('error occurred')] + graphQLErrors: [new GraphQLError("error occurred")], }) ); } @@ -599,14 +616,14 @@ describe('General Mutation testing', () => { { request: { query: mutation }, result: { - errors: [new GraphQLError('error occurred')] - } - } + errors: [new GraphQLError("error occurred")], + }, + }, ]; render( <MockedProvider - defaultOptions={{ mutate: { errorPolicy: 'all' } }} + defaultOptions={{ mutate: { errorPolicy: "all" } }} mocks={mockError} > <Component /> @@ -618,10 +635,10 @@ describe('General Mutation testing', () => { }); }); - it('renders an error state and throws when encountering network errors when errorPolicy=all', async () => { + it("renders an error state and throws when encountering network errors when errorPolicy=all", async () => { let count = 0; const expectedError = new ApolloError({ - networkError: new Error('network error') + networkError: new Error("network error"), }); const Component = () => ( <Mutation mutation={mutation}> @@ -629,7 +646,7 @@ describe('General Mutation testing', () => { if (count === 0) { createTodo() .then(() => { - throw new Error('Did not expect a result'); + throw new Error("Did not expect a result"); }) .catch((e: any) => { expect(e).toEqual(expectedError); @@ -648,13 +665,13 @@ describe('General Mutation testing', () => { const mockError = [ { request: { query: mutation }, - error: new Error('network error') - } + error: new Error("network error"), + }, ]; render( <MockedProvider - defaultOptions={{ mutate: { errorPolicy: 'all' } }} + defaultOptions={{ mutate: { errorPolicy: "all" } }} mocks={mockError} > <Component /> @@ -666,16 +683,16 @@ describe('General Mutation testing', () => { }); }); - it('calls the onError prop if the mutation encounters an error', async () => { + it("calls the onError prop if the mutation encounters an error", async () => { let onRenderCalled = false; class Component extends React.Component { state = { - mutationError: false + mutationError: false, }; onError = (error: Error) => { - expect(error.message).toMatch('error occurred'); + expect(error.message).toMatch("error occurred"); onRenderCalled = true; this.setState({ mutationError: true }); }; @@ -703,8 +720,8 @@ describe('General Mutation testing', () => { const mockError = [ { request: { query: mutation }, - error: new Error('error occurred') - } + error: new Error("error occurred"), + }, ]; render( @@ -718,9 +735,9 @@ describe('General Mutation testing', () => { }); }); - it('performs a mutation with variables prop', async () => { + it("performs a mutation with variables prop", async () => { const variables = { - text: 'play tennis' + text: "play tennis", }; let count = 0; @@ -746,8 +763,8 @@ describe('General Mutation testing', () => { const mocks1 = [ { request: { query: mutation, variables }, - result: { data } - } + result: { data }, + }, ]; render( @@ -761,9 +778,9 @@ describe('General Mutation testing', () => { }); }); - it('allows passing a variable to the mutate function', async () => { + it("allows passing a variable to the mutate function", async () => { const variables = { - text: 'play tennis' + text: "play tennis", }; let count = 0; @@ -789,8 +806,8 @@ describe('General Mutation testing', () => { const mocks1 = [ { request: { query: mutation, variables }, - result: { data } - } + result: { data }, + }, ]; render( @@ -804,21 +821,21 @@ describe('General Mutation testing', () => { }); }); - it('allows an optimistic response prop', async () => { + it("allows an optimistic response prop", async () => { const link = mockSingleLink(...mocks); const client = new ApolloClient({ link, - cache + cache, }); const optimisticResponse = { createTodo: { - id: '99', - text: 'This is an optimistic response', + id: "99", + text: "This is an optimistic response", completed: false, - __typename: 'Todo' + __typename: "Todo", }, - __typename: 'Mutation' + __typename: "Mutation", }; let count = 0; @@ -828,7 +845,7 @@ describe('General Mutation testing', () => { if (count === 0) { createTodo(); const dataInStore = client.cache.extract(true); - expect(dataInStore['Todo:99']).toEqual( + expect(dataInStore["Todo:99"]).toEqual( optimisticResponse.createTodo ); } else if (count === 1) { @@ -856,21 +873,21 @@ describe('General Mutation testing', () => { }); }); - it('allows passing an optimistic response to the mutate function', async () => { + it("allows passing an optimistic response to the mutate function", async () => { const link = mockSingleLink(...mocks); const client = new ApolloClient({ link, - cache + cache, }); const optimisticResponse = { createTodo: { - id: '99', - text: 'This is an optimistic response', + id: "99", + text: "This is an optimistic response", completed: false, - __typename: 'Todo' + __typename: "Todo", }, - __typename: 'Mutation' + __typename: "Mutation", }; let count = 0; @@ -880,7 +897,7 @@ describe('General Mutation testing', () => { if (count === 0) { createTodo({ optimisticResponse }); const dataInStore = client.cache.extract(true); - expect(dataInStore['Todo:99']).toEqual( + expect(dataInStore["Todo:99"]).toEqual( optimisticResponse.createTodo ); } else if (count === 2) { @@ -905,7 +922,7 @@ describe('General Mutation testing', () => { }); }); - it('allows a refetchQueries prop', async () => { + it("allows a refetchQueries prop", async () => { const query = gql` query getTodo { todo { @@ -920,30 +937,30 @@ describe('General Mutation testing', () => { const queryData = { todo: { - id: '1', - text: 'todo from query', + id: "1", + text: "todo from query", completed: false, - __typename: 'Todo' + __typename: "Todo", }, - __typename: 'Query' + __typename: "Query", }; const mocksWithQuery = [ ...mocks, { request: { query }, - result: { data: queryData } + result: { data: queryData }, }, { request: { query }, - result: { data: queryData } + result: { data: queryData }, }, ]; const refetchQueries = [ { - query - } + query, + }, ]; let renderCount = 0; @@ -978,7 +995,7 @@ describe('General Mutation testing', () => { ); }); - it('allows a refetchQueries prop as string and variables have updated', async () => new Promise((resolve, reject) => { + it("allows a refetchQueries prop as string and variables have updated", async () => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -991,43 +1008,46 @@ describe('General Mutation testing', () => { const peopleData1 = { allPeople: { - people: [{ name: 'Luke Skywalker', __typename: 'Person' }], - __typename: 'People' - } + people: [{ name: "Luke Skywalker", __typename: "Person" }], + __typename: "People", + }, }; const peopleData2 = { allPeople: { - people: [{ name: 'Han Solo', __typename: 'Person' }], - __typename: 'People' - } + people: [{ name: "Han Solo", __typename: "Person" }], + __typename: "People", + }, }; const peopleData3 = { allPeople: { - people: [{ name: 'Lord Vader', __typename: 'Person' }], - __typename: 'People' - } + people: [{ name: "Lord Vader", __typename: "Person" }], + __typename: "People", + }, }; const peopleMocks = [ ...mocks, { request: { query, variables: { first: 1 } }, - result: { data: peopleData1 } + result: { data: peopleData1 }, }, { request: { query, variables: { first: 2 } }, - result: { data: peopleData2 } + result: { data: peopleData2 }, }, { request: { query, variables: { first: 2 } }, - result: { data: peopleData3 } - } + result: { data: peopleData3 }, + }, ]; - const refetchQueries = ['people']; + const refetchQueries = ["people"]; let count = 0; + let testFailures: any[] = []; - const Component: React.FC<PropsWithChildren<PropsWithChildren<any>>> = props => { + const Component: React.FC<PropsWithChildren<PropsWithChildren<any>>> = ( + props + ) => { const [variables, setVariables] = useState(props.variables); return ( <Mutation mutation={mutation} refetchQueries={refetchQueries}> @@ -1055,19 +1075,20 @@ describe('General Mutation testing', () => { // mutation loading expect(resultMutation.loading).toBe(true); } else if (count === 5) { - // mutation loaded - expect(resultMutation.loading).toBe(false); + // query refetched or mutation loaded + // or both finished batched together + // hard to make assumptions here } else if (count === 6) { - // query refetched + // both loaded expect(resultQuery.loading).toBe(false); expect(resultMutation.loading).toBe(false); expect(resultQuery.data).toEqual(peopleData3); } else { - reject(`Too many renders (${count})`); + throw new Error(`Too many renders (${count})`); } count++; } catch (err) { - reject(err); + testFailures.push(err); } return null; }} @@ -1083,93 +1104,97 @@ describe('General Mutation testing', () => { </MockedProvider> ); - waitFor(() => { - expect(count).toEqual(IS_REACT_18 ? 6 : 7); - }).then(resolve, reject); - })); + await waitFor(() => { + if (testFailures.length > 0) { + throw testFailures[0]; + } + expect(count).toEqual(7); + }); + }); - it('allows refetchQueries to be passed to the mutate function', () => new Promise((resolve, reject) => { - const query = gql` - query getTodo { - todo { - id - text - completed + it("allows refetchQueries to be passed to the mutate function", () => + new Promise((resolve, reject) => { + const query = gql` + query getTodo { + todo { + id + text + completed + __typename + } __typename } - __typename - } - `; - - const queryData = { - todo: { - id: '1', - text: 'todo from query', - completed: false, - __typename: 'Todo' - }, - __typename: 'Query' - }; - - const mocksWithQuery = [ - ...mocks, - { - request: { query }, - result: { data: queryData } - }, - { - request: { query }, - result: { data: queryData } - }, - ]; - - const refetchQueries = [ - { - query - } - ]; + `; + + const queryData = { + todo: { + id: "1", + text: "todo from query", + completed: false, + __typename: "Todo", + }, + __typename: "Query", + }; - let count = 0; - const Component = () => ( - <Mutation mutation={mutation}> - {(createTodo: any, resultMutation: any) => ( - <Query query={query}> - {(resultQuery: any) => { - try { - if (count === 0) { - setTimeout(() => createTodo({ refetchQueries }), 10); - } else if (count === 1) { - expect(resultMutation.loading).toBe(false); - expect(resultQuery.loading).toBe(false); - } else if (count === 2) { - expect(resultMutation.loading).toBe(true); - expect(resultQuery.data).toEqual(queryData); - } else if (count === 3) { - expect(resultMutation.loading).toBe(false); + const mocksWithQuery = [ + ...mocks, + { + request: { query }, + result: { data: queryData }, + }, + { + request: { query }, + result: { data: queryData }, + }, + ]; + + const refetchQueries = [ + { + query, + }, + ]; + + let count = 0; + const Component = () => ( + <Mutation mutation={mutation}> + {(createTodo: any, resultMutation: any) => ( + <Query query={query}> + {(resultQuery: any) => { + try { + if (count === 0) { + setTimeout(() => createTodo({ refetchQueries }), 10); + } else if (count === 1) { + expect(resultMutation.loading).toBe(false); + expect(resultQuery.loading).toBe(false); + } else if (count === 2) { + expect(resultMutation.loading).toBe(true); + expect(resultQuery.data).toEqual(queryData); + } else if (count === 3) { + expect(resultMutation.loading).toBe(false); + } + count++; + } catch (err) { + reject(err); } - count++; - } catch (err) { - reject(err); - } - return null; - }} - </Query> - )} - </Mutation> - ); + return null; + }} + </Query> + )} + </Mutation> + ); - render( - <MockedProvider mocks={mocksWithQuery}> - <Component /> - </MockedProvider> - ); + render( + <MockedProvider mocks={mocksWithQuery}> + <Component /> + </MockedProvider> + ); - waitFor(() => { - expect(count).toBe(4); - }).then(resolve, reject); - })); + waitFor(() => { + expect(count).toBe(4); + }).then(resolve, reject); + })); - it('has an update prop for updating the store after the mutation', async () => { + it("has an update prop for updating the store after the mutation", async () => { const update = (_proxy: DataProxy, response: ExecutionResult) => { expect(response.data).toEqual(data); }; @@ -1200,7 +1225,7 @@ describe('General Mutation testing', () => { }); }); - it('allows update to be passed to the mutate function', async () => { + it("allows update to be passed to the mutate function", async () => { const update = (_proxy: DataProxy, response: ExecutionResult) => { expect(response.data).toEqual(data); }; @@ -1227,17 +1252,17 @@ describe('General Mutation testing', () => { ); await waitFor(() => { - expect(count).toBe(3) + expect(count).toBe(3); }); }); - it('allows for overriding the options passed in the props by passing them in the mutate function', async () => { + it("allows for overriding the options passed in the props by passing them in the mutate function", async () => { const variablesProp = { - text: 'play tennis' + text: "play tennis", }; const variablesMutateFn = { - text: 'go swimming' + text: "go swimming", }; let count = 0; @@ -1260,12 +1285,12 @@ describe('General Mutation testing', () => { const mocks1 = [ { request: { query: mutation, variables: variablesProp }, - result: { data } + result: { data }, }, { request: { query: mutation, variables: variablesMutateFn }, - result: { data: data2 } - } + result: { data: data2 }, + }, ]; render( @@ -1279,40 +1304,40 @@ describe('General Mutation testing', () => { }); }); - it('updates if the client changes', async () => { + it("updates if the client changes", async () => { const link1 = mockSingleLink({ request: { query: mutation }, - result: { data } + result: { data }, }); const client1 = new ApolloClient({ link: link1, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const data3 = { createTodo: { - __typename: 'Todo', - id: '100', - text: 'After updating client.', - completed: false + __typename: "Todo", + id: "100", + text: "After updating client.", + completed: false, }, - __typename: 'Mutation' + __typename: "Mutation", }; const link2 = mockSingleLink({ request: { query: mutation }, - result: { data: data3 } + result: { data: data3 }, }); const client2 = new ApolloClient({ link: link2, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; class Component extends React.Component { state = { - client: client1 + client: client1, }; render() { @@ -1328,7 +1353,7 @@ describe('General Mutation testing', () => { expect(result.data).toEqual(data); setTimeout(() => { this.setState({ - client: client2 + client: client2, }); }); } else if (count === 3) { @@ -1353,23 +1378,23 @@ describe('General Mutation testing', () => { }); }); - it('uses client from props instead of one provided by context', () => { + it("uses client from props instead of one provided by context", () => { const link1 = mockSingleLink({ request: { query: mutation }, - result: { data } + result: { data }, }); const client1 = new ApolloClient({ link: link1, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const link2 = mockSingleLink({ request: { query: mutation }, - result: { data: data2 } + result: { data: data2 }, }); const client2 = new ApolloClient({ link: link2, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; @@ -1402,7 +1427,7 @@ describe('General Mutation testing', () => { }); }); - it('errors if a query is passed instead of a mutation', () => { + it("errors if a query is passed instead of a mutation", () => { const query = gql` query todos { todos { @@ -1422,14 +1447,14 @@ describe('General Mutation testing', () => { </MockedProvider> ); }).toThrowError( - 'Running a Mutation requires a graphql Mutation, but a Query was used ' + - 'instead.' + "Running a Mutation requires a graphql Mutation, but a Query was used " + + "instead." ); console.log = errorLogger; }); - it('errors when changing from mutation to a query', async () => { + it("errors when changing from mutation to a query", async () => { let didError = false; const query = gql` query todos { @@ -1441,14 +1466,14 @@ describe('General Mutation testing', () => { class Component extends React.Component { state = { - query: mutation + query: mutation, }; componentDidCatch(e: Error) { expect(e).toEqual( new Error( - 'Running a Mutation requires a graphql Mutation, but a Query ' + - 'was used instead.' + "Running a Mutation requires a graphql Mutation, but a Query " + + "was used instead." ) ); didError = true; @@ -1459,7 +1484,7 @@ describe('General Mutation testing', () => { {() => { setTimeout(() => { this.setState({ - query + query, }); }); return null; @@ -1492,7 +1517,7 @@ describe('General Mutation testing', () => { console.log = errorLogger; }); - it('errors if a subscription is passed instead of a mutation', () => { + it("errors if a subscription is passed instead of a mutation", () => { const subscription = gql` subscription todos { todos { @@ -1512,14 +1537,14 @@ describe('General Mutation testing', () => { </MockedProvider> ); }).toThrowError( - 'Running a Mutation requires a graphql Mutation, but a Subscription ' + - 'was used instead.' + "Running a Mutation requires a graphql Mutation, but a Subscription " + + "was used instead." ); console.log = errorLogger; }); - it('errors when changing from mutation to a subscription', async () => { + it("errors when changing from mutation to a subscription", async () => { let didError = false; const subscription = gql` subscription todos { @@ -1531,14 +1556,14 @@ describe('General Mutation testing', () => { class Component extends React.Component { state = { - query: mutation + query: mutation, }; componentDidCatch(e: Error) { expect(e).toEqual( new Error( - 'Running a Mutation requires a graphql Mutation, but a ' + - 'Subscription was used instead.' + "Running a Mutation requires a graphql Mutation, but a " + + "Subscription was used instead." ) ); didError = true; @@ -1550,7 +1575,7 @@ describe('General Mutation testing', () => { {() => { setTimeout(() => { this.setState({ - query: subscription + query: subscription, }); }); return null; @@ -1582,22 +1607,25 @@ describe('General Mutation testing', () => { console.log = errorLogger; }); - describe('after it has been unmounted', () => { - it('calls the onCompleted prop after the mutation is complete', async () => { + describe("after it has been unmounted", () => { + it("calls the onCompleted prop after the mutation is complete", async () => { let finished = false; let success = false; - const context = { "foo": "bar" } + const context = { foo: "bar" }; const onCompletedFn = jest.fn(); const checker = () => { setTimeout(() => { success = true; - expect(onCompletedFn).toHaveBeenCalledWith(data, expect.objectContaining({ context })); + expect(onCompletedFn).toHaveBeenCalledWith( + data, + expect.objectContaining({ context }) + ); }, 100); }; class Component extends React.Component { state = { - called: false + called: false, }; render() { @@ -1612,7 +1640,7 @@ describe('General Mutation testing', () => { createTodo({ context }).finally(() => { finished = true; }); - expect(onCompletedFn).toHaveBeenCalledWith + expect(onCompletedFn).toHaveBeenCalledWith; // eslint-disable-next-line testing-library/await-async-utils this.setState({ called: true }, checker); }); @@ -1630,32 +1658,38 @@ describe('General Mutation testing', () => { </MockedProvider> ); - await waitFor(() => { - // TODO(fixme): The following line fixes the RTL lint rule error: - // - // expect(waitFor(() => finished)).resolves.toBe(true); - // - // ...however it also causes the test to fail against React 17. - // eslint-disable-next-line testing-library/await-async-utils - expect(finished).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - // TODO(fixme): The following line fixes the RTL lint rule error: - // - // expect(waitFor(() => success)).resolves.toBe(true); - // - // ...however it also causes the test to fail against React 17. - // eslint-disable-next-line testing-library/await-async-utils - expect(success).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + // TODO(fixme): The following line fixes the RTL lint rule error: + // + // expect(waitFor(() => finished)).resolves.toBe(true); + // + // ...however it also causes the test to fail against React 17. + // eslint-disable-next-line testing-library/await-async-utils + expect(finished).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + // TODO(fixme): The following line fixes the RTL lint rule error: + // + // expect(waitFor(() => success)).resolves.toBe(true); + // + // ...however it also causes the test to fail against React 17. + // eslint-disable-next-line testing-library/await-async-utils + expect(success).toBe(true); + }, + { interval: 1 } + ); }); }); - it('calls the onError prop if the mutation encounters an error', async () => { + it("calls the onError prop if the mutation encounters an error", async () => { let finished = false; let onErrorCalled = false; function onError(error: ApolloError) { - expect(error.message).toEqual('error occurred'); + expect(error.message).toEqual("error occurred"); onErrorCalled = true; } @@ -1677,8 +1711,8 @@ describe('General Mutation testing', () => { const mockError = [ { request: { query: mutation }, - error: new Error('error occurred') - } + error: new Error("error occurred"), + }, ]; render( @@ -1687,23 +1721,29 @@ describe('General Mutation testing', () => { </MockedProvider> ); - await waitFor(() => { - // TODO(fixme): The following line fixes the RTL lint rule error: - // - // expect(waitFor(() => onErrorCalled)).resolves.toBe(true); - // - // ...however it also causes the test to fail against React 17. - // eslint-disable-next-line testing-library/await-async-utils - expect(onErrorCalled).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - // TODO(fixme): The following line fixes the RTL lint rule error: - // - // expect(waitFor(() => finished)).resolves.toBe(true); - // - // ...however it also causes the test to fail against React 17. - // eslint-disable-next-line testing-library/await-async-utils - expect(finished).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + // TODO(fixme): The following line fixes the RTL lint rule error: + // + // expect(waitFor(() => onErrorCalled)).resolves.toBe(true); + // + // ...however it also causes the test to fail against React 17. + // eslint-disable-next-line testing-library/await-async-utils + expect(onErrorCalled).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + // TODO(fixme): The following line fixes the RTL lint rule error: + // + // expect(waitFor(() => finished)).resolves.toBe(true); + // + // ...however it also causes the test to fail against React 17. + // eslint-disable-next-line testing-library/await-async-utils + expect(finished).toBe(true); + }, + { interval: 1 } + ); }); }); diff --git a/src/react/components/__tests__/client/Query.test.tsx b/src/react/components/__tests__/client/Query.test.tsx --- a/src/react/components/__tests__/client/Query.test.tsx +++ b/src/react/components/__tests__/client/Query.test.tsx @@ -1,17 +1,15 @@ -import React from 'react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; -import { render, screen, waitFor } from '@testing-library/react'; - -import { ApolloClient, NetworkStatus } from '../../../../core'; -import { ApolloError } from '../../../../errors'; -import { ApolloLink } from '../../../../link/core'; -import { InMemoryCache } from '../../../../cache'; -import { ApolloProvider } from '../../../context'; -import { itAsync, MockedProvider, mockSingleLink } from '../../../../testing'; -import { Query } from '../../Query'; - -const IS_REACT_18 = React.version.startsWith('18'); +import React from "react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; +import { render, screen, waitFor } from "@testing-library/react"; + +import { ApolloClient, NetworkStatus } from "../../../../core"; +import { ApolloError } from "../../../../errors"; +import { ApolloLink } from "../../../../link/core"; +import { InMemoryCache } from "../../../../cache"; +import { ApolloProvider } from "../../../context"; +import { itAsync, MockedProvider, mockSingleLink } from "../../../../testing"; +import { Query } from "../../Query"; const allPeopleQuery: DocumentNode = gql` query people { @@ -30,7 +28,7 @@ interface Data { } const allPeopleData: Data = { - allPeople: { people: [{ name: 'Luke Skywalker' }] }, + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; const allPeopleMocks = [ { @@ -41,8 +39,8 @@ const allPeopleMocks = [ const AllPeopleQuery = Query; -describe('Query component', () => { - itAsync('calls the children prop', (resolve, reject) => { +describe("Query component", () => { + itAsync("calls the children prop", (resolve, reject) => { let finished = false; const link = mockSingleLink({ request: { query: allPeopleQuery }, @@ -113,11 +111,11 @@ describe('Query component', () => { ); waitFor(() => { - expect(finished).toBe(true) + expect(finished).toBe(true); }).then(resolve, reject); }); - it('renders using the children prop', async () => { + it("renders using the children prop", async () => { const Component = () => ( <Query query={allPeopleQuery}>{(_: any) => <div>test</div>}</Query> ); @@ -129,11 +127,11 @@ describe('Query component', () => { ); await waitFor(() => { - expect(screen.getByText('test')).toBeTruthy(); + expect(screen.getByText("test")).toBeTruthy(); }); }); - describe('result provides', () => { + describe("result provides", () => { let consoleWarn = console.warn; beforeAll(() => { console.warn = () => null; @@ -143,7 +141,7 @@ describe('Query component', () => { console.warn = consoleWarn; }); - itAsync('client', (resolve, reject) => { + itAsync("client", (resolve, reject) => { let count = 0; const queryWithVariables: DocumentNode = gql` query people($first: Int) { @@ -197,12 +195,12 @@ describe('Query component', () => { }).then(resolve, reject); }); - itAsync('error', (resolve, reject) => { + itAsync("error", (resolve, reject) => { let finished = false; const mockError = [ { request: { query: allPeopleQuery }, - error: new Error('error occurred'), + error: new Error("error occurred"), }, ]; @@ -213,9 +211,7 @@ describe('Query component', () => { return null; } try { - expect(result.error).toEqual( - new Error('error occurred') - ); + expect(result.error).toEqual(new Error("error occurred")); finished = true; } catch (error) { reject(error); @@ -236,7 +232,7 @@ describe('Query component', () => { }).then(resolve, reject); }); - itAsync('refetch', (resolve, reject) => { + itAsync("refetch", (resolve, reject) => { const queryRefetch: DocumentNode = gql` query people($first: Int) { allPeople(first: $first) { @@ -247,9 +243,9 @@ describe('Query component', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; - const data3 = { allPeople: { people: [{ name: 'Darth Vader' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; + const data3 = { allPeople: { people: [{ name: "Darth Vader" }] } }; const refetchVariables = { first: 1, @@ -293,11 +289,7 @@ describe('Query component', () => { } if (count === 3) { // second data - if (IS_REACT_18) { - expect(data).toEqual(data3); - } else { - expect(data).toEqual(data2); - } + expect(data).toEqual(data2); } if (count === 5) { // third data @@ -338,17 +330,13 @@ describe('Query component', () => { ); waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(4); - } else { - expect(count).toBe(6); - } + expect(count).toBe(6); }).then(resolve, reject); }); - itAsync('fetchMore', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + itAsync("fetchMore", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const variables = { first: 2, @@ -425,10 +413,10 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(2)).then(resolve, reject); }); - itAsync('startPolling', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; - const data3 = { allPeople: { people: [{ name: 'Darth Vader' }] } }; + itAsync("startPolling", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; + const data3 = { allPeople: { people: [{ name: "Darth Vader" }] } }; const mocks = [ { @@ -490,10 +478,10 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(3)).then(resolve, reject); }); - itAsync('stopPolling', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; - const data3 = { allPeople: { people: [{ name: 'Darth Vader' }] } }; + itAsync("stopPolling", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; + const data3 = { allPeople: { people: [{ name: "Darth Vader" }] } }; const mocks = [ { @@ -541,9 +529,9 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(POLL_COUNT)).then(resolve, reject); }); - itAsync('updateQuery', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + itAsync("updateQuery", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const variables = { first: 2, }; @@ -604,11 +592,11 @@ describe('Query component', () => { }); }); - describe('props allow', () => { - it('custom fetch-policy', async () => { + describe("props allow", () => { + it("custom fetch-policy", async () => { let count = 0; const Component = () => ( - <Query query={allPeopleQuery} fetchPolicy={'cache-only'}> + <Query query={allPeopleQuery} fetchPolicy={"cache-only"}> {(result: any) => { if (!result.loading) { expect(result.networkStatus).toBe(NetworkStatus.ready); @@ -630,7 +618,7 @@ describe('Query component', () => { }); }); - it('default fetch-policy', async () => { + it("default fetch-policy", async () => { let count = 0; const Component = () => ( <Query query={allPeopleQuery}> @@ -646,7 +634,7 @@ describe('Query component', () => { render( <MockedProvider - defaultOptions={{ watchQuery: { fetchPolicy: 'cache-only' } }} + defaultOptions={{ watchQuery: { fetchPolicy: "cache-only" } }} mocks={allPeopleMocks} > <Component /> @@ -658,9 +646,9 @@ describe('Query component', () => { }); }); - itAsync('notifyOnNetworkStatusChange', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + itAsync("notifyOnNetworkStatusChange", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const mocks = [ { @@ -688,11 +676,7 @@ describe('Query component', () => { }); } if (count === 2) { - if (IS_REACT_18) { - expect(result.loading).toBeFalsy(); - } else { - expect(result.loading).toBeTruthy(); - } + expect(result.loading).toBeTruthy(); } if (count === 3) { expect(result.loading).toBeFalsy(); @@ -714,18 +698,14 @@ describe('Query component', () => { ); waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(3) - } else { - expect(count).toBe(4) - } + expect(count).toBe(4); }).then(resolve, reject); }); - itAsync('pollInterval', (resolve, reject) => { - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; - const data3 = { allPeople: { people: [{ name: 'Darth Vader' }] } }; + itAsync("pollInterval", (resolve, reject) => { + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; + const data3 = { allPeople: { people: [{ name: "Darth Vader" }] } }; const mocks = [ { @@ -775,7 +755,7 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(POLL_COUNT)).then(resolve, reject); }); - itAsync('skip', (resolve, reject) => { + itAsync("skip", (resolve, reject) => { let finished = false; const Component = () => ( <Query query={allPeopleQuery} skip> @@ -804,7 +784,7 @@ describe('Query component', () => { }).then(resolve, reject); }); - it('onCompleted with data', async () => { + it("onCompleted with data", async () => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -815,8 +795,8 @@ describe('Query component', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const mocks = [ { request: { query, variables: { first: 1 } }, @@ -878,9 +858,9 @@ describe('Query component', () => { }); }); - itAsync('onError with data', (resolve, reject) => { + itAsync("onError with data", (resolve, reject) => { let finished = false; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const mocks = [ { @@ -919,8 +899,8 @@ describe('Query component', () => { }); }); - describe('props disallow', () => { - it('Mutation provided as query', () => { + describe("props disallow", () => { + it("Mutation provided as query", () => { const mutation = gql` mutation submitRepository { submitRepository(repoFullName: "apollographql/apollo-client") { @@ -939,14 +919,14 @@ describe('Query component', () => { </MockedProvider> ); }).toThrowError( - 'Running a Query requires a graphql Query, but a Mutation was used ' + - 'instead.' + "Running a Query requires a graphql Query, but a Mutation was used " + + "instead." ); console.error = errorLogger; }); - it('Subscription provided as query', () => { + it("Subscription provided as query", () => { const subscription = gql` subscription onCommentAdded($repoFullName: String!) { commentAdded(repoFullName: $repoFullName) { @@ -966,19 +946,19 @@ describe('Query component', () => { </MockedProvider> ); }).toThrowError( - 'Running a Query requires a graphql Query, but a Subscription was ' + - 'used instead.' + "Running a Query requires a graphql Query, but a Subscription was " + + "used instead." ); console.error = errorLogger; }); - itAsync('onCompleted with error', (resolve, reject) => { + itAsync("onCompleted with error", (resolve, reject) => { let finished = false; const mockError = [ { request: { query: allPeopleQuery }, - error: new Error('error occurred'), + error: new Error("error occurred"), }, ]; @@ -1007,9 +987,9 @@ describe('Query component', () => { }).then(resolve, reject); }); - it('onError with error', async () => { + it("onError with error", async () => { let finished = false; - const error = new Error('error occurred'); + const error = new Error("error occurred"); const mockError = [ { request: { query: allPeopleQuery }, @@ -1042,8 +1022,8 @@ describe('Query component', () => { }); }); - describe('should update', () => { - itAsync('if props change', (resolve, reject) => { + describe("should update", () => { + itAsync("if props change", (resolve, reject) => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -1054,8 +1034,8 @@ describe('Query component', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const mocks = [ { request: { query, variables: { first: 1 } }, @@ -1134,7 +1114,7 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(4)).then(resolve, reject); }); - itAsync('if the query changes', (resolve, reject) => { + itAsync("if the query changes", (resolve, reject) => { const query1 = allPeopleQuery; const query2 = gql` query people { @@ -1147,8 +1127,8 @@ describe('Query component', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo', id: '1' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo", id: "1" }] } }; const mocks = [ { request: { query: query1 }, @@ -1211,7 +1191,7 @@ describe('Query component', () => { waitFor(() => expect(count).toBe(2)).then(resolve, reject); }); - it('with data while loading', async () => { + it("with data while loading", async () => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -1224,11 +1204,11 @@ describe('Query component', () => { const data1 = { allPeople: { - people: [{ name: 'Luke Skywalker' }], + people: [{ name: "Luke Skywalker" }], }, }; const data2 = { - allPeople: { people: [{ name: 'Han Solo' }] }, + allPeople: { people: [{ name: "Han Solo" }] }, }; const mocks = [ { @@ -1274,7 +1254,9 @@ describe('Query component', () => { case 3: expect(result.loading).toBe(true); expect(result.data).toBeUndefined(); - expect(result.networkStatus).toBe(NetworkStatus.setVariables); + expect(result.networkStatus).toBe( + NetworkStatus.setVariables + ); break; case 4: expect(result.loading).toBe(false); @@ -1299,133 +1281,128 @@ describe('Query component', () => { ); }); - itAsync('should update if a manual `refetch` is triggered after a state change', (resolve, reject) => { - const query: DocumentNode = gql` - query { - allPeople { - people { - name + itAsync( + "should update if a manual `refetch` is triggered after a state change", + (resolve, reject) => { + const query: DocumentNode = gql` + query { + allPeople { + people { + name + } } } - } - `; + `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; - const link = mockSingleLink( - { - request: { query }, - result: { data: data1 }, - }, - { - request: { query }, - result: { data: data1 }, - }, - { - request: { query }, - result: { data: data1 }, - } - ); + const link = mockSingleLink( + { + request: { query }, + result: { data: data1 }, + }, + { + request: { query }, + result: { data: data1 }, + }, + { + request: { query }, + result: { data: data1 }, + } + ); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ addTypename: false }), - }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ addTypename: false }), + }); - let count = 0; + let count = 0; - class SomeComponent extends React.Component { - constructor(props: any) { - super(props); - this.state = { - open: false, - }; - this.toggle = this.toggle.bind(this); - } + class SomeComponent extends React.Component { + constructor(props: any) { + super(props); + this.state = { + open: false, + }; + this.toggle = this.toggle.bind(this); + } - toggle() { - this.setState((prevState: any) => ({ - open: !prevState.open, - })); - } + toggle() { + this.setState((prevState: any) => ({ + open: !prevState.open, + })); + } - render() { - const { open } = this.state as any; - return ( - <Query client={client} query={query} notifyOnNetworkStatusChange> - {(props: any) => { - try { - switch (count) { - case 0: - // Loading first response - expect(props.loading).toBe(true); - expect(open).toBe(false); - break; - case 1: - // First response loaded, change state value - expect(props.data).toEqual(data1); - expect(open).toBe(false); - setTimeout(() => { - this.toggle(); - }); - break; - case 2: - // State value changed, fire a refetch - expect(open).toBe(true); - setTimeout(() => { - props.refetch(); - }); - break; - case 3: - // Second response loading - if (IS_REACT_18) { - expect(props.loading).toBe(false); - } else { + render() { + const { open } = this.state as any; + return ( + <Query client={client} query={query} notifyOnNetworkStatusChange> + {(props: any) => { + try { + switch (count) { + case 0: + // Loading first response expect(props.loading).toBe(true); - } - break; - case 4: - // Second response received, fire another refetch - expect(props.data).toEqual(data1); - setTimeout(() => { - props.refetch(); - }); - break; - case 5: - // Third response loading - expect(props.loading).toBe(true); - break; - case 6: - // Third response received - expect(props.data).toEqual(data1); - break; - default: - reject('Unknown count'); + expect(open).toBe(false); + break; + case 1: + // First response loaded, change state value + expect(props.data).toEqual(data1); + expect(open).toBe(false); + setTimeout(() => { + this.toggle(); + }); + break; + case 2: + // State value changed, fire a refetch + expect(open).toBe(true); + setTimeout(() => { + props.refetch(); + }); + break; + case 3: + // Second response loading + expect(props.loading).toBe(true); + break; + case 4: + // Second response received, fire another refetch + expect(props.data).toEqual(data1); + setTimeout(() => { + props.refetch(); + }); + break; + case 5: + // Third response loading + expect(props.loading).toBe(true); + break; + case 6: + // Third response received + expect(props.data).toEqual(data1); + break; + default: + reject("Unknown count"); + } + count += 1; + } catch (error) { + reject(error); } - count += 1; - } catch (error) { - reject(error); - } - return null; - }} - </Query> - ); + return null; + }} + </Query> + ); + } } - } - render(<SomeComponent />); + render(<SomeComponent />); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(4) - } else { - expect(count).toBe(7) - } - }).then(resolve, reject); - }); + waitFor(() => { + expect(count).toBe(7); + }).then(resolve, reject); + } + ); }); - it('should error if the query changes type to a subscription', async () => { + it("should error if the query changes type to a subscription", async () => { let finished = false; const subscription = gql` subscription onCommentAdded($repoFullName: String!) { @@ -1445,8 +1422,8 @@ describe('Query component', () => { componentDidCatch(error: any) { const expectedError = new Error( - 'Running a Query requires a graphql Query, but a Subscription was ' + - 'used instead.' + "Running a Query requires a graphql Query, but a Subscription was " + + "used instead." ); expect(error).toEqual(expectedError); finished = true; @@ -1472,13 +1449,16 @@ describe('Query component', () => { </MockedProvider> ); - await waitFor(() => { - expect(finished).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(finished).toBe(true); + }, + { interval: 1 } + ); console.error = errorLog; }); - it('should be able to refetch after there was a network error', async () => { + it("should be able to refetch after there was a network error", async () => { const query: DocumentNode = gql` query somethingelse { allPeople(first: 1) { @@ -1489,12 +1469,12 @@ describe('Query component', () => { } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const dataTwo = { allPeople: { people: [{ name: 'Princess Leia' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const dataTwo = { allPeople: { people: [{ name: "Princess Leia" }] } }; const link = mockSingleLink( { request: { query }, result: { data } }, - { request: { query }, error: new Error('This is an error!') }, - { request: { query }, result: { data: dataTwo }, delay: 10 }, + { request: { query }, error: new Error("This is an error!") }, + { request: { query }, result: { data: dataTwo }, delay: 10 } ); const client = new ApolloClient({ link, @@ -1502,6 +1482,7 @@ describe('Query component', () => { }); let count = 0; + let testFailures: any[] = []; const noop = () => null; const AllPeopleQuery2 = Query; @@ -1519,27 +1500,21 @@ describe('Query component', () => { case 1: // First result is loaded, run a refetch to get the second result // which is an error. - expect(result.data.allPeople).toEqual( - data.allPeople - ); + expect(result.data.allPeople).toEqual(data.allPeople); setTimeout(() => { result.refetch().then(() => { - fail('Expected error value on first refetch.'); + fail("Expected error value on first refetch."); }, noop); }, 0); break; case 2: // Waiting for the second result to load - if (IS_REACT_18) { - expect(result.loading).toBe(false); - } else { - expect(result.loading).toBe(true); - } + expect(result.loading).toBe(true); break; case 3: setTimeout(() => { result.refetch().catch(() => { - fail('Expected good data on second refetch.'); + fail("Expected good data on second refetch."); }); }, 0); // fallthrough @@ -1558,10 +1533,13 @@ describe('Query component', () => { expect(result.data.allPeople).toEqual(dataTwo.allPeople); break; default: - throw new Error('Unexpected fall through'); + throw new Error("Unexpected fall through"); } } catch (e) { - fail(e); + // if we throw the error inside the component, + // we will get more rerenders in the test, but the `expect` error + // might not propagate anyways + testFailures.push(e); } return null; }} @@ -1576,16 +1554,15 @@ describe('Query component', () => { ); await waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(3) - } else { - expect(count).toBe(6) + if (testFailures.length > 0) { + throw testFailures[0]; } + expect(count).toBe(6); }); }); itAsync( - 'should not persist previous result errors when a subsequent valid result is received', + "should not persist previous result errors when a subsequent valid result is received", (resolve, reject) => { const query: DocumentNode = gql` query somethingelse($variable: Boolean) { @@ -1597,7 +1574,7 @@ describe('Query component', () => { } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const variableGood = { variable: true }; const variableBad = { variable: false }; @@ -1617,7 +1594,7 @@ describe('Query component', () => { variables: variableBad, }, result: { - errors: [new Error('This is an error!')], + errors: [new Error("This is an error!")], }, }, { @@ -1649,11 +1626,7 @@ describe('Query component', () => { // Change query variables to trigger bad result. setTimeout(() => { render( - <Query - client={client} - query={query} - variables={variableBad} - > + <Query client={client} query={query} variables={variableBad}> {(result: any) => { return <DummyComp id="dummyId" {...result} />; }} @@ -1670,11 +1643,7 @@ describe('Query component', () => { // Change query variables to trigger a good result. setTimeout(() => { render( - <Query - client={client} - query={query} - variables={variableGood} - > + <Query client={client} query={query} variables={variableGood}> {(result: any) => { return <DummyComp id="dummyId" {...result} />; }} @@ -1688,7 +1657,7 @@ describe('Query component', () => { expect(props.data.allPeople).toBeTruthy(); break; default: - reject('Unknown count'); + reject("Unknown count"); } } catch (error) { reject(error); @@ -1708,7 +1677,7 @@ describe('Query component', () => { } ); - it('should support mixing setState and onCompleted', async () => { + it("should support mixing setState and onCompleted", async () => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -1719,8 +1688,8 @@ describe('Query component', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const mocks = [ { request: { query, variables: { first: 1 } }, @@ -1808,61 +1777,63 @@ describe('Query component', () => { }); }); - itAsync('should not repeatedly call onError if setState in it', (resolve, reject) => { - const mockError = [ - { - request: { query: allPeopleQuery, variables: { first: 1 } }, - error: new Error('error occurred'), - }, - ]; - - let unmount: any; - let onErrorCallCount = 0; - class Component extends React.Component { - state = { - variables: { - first: 1, + itAsync( + "should not repeatedly call onError if setState in it", + (resolve, reject) => { + const mockError = [ + { + request: { query: allPeopleQuery, variables: { first: 1 } }, + error: new Error("error occurred"), }, - }; - onError = () => { - onErrorCallCount += 1; - this.setState({ causeUpdate: true }); - }; - render() { - return ( - <Query - query={allPeopleQuery} - variables={this.state.variables} - onError={this.onError} - > - {({ loading }: any) => { - if (!loading) { - setTimeout(unmount); - } - return null; - }} - </Query> - ); + ]; + + let unmount: any; + let onErrorCallCount = 0; + class Component extends React.Component { + state = { + variables: { + first: 1, + }, + }; + onError = () => { + onErrorCallCount += 1; + this.setState({ causeUpdate: true }); + }; + render() { + return ( + <Query + query={allPeopleQuery} + variables={this.state.variables} + onError={this.onError} + > + {({ loading }: any) => { + if (!loading) { + setTimeout(unmount); + } + return null; + }} + </Query> + ); + } } - } - unmount = render( - <MockedProvider mocks={mockError} addTypename={false}> - <Component /> - </MockedProvider> - ).unmount; + unmount = render( + <MockedProvider mocks={mockError} addTypename={false}> + <Component /> + </MockedProvider> + ).unmount; - waitFor(() => { - expect(onErrorCallCount).toBe(1); - }).then(resolve, reject); - }); + waitFor(() => { + expect(onErrorCallCount).toBe(1); + }).then(resolve, reject); + } + ); - describe('Partial refetching', () => { + describe("Partial refetching", () => { let errorSpy!: ReturnType<typeof jest.spyOn>; beforeEach(() => { - errorSpy = jest.spyOn(console, 'error') - .mockImplementation(() => {}); + errorSpy = jest.spyOn(console, "error").mockImplementation(() => {}); }); afterAll(() => { @@ -1873,10 +1844,10 @@ describe('Query component', () => { // error calls no matter what I try and I do not want to care about it // anymore :) itAsync.skip( - 'should attempt a refetch when the query result was marked as being ' + - 'partial, the returned data was reset to an empty Object by the ' + - 'Apollo Client QueryManager (due to a cache miss), and the ' + - '`partialRefetch` prop is `true`', + "should attempt a refetch when the query result was marked as being " + + "partial, the returned data was reset to an empty Object by the " + + "Apollo Client QueryManager (due to a cache miss), and the " + + "`partialRefetch` prop is `true`", (resolve, reject) => { const allPeopleQuery: DocumentNode = gql` query people { @@ -1890,7 +1861,7 @@ describe('Query component', () => { let count = 0; const allPeopleData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] }, + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; const query = allPeopleQuery; const link = mockSingleLink( @@ -1912,7 +1883,7 @@ describe('Query component', () => { if (!loading) { expect(data).toEqual(allPeopleData); expect(errorSpy).toHaveBeenCalledTimes(1); - expect(errorSpy.mock.calls[0][0]).toMatch('Missing field'); + expect(errorSpy.mock.calls[0][0]).toMatch("Missing field"); } } catch (err) { reject(err); @@ -1935,8 +1906,8 @@ describe('Query component', () => { ); itAsync.skip( - 'should not refetch when an empty partial is returned if the ' + - '`partialRefetch` prop is false/not set', + "should not refetch when an empty partial is returned if the " + + "`partialRefetch` prop is false/not set", (resolve, reject) => { let finished = false; const query = allPeopleQuery; @@ -1967,16 +1938,16 @@ describe('Query component', () => { </ApolloProvider> ); - waitFor(() => { - expect(finished).toBe(true); - }).then(resolve, reject); + waitFor(() => { + expect(finished).toBe(true); + }).then(resolve, reject); } ); }); itAsync( - 'should keep data for a `Query` component using `no-cache` when the ' + - 'tree is re-rendered', + "should keep data for a `Query` component using `no-cache` when the " + + "tree is re-rendered", (resolve, reject) => { const query1 = allPeopleQuery; @@ -1998,14 +1969,14 @@ describe('Query component', () => { const allThingsData: ThingData = { allThings: { - thing: [{ description: 'Thing 1' }, { description: 'Thing 2' }], + thing: [{ description: "Thing 1" }, { description: "Thing 2" }], }, }; const link = mockSingleLink( { request: { query: query1 }, result: { data: allPeopleData } }, { request: { query: query2 }, result: { data: allThingsData } }, - { request: { query: query1 }, result: { data: allPeopleData } }, + { request: { query: query1 }, result: { data: allPeopleData } } ); const client = new ApolloClient({ @@ -2056,7 +2027,7 @@ describe('Query component', () => { } ); - describe('Return partial data', () => { + describe("Return partial data", () => { const origConsoleWarn = console.warn; beforeAll(() => { @@ -2067,7 +2038,7 @@ describe('Query component', () => { console.warn = origConsoleWarn; }); - it('should not return partial cache data when `returnPartialData` is false', async () => { + it("should not return partial cache data when `returnPartialData` is false", async () => { let finished = false; const cache = new InMemoryCache(); const client = new ApolloClient({ @@ -2093,15 +2064,15 @@ describe('Query component', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.', + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", }, ], }, @@ -2135,11 +2106,11 @@ describe('Query component', () => { render(<App />); await waitFor(() => { - expect(finished).toBe(true) - }) + expect(finished).toBe(true); + }); }); - it('should return partial cache data when `returnPartialData` is true', async () => { + it("should return partial cache data when `returnPartialData` is true", async () => { let finished = false; const cache = new InMemoryCache(); const client = new ApolloClient({ @@ -2165,15 +2136,15 @@ describe('Query component', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.', + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", }, ], }, @@ -2199,11 +2170,11 @@ describe('Query component', () => { expect(data).toEqual({ cars: [ { - __typename: 'Car', + __typename: "Car", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', + __typename: "Repair", + date: "2019-05-08", }, ], }, diff --git a/src/react/components/__tests__/client/Subscription.test.tsx b/src/react/components/__tests__/client/Subscription.test.tsx --- a/src/react/components/__tests__/client/Subscription.test.tsx +++ b/src/react/components/__tests__/client/Subscription.test.tsx @@ -1,21 +1,21 @@ -import React from 'react'; -import gql from 'graphql-tag'; -import { render, waitFor } from '@testing-library/react'; +import React from "react"; +import gql from "graphql-tag"; +import { render, waitFor } from "@testing-library/react"; -import { ApolloClient } from '../../../../core'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloProvider } from '../../../context'; -import { ApolloLink, Operation } from '../../../../link/core'; -import { itAsync, MockSubscriptionLink } from '../../../../testing'; -import { Subscription } from '../../Subscription'; +import { ApolloClient } from "../../../../core"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloProvider } from "../../../context"; +import { ApolloLink, Operation } from "../../../../link/core"; +import { itAsync, MockSubscriptionLink } from "../../../../testing"; +import { Subscription } from "../../Subscription"; const results = [ - 'Luke Skywalker', - 'Han Solo', - 'Darth Vader', - 'Leia Skywalker' -].map(name => ({ - result: { data: { user: { name } } } + "Luke Skywalker", + "Han Solo", + "Darth Vader", + "Leia Skywalker", +].map((name) => ({ + result: { data: { user: { name } } }, })); beforeEach(() => { @@ -34,10 +34,10 @@ const cache = new Cache({ addTypename: false }); const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache + cache, }); -itAsync('executes the subscription', (resolve, reject) => { +itAsync("executes the subscription", (resolve, reject) => { let renderCount = 0; const Component = () => ( <Subscription subscription={subscription}> @@ -88,7 +88,7 @@ itAsync('executes the subscription', (resolve, reject) => { waitFor(() => expect(renderCount).toBe(5)).then(resolve, reject); }); -it('calls onData if given', async () => { +it("calls onData if given", async () => { let count = 0; const Component = () => ( @@ -117,8 +117,10 @@ it('calls onData if given', async () => { await waitFor(() => expect(count).toBe(4)); }); -it('calls onSubscriptionData with deprecation warning if given', async () => { - const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); +it("calls onSubscriptionData with deprecation warning if given", async () => { + const consoleWarnSpy = jest + .spyOn(console, "warn") + .mockImplementation(() => {}); let count = 0; const Component = () => ( @@ -149,12 +151,12 @@ it('calls onSubscriptionData with deprecation warning if given', async () => { if (count >= 3) clearInterval(interval); }, 10); - await waitFor(() => expect(count).toBe(4)) + await waitFor(() => expect(count).toBe(4)); consoleWarnSpy.mockRestore(); }); -it('should call onComplete if specified', async () => { +it("should call onComplete if specified", async () => { let count = 0; let done = false; @@ -184,8 +186,10 @@ it('should call onComplete if specified', async () => { await waitFor(() => expect(done).toBeTruthy()); }); -it('should call onSubscriptionComplete with deprecation warning if specified', async () => { - const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); +it("should call onSubscriptionComplete with deprecation warning if specified", async () => { + const consoleWarnSpy = jest + .spyOn(console, "warn") + .mockImplementation(() => {}); let count = 0; let done = false; @@ -222,73 +226,76 @@ it('should call onSubscriptionComplete with deprecation warning if specified', a consoleWarnSpy.mockRestore(); }); -itAsync('executes subscription for the variables passed in the props', (resolve, reject) => { - const subscriptionWithVariables = gql` - subscription UserInfo($name: String) { - user(name: $name) { - name +itAsync( + "executes subscription for the variables passed in the props", + (resolve, reject) => { + const subscriptionWithVariables = gql` + subscription UserInfo($name: String) { + user(name: $name) { + name + } } - } - `; + `; - const variables = { name: 'Luke Skywalker' }; + const variables = { name: "Luke Skywalker" }; - class MockSubscriptionLinkOverride extends MockSubscriptionLink { - request(req: Operation) { - try { - expect(req.variables).toEqual(variables); - } catch (error) { - reject(error); + class MockSubscriptionLinkOverride extends MockSubscriptionLink { + request(req: Operation) { + try { + expect(req.variables).toEqual(variables); + } catch (error) { + reject(error); + } + return super.request(req); } - return super.request(req); } - } - - const mockLink = new MockSubscriptionLinkOverride(); - const mockClient = new ApolloClient({ - link: mockLink, - cache - }); + const mockLink = new MockSubscriptionLinkOverride(); - let count = 0; + const mockClient = new ApolloClient({ + link: mockLink, + cache, + }); - const Component = () => ( - <Subscription - subscription={subscriptionWithVariables} - variables={variables} - > - {(result: any) => { - const { loading, data } = result; + let count = 0; - try { - if (count === 0) { - expect(loading).toBe(true); - } else if (count === 1) { - expect(loading).toBe(false); - expect(data).toEqual(results[0].result.data); + const Component = () => ( + <Subscription + subscription={subscriptionWithVariables} + variables={variables} + > + {(result: any) => { + const { loading, data } = result; + + try { + if (count === 0) { + expect(loading).toBe(true); + } else if (count === 1) { + expect(loading).toBe(false); + expect(data).toEqual(results[0].result.data); + } + } catch (error) { + reject(error); } - } catch (error) { - reject(error); - } - count++; - return null; - }} - </Subscription> - ); + count++; + return null; + }} + </Subscription> + ); - render( - <ApolloProvider client={mockClient}> - <Component /> - </ApolloProvider> - ); + render( + <ApolloProvider client={mockClient}> + <Component /> + </ApolloProvider> + ); - mockLink.simulateResult(results[0]); + mockLink.simulateResult(results[0]); - waitFor(() => expect(count).toBe(2)).then(resolve, reject); -}); + waitFor(() => expect(count).toBe(2)).then(resolve, reject); + } +); -itAsync('does not execute if variables have not changed', (resolve, reject) => { +itAsync("does not execute if variables have not changed", (resolve, reject) => { const subscriptionWithVariables = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -297,7 +304,7 @@ itAsync('does not execute if variables have not changed', (resolve, reject) => { } `; - const name = 'Luke Skywalker'; + const name = "Luke Skywalker"; class MockSubscriptionLinkOverride extends MockSubscriptionLink { request(req: Operation) { @@ -314,7 +321,7 @@ itAsync('does not execute if variables have not changed', (resolve, reject) => { const mockClient = new ApolloClient({ link: mockLink, - cache + cache, }); let count = 0; @@ -359,7 +366,7 @@ itAsync('does not execute if variables have not changed', (resolve, reject) => { waitFor(() => expect(count).toBe(3)).then(resolve, reject); }); -itAsync('renders an error', (resolve, reject) => { +itAsync("renders an error", (resolve, reject) => { const subscriptionWithVariables = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -369,11 +376,11 @@ itAsync('renders an error', (resolve, reject) => { `; const variables = { - name: 'Luke Skywalker' + name: "Luke Skywalker", }; const subscriptionError = { - error: new Error('error occurred') + error: new Error("error occurred"), }; let count = 0; @@ -390,7 +397,7 @@ itAsync('renders an error', (resolve, reject) => { expect(error).toBeUndefined(); } else if (count === 1) { expect(loading).toBe(false); - expect(error).toEqual(new Error('error occurred')); + expect(error).toEqual(new Error("error occurred")); expect(data).toBeUndefined(); } } catch (error) { @@ -414,19 +421,20 @@ itAsync('renders an error', (resolve, reject) => { waitFor(() => expect(count).toBe(2)).then(resolve, reject); }); -describe('should update', () => { - itAsync('if the client changes', (resolve, reject) => { +describe("should update", () => { + it("if the client changes", async () => { const link2 = new MockSubscriptionLink(); const client2 = new ApolloClient({ link: link2, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; + let testFailures: any[] = []; class Component extends React.Component { state = { - client: client + client: client, }; render() { @@ -436,7 +444,7 @@ describe('should update', () => { {(result: any) => { const { loading, data } = result; try { - switch (count) { + switch (count++) { case 0: expect(loading).toBeTruthy(); expect(data).toBeUndefined(); @@ -445,14 +453,14 @@ describe('should update', () => { setTimeout(() => { this.setState( { - client: client2 + client: client2, }, () => { link2.simulateResult(results[1]); } ); }); - // fallthrough + // fallthrough case 2: expect(loading).toBeFalsy(); expect(data).toEqual(results[0].result.data); @@ -465,12 +473,12 @@ describe('should update', () => { expect(loading).toBeFalsy(); expect(data).toEqual(results[1].result.data); break; + default: + throw new Error("too many rerenders"); } } catch (error) { - reject(error); + testFailures.push(error); } - - count++; return null; }} </Subscription> @@ -483,10 +491,15 @@ describe('should update', () => { link.simulateResult(results[0]); - waitFor(() => expect(count).toBe(5)).then(resolve, reject); + await waitFor(() => { + if (testFailures.length > 0) { + throw testFailures[0]; + } + expect(count).toBe(5); + }); }); - itAsync('if the query changes', (resolve, reject) => { + itAsync("if the query changes", (resolve, reject) => { const subscriptionHero = gql` subscription HeroInfo { hero { @@ -499,30 +512,30 @@ describe('should update', () => { result: { data: { hero: { - name: 'Chewie' - } - } - } + name: "Chewie", + }, + }, + }, }; const userLink = new MockSubscriptionLink(); const heroLink = new MockSubscriptionLink(); const linkCombined = new ApolloLink((o, f) => (f ? f(o) : null)).split( - ({ operationName }) => operationName === 'HeroInfo', + ({ operationName }) => operationName === "HeroInfo", heroLink, userLink ); const mockClient = new ApolloClient({ link: linkCombined, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; class Component extends React.Component { state = { - subscription + subscription, }; render() { @@ -540,14 +553,14 @@ describe('should update', () => { setTimeout(() => { this.setState( { - subscription: subscriptionHero + subscription: subscriptionHero, }, () => { heroLink.simulateResult(heroResult); } ); }); - // fallthrough + // fallthrough case 2: expect(loading).toBeFalsy(); expect(data).toEqual(results[0].result.data); @@ -583,7 +596,7 @@ describe('should update', () => { waitFor(() => expect(count).toBe(5)).then(resolve, reject); }); - itAsync('if the variables change', (resolve, reject) => { + itAsync("if the variables change", (resolve, reject) => { const subscriptionWithVariables = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -592,33 +605,33 @@ describe('should update', () => { } `; - const variablesLuke = { name: 'Luke Skywalker' }; - const variablesHan = { name: 'Han Solo' }; + const variablesLuke = { name: "Luke Skywalker" }; + const variablesHan = { name: "Han Solo" }; const dataLuke = { user: { - name: 'Luke Skywalker' - } + name: "Luke Skywalker", + }, }; const dataHan = { user: { - name: 'Han Solo' - } + name: "Han Solo", + }, }; const mockLink = new MockSubscriptionLink(); const mockClient = new ApolloClient({ link: mockLink, - cache + cache, }); let count = 0; class Component extends React.Component { state = { - variables: variablesLuke + variables: variablesLuke, }; render() { @@ -639,14 +652,16 @@ describe('should update', () => { setTimeout(() => { this.setState( { - variables: variablesHan + variables: variablesHan, }, () => { - mockLink.simulateResult({ result: { data: dataHan } }); + mockLink.simulateResult({ + result: { data: dataHan }, + }); } ); }); - // fallthrough + // fallthrough case 2: expect(loading).toBeFalsy(); expect(data).toEqual(dataLuke); @@ -684,20 +699,20 @@ describe('should update', () => { }); }); -describe('should not update', () => { - const variablesLuke = { name: 'Luke Skywalker' }; - const variablesHan = { name: 'Han Solo' }; +describe("should not update", () => { + const variablesLuke = { name: "Luke Skywalker" }; + const variablesHan = { name: "Han Solo" }; const dataLuke = { user: { - name: 'Luke Skywalker' - } + name: "Luke Skywalker", + }, }; const dataHan = { user: { - name: 'Han Solo' - } + name: "Han Solo", + }, }; class MockSubscriptionLinkOverride extends MockSubscriptionLink { @@ -708,23 +723,23 @@ describe('should not update', () => { } simulateResult() { - if (this.variables.name === 'Luke Skywalker') { + if (this.variables.name === "Luke Skywalker") { return super.simulateResult({ result: { - data: dataLuke - } + data: dataLuke, + }, }); - } else if (this.variables.name === 'Han Solo') { + } else if (this.variables.name === "Han Solo") { return super.simulateResult({ result: { - data: dataHan - } + data: dataHan, + }, }); } } } - itAsync('if shouldResubscribe is false', (resolve, reject) => { + itAsync("if shouldResubscribe is false", (resolve, reject) => { const subscriptionWithVariables = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -737,14 +752,14 @@ describe('should not update', () => { const mockClient = new ApolloClient({ link: mockLink, - cache + cache, }); let count = 0; class Component extends React.Component { state = { - variables: variablesLuke + variables: variablesLuke, }; render() { @@ -766,7 +781,7 @@ describe('should not update', () => { setTimeout(() => { this.setState( { - variables: variablesHan + variables: variablesHan, }, () => { mockLink.simulateResult(); @@ -800,7 +815,7 @@ describe('should not update', () => { waitFor(() => expect(count).toBe(4)).then(resolve, reject); }); - itAsync('if shouldResubscribe returns false', (resolve, reject) => { + itAsync("if shouldResubscribe returns false", (resolve, reject) => { const subscriptionWithVariables = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -813,14 +828,14 @@ describe('should not update', () => { const mockClient = new ApolloClient({ link: mockLink, - cache + cache, }); let count = 0; class Component extends React.Component { state = { - variables: variablesLuke + variables: variablesLuke, }; render() { @@ -842,7 +857,7 @@ describe('should not update', () => { setTimeout(() => { this.setState( { - variables: variablesHan + variables: variablesHan, }, () => { mockLink.simulateResult(); diff --git a/src/react/components/__tests__/ssr/getDataFromTree.test.tsx b/src/react/components/__tests__/ssr/getDataFromTree.test.tsx --- a/src/react/components/__tests__/ssr/getDataFromTree.test.tsx +++ b/src/react/components/__tests__/ssr/getDataFromTree.test.tsx @@ -1,22 +1,28 @@ /** @jest-environment node */ -import React from 'react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; +import React from "react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; -import { ApolloClient } from '../../../../core'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloProvider, getApolloContext, ApolloContextValue } from '../../../context'; -import { getDataFromTree } from '../../../ssr'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { Query } from '../../Query'; +import { ApolloClient } from "../../../../core"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { + ApolloProvider, + getApolloContext, + ApolloContextValue, +} from "../../../context"; +import { getDataFromTree } from "../../../ssr"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { Query } from "../../Query"; -describe('SSR', () => { - describe('`getDataFromTree`', () => { - it('should support passing a root context', () => { - const apolloContext = getApolloContext() as unknown as React.Context<ApolloContextValue & { text: string }>; +describe("SSR", () => { + describe("`getDataFromTree`", () => { + it("should support passing a root context", () => { + const apolloContext = getApolloContext() as unknown as React.Context< + ApolloContextValue & { text: string } + >; class Consumer extends React.Component { static contextType = apolloContext; - declare context: React.ContextType<typeof apolloContext> + declare context: React.ContextType<typeof apolloContext>; render() { return <div>{this.context.text}</div>; @@ -24,13 +30,13 @@ describe('SSR', () => { } return getDataFromTree(<Consumer />, { - text: 'oyez' - }).then(html => { - expect(html).toEqual('<div>oyez</div>'); + text: "oyez", + }).then((html) => { + expect(html).toEqual("<div>oyez</div>"); }); }); - it('should run through all of the queries (also defined via Query component) that want SSR', () => { + it("should run through all of the queries (also defined via Query component) that want SSR", () => { const query = gql` { currentUser { @@ -38,15 +44,15 @@ describe('SSR', () => { } } `; - const data1 = { currentUser: { firstName: 'James' } }; + const data1 = { currentUser: { firstName: "James" } }; const link = mockSingleLink({ request: { query }, result: { data: data1 }, - delay: 50 + delay: 50, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -59,7 +65,7 @@ describe('SSR', () => { <Query query={query}> {({ data, loading }: { data: Data; loading: boolean }) => ( <div> - {loading || !data ? 'loading' : data.currentUser!.firstName} + {loading || !data ? "loading" : data.currentUser!.firstName} </div> )} </Query> @@ -71,57 +77,60 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - itAsync('should pass any GraphQL errors in props along with data during a SSR when errorPolicy="all"', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople { - people { - name + itAsync( + 'should pass any GraphQL errors in props along with data during a SSR when errorPolicy="all"', + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople { + people { + name + } } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { - data: { - allPeople: { - people: null - } + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { + allPeople: { + people: null, + }, + }, + errors: [new Error("this is an error")], }, - errors: [new Error('this is an error')] - } - }); + }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const app = ( - <ApolloProvider client={client}> - <Query query={query} errorPolicy="all"> - {({ loading, data, error }: any) => { - if (!loading) { - expect(data).toMatchObject({ allPeople: { people: null } }); - expect(error).toBeDefined(); - expect(error.graphQLErrors[0].message).toEqual( - 'this is an error' - ); - resolve(); - } - return null; - }} - </Query> - </ApolloProvider> - ); + const app = ( + <ApolloProvider client={client}> + <Query query={query} errorPolicy="all"> + {({ loading, data, error }: any) => { + if (!loading) { + expect(data).toMatchObject({ allPeople: { people: null } }); + expect(error).toBeDefined(); + expect(error.graphQLErrors[0].message).toEqual( + "this is an error" + ); + resolve(); + } + return null; + }} + </Query> + </ApolloProvider> + ); - getDataFromTree(app); - }); + getDataFromTree(app); + } + ); }); }); diff --git a/src/react/components/__tests__/ssr/server.test.tsx b/src/react/components/__tests__/ssr/server.test.tsx --- a/src/react/components/__tests__/ssr/server.test.tsx +++ b/src/react/components/__tests__/ssr/server.test.tsx @@ -1,5 +1,5 @@ /** @jest-environment node */ -import React from 'react'; +import React from "react"; import { print, graphql as execute, @@ -7,111 +7,111 @@ import { GraphQLObjectType, GraphQLList, GraphQLString, - GraphQLID -} from 'graphql'; -import gql from 'graphql-tag'; + GraphQLID, +} from "graphql"; +import gql from "graphql-tag"; -import { ApolloClient } from '../../../../core'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloProvider } from '../../../context'; -import { ApolloLink } from '../../../../link/core'; -import { Observable } from '../../../../utilities'; -import { renderToStringWithData } from '../../../ssr'; -import { Query } from '../../Query'; +import { ApolloClient } from "../../../../core"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloProvider } from "../../../context"; +import { ApolloLink } from "../../../../link/core"; +import { Observable } from "../../../../utilities"; +import { renderToStringWithData } from "../../../ssr"; +import { Query } from "../../Query"; -const planetMap = new Map([['Planet:1', { id: 'Planet:1', name: 'Tatooine' }]]); +const planetMap = new Map([["Planet:1", { id: "Planet:1", name: "Tatooine" }]]); const shipMap = new Map([ [ - 'Ship:2', + "Ship:2", { - id: 'Ship:2', - name: 'CR90 corvette', - films: ['Film:4', 'Film:6', 'Film:3'] - } + id: "Ship:2", + name: "CR90 corvette", + films: ["Film:4", "Film:6", "Film:3"], + }, ], [ - 'Ship:3', + "Ship:3", { - id: 'Ship:3', - name: 'Star Destroyer', - films: ['Film:4', 'Film:5', 'Film:6'] - } - ] + id: "Ship:3", + name: "Star Destroyer", + films: ["Film:4", "Film:5", "Film:6"], + }, + ], ]); const filmMap = new Map([ - ['Film:3', { id: 'Film:3', title: 'Revenge of the Sith' }], - ['Film:4', { id: 'Film:4', title: 'A New Hope' }], - ['Film:5', { id: 'Film:5', title: 'the Empire Strikes Back' }], - ['Film:6', { id: 'Film:6', title: 'Return of the Jedi' }] + ["Film:3", { id: "Film:3", title: "Revenge of the Sith" }], + ["Film:4", { id: "Film:4", title: "A New Hope" }], + ["Film:5", { id: "Film:5", title: "the Empire Strikes Back" }], + ["Film:6", { id: "Film:6", title: "Return of the Jedi" }], ]); const PlanetType = new GraphQLObjectType({ - name: 'Planet', + name: "Planet", fields: { id: { type: GraphQLID }, - name: { type: GraphQLString } - } + name: { type: GraphQLString }, + }, }); const FilmType = new GraphQLObjectType({ - name: 'Film', + name: "Film", fields: { id: { type: GraphQLID }, - title: { type: GraphQLString } - } + title: { type: GraphQLString }, + }, }); const ShipType = new GraphQLObjectType({ - name: 'Ship', + name: "Ship", fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, films: { type: new GraphQLList(FilmType), - resolve: ({ films }) => films.map((id: string) => filmMap.get(id)) - } - } + resolve: ({ films }) => films.map((id: string) => filmMap.get(id)), + }, + }, }); const QueryType = new GraphQLObjectType({ - name: 'Query', + name: "Query", fields: { allPlanets: { type: new GraphQLList(PlanetType), - resolve: () => Array.from(planetMap.values()) + resolve: () => Array.from(planetMap.values()), }, allShips: { type: new GraphQLList(ShipType), - resolve: () => Array.from(shipMap.values()) + resolve: () => Array.from(shipMap.values()), }, ship: { type: ShipType, args: { id: { type: GraphQLID } }, - resolve: (_, { id }) => shipMap.get(id) + resolve: (_, { id }) => shipMap.get(id), }, film: { type: FilmType, args: { id: { type: GraphQLID } }, - resolve: (_, { id }) => filmMap.get(id) - } - } + resolve: (_, { id }) => filmMap.get(id), + }, + }, }); const Schema = new GraphQLSchema({ query: QueryType }); -describe('SSR', () => { - it('should work with React.createContext', async () => { - let defaultValue = 'default'; +describe("SSR", () => { + it("should work with React.createContext", async () => { + let defaultValue = "default"; let Context = React.createContext(defaultValue); - let providerValue = 'provider'; + let providerValue = "provider"; expect( await renderToStringWithData( <React.Fragment> <Context.Provider value={providerValue} /> <Context.Consumer> - {val => { + {(val) => { expect(val).toBe(defaultValue); return val; }} @@ -123,7 +123,7 @@ describe('SSR', () => { await renderToStringWithData( <Context.Provider value={providerValue}> <Context.Consumer> - {val => { + {(val) => { expect(val).toBe(providerValue); return val; }} @@ -134,7 +134,7 @@ describe('SSR', () => { expect( await renderToStringWithData( <Context.Consumer> - {val => { + {(val) => { expect(val).toBe(defaultValue); return val; }} @@ -147,34 +147,34 @@ describe('SSR', () => { await renderToStringWithData( <ContextForUndefined.Provider value={undefined}> <ContextForUndefined.Consumer> - {val => { + {(val) => { expect(val).toBeUndefined(); - return val === undefined ? 'works' : 'broken'; + return val === undefined ? "works" : "broken"; }} </ContextForUndefined.Consumer> </ContextForUndefined.Provider> ) - ).toBe('works'); + ).toBe("works"); const apolloClient = new ApolloClient({ - link: new ApolloLink(config => { - return new Observable(observer => { + link: new ApolloLink((config) => { + return new Observable((observer) => { execute({ schema: Schema, source: print(config.query), variableValues: config.variables, operationName: config.operationName, }) - .then(result => { + .then((result) => { observer.next(result); observer.complete(); }) - .catch(e => { + .catch((e) => { observer.error(e); }); }); }), - cache: new Cache() + cache: new Cache(), }); expect( @@ -192,7 +192,7 @@ describe('SSR', () => { > {() => ( <Context.Consumer> - {val => { + {(val) => { expect(val).toBe(providerValue); return val; }} diff --git a/src/react/context/__tests__/ApolloConsumer.test.tsx b/src/react/context/__tests__/ApolloConsumer.test.tsx --- a/src/react/context/__tests__/ApolloConsumer.test.tsx +++ b/src/react/context/__tests__/ApolloConsumer.test.tsx @@ -1,25 +1,25 @@ -import React from 'react'; -import { render, screen } from '@testing-library/react'; +import React from "react"; +import { render, screen } from "@testing-library/react"; -import { ApolloLink } from '../../../link/core'; -import { ApolloClient } from '../../../core'; -import { InMemoryCache as Cache } from '../../../cache'; -import { ApolloProvider } from '../ApolloProvider'; -import { ApolloConsumer } from '../ApolloConsumer'; -import { getApolloContext } from '../ApolloContext'; -import { itAsync } from '../../../testing'; +import { ApolloLink } from "../../../link/core"; +import { ApolloClient } from "../../../core"; +import { InMemoryCache as Cache } from "../../../cache"; +import { ApolloProvider } from "../ApolloProvider"; +import { ApolloConsumer } from "../ApolloConsumer"; +import { getApolloContext } from "../ApolloContext"; +import { itAsync } from "../../../testing"; const client = new ApolloClient({ cache: new Cache(), - link: new ApolloLink((o, f) => (f ? f(o) : null)) + link: new ApolloLink((o, f) => (f ? f(o) : null)), }); -describe('<ApolloConsumer /> component', () => { - itAsync('has a render prop', (resolve, reject) => { +describe("<ApolloConsumer /> component", () => { + itAsync("has a render prop", (resolve, reject) => { render( <ApolloProvider client={client}> <ApolloConsumer> - {clientRender => { + {(clientRender) => { try { expect(clientRender).toBe(client); resolve(); @@ -33,17 +33,17 @@ describe('<ApolloConsumer /> component', () => { ); }); - it('renders the content in the children prop', () => { + it("renders the content in the children prop", () => { render( <ApolloProvider client={client}> <ApolloConsumer>{() => <div>Test</div>}</ApolloConsumer> </ApolloProvider> ); - expect(screen.getByText('Test')).toBeTruthy(); + expect(screen.getByText("Test")).toBeTruthy(); }); - it('errors if there is no client in the context', () => { + it("errors if there is no client in the context", () => { // Prevent Error about missing context type from appearing in the console. const errorLogger = console.error; console.error = () => {}; diff --git a/src/react/context/__tests__/ApolloProvider.test.tsx b/src/react/context/__tests__/ApolloProvider.test.tsx --- a/src/react/context/__tests__/ApolloProvider.test.tsx +++ b/src/react/context/__tests__/ApolloProvider.test.tsx @@ -1,39 +1,44 @@ -import React, { useContext } from 'react'; -import { render, screen } from '@testing-library/react'; +import React, { useContext } from "react"; +import { render, screen } from "@testing-library/react"; -import { ApolloLink } from '../../../link/core'; -import { ApolloClient } from '../../../core'; -import { InMemoryCache as Cache } from '../../../cache'; -import { ApolloProvider } from '../ApolloProvider'; -import { getApolloContext } from '../ApolloContext'; +import { ApolloLink } from "../../../link/core"; +import { ApolloClient } from "../../../core"; +import { InMemoryCache as Cache } from "../../../cache"; +import { ApolloProvider, ApolloProviderProps } from "../ApolloProvider"; +import { ApolloContextValue, getApolloContext } from "../ApolloContext"; -describe('<ApolloProvider /> Component', () => { +describe("<ApolloProvider /> Component", () => { const client = new ApolloClient({ cache: new Cache(), - link: new ApolloLink((o, f) => (f ? f(o) : null)) + link: new ApolloLink((o, f) => (f ? f(o) : null)), }); - it('should render children components', () => { + const anotherClient = new ApolloClient({ + cache: new Cache(), + link: new ApolloLink((o, f) => (f ? f(o) : null)), + }); + + it("should render children components", () => { render( <ApolloProvider client={client}> <div className="unique">Test</div> </ApolloProvider> ); - expect(screen.getByText('Test')).toBeTruthy(); + expect(screen.getByText("Test")).toBeTruthy(); }); - it('should support the 2.0', () => { + it("should support the 2.0", () => { render( <ApolloProvider client={{} as ApolloClient<any>}> <div className="unique">Test</div> </ApolloProvider> ); - expect(screen.getByText('Test')).toBeTruthy(); + expect(screen.getByText("Test")).toBeTruthy(); }); - it('should require a client', () => { + it("should require a client", () => { const originalConsoleError = console.error; console.error = () => { /* noop */ @@ -50,22 +55,22 @@ describe('<ApolloProvider /> Component', () => { </ApolloContext.Provider> ); }).toThrowError( - 'ApolloProvider was not passed a client instance. Make ' + + "ApolloProvider was not passed a client instance. Make " + 'sure you pass in your client via the "client" prop.' ); console.error = originalConsoleError; }); - it('should not require a store', () => { + it("should not require a store", () => { render( <ApolloProvider client={client}> <div className="unique">Test</div> </ApolloProvider> ); - expect(screen.getByText('Test')).toBeTruthy(); + expect(screen.getByText("Test")).toBeTruthy(); }); - it('should add the client to the children context', () => { + it("should add the client to the children context", () => { const TestChild = () => { const context = useContext(getApolloContext()); expect(context.client).toEqual(client); @@ -79,7 +84,7 @@ describe('<ApolloProvider /> Component', () => { ); }); - it('should update props when the client changes', () => { + it("should update props when the client changes", () => { let clientToCheck = client; const TestChild = () => { @@ -95,7 +100,7 @@ describe('<ApolloProvider /> Component', () => { const newClient = new ApolloClient({ cache: new Cache(), - link: new ApolloLink((o, f) => (f ? f(o) : null)) + link: new ApolloLink((o, f) => (f ? f(o) : null)), }); clientToCheck = newClient; rerender( @@ -104,4 +109,69 @@ describe('<ApolloProvider /> Component', () => { </ApolloProvider> ); }); + + describe.each< + [ + string, + Omit<ApolloProviderProps<any>, "children">, + Omit<ApolloProviderProps<any>, "children">, + ] + >([["client", { client }, { client: anotherClient }]])( + "context value stability, %s prop", + (prop, value, childValue) => { + it(`should not recreate the context value if the ${prop} prop didn't change`, () => { + let lastContext: ApolloContextValue | undefined; + + const TestChild = () => { + lastContext = useContext(getApolloContext()); + return null; + }; + + const { rerender } = render( + <ApolloProvider {...value}> + <TestChild /> + </ApolloProvider> + ); + + const firstContextValue = lastContext; + + rerender( + <ApolloProvider {...value}> + <TestChild /> + </ApolloProvider> + ); + + expect(lastContext).toBe(firstContextValue); + }); + + it(`should not recreate the context if the parent context value differs, but the ${prop} prop didn't change`, () => { + let lastContext: ApolloContextValue | undefined; + + const TestChild = () => { + lastContext = useContext(getApolloContext()); + return null; + }; + + const { rerender } = render( + <ApolloProvider {...value}> + <ApolloProvider {...childValue}> + <TestChild /> + </ApolloProvider> + </ApolloProvider> + ); + + const firstContextValue = lastContext; + + rerender( + <ApolloProvider {...value}> + <ApolloProvider {...childValue}> + <TestChild /> + </ApolloProvider> + </ApolloProvider> + ); + + expect(lastContext).toBe(firstContextValue); + }); + } + ); }); diff --git a/src/react/hoc/__tests__/client-option.test.tsx b/src/react/hoc/__tests__/client-option.test.tsx --- a/src/react/hoc/__tests__/client-option.test.tsx +++ b/src/react/hoc/__tests__/client-option.test.tsx @@ -1,17 +1,17 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../core'; -import { ApolloProvider } from '../../context'; -import { InMemoryCache as Cache } from '../../../cache'; -import { itAsync, mockSingleLink } from '../../../testing'; -import { graphql } from '../graphql'; -import { ChildProps } from '../types'; - -describe('client option', () => { - it('renders with client from options', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../core"; +import { ApolloProvider } from "../../context"; +import { InMemoryCache as Cache } from "../../../cache"; +import { itAsync, mockSingleLink } from "../../../testing"; +import { graphql } from "../graphql"; +import { ChildProps } from "../types"; + +describe("client option", () => { + it("renders with client from options", () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -21,22 +21,22 @@ describe('client option', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const config = { options: { - client - } + client, + }, }; const ContainerWithData = graphql<{}, Data>(query, config)(() => null); const { unmount } = render( @@ -44,7 +44,7 @@ describe('client option', () => { client={ new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }) } > @@ -54,7 +54,7 @@ describe('client option', () => { unmount(); }); - itAsync('doesnt require a recycler', (resolve, reject) => { + itAsync("doesnt require a recycler", (resolve, reject) => { const query = gql` query people { allPeople(first: 1) { @@ -64,24 +64,27 @@ describe('client option', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const config = { options: { - client - } + client, + }, }; - let renderCount = 0 - const ContainerWithData = graphql<{}, Data>(query, config)(() => { + let renderCount = 0; + const ContainerWithData = graphql<{}, Data>( + query, + config + )(() => { renderCount += 1; return null; }); @@ -92,72 +95,77 @@ describe('client option', () => { }).then(resolve, reject); }); - itAsync('ignores client from context if client from options is present', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "ignores client from context if client from options is present", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } + `; + const dataProvider = { + allPeople: { people: [{ name: "Leia Organa Solo" }] }, + }; + + type Data = typeof dataProvider; + const linkProvider = mockSingleLink({ + request: { query }, + result: { data: dataProvider }, + }); + const clientProvider = new ApolloClient({ + link: linkProvider, + cache: new Cache({ addTypename: false }), + }); + const dataOptions = { + allPeople: { people: [{ name: "Luke Skywalker" }] }, + }; + const linkOptions = mockSingleLink({ + request: { query }, + result: { data: dataOptions }, + }); + const clientOptions = new ApolloClient({ + link: linkOptions, + cache: new Cache({ addTypename: false }), + }); + + const config = { + options: { + client: clientOptions, + }, + }; + + class Container extends React.Component<ChildProps<{}, Data>> { + componentDidUpdate() { + const { data } = this.props; + expect(data!.loading).toBeFalsy(); // first data + expect(data!.allPeople).toEqual({ + people: [{ name: "Luke Skywalker" }], + }); + done = true; + } + render() { + return null; + } } - `; - const dataProvider = { - allPeople: { people: [{ name: 'Leia Organa Solo' }] } - }; - - type Data = typeof dataProvider; - const linkProvider = mockSingleLink({ - request: { query }, - result: { data: dataProvider } - }); - const clientProvider = new ApolloClient({ - link: linkProvider, - cache: new Cache({ addTypename: false }) - }); - const dataOptions = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const linkOptions = mockSingleLink({ - request: { query }, - result: { data: dataOptions } - }); - const clientOptions = new ApolloClient({ - link: linkOptions, - cache: new Cache({ addTypename: false }) - }); - - const config = { - options: { - client: clientOptions - } - }; - - class Container extends React.Component<ChildProps<{}, Data>> { - componentDidUpdate() { - const { data } = this.props; - expect(data!.loading).toBeFalsy(); // first data - expect(data!.allPeople).toEqual({ - people: [{ name: 'Luke Skywalker' }] - }); - done = true; - } - render() { - return null; - } + const ContainerWithData = graphql<{}, Data>(query, config)(Container); + render( + <ApolloProvider client={clientProvider}> + <ContainerWithData /> + </ApolloProvider> + ); + + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); } - const ContainerWithData = graphql<{}, Data>(query, config)(Container); - render( - <ApolloProvider client={clientProvider}> - <ContainerWithData /> - </ApolloProvider> - ); - - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + ); - itAsync('exposes refetch as part of the props api', (resolve, reject) => { + itAsync("exposes refetch as part of the props api", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people($first: Int) { @@ -171,16 +179,16 @@ describe('client option', () => { const variables = { first: 1 }; type Variables = typeof variables; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data1; const link = mockSingleLink({ request: { query, variables }, - result: { data: data1 } + result: { data: data1 }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Variables, Data, Variables>(query)( diff --git a/src/react/hoc/__tests__/fragments.test.tsx b/src/react/hoc/__tests__/fragments.test.tsx --- a/src/react/hoc/__tests__/fragments.test.tsx +++ b/src/react/hoc/__tests__/fragments.test.tsx @@ -1,18 +1,18 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; -import { ApolloClient } from '../../../core'; -import { ApolloProvider } from '../../context'; -import { InMemoryCache as Cache } from '../../../cache'; -import { itAsync, mockSingleLink } from '../../../testing'; -import { graphql } from '../graphql'; -import { ChildProps } from '../types'; +import { ApolloClient } from "../../../core"; +import { ApolloProvider } from "../../context"; +import { InMemoryCache as Cache } from "../../../cache"; +import { itAsync, mockSingleLink } from "../../../testing"; +import { graphql } from "../graphql"; +import { ChildProps } from "../types"; -describe('fragments', () => { +describe("fragments", () => { // XXX in a later version, we should support this for composition - it('throws if you only pass a fragment', () => { + it("throws if you only pass a fragment", () => { const query: DocumentNode = gql` fragment Failure on PeopleConnection { people { @@ -21,17 +21,17 @@ describe('fragments', () => { } `; const expectedData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] } + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; type Data = typeof expectedData; const link = mockSingleLink({ request: { query }, - result: { data: expectedData } + result: { data: expectedData }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); try { @@ -40,9 +40,7 @@ describe('fragments', () => { componentDidUpdate() { const { props } = this; expect(props.data!.loading).toBeFalsy(); - expect(props.data!.allPeople).toEqual( - expectedData.allPeople - ); + expect(props.data!.allPeople).toEqual(expectedData.allPeople); } render() { return null; @@ -61,63 +59,64 @@ describe('fragments', () => { } }); - itAsync('correctly fetches a query with inline fragments', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - __typename - ...person + itAsync( + "correctly fetches a query with inline fragments", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + __typename + ...person + } } - } - fragment person on PeopleConnection { - people { - name + fragment person on PeopleConnection { + people { + name + } } - } - `; - const data = { - allPeople: { - __typename: 'PeopleConnection', - people: [{ name: 'Luke Skywalker' }] - } - }; + `; + const data = { + allPeople: { + __typename: "PeopleConnection", + people: [{ name: "Luke Skywalker" }], + }, + }; - type Data = typeof data; + type Data = typeof data; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const Container = graphql<{}, Data>(query)( - class extends React.Component<ChildProps<{}, Data>> { - componentDidUpdate() { - expect(this.props.data!.loading).toBeFalsy(); - expect(this.props.data!.allPeople).toEqual( - data.allPeople - ); - done = true; - } - render() { - return null; + const Container = graphql<{}, Data>(query)( + class extends React.Component<ChildProps<{}, Data>> { + componentDidUpdate() { + expect(this.props.data!.loading).toBeFalsy(); + expect(this.props.data!.allPeople).toEqual(data.allPeople); + done = true; + } + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/mutations/index.test.tsx b/src/react/hoc/__tests__/mutations/index.test.tsx --- a/src/react/hoc/__tests__/mutations/index.test.tsx +++ b/src/react/hoc/__tests__/mutations/index.test.tsx @@ -1,18 +1,14 @@ -import React from 'react'; -import { render } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { - createMockClient, - itAsync, - MockedProvider, -} from '../../../../testing'; -import { NormalizedCacheObject } from '../../../../cache'; -import { ApolloProvider } from '../../../context'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; +import React from "react"; +import { render } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { createMockClient, itAsync, MockedProvider } from "../../../../testing"; +import { NormalizedCacheObject } from "../../../../cache"; +import { ApolloProvider } from "../../../context"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; const query: DocumentNode = gql` mutation addPerson { @@ -35,10 +31,10 @@ interface Variables { } const expectedData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] } + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; -describe('graphql(mutation)', () => { +describe("graphql(mutation)", () => { let error: typeof console.error; let client: ApolloClient<NormalizedCacheObject>; beforeEach(() => { @@ -51,12 +47,12 @@ describe('graphql(mutation)', () => { console.error = error; }); - it('binds a mutation to props', () => { + it("binds a mutation to props", () => { const ContainerWithData = graphql(query)(({ mutate, result }) => { expect(mutate).toBeTruthy(); expect(result).toBeTruthy(); - expect(typeof mutate).toBe('function'); - expect(typeof result).toBe('object'); + expect(typeof mutate).toBe("function"); + expect(typeof result).toBe("object"); return null; }); @@ -67,7 +63,7 @@ describe('graphql(mutation)', () => { ); }); - it('binds a mutation result to props', () => { + it("binds a mutation result to props", () => { type InjectedProps = { result: any; }; @@ -77,7 +73,7 @@ describe('graphql(mutation)', () => { )(({ result }) => { const { loading, error } = result; expect(result).toBeTruthy(); - expect(typeof loading).toBe('boolean'); + expect(typeof loading).toBe("boolean"); expect(error).toBeFalsy(); return null; @@ -90,7 +86,7 @@ describe('graphql(mutation)', () => { ); }); - it('binds a mutation to props with a custom name', () => { + it("binds a mutation to props with a custom name", () => { interface Props {} type InjectedProps = { @@ -100,12 +96,12 @@ describe('graphql(mutation)', () => { const ContainerWithData = graphql<Props, Data, Variables, InjectedProps>( query, - { name: 'customMutation' } + { name: "customMutation" } )(({ customMutation, customMutationResult }) => { expect(customMutation).toBeTruthy(); expect(customMutationResult).toBeTruthy(); - expect(typeof customMutation).toBe('function'); - expect(typeof customMutationResult).toBe('object'); + expect(typeof customMutation).toBe("function"); + expect(typeof customMutationResult).toBe("object"); return null; }); @@ -116,7 +112,7 @@ describe('graphql(mutation)', () => { ); }); - it('binds a mutation to custom props', () => { + it("binds a mutation to custom props", () => { interface Props { methodName: string; } @@ -128,12 +124,12 @@ describe('graphql(mutation)', () => { { props: ({ ownProps, mutate: addPerson }) => ({ [ownProps.methodName]: (name: string) => - addPerson!({ variables: { name } }) - }) + addPerson!({ variables: { name } }), + }), } )(({ myInjectedMutationMethod }) => { expect(myInjectedMutationMethod).toBeTruthy(); - expect(typeof myInjectedMutationMethod).toBe('function'); + expect(typeof myInjectedMutationMethod).toBe("function"); return null; }); @@ -144,7 +140,7 @@ describe('graphql(mutation)', () => { ); }); - itAsync('does not swallow children errors', (resolve, reject) => { + itAsync("does not swallow children errors", (resolve, reject) => { let bar: any; const ContainerWithData = graphql(query)(() => { bar(); // this will throw @@ -173,11 +169,11 @@ describe('graphql(mutation)', () => { ); }); - itAsync('can execute a mutation', (resolve, reject) => { + itAsync("can execute a mutation", (resolve, reject) => { const Container = graphql(query)( class extends React.Component<ChildProps> { componentDidMount() { - this.props.mutate!().then(result => { + this.props.mutate!().then((result) => { expect(result && result.data).toEqual(expectedData); resolve(); }); @@ -195,95 +191,101 @@ describe('graphql(mutation)', () => { ); }); - itAsync('can execute a mutation with variables from props', (resolve, reject) => { - const queryWithVariables = gql` - mutation addPerson($first: Int) { - allPeople(first: $first) { - people { - name + itAsync( + "can execute a mutation with variables from props", + (resolve, reject) => { + const queryWithVariables = gql` + mutation addPerson($first: Int) { + allPeople(first: $first) { + people { + name + } } } - } - `; - client = createMockClient(expectedData, queryWithVariables, { - first: 1 - }); - - interface Props { - first: number; - } + `; + client = createMockClient(expectedData, queryWithVariables, { + first: 1, + }); - const Container = graphql<Props>(queryWithVariables)( - class extends React.Component<ChildProps<Props>> { - componentDidMount() { - this.props.mutate!().then(result => { - expect(result && result.data).toEqual(expectedData); - resolve(); - }); - } - render() { - return null; - } + interface Props { + first: number; } - ); - render( - <ApolloProvider client={client}> - <Container first={1} /> - </ApolloProvider> - ); - }); - - itAsync('can execute a mutation with variables from BOTH options and arguments', (resolve, reject) => { - const queryWithVariables = gql` - mutation addPerson($first: Int!, $second: Int!) { - allPeople(first: $first) { - people { - name + const Container = graphql<Props>(queryWithVariables)( + class extends React.Component<ChildProps<Props>> { + componentDidMount() { + this.props.mutate!().then((result) => { + expect(result && result.data).toEqual(expectedData); + resolve(); + }); + } + render() { + return null; } } - } - `; + ); - const mocks = [ - { - request: { - query: queryWithVariables, - variables: { - first: 1, - second: 2 + render( + <ApolloProvider client={client}> + <Container first={1} /> + </ApolloProvider> + ); + } + ); + + itAsync( + "can execute a mutation with variables from BOTH options and arguments", + (resolve, reject) => { + const queryWithVariables = gql` + mutation addPerson($first: Int!, $second: Int!) { + allPeople(first: $first) { + people { + name + } } - }, - result: { data: expectedData }, - } - ]; - - interface Props {} - - const Container = graphql<Props>(queryWithVariables, { - options: () => ({ - variables: { first: 1 } - }) - })( - class extends React.Component<ChildProps<Props>> { - componentDidMount() { - this.props.mutate!({ - variables: { second: 2 } - }).then(result => { - expect(result && result.data).toEqual(expectedData); - resolve(); - }); } - render() { - return null; + `; + + const mocks = [ + { + request: { + query: queryWithVariables, + variables: { + first: 1, + second: 2, + }, + }, + result: { data: expectedData }, + }, + ]; + + interface Props {} + + const Container = graphql<Props>(queryWithVariables, { + options: () => ({ + variables: { first: 1 }, + }), + })( + class extends React.Component<ChildProps<Props>> { + componentDidMount() { + this.props.mutate!({ + variables: { second: 2 }, + }).then((result) => { + expect(result && result.data).toEqual(expectedData); + resolve(); + }); + } + render() { + return null; + } } - } - ); + ); - render( - <MockedProvider mocks={mocks}> - <Container /> - </MockedProvider> - ); - }); + render( + <MockedProvider mocks={mocks}> + <Container /> + </MockedProvider> + ); + } + ); }); diff --git a/src/react/hoc/__tests__/mutations/lifecycle.test.tsx b/src/react/hoc/__tests__/mutations/lifecycle.test.tsx --- a/src/react/hoc/__tests__/mutations/lifecycle.test.tsx +++ b/src/react/hoc/__tests__/mutations/lifecycle.test.tsx @@ -1,11 +1,11 @@ -import React from 'react'; -import { render, cleanup } from '@testing-library/react'; -import gql from 'graphql-tag'; +import React from "react"; +import { render, cleanup } from "@testing-library/react"; +import gql from "graphql-tag"; -import { ApolloProvider } from '../../../context/ApolloProvider'; -import { itAsync, createMockClient } from '../../../../testing/core'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; +import { ApolloProvider } from "../../../context/ApolloProvider"; +import { itAsync, createMockClient } from "../../../../testing/core"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; const query = gql` mutation addPerson($id: Int) { @@ -17,40 +17,43 @@ const query = gql` } `; const expectedData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] } + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; -describe('graphql(mutation) lifecycle', () => { +describe("graphql(mutation) lifecycle", () => { afterEach(cleanup); - itAsync('allows falsy values in the mapped variables from props', (resolve, reject) => { - const client = createMockClient(expectedData, query, { id: null }); + itAsync( + "allows falsy values in the mapped variables from props", + (resolve, reject) => { + const client = createMockClient(expectedData, query, { id: null }); - interface Props { - id: string | null; - } - - const Container = graphql<Props>(query)( - class extends React.Component<ChildProps<Props>> { - componentDidMount() { - this.props.mutate!().then(result => { - expect(result && result.data).toEqual(expectedData); - resolve(); - }); - } + interface Props { + id: string | null; + } - render() { - return null; + const Container = graphql<Props>(query)( + class extends React.Component<ChildProps<Props>> { + componentDidMount() { + this.props.mutate!().then((result) => { + expect(result && result.data).toEqual(expectedData); + resolve(); + }); + } + + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container id={null} /> - </ApolloProvider> - ); - }); + render( + <ApolloProvider client={client}> + <Container id={null} /> + </ApolloProvider> + ); + } + ); it("errors if the passed props don't contain the needed variables", () => { const client = createMockClient(expectedData, query, { first: 1 }); @@ -69,53 +72,56 @@ describe('graphql(mutation) lifecycle', () => { } }); - itAsync('rebuilds the mutation on prop change when using `options`', (resolve, reject) => { - const client = createMockClient(expectedData, query, { - id: 2 - }); + itAsync( + "rebuilds the mutation on prop change when using `options`", + (resolve, reject) => { + const client = createMockClient(expectedData, query, { + id: 2, + }); - interface Props { - listId: number; - } + interface Props { + listId: number; + } - function options(props: Props) { - return { - variables: { - id: props.listId - } - }; - } + function options(props: Props) { + return { + variables: { + id: props.listId, + }, + }; + } - class Container extends React.Component<ChildProps<Props>> { - render() { - if (this.props.listId !== 2) return null; - this.props.mutate!().then(() => resolve()); - return null; + class Container extends React.Component<ChildProps<Props>> { + render() { + if (this.props.listId !== 2) return null; + this.props.mutate!().then(() => resolve()); + return null; + } } - } - const ContainerWithMutate = graphql<Props>(query, { options })(Container); + const ContainerWithMutate = graphql<Props>(query, { options })(Container); - class ChangingProps extends React.Component<{}, { listId: number }> { - state = { listId: 1 }; + class ChangingProps extends React.Component<{}, { listId: number }> { + state = { listId: 1 }; - componentDidMount() { - setTimeout(() => this.setState({ listId: 2 }), 50); - } + componentDidMount() { + setTimeout(() => this.setState({ listId: 2 }), 50); + } - render() { - return <ContainerWithMutate listId={this.state.listId} />; + render() { + return <ContainerWithMutate listId={this.state.listId} />; + } } - } - render( - <ApolloProvider client={client}> - <ChangingProps /> - </ApolloProvider> - ); - }); + render( + <ApolloProvider client={client}> + <ChangingProps /> + </ApolloProvider> + ); + } + ); - itAsync('can execute a mutation with custom variables', (resolve, reject) => { + itAsync("can execute a mutation with custom variables", (resolve, reject) => { const client = createMockClient(expectedData, query, { id: 1 }); interface Variables { id: number; @@ -124,7 +130,7 @@ describe('graphql(mutation) lifecycle', () => { const Container = graphql<{}, {}, Variables>(query)( class extends React.Component<ChildProps<{}, {}, Variables>> { componentDidMount() { - this.props.mutate!({ variables: { id: 1 } }).then(result => { + this.props.mutate!({ variables: { id: 1 } }).then((result) => { expect(result && result.data).toEqual(expectedData); resolve(); }); diff --git a/src/react/hoc/__tests__/mutations/queries.test.tsx b/src/react/hoc/__tests__/mutations/queries.test.tsx --- a/src/react/hoc/__tests__/mutations/queries.test.tsx +++ b/src/react/hoc/__tests__/mutations/queries.test.tsx @@ -1,90 +1,91 @@ -import React from 'react'; -import { act, render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient, MutationUpdaterFunction, ApolloCache } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { - itAsync, - createMockClient, - mockSingleLink, -} from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18'); +import React from "react"; +import { act, render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; -describe('graphql(mutation) query integration', () => { - itAsync('allows for passing optimisticResponse for a mutation', (resolve, reject) => { - const query: DocumentNode = gql` - mutation createTodo { - createTodo { - id - text - completed +import { + ApolloClient, + MutationUpdaterFunction, + ApolloCache, +} from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, createMockClient, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("graphql(mutation) query integration", () => { + itAsync( + "allows for passing optimisticResponse for a mutation", + (resolve, reject) => { + const query: DocumentNode = gql` + mutation createTodo { + createTodo { + id + text + completed + __typename + } __typename } - __typename - } - `; - - const data = { - __typename: 'Mutation', - createTodo: { - __typename: 'Todo', - id: '99', - text: 'This one was created with a mutation.', - completed: true - } - }; - - type Data = typeof data; - - let mutateFired = false; - const client = createMockClient(data, query); - const Container = graphql<{}, Data>(query)( - class extends React.Component<ChildProps<{}, Data>> { - componentDidMount() { - const optimisticResponse = { - __typename: 'Mutation', - createTodo: { - __typename: 'Todo', - id: '99', - text: 'Optimistically generated', - completed: true - } - }; + `; + + const data = { + __typename: "Mutation", + createTodo: { + __typename: "Todo", + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, + }; - this.props.mutate!({ optimisticResponse }).then(result => { - expect(result && result.data).toEqual(data); - mutateFired = true; - }); + type Data = typeof data; + + let mutateFired = false; + const client = createMockClient(data, query); + const Container = graphql<{}, Data>(query)( + class extends React.Component<ChildProps<{}, Data>> { + componentDidMount() { + const optimisticResponse = { + __typename: "Mutation", + createTodo: { + __typename: "Todo", + id: "99", + text: "Optimistically generated", + completed: true, + }, + }; + + this.props.mutate!({ optimisticResponse }).then((result) => { + expect(result && result.data).toEqual(data); + mutateFired = true; + }); - const dataInStore = client.cache.extract(true); - expect(dataInStore['Todo:99']).toEqual( - optimisticResponse.createTodo - ); - } - render() { - return null; + const dataInStore = client.cache.extract(true); + expect(dataInStore["Todo:99"]).toEqual( + optimisticResponse.createTodo + ); + } + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(mutateFired).toBeTruthy(); - }).then(resolve, reject); - }); + waitFor(() => { + expect(mutateFired).toBeTruthy(); + }).then(resolve, reject); + } + ); - itAsync('allows for updating queries from a mutation', (resolve, reject) => { + itAsync("allows for updating queries from a mutation", (resolve, reject) => { const query: DocumentNode = gql` query todos { todo_list { @@ -111,20 +112,20 @@ describe('graphql(mutation) query integration', () => { const mutationData = { createTodo: { - id: '99', - text: 'This one was created with a mutation.', - completed: true - } + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, }; type MutationData = typeof mutationData; const optimisticResponse = { createTodo: { - id: '99', - text: 'Optimistically generated', - completed: true - } + id: "99", + text: "Optimistically generated", + completed: true, + }, }; interface QueryData { todo_list: { @@ -148,13 +149,13 @@ describe('graphql(mutation) query integration', () => { }; const expectedData = { - todo_list: { id: '123', title: 'how to apollo', tasks: [] } + todo_list: { id: "123", title: "how to apollo", tasks: [] }, }; const link = mockSingleLink( { request: { query }, - result: { data: expectedData } + result: { data: expectedData }, }, { request: { query: mutation }, result: { data: mutationData } } ); @@ -165,7 +166,7 @@ describe('graphql(mutation) query integration', () => { type WithQueryChildProps = ChildProps<{}, QueryData>; const withMutation = graphql<WithQueryChildProps, MutationData>(mutation, { - options: () => ({ optimisticResponse, update }) + options: () => ({ optimisticResponse, update }), }); let renderCount = 0; @@ -176,7 +177,7 @@ describe('graphql(mutation) query integration', () => { if (!this.props.data || !this.props.data.todo_list) return null; if (!this.props.data.todo_list.tasks.length) { act(() => { - this.props.mutate!().then(result => { + this.props.mutate!().then((result) => { expect(result && result.data).toEqual(mutationData); }); }); @@ -186,23 +187,14 @@ describe('graphql(mutation) query integration', () => { switch (++renderCount) { case 1: expect(this.props.data.todo_list.tasks).toEqual([ - optimisticResponse.createTodo + optimisticResponse.createTodo, ]); break; case 2: expect(this.props.data.todo_list.tasks).toEqual([ - mutationData.createTodo + mutationData.createTodo, ]); break; - case 3: - if (IS_REACT_18) { - expect(this.props.data.todo_list.tasks).toEqual([ - mutationData.createTodo - ]); - } else { - reject(`too many renders (${renderCount})`); - } - break; default: reject(`too many renders (${renderCount})`); } @@ -220,151 +212,138 @@ describe('graphql(mutation) query integration', () => { ); waitFor(() => { - if (IS_REACT_18) { - expect(renderCount).toBe(3); - } else { - expect(renderCount).toBe(2); - } + expect(renderCount).toBe(2); }).then(resolve, reject); }); - itAsync('allows for updating queries from a mutation automatically', (resolve, reject) => { - const query: DocumentNode = gql` - query getMini($id: ID!) { - mini(id: $id) { - __typename - id - cover(maxWidth: 600, maxHeight: 400) + itAsync( + "allows for updating queries from a mutation automatically", + (resolve, reject) => { + const query: DocumentNode = gql` + query getMini($id: ID!) { + mini(id: $id) { + __typename + id + cover(maxWidth: 600, maxHeight: 400) + } } - } - `; - - const queryData = { - mini: { - id: 1, - __typename: 'Mini', - cover: 'image1' - } - }; + `; + + const queryData = { + mini: { + id: 1, + __typename: "Mini", + cover: "image1", + }, + }; - type Data = typeof queryData; + type Data = typeof queryData; - const variables = { id: 1 }; + const variables = { id: 1 }; - type Variables = typeof variables; + type Variables = typeof variables; - const mutation: DocumentNode = gql` - mutation($signature: String!) { - mini: submitMiniCoverS3DirectUpload(signature: $signature) { - __typename - id - cover(maxWidth: 600, maxHeight: 400) + const mutation: DocumentNode = gql` + mutation ($signature: String!) { + mini: submitMiniCoverS3DirectUpload(signature: $signature) { + __typename + id + cover(maxWidth: 600, maxHeight: 400) + } } - } - `; - - const mutationData = { - mini: { - id: 1, - cover: 'image2', - __typename: 'Mini' - } - }; - - type MutationData = typeof mutationData; + `; + + const mutationData = { + mini: { + id: 1, + cover: "image2", + __typename: "Mini", + }, + }; - interface MutationVariables { - signature: string; - } + type MutationData = typeof mutationData; - const link = mockSingleLink( - { request: { query, variables }, result: { data: queryData } }, - { - request: { query: mutation, variables: { signature: '1233' } }, - result: { data: mutationData } + interface MutationVariables { + signature: string; } - ); - const cache = new Cache({ addTypename: false }); - const client = new ApolloClient({ link, cache }); - class Boundary extends React.Component<React.PropsWithChildren> { - componentDidCatch(e: any) { - reject(e); - } - render() { - // eslint-disable-next-line testing-library/no-node-access - return this.props.children; - } - } + const link = mockSingleLink( + { request: { query, variables }, result: { data: queryData } }, + { + request: { query: mutation, variables: { signature: "1233" } }, + result: { data: mutationData }, + } + ); + const cache = new Cache({ addTypename: false }); + const client = new ApolloClient({ link, cache }); - let count = 0; - const MutationContainer = graphql<MutationVariables, MutationData>( - mutation - )( - class extends React.Component< - ChildProps<MutationVariables, MutationData> - > { + class Boundary extends React.Component<React.PropsWithChildren> { + componentDidCatch(e: any) { + reject(e); + } render() { - if (count === 1) { - setTimeout(() => { - this.props.mutate!() - .then(result => { - expect(result && result.data).toEqual( - mutationData - ); - }) - .catch(reject); - }); - } - return null; + // eslint-disable-next-line testing-library/no-node-access + return this.props.children; } } - ); - const Container = graphql<Variables, Data>(query)( - class extends React.Component<ChildProps<Variables, Data>> { - render() { - if (count === 1) { - if (IS_REACT_18) { - expect(this.props.data!.mini).toEqual(mutationData.mini); - } else { - expect(this.props.data!.mini).toEqual(queryData.mini); + let count = 0; + const MutationContainer = graphql<MutationVariables, MutationData>( + mutation + )( + class extends React.Component< + ChildProps<MutationVariables, MutationData> + > { + render() { + if (count === 1) { + setTimeout(() => { + this.props.mutate!() + .then((result) => { + expect(result && result.data).toEqual(mutationData); + }) + .catch(reject); + }); } + return null; } - if (count === 2) { - expect(this.props.data!.mini).toEqual( - mutationData.mini + } + ); + + const Container = graphql<Variables, Data>(query)( + class extends React.Component<ChildProps<Variables, Data>> { + render() { + if (count === 1) { + expect(this.props.data!.mini).toEqual(queryData.mini); + } + if (count === 2) { + expect(this.props.data!.mini).toEqual(mutationData.mini); + } + count++; + + return ( + <MutationContainer {...this.props.data!.mini} signature="1233" /> ); } - count++; - - return ( - <MutationContainer {...this.props.data!.mini} signature="1233" /> - ); } - } - ); + ); - render( - <ApolloProvider client={client}> - <Boundary> - <Container id={1} /> - </Boundary> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Boundary> + <Container id={1} /> + </Boundary> + </ApolloProvider> + ); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(2); - } else { + waitFor(() => { expect(count).toBe(3); - } - }).then(resolve, reject); - }); + }).then(resolve, reject); + } + ); - it('should be able to override the internal `ignoreResults` setting', async () => { + it("should be able to override the internal `ignoreResults` setting", async () => { const mutation: DocumentNode = gql` - mutation($signature: String!) { + mutation ($signature: String!) { mini: submitMiniCoverS3DirectUpload(signature: $signature) { __typename id @@ -376,9 +355,9 @@ describe('graphql(mutation) query integration', () => { const mutationData = { mini: { id: 1, - cover: 'image2', - __typename: 'Mini' - } + cover: "image2", + __typename: "Mini", + }, }; type MutationData = typeof mutationData; @@ -388,8 +367,8 @@ describe('graphql(mutation) query integration', () => { } const link = mockSingleLink({ - request: { query: mutation, variables: { signature: '1233' } }, - result: { data: mutationData } + request: { query: mutation, variables: { signature: "1233" } }, + result: { data: mutationData }, }); const cache = new Cache({ addTypename: false }); @@ -408,10 +387,8 @@ describe('graphql(mutation) query integration', () => { case 0: expect(this.props.result!.loading).toBeFalsy(); setTimeout(() => { - this.props.mutate!().then(result => { - expect(result && result.data).toEqual( - mutationData - ); + this.props.mutate!().then((result) => { + expect(result && result.data).toEqual(mutationData); }); }); break; diff --git a/src/react/hoc/__tests__/mutations/recycled-queries.test.tsx b/src/react/hoc/__tests__/mutations/recycled-queries.test.tsx --- a/src/react/hoc/__tests__/mutations/recycled-queries.test.tsx +++ b/src/react/hoc/__tests__/mutations/recycled-queries.test.tsx @@ -1,19 +1,21 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloCache, ApolloClient, MutationUpdaterFunction } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { MutationFunction } from '../../../types/types'; -import { mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18') - -describe('graphql(mutation) update queries', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { + ApolloCache, + ApolloClient, + MutationUpdaterFunction, +} from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { MutationFunction } from "../../../types/types"; +import { mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("graphql(mutation) update queries", () => { // This is a long test that keeps track of a lot of stuff. It is testing // whether or not the `options.update` reducers will run even when a given // container component is unmounted. @@ -33,7 +35,7 @@ describe('graphql(mutation) update queries', () => { // // There are also a lot more assertions on the way to make sure everything is // going as smoothly as planned. - it('will run `update` for a previously mounted component', async () => { + it("will run `update` for a previously mounted component", async () => { const query: DocumentNode = gql` query todos { todo_list { @@ -68,10 +70,10 @@ describe('graphql(mutation) update queries', () => { const mutationData = { createTodo: { - id: '99', - text: 'This one was created with a mutation.', - completed: true - } + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, }; type MutationData = typeof mutationData; @@ -84,35 +86,33 @@ describe('graphql(mutation) update queries', () => { > = (proxy, result) => { todoUpdateQueryCount++; const data = JSON.parse( - JSON.stringify( - proxy.readQuery<QueryData>({ query }) - ) // read from cache + JSON.stringify(proxy.readQuery<QueryData>({ query })) // read from cache ); data!.todo_list.tasks.push(result.data!.createTodo); // update value proxy.writeQuery({ query, data }); // write to cache }; const expectedData = { - todo_list: { id: '123', title: 'how to apollo', tasks: [] } + todo_list: { id: "123", title: "how to apollo", tasks: [] }, }; const link = mockSingleLink( { request: { query }, - result: { data: expectedData } + result: { data: expectedData }, }, { request: { query: mutation }, result: { data: mutationData } }, { request: { query: mutation }, result: { data: mutationData } } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let mutate: MutationFunction<MutationData>; const MyMutation = graphql<{}, MutationData>(mutation, { - options: () => ({ update }) + options: () => ({ update }), })( class extends React.Component<ChildProps<{}, MutationData>> { componentDidMount() { @@ -128,6 +128,7 @@ describe('graphql(mutation) update queries', () => { let queryMountCount = 0; let queryUnmountCount = 0; let queryRenderCount = 0; + const testFailures: any[] = []; const MyQuery = graphql<{}, QueryData>(query)( class extends React.Component<ChildProps<{}, QueryData>> { @@ -140,55 +141,60 @@ describe('graphql(mutation) update queries', () => { } render() { - switch (queryRenderCount) { - case 0: - expect(this.props.data!.loading).toBeTruthy(); - expect(this.props.data!.todo_list).toBeFalsy(); - break; - case 1: - expect(this.props.data!.loading).toBeFalsy(); - if (!IS_REACT_18) { + try { + switch (queryRenderCount) { + case 0: + expect(this.props.data!.loading).toBeTruthy(); + expect(this.props.data!.todo_list).toBeFalsy(); + break; + case 1: + expect(this.props.data!.loading).toBeFalsy(); expect(this.props.data!.todo_list).toEqual({ - id: '123', - title: 'how to apollo', - tasks: [] + id: "123", + title: "how to apollo", + tasks: [], }); - } - break; - case 2: - expect(this.props.data!.loading).toBeFalsy(); - expect(queryMountCount).toBe(1); - expect(this.props.data!.todo_list).toEqual({ - id: '123', - title: 'how to apollo', - tasks: [ - { - id: '99', - text: 'This one was created with a mutation.', - completed: true - } - ] - }); - break; - case 3: - expect(this.props.data!.todo_list).toEqual({ - id: '123', - title: 'how to apollo', - tasks: [ - { - id: '99', - text: 'This one was created with a mutation.', - completed: true - }, - { - id: '99', - text: 'This one was created with a mutation.', - completed: true - } - ] - }); - break; - default: + break; + case 2: + expect(this.props.data!.loading).toBeFalsy(); + expect(queryMountCount).toBe(1); + expect(this.props.data!.todo_list).toEqual({ + id: "123", + title: "how to apollo", + tasks: [ + { + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, + ], + }); + break; + case 3: + expect(this.props.data!.loading).toBeFalsy(); + expect(queryMountCount).toBe(1); + expect(this.props.data!.todo_list).toEqual({ + id: "123", + title: "how to apollo", + tasks: [ + { + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, + { + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, + ], + }); + break; + default: + throw new Error("too many rerenders"); + } + } catch (e) { + testFailures.push(e); } queryRenderCount += 1; @@ -209,35 +215,47 @@ describe('graphql(mutation) update queries', () => { </ApolloProvider> ); - setTimeout(() => { - mutate(); + let resolveLastTimeout: () => void; + const allTimeoutsFinished = new Promise<void>((r) => { + resolveLastTimeout = r; + }); - setTimeout(() => { - if (IS_REACT_18) { - expect(queryUnmountCount).toBe(1); - } else { - expect(queryUnmountCount).toBe(0); + const catchingSetTimeout = (cb: (args: void) => void, ms: number) => { + return setTimeout(() => { + try { + cb(); + } catch (e) { + testFailures.push(e); } + }, ms); + }; + + catchingSetTimeout(() => { + mutate(); + + catchingSetTimeout(() => { + expect(queryUnmountCount).toBe(0); query1Unmount(); expect(queryUnmountCount).toBe(1); - setTimeout(() => { + catchingSetTimeout(() => { mutate(); - setTimeout(() => { + catchingSetTimeout(() => { const { unmount: query2Unmount } = render( <ApolloProvider client={client}> <MyQuery /> </ApolloProvider> ); - setTimeout(() => { + catchingSetTimeout(() => { mutationUnmount(); query2Unmount(); expect(todoUpdateQueryCount).toBe(2); expect(queryMountCount).toBe(2); expect(queryUnmountCount).toBe(2); + resolveLastTimeout!(); }, 5); }, 5); }, 5); @@ -245,13 +263,15 @@ describe('graphql(mutation) update queries', () => { }, 5); await waitFor(() => { - if (!IS_REACT_18) { - expect(queryRenderCount).toBe(4); - } + expect(queryRenderCount).toBe(4); }); + await allTimeoutsFinished; + if (testFailures.length > 0) { + throw testFailures[0]; + } }); - it('will run `refetchQueries` for a recycled queries', async () => { + it("will run `refetchQueries` for a recycled queries", async () => { const mutation: DocumentNode = gql` mutation createTodo { createTodo { @@ -264,10 +284,10 @@ describe('graphql(mutation) update queries', () => { const mutationData = { createTodo: { - id: '99', - text: 'This one was created with a mutation.', - completed: true - } + id: "99", + text: "This one was created with a mutation.", + completed: true, + }, }; type MutationData = typeof mutationData; @@ -299,28 +319,28 @@ describe('graphql(mutation) update queries', () => { } const data = { - todo_list: { id: '123', title: 'how to apollo', tasks: [] } + todo_list: { id: "123", title: "how to apollo", tasks: [] }, }; const updatedData = { todo_list: { - id: '123', - title: 'how to apollo', - tasks: [mutationData.createTodo] - } + id: "123", + title: "how to apollo", + tasks: [mutationData.createTodo], + }, }; const link = mockSingleLink( - { request: { query, variables: { id: '123' } }, result: { data } }, + { request: { query, variables: { id: "123" } }, result: { data } }, { request: { query: mutation }, result: { data: mutationData } }, { - request: { query, variables: { id: '123' } }, - result: { data: updatedData } + request: { query, variables: { id: "123" } }, + result: { data: updatedData }, } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let mutate: MutationFunction<MutationData>; @@ -357,23 +377,19 @@ describe('graphql(mutation) update queries', () => { case 1: expect(this.props.data!.loading).toBeFalsy(); expect(this.props.data!.todo_list).toEqual({ - id: '123', - title: 'how to apollo', - tasks: [] + id: "123", + title: "how to apollo", + tasks: [], }); break; case 2: expect(this.props.data!.loading).toBeFalsy(); expect(queryMountCount).toBe(1); - expect(this.props.data!.todo_list).toEqual( - updatedData.todo_list - ); + expect(this.props.data!.todo_list).toEqual(updatedData.todo_list); break; case 3: expect(this.props.data!.loading).toBeFalsy(); - expect(this.props.data!.todo_list).toEqual( - updatedData.todo_list - ); + expect(this.props.data!.todo_list).toEqual(updatedData.todo_list); break; default: } @@ -396,7 +412,7 @@ describe('graphql(mutation) update queries', () => { ); setTimeout(() => { - mutate({ refetchQueries: [{ query, variables: { id: '123' } }] }).then( + mutate({ refetchQueries: [{ query, variables: { id: "123" } }] }).then( () => { setTimeout(() => { render( diff --git a/src/react/hoc/__tests__/queries/api.test.tsx b/src/react/hoc/__tests__/queries/api.test.tsx --- a/src/react/hoc/__tests__/queries/api.test.tsx +++ b/src/react/hoc/__tests__/queries/api.test.tsx @@ -1,16 +1,16 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -describe('[queries] api', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] api", () => { const consoleWarn = console.warn; beforeAll(() => { console.warn = () => null; @@ -20,7 +20,7 @@ describe('[queries] api', () => { console.warn = consoleWarn; }); - itAsync('exposes refetch as part of the props api', (resolve, reject) => { + itAsync("exposes refetch as part of the props api", (resolve, reject) => { const query: DocumentNode = gql` query people($first: Int) { allPeople(first: $first) { @@ -31,7 +31,7 @@ describe('[queries] api', () => { } `; const variables = { first: 1 }; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const link = mockSingleLink( { request: { query, variables }, result: { data: data1 } }, { request: { query, variables }, result: { data: data1 } }, @@ -39,7 +39,7 @@ describe('[queries] api', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let hasRefetched = false, @@ -76,9 +76,7 @@ describe('[queries] api', () => { .refetch({ first: 2 }) // new variables .then((response: any) => { expect(response.data).toEqual(data1); - expect(data!.allPeople).toEqual( - data1.allPeople - ); + expect(data!.allPeople).toEqual(data1.allPeople); done = true; }); }) @@ -105,51 +103,56 @@ describe('[queries] api', () => { waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); }); - itAsync('exposes subscribeToMore as part of the props api', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "exposes subscribeToMore as part of the props api", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - // example of loose typing - const Container = graphql(query)( - class extends React.Component<ChildProps> { - render() { - const { data } = this.props; - if (data && !data.loading) { - expect(data!.subscribeToMore).toBeTruthy(); - expect(data!.subscribeToMore instanceof Function).toBeTruthy(); - done = true; + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + // example of loose typing + const Container = graphql(query)( + class extends React.Component<ChildProps> { + render() { + const { data } = this.props; + if (data && !data.loading) { + expect(data!.subscribeToMore).toBeTruthy(); + expect(data!.subscribeToMore instanceof Function).toBeTruthy(); + done = true; + } + return null; } - return null; } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); - itAsync('exposes fetchMore as part of the props api', (resolve, reject) => { + itAsync("exposes fetchMore as part of the props api", (resolve, reject) => { const query: DocumentNode = gql` query people($skip: Int, $first: Int) { allPeople(first: $first, skip: $skip) { @@ -159,8 +162,8 @@ describe('[queries] api', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data1 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data1 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; type Data = typeof data; @@ -175,12 +178,12 @@ describe('[queries] api', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; const Container = graphql<{}, Data, Variables>(query, { - options: () => ({ variables }) + options: () => ({ variables }), })( class extends React.Component<ChildProps<{}, Data, Variables>> { componentDidUpdate() { @@ -188,16 +191,16 @@ describe('[queries] api', () => { if (count === 0) { expect(props.data!.fetchMore).toBeTruthy(); expect(props.data!.fetchMore instanceof Function).toBeTruthy(); - props.data! - .fetchMore({ + props + .data!.fetchMore({ variables: { skip: 2 }, updateQuery: (prev: any, { fetchMoreResult }) => ({ allPeople: { people: prev.allPeople.people.concat( fetchMoreResult!.allPeople.people - ) - } - }) + ), + }, + }), }) .then((result: any) => { expect(result.data.allPeople.people).toEqual( @@ -214,7 +217,7 @@ describe('[queries] api', () => { // This ends the test (passing). setTimeout(() => resolve(), 20); } else { - throw new Error('should not reach this point'); + throw new Error("should not reach this point"); } count++; } @@ -234,100 +237,101 @@ describe('[queries] api', () => { ); }); - itAsync('reruns props function after query results change via fetchMore', (resolve, reject) => { - const query: DocumentNode = gql` - query people($cursor: Int) { - allPeople(cursor: $cursor) { - cursor - people { - name + itAsync( + "reruns props function after query results change via fetchMore", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($cursor: Int) { + allPeople(cursor: $cursor) { + cursor + people { + name + } } } - } - `; - const vars1 = { cursor: undefined }; - const data1 = { - allPeople: { cursor: 1, people: [{ name: 'Luke Skywalker' }] } - }; - const vars2 = { cursor: 1 }; - const data2 = { - allPeople: { cursor: 2, people: [{ name: 'Leia Skywalker' }] } - }; - - type Data = typeof data1; - type Variables = { cursor: number | undefined }; - - const link = mockSingleLink( - { request: { query, variables: vars1 }, result: { data: data1 } }, - { request: { query, variables: vars2 }, result: { data: data2 } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - let isUpdated = false; - - type FinalProps = { - loading: boolean; - people?: { name: string }[]; - getMorePeople?: () => void; - }; - - let done = false; - const Container = graphql<{}, Data, Variables, FinalProps>(query, { - props({ data }) { - const { loading, allPeople, fetchMore } = data!; - - if (loading) return { loading }; - const { cursor, people } = allPeople!; - return { - loading: false, - people, - getMorePeople: () => - fetchMore({ - variables: { cursor }, - updateQuery(prev, { fetchMoreResult }) { - const { - allPeople: { cursor, people } - } = fetchMoreResult!; - return { - allPeople: { - cursor, - people: [...people, ...prev.allPeople.people] - } - }; + `; + const vars1 = { cursor: undefined }; + const data1 = { + allPeople: { cursor: 1, people: [{ name: "Luke Skywalker" }] }, + }; + const vars2 = { cursor: 1 }; + const data2 = { + allPeople: { cursor: 2, people: [{ name: "Leia Skywalker" }] }, + }; + + type Data = typeof data1; + type Variables = { cursor: number | undefined }; + + const link = mockSingleLink( + { request: { query, variables: vars1 }, result: { data: data1 } }, + { request: { query, variables: vars2 }, result: { data: data2 } } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let isUpdated = false; + + type FinalProps = { + loading: boolean; + people?: { name: string }[]; + getMorePeople?: () => void; + }; + + let done = false; + const Container = graphql<{}, Data, Variables, FinalProps>(query, { + props({ data }) { + const { loading, allPeople, fetchMore } = data!; + + if (loading) return { loading }; + const { cursor, people } = allPeople!; + return { + loading: false, + people, + getMorePeople: () => + fetchMore({ + variables: { cursor }, + updateQuery(prev, { fetchMoreResult }) { + const { + allPeople: { cursor, people }, + } = fetchMoreResult!; + return { + allPeople: { + cursor, + people: [...people, ...prev.allPeople.people], + }, + }; + }, + }), + }; + }, + })( + class extends React.Component<FinalProps> { + render() { + if (!this.props.loading) { + if (isUpdated) { + expect(this.props.people!.length).toBe(2); + done = true; + } else { + isUpdated = true; + expect(this.props.people).toEqual(data1.allPeople.people); + this.props.getMorePeople!(); } - }) - }; - } - })( - class extends React.Component<FinalProps> { - render() { - if (!this.props.loading) { - if (isUpdated) { - expect(this.props.people!.length).toBe(2); - done = true; - } else { - isUpdated = true; - expect(this.props.people).toEqual( - data1.allPeople.people - ); - this.props.getMorePeople!(); } - } - return null; + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBe(true)).then(resolve, reject); - }); + waitFor(() => expect(done).toBe(true)).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/queries/errors.test.tsx b/src/react/hoc/__tests__/queries/errors.test.tsx --- a/src/react/hoc/__tests__/queries/errors.test.tsx +++ b/src/react/hoc/__tests__/queries/errors.test.tsx @@ -1,21 +1,19 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { withState } from './recomposeWithState'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { QueryResult } from '../../../types/types'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { Query } from '../../../components/Query'; -import { graphql } from '../../graphql'; -import { ChildProps, DataValue } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18'); - -describe('[queries] errors', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { withState } from "./recomposeWithState"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { QueryResult } from "../../../types/types"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { Query } from "../../../components/Query"; +import { graphql } from "../../graphql"; +import { ChildProps, DataValue } from "../../types"; + +describe("[queries] errors", () => { let error: typeof console.error; beforeEach(() => { error = console.error; @@ -26,7 +24,7 @@ describe('[queries] errors', () => { }); // errors - itAsync('does not swallow children errors', (resolve, reject) => { + itAsync("does not swallow children errors", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people { @@ -37,14 +35,14 @@ describe('[queries] errors', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); class ErrorBoundary extends React.Component<React.PropsWithChildren> { @@ -77,7 +75,7 @@ describe('[queries] errors', () => { }).then(resolve, reject); }); - it('can unmount without error', () => { + it("can unmount without error", () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -87,14 +85,14 @@ describe('[queries] errors', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const ContainerWithData = graphql(query)(() => null); @@ -112,8 +110,8 @@ describe('[queries] errors', () => { } }); - itAsync('passes any GraphQL errors in props', (resolve, reject) => { - let done = false + itAsync("passes any GraphQL errors in props", (resolve, reject) => { + let done = false; const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -125,11 +123,11 @@ describe('[queries] errors', () => { `; const link = mockSingleLink({ request: { query }, - error: new Error('boo') + error: new Error("boo"), }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const ErrorContainer = graphql(query)( @@ -138,7 +136,7 @@ describe('[queries] errors', () => { const { data } = this.props; expect(data!.error).toBeTruthy(); expect(data!.error!.networkError).toBeTruthy(); - done = true + done = true; } render() { return null; @@ -157,7 +155,7 @@ describe('[queries] errors', () => { }).then(resolve, reject); }); - describe('uncaught exceptions', () => { + describe("uncaught exceptions", () => { const consoleWarn = console.warn; beforeAll(() => { console.warn = () => null; @@ -173,13 +171,13 @@ describe('[queries] errors', () => { } beforeEach(() => { unhandled = []; - process.on('unhandledRejection', handle); + process.on("unhandledRejection", handle); }); afterEach(() => { - process.removeListener('unhandledRejection', handle); + process.removeListener("unhandledRejection", handle); }); - it('does not log when you change variables resulting in an error', async () => { + it("does not log when you change variables resulting in an error", async () => { const query: DocumentNode = gql` query people($var: Int) { allPeople(first: $var) { @@ -190,21 +188,21 @@ describe('[queries] errors', () => { } `; const var1 = { var: 1 }; - const data = { allPeople: { people: { name: 'Luke Skywalker' } } }; + const data = { allPeople: { people: { name: "Luke Skywalker" } } }; const var2 = { var: 2 }; const link = mockSingleLink( { request: { query, variables: var1 }, - result: { data } + result: { data }, }, { request: { query, variables: var2 }, - error: new Error('boo') + error: new Error("boo"), } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); type Data = typeof data; @@ -217,7 +215,11 @@ describe('[queries] errors', () => { let iteration = 0; let done = false; - const ErrorContainer = withState('var', 'setVar', 1)( + const ErrorContainer = withState( + "var", + "setVar", + 1 + )( graphql<Props, Data, Vars>(query)( class extends React.Component<ChildProps<Props, Data, Vars>> { componentDidUpdate() { @@ -260,14 +262,14 @@ describe('[queries] errors', () => { await waitFor(() => { expect(iteration).toBe(3); - }) + }); await waitFor(() => { expect(done).toBeTruthy(); }); }); }); - it('will not log a warning when there is an error that is not caught in the render method when using query', () => + it("will not log a warning when there is an error that is not caught in the render method when using query", () => new Promise<void>((resolve, reject) => { const query: DocumentNode = gql` query people { @@ -287,11 +289,11 @@ describe('[queries] errors', () => { const link = mockSingleLink({ request: { query }, - error: new Error('oops') + error: new Error("oops"), }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const origError = console.error; @@ -315,7 +317,7 @@ describe('[queries] errors', () => { expect(errorMock.mock.calls.length).toBe(0); break; default: - throw new Error('Too many renders.'); + throw new Error("Too many renders."); } } catch (error) { reject(error); @@ -337,91 +339,80 @@ describe('[queries] errors', () => { }).then(resolve, reject); })); - itAsync('passes any cached data when there is a GraphQL error', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "passes any cached data when there is a GraphQL error", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - type Data = typeof data; - const link = mockSingleLink( - { request: { query }, result: { data } }, - { request: { query }, error: new Error('No Network Connection') } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + type Data = typeof data; + const link = mockSingleLink( + { request: { query }, result: { data } }, + { request: { query }, error: new Error("No Network Connection") } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let count = 0; - const Container = graphql<{}, Data>(query, { - options: { notifyOnNetworkStatusChange: true } - })( - class extends React.Component<ChildProps<{}, Data>> { - componentDidUpdate() { - const { props } = this; - try { - switch (count++) { - case 0: - expect(props.data!.allPeople).toEqual( - data.allPeople - ); - setTimeout(() => { - props.data!.refetch().catch(() => null); - }); - break; - case 1: - if (IS_REACT_18) { - expect(props.data!.loading).toBeFalsy(); - } else { + let count = 0; + const Container = graphql<{}, Data>(query, { + options: { notifyOnNetworkStatusChange: true }, + })( + class extends React.Component<ChildProps<{}, Data>> { + componentDidUpdate() { + const { props } = this; + try { + switch (count++) { + case 0: + expect(props.data!.allPeople).toEqual(data.allPeople); + setTimeout(() => { + props.data!.refetch().catch(() => null); + }); + break; + case 1: expect(props.data!.loading).toBeTruthy(); - } - expect(props.data!.allPeople).toEqual( - data.allPeople - ); - break; - case 2: - expect(props.data!.loading).toBeFalsy(); - expect(props.data!.error).toBeTruthy(); - expect(props.data!.allPeople).toEqual( - data.allPeople - ); - break; - default: - throw new Error('Unexpected fall through'); + expect(props.data!.allPeople).toEqual(data.allPeople); + break; + case 2: + expect(props.data!.loading).toBeFalsy(); + expect(props.data!.error).toBeTruthy(); + expect(props.data!.allPeople).toEqual(data.allPeople); + break; + default: + throw new Error("Unexpected fall through"); + } + } catch (e) { + reject(e); } - } catch (e) { - reject(e); } - } - render() { - return null; + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(2); - } else { - expect(count).toBe(3) - } - }).then(resolve, reject); - }); + waitFor(() => { + expect(count).toBe(3); + }).then(resolve, reject); + } + ); - itAsync('can refetch after there was a network error', (resolve, reject) => { + itAsync("can refetch after there was a network error", (resolve, reject) => { const query: DocumentNode = gql` query somethingelse { allPeople(first: 1) { @@ -431,24 +422,24 @@ describe('[queries] errors', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const dataTwo = { allPeople: { people: [{ name: 'Princess Leia' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const dataTwo = { allPeople: { people: [{ name: "Princess Leia" }] } }; type Data = typeof data; const link = mockSingleLink( { request: { query }, result: { data } }, - { request: { query }, error: new Error('This is an error!') }, + { request: { query }, error: new Error("This is an error!") }, { request: { query }, result: { data: dataTwo } } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; const noop = () => null; const Container = graphql<{}, Data>(query, { - options: { notifyOnNetworkStatusChange: true } + options: { notifyOnNetworkStatusChange: true }, })( class extends React.Component<ChildProps<{}, Data>> { componentDidUpdate() { @@ -459,16 +450,12 @@ describe('[queries] errors', () => { props .data!.refetch() .then(() => { - reject('Expected error value on first refetch.'); + reject("Expected error value on first refetch."); }) .catch(noop); break; case 1: - if (IS_REACT_18) { - expect(props.data!.loading).toBeFalsy(); - } else { - expect(props.data!.loading).toBeTruthy(); - } + expect(props.data!.loading).toBeTruthy(); break; case 2: expect(props.data!.loading).toBeFalsy(); @@ -477,7 +464,7 @@ describe('[queries] errors', () => { .data!.refetch() .then(noop) .catch(() => { - reject('Expected good data on second refetch.'); + reject("Expected good data on second refetch."); }); break; case 3: @@ -486,12 +473,10 @@ describe('[queries] errors', () => { case 4: expect(props.data!.loading).toBeFalsy(); expect(props.data!.error).toBeFalsy(); - expect(props.data!.allPeople).toEqual( - dataTwo.allPeople - ); + expect(props.data!.allPeople).toEqual(dataTwo.allPeople); break; default: - throw new Error('Unexpected fall through'); + throw new Error("Unexpected fall through"); } } catch (e) { reject(e); @@ -511,232 +496,139 @@ describe('[queries] errors', () => { ); waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(2) - } else { - expect(count).toBe(5) - } + expect(count).toBe(5); }).then(resolve, reject); }); - itAsync('does not throw/console.err an error after a component that received a network error is unmounted', (resolve, reject) => { - const query: DocumentNode = gql` - query somethingelse { - allPeople(first: 1) { - people { - name + itAsync( + "does not throw/console.err an error after a component that received a network error is unmounted", + (resolve, reject) => { + const query: DocumentNode = gql` + query somethingelse { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; - type Data = typeof data; - const link = mockSingleLink( - { request: { query }, result: { data } }, - { request: { query }, error: new Error('This is an error!') } - ); + type Data = typeof data; + const link = mockSingleLink( + { request: { query }, result: { data } }, + { request: { query }, error: new Error("This is an error!") } + ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - let count = 0; - const noop = () => null; + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + let count = 0; + const noop = () => null; - interface ContainerOwnProps { - hideContainer: Function; - } + interface ContainerOwnProps { + hideContainer: Function; + } - interface QueryChildProps { - data: DataValue<Data>; - hideContainer: Function; - } + interface QueryChildProps { + data: DataValue<Data>; + hideContainer: Function; + } - let done = false; - const Container = graphql<ContainerOwnProps, Data, {}, QueryChildProps>( - query, - { - options: { notifyOnNetworkStatusChange: true }, - props: something => { - return { - data: something.data!, - hideContainer: something!.ownProps.hideContainer - }; + let done = false; + const Container = graphql<ContainerOwnProps, Data, {}, QueryChildProps>( + query, + { + options: { notifyOnNetworkStatusChange: true }, + props: (something) => { + return { + data: something.data!, + hideContainer: something!.ownProps.hideContainer, + }; + }, } - } - )( - class extends React.Component<ChildProps<QueryChildProps, Data>> { - componentDidUpdate() { - const { props } = this; - try { - switch (count++) { - case 0: - props - .data!.refetch() - .then(() => { - reject('Expected error value on first refetch.'); - }) - .catch(noop); - break; - case 2: - expect(props.data!.loading).toBeFalsy(); - expect(props.data!.error).toBeTruthy(); - const origError = console.error; - const errorMock = jest.fn(); - console.error = errorMock; - props.hideContainer(); - setTimeout(() => { - expect(errorMock.mock.calls.length).toEqual(0); - console.error = origError; - done = true; - }, 100); - break; - default: - if (count < 2) { - throw new Error('Unexpected fall through'); - } + )( + class extends React.Component<ChildProps<QueryChildProps, Data>> { + componentDidUpdate() { + const { props } = this; + try { + switch (count++) { + case 0: + props + .data!.refetch() + .then(() => { + reject("Expected error value on first refetch."); + }) + .catch(noop); + break; + case 2: + expect(props.data!.loading).toBeFalsy(); + expect(props.data!.error).toBeTruthy(); + const origError = console.error; + const errorMock = jest.fn(); + console.error = errorMock; + props.hideContainer(); + setTimeout(() => { + expect(errorMock.mock.calls.length).toEqual(0); + console.error = origError; + done = true; + }, 100); + break; + default: + if (count < 2) { + throw new Error("Unexpected fall through"); + } + } + } catch (err) { + reject(err); } - } catch (err) { - reject(err); } - } - render() { - return null; - } - } - ); - - class Switcher extends React.Component<any, any> { - constructor(props: any) { - super(props); - this.state = { - showContainer: true - }; - } - render() { - const { - state: { showContainer } - } = this; - if (showContainer) { - return ( - <Container - hideContainer={() => this.setState({ showContainer: false })} - /> - ); - } - return null; - } - } - - render( - <ApolloProvider client={client}> - <Switcher /> - </ApolloProvider> - ); - - waitFor(() => { - if (!IS_REACT_18) { - expect(done).toBeTruthy() - } - }).then(resolve, reject); - }); - - itAsync('correctly sets loading state on remount after a network error', (resolve, reject) => { - const query: DocumentNode = gql` - query somethingelse { - allPeople(first: 1) { - people { - name + render() { + return null; } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const dataTwo = { allPeople: { people: [{ name: 'Princess Leia' }] } }; - - type Data = typeof data; - const link = mockSingleLink( - { request: { query }, error: new Error('This is an error!') }, - { request: { query }, result: { data: dataTwo } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + ); - let count = 0; - type ContainerOwnProps = { toggle: () => void }; - const Container = graphql<ContainerOwnProps, Data>(query, { - options: { notifyOnNetworkStatusChange: true } - })( - class extends React.Component<ChildProps<ContainerOwnProps, Data>> { + class Switcher extends React.Component<any, any> { + constructor(props: any) { + super(props); + this.state = { + showContainer: true, + }; + } render() { - switch (count) { - case 0: - expect(this.props.data!.loading).toBe(true); - break; - case 1: - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.error!.networkError!.message).toMatch( - /This is an error/ - ); - // unmount this component - setTimeout(() => { - this.props.toggle(); - }, 0); - setTimeout(() => { - // remount after 50 ms - this.props.toggle(); - }, 50); - break; - case 2: - expect(this.props.data!.loading).toBe(true); - break; - case 3: - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.allPeople).toEqual(dataTwo.allPeople); - break; - default: - throw new Error('Too many renders.'); + const { + state: { showContainer }, + } = this; + if (showContainer) { + return ( + <Container + hideContainer={() => this.setState({ showContainer: false })} + /> + ); } - count += 1; - return null; } } - ); - type Toggle = () => void; - type OwnProps = { children: (toggle: Toggle) => any }; - class Manager extends React.Component<OwnProps, { show: boolean }> { - constructor(props: any) { - super(props); - this.state = { show: true }; - } - render() { - if (!this.state.show) return null; - // eslint-disable-next-line testing-library/no-node-access - return this.props.children(() => - this.setState(({ show }) => ({ show: !show })) - ); - } - } - - render( - <ApolloProvider client={client}> - <Manager>{(toggle: Toggle) => <Container toggle={toggle} />}</Manager> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Switcher /> + </ApolloProvider> + ); - waitFor(() => expect(count).toBe(4)).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBeTruthy(); + }).then(resolve, reject); + } + ); - describe('errorPolicy', () => { - itAsync('passes any GraphQL errors in props along with data', (resolve, reject) => { - let done = false; + itAsync( + "correctly sets loading state on remount after a network error", + (resolve, reject) => { const query: DocumentNode = gql` - query people { + query somethingelse { allPeople(first: 1) { people { name @@ -744,107 +636,206 @@ describe('[queries] errors', () => { } } `; - const link = mockSingleLink({ - request: { query }, - result: { - data: { - allPeople: { - people: null - } - }, - errors: [new Error('this is an error')] - } - }); + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const dataTwo = { allPeople: { people: [{ name: "Princess Leia" }] } }; + type Data = typeof data; + const link = mockSingleLink( + { request: { query }, error: new Error("This is an error!") }, + { request: { query }, result: { data: dataTwo } } + ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - const ErrorContainer = graphql(query, { - options: { errorPolicy: 'all' } + let count = 0; + type ContainerOwnProps = { toggle: () => void }; + const Container = graphql<ContainerOwnProps, Data>(query, { + options: { notifyOnNetworkStatusChange: true }, })( - class extends React.Component<ChildProps> { - componentDidUpdate() { - const { data } = this.props; - expect(data!.error).toBeTruthy(); - expect(data!.error!.graphQLErrors[0].message).toEqual( - 'this is an error' - ); - expect(data).toMatchObject({ allPeople: { people: null } }); - done = true; - } + class extends React.Component<ChildProps<ContainerOwnProps, Data>> { render() { + switch (count) { + case 0: + expect(this.props.data!.loading).toBe(true); + break; + case 1: + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.error!.networkError!.message).toMatch( + /This is an error/ + ); + // unmount this component + setTimeout(() => { + this.props.toggle(); + }, 0); + setTimeout(() => { + // remount after 50 ms + this.props.toggle(); + }, 50); + break; + case 2: + expect(this.props.data!.loading).toBe(true); + break; + case 3: + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.allPeople).toEqual(dataTwo.allPeople); + break; + default: + throw new Error("Too many renders."); + } + count += 1; + return null; } } ); + type Toggle = () => void; + type OwnProps = { children: (toggle: Toggle) => any }; + class Manager extends React.Component<OwnProps, { show: boolean }> { + constructor(props: any) { + super(props); + this.state = { show: true }; + } + render() { + if (!this.state.show) return null; + // eslint-disable-next-line testing-library/no-node-access + return this.props.children(() => + this.setState(({ show }) => ({ show: !show })) + ); + } + } + render( <ApolloProvider client={client}> - <ErrorContainer /> + <Manager>{(toggle: Toggle) => <Container toggle={toggle} />}</Manager> </ApolloProvider> ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => expect(count).toBe(4)).then(resolve, reject); + } + ); + + describe("errorPolicy", () => { + itAsync( + "passes any GraphQL errors in props along with data", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } + } + } + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { + allPeople: { + people: null, + }, + }, + errors: [new Error("this is an error")], + }, + }); - itAsync('passes any GraphQL errors in props along with data [component]', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const ErrorContainer = graphql(query, { + options: { errorPolicy: "all" }, + })( + class extends React.Component<ChildProps> { + componentDidUpdate() { + const { data } = this.props; + expect(data!.error).toBeTruthy(); + expect(data!.error!.graphQLErrors[0].message).toEqual( + "this is an error" + ); + expect(data).toMatchObject({ allPeople: { people: null } }); + done = true; + } + render() { + return null; } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { - data: { - allPeople: { - people: null + ); + + render( + <ApolloProvider client={client}> + <ErrorContainer /> + </ApolloProvider> + ); + + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); + + itAsync( + "passes any GraphQL errors in props along with data [component]", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } + } + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { + allPeople: { + people: null, + }, + }, + errors: [new Error("this is an error")], }, - errors: [new Error('this is an error')] - } - }); + }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - class ErrorContainer extends React.Component<QueryResult> { - componentDidUpdate() { - const { props } = this; - expect(props.error).toBeTruthy(); - expect(props.error!.graphQLErrors[0].message).toEqual( - 'this is an error' - ); - expect(props.data!.allPeople!).toMatchObject({ people: null }); - done = true; - } - render() { - return null; + class ErrorContainer extends React.Component<QueryResult> { + componentDidUpdate() { + const { props } = this; + expect(props.error).toBeTruthy(); + expect(props.error!.graphQLErrors[0].message).toEqual( + "this is an error" + ); + expect(props.data!.allPeople!).toMatchObject({ people: null }); + done = true; + } + render() { + return null; + } } - } - render( - <ApolloProvider client={client}> - <Query query={query} errorPolicy="all"> - {(props: any) => <ErrorContainer {...props} />} - </Query> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Query query={query} errorPolicy="all"> + {(props: any) => <ErrorContainer {...props} />} + </Query> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); }); }); diff --git a/src/react/hoc/__tests__/queries/index.test.tsx b/src/react/hoc/__tests__/queries/index.test.tsx --- a/src/react/hoc/__tests__/queries/index.test.tsx +++ b/src/react/hoc/__tests__/queries/index.test.tsx @@ -1,18 +1,18 @@ -import React from 'react'; -import PropTypes from 'prop-types'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloLink } from '../../../../link/core'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps, DataProps } from '../../types'; - -describe('queries', () => { +import React from "react"; +import PropTypes from "prop-types"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloLink } from "../../../../link/core"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps, DataProps } from "../../types"; + +describe("queries", () => { let error: typeof console.error; beforeEach(() => { error = console.error; @@ -24,7 +24,7 @@ describe('queries', () => { }); // general api - it('binds a query to props', async() => { + it("binds a query to props", async () => { let done = false; const query: DocumentNode = gql` query people { @@ -37,11 +37,11 @@ describe('queries', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } + result: { data: { allPeople: { people: [{ name: "Luke Skywalker" }] } } }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -50,13 +50,13 @@ describe('queries', () => { }; } - const ContainerWithData = graphql<any, Data>(query)( - ({ data }: DataProps<Data>) => { - expect(data).toBeTruthy(); - done = true; - return null; - } - ); + const ContainerWithData = graphql<any, Data>(query)(({ + data, + }: DataProps<Data>) => { + expect(data).toBeTruthy(); + done = true; + return null; + }); render( <ApolloProvider client={client}> @@ -69,7 +69,7 @@ describe('queries', () => { }); }); - itAsync('includes the variables in the props', (resolve, reject) => { + itAsync("includes the variables in the props", (resolve, reject) => { const TIME_SCALE = 5000; let renderCount = 0; const query: DocumentNode = gql` @@ -85,11 +85,11 @@ describe('queries', () => { const variables = { first: 1 }; const link = mockSingleLink({ request: { query, variables }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } + result: { data: { allPeople: { people: [{ name: "Luke Skywalker" }] } } }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); // Ensure variable types work correctly here @@ -106,14 +106,14 @@ describe('queries', () => { first: number; } - const ContainerWithData = graphql<Variables, Data, Variables>(query)( - ({ data }: ChildProps<Variables, Data, Variables>) => { - expect(data).toBeTruthy(); - expect(data!.variables).toEqual(variables); - renderCount += 1; - return null; - } - ); + const ContainerWithData = graphql<Variables, Data, Variables>(query)(({ + data, + }: ChildProps<Variables, Data, Variables>) => { + expect(data).toBeTruthy(); + expect(data!.variables).toEqual(variables); + renderCount += 1; + return null; + }); render( <ApolloProvider client={client}> @@ -121,96 +121,104 @@ describe('queries', () => { </ApolloProvider> ); - waitFor(() => { - expect(renderCount).toBe(2); - }, {timeout: TIME_SCALE}).then(resolve, reject); + waitFor( + () => { + expect(renderCount).toBe(2); + }, + { timeout: TIME_SCALE } + ).then(resolve, reject); }); - itAsync('should update query variables when props change', (resolve, reject) => { - const query: DocumentNode = gql` - query people($someId: ID) { - allPeople(someId: $someId) { - people { - name + itAsync( + "should update query variables when props change", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($someId: ID) { + allPeople(someId: $someId) { + people { + name + } } } - } - `; + `; - const link = mockSingleLink( - { - request: { query, variables: { someId: 1 } }, - result: { - data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } + const link = mockSingleLink( + { + request: { query, variables: { someId: 1 } }, + result: { + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, + }, + { + request: { query, variables: { someId: 2 } }, + result: { + data: { allPeople: { people: [{ name: "Darth Vader" }] } }, + }, } - }, - { - request: { query, variables: { someId: 2 } }, - result: { data: { allPeople: { people: [{ name: 'Darth Vader' }] } } } - } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - interface Data { - allPeople: { - people: { - name: string; + interface Data { + allPeople: { + people: { + name: string; + }; }; - }; - } + } - interface Variables { - someId: number; - } + interface Variables { + someId: number; + } - const options = { - options: {} - }; + const options = { + options: {}, + }; - let count = 0; - const ContainerWithData = graphql<Variables, Data, Variables>( - query, - options - )(({ data }: ChildProps<Variables, Data, Variables>) => { - expect(data).toBeTruthy(); - switch (++count) { - case 1: - expect(data!.loading).toBe(true); - expect(data!.variables).toEqual({ someId: 1 }); - break; - case 2: - expect(data!.loading).toBe(true); - expect(data!.variables).toEqual({ someId: 2 }); - break; - case 3: - expect(data!.loading).toBe(false); - expect(data!.variables).toEqual({ someId: 2 }); - break; - default: - reject(`too many renders (${count})`); - } + let count = 0; + const ContainerWithData = graphql<Variables, Data, Variables>( + query, + options + )(({ data }: ChildProps<Variables, Data, Variables>) => { + expect(data).toBeTruthy(); + switch (++count) { + case 1: + expect(data!.loading).toBe(true); + expect(data!.variables).toEqual({ someId: 1 }); + break; + case 2: + expect(data!.loading).toBe(true); + expect(data!.variables).toEqual({ someId: 2 }); + break; + case 3: + expect(data!.loading).toBe(false); + expect(data!.variables).toEqual({ someId: 2 }); + break; + default: + reject(`too many renders (${count})`); + } - return null; - }); + return null; + }); - const { rerender } = render( - <ApolloProvider client={client}> - <ContainerWithData someId={1} /> - </ApolloProvider> - ); - rerender( - <ApolloProvider client={client}> - <ContainerWithData someId={2} /> - </ApolloProvider> - ); + const { rerender } = render( + <ApolloProvider client={client}> + <ContainerWithData someId={1} /> + </ApolloProvider> + ); + rerender( + <ApolloProvider client={client}> + <ContainerWithData someId={2} /> + </ApolloProvider> + ); - waitFor(() => { - expect(count).toBe(3); - }).then(resolve, reject); - }); + waitFor(() => { + expect(count).toBe(3); + }).then(resolve, reject); + } + ); it("shouldn't warn about fragments", () => { const oldWarn = console.warn; @@ -218,20 +226,18 @@ describe('queries', () => { console.warn = (str: any) => warnings.push(str); try { - graphql( - gql` - query foo { - bar - } - ` - ); + graphql(gql` + query foo { + bar + } + `); expect(warnings.length).toEqual(0); } finally { console.warn = oldWarn; } }); - itAsync('executes a query', (resolve, reject) => { + itAsync("executes a query", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people { @@ -242,16 +248,16 @@ describe('queries', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<{}, Data>(query)( @@ -279,7 +285,7 @@ describe('queries', () => { }).then(resolve, reject); }); - itAsync('executes a query with two root fields', (resolve, reject) => { + itAsync("executes a query with two root fields", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people { @@ -296,18 +302,18 @@ describe('queries', () => { } `; const data = { - allPeople: { people: [{ name: 'Luke Skywalker' }] }, - otherPeople: { people: [{ name: 'Luke Skywalker' }] } + allPeople: { people: [{ name: "Luke Skywalker" }] }, + otherPeople: { people: [{ name: "Luke Skywalker" }] }, }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<{}, Data>(query)( @@ -316,9 +322,7 @@ describe('queries', () => { const { props } = this; expect(props.data!.loading).toBeFalsy(); expect(props.data!.allPeople).toEqual(data.allPeople); - expect(props.data!.otherPeople).toEqual( - data.otherPeople - ); + expect(props.data!.otherPeople).toEqual(data.otherPeople); done = true; } render() { @@ -338,7 +342,7 @@ describe('queries', () => { }).then(resolve, reject); }); - itAsync('maps props as variables if they match', (resolve, reject) => { + itAsync("maps props as variables if they match", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people($first: Int) { @@ -349,7 +353,7 @@ describe('queries', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const variables = { first: 1 }; @@ -357,11 +361,11 @@ describe('queries', () => { const link = mockSingleLink({ request: { query, variables }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data, Vars>(query)( @@ -370,9 +374,7 @@ describe('queries', () => { const { props } = this; expect(props.data!.loading).toBeFalsy(); expect(props.data!.allPeople).toEqual(data.allPeople); - expect(props.data!.variables).toEqual( - this.props.data!.variables - ); + expect(props.data!.variables).toEqual(this.props.data!.variables); done = true; } render() { @@ -392,129 +394,135 @@ describe('queries', () => { }).then(resolve, reject); }); - itAsync("doesn't care about the order of variables in a request", (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people($first: Int, $jedi: Boolean) { - allPeople(first: $first, jedi: $jedi) { - people { - name + itAsync( + "doesn't care about the order of variables in a request", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people($first: Int, $jedi: Boolean) { + allPeople(first: $first, jedi: $jedi) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - type Data = typeof data; - const variables = { jedi: true, first: 1 }; - type Vars = typeof variables; - - const mocks = [ - { - request: { - query, - variables + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + type Data = typeof data; + const variables = { jedi: true, first: 1 }; + type Vars = typeof variables; + + const mocks = [ + { + request: { + query, + variables, + }, + result: { + data, + }, }, - result: { - data - } - } - ]; - const link = mockSingleLink.apply(null, mocks); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - const options = { - options: { - variables: { - jedi: true, - first: 1 - } - } - }; + ]; + const link = mockSingleLink.apply(null, mocks); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + const options = { + options: { + variables: { + jedi: true, + first: 1, + }, + }, + }; - const Container = graphql<{}, Data, Vars>( - query, - options - )( - class extends React.Component<ChildProps<{}, Data, Vars>> { - componentDidUpdate() { - const { props } = this; - try { - expect(props.data!.loading).toBeFalsy(); - expect(props.data!.allPeople).toEqual(data.allPeople); - done = true; - } catch (error) { - reject(error); + const Container = graphql<{}, Data, Vars>( + query, + options + )( + class extends React.Component<ChildProps<{}, Data, Vars>> { + componentDidUpdate() { + const { props } = this; + try { + expect(props.data!.loading).toBeFalsy(); + expect(props.data!.allPeople).toEqual(data.allPeople); + done = true; + } catch (error) { + reject(error); + } + } + render() { + return null; } } - render() { - return null; - } - } - ); - - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - itAsync('allows falsy values in the mapped variables from props', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); + + itAsync( + "allows falsy values in the mapped variables from props", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - type Data = typeof data; + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + type Data = typeof data; - const variables = { first: null }; - type Vars = typeof variables; + const variables = { first: null }; + type Vars = typeof variables; - const link = mockSingleLink({ - request: { query, variables }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const link = mockSingleLink({ + request: { query, variables }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const Container = graphql<Partial<Vars>, Data, Vars>(query)( - class extends React.Component<ChildProps<Partial<Vars>, Data, Vars>> { - componentDidUpdate() { - const { props } = this; - expect(props.data!.loading).toBeFalsy(); - expect(props.data!.allPeople).toEqual(data.allPeople); - done = true; - } - render() { - return null; + const Container = graphql<Partial<Vars>, Data, Vars>(query)( + class extends React.Component<ChildProps<Partial<Vars>, Data, Vars>> { + componentDidUpdate() { + const { props } = this; + expect(props.data!.loading).toBeFalsy(); + expect(props.data!.allPeople).toEqual(data.allPeople); + done = true; + } + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container first={null} /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container first={null} /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); it("doesn't error on optional required props", () => { const query: DocumentNode = gql` @@ -526,7 +534,7 @@ describe('queries', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const variables = { first: 1 }; @@ -534,11 +542,11 @@ describe('queries', () => { const link = mockSingleLink({ request: { query, variables }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data>(query)(() => null); @@ -549,7 +557,7 @@ describe('queries', () => { <Container first={1} /> </ApolloProvider> ); - unmount() + unmount(); } catch (e) { errorCaught = e; } @@ -558,7 +566,7 @@ describe('queries', () => { }); // context - itAsync('allows context through updates', (resolve, reject) => { + itAsync("allows context through updates", (resolve, reject) => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -568,16 +576,16 @@ describe('queries', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<{}, Data>(query)( @@ -594,10 +602,13 @@ describe('queries', () => { } ); - class ContextContainer extends React.Component<React.PropsWithChildren, { color: string }> { + class ContextContainer extends React.Component< + React.PropsWithChildren, + { color: string } + > { constructor(props: {}) { super(props); - this.state = { color: 'purple' }; + this.state = { color: "purple" }; } getChildContext() { @@ -606,7 +617,7 @@ describe('queries', () => { componentDidMount() { setTimeout(() => { - this.setState({ color: 'green' }); + this.setState({ color: "green" }); }, 50); } @@ -617,7 +628,7 @@ describe('queries', () => { } (ContextContainer as any).childContextTypes = { - color: PropTypes.string + color: PropTypes.string, }; let count = 0; @@ -625,9 +636,9 @@ describe('queries', () => { class ChildContextContainer extends React.Component<React.PropsWithChildren> { render() { const { color } = this.context as any; - if (count === 0) expect(color).toBe('purple'); + if (count === 0) expect(color).toBe("purple"); if (count === 1) { - expect(color).toBe('green'); + expect(color).toBe("green"); done = true; } @@ -638,7 +649,7 @@ describe('queries', () => { } (ChildContextContainer as any).contextTypes = { - color: PropTypes.string + color: PropTypes.string, }; render( @@ -655,7 +666,7 @@ describe('queries', () => { }); // meta - it('stores the component name', () => { + it("stores the component name", () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -665,7 +676,7 @@ describe('queries', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const Container = graphql<{}, Data>(query)( class Container extends React.Component<ChildProps<{}, Data>> { @@ -675,7 +686,7 @@ describe('queries', () => { } ); - expect(Container.displayName).toEqual('Apollo(Container)'); + expect(Container.displayName).toEqual("Apollo(Container)"); }); it("uses a custom wrapped component name when 'alias' is specified", () => { @@ -689,7 +700,7 @@ describe('queries', () => { } `; @graphql(query, { - alias: 'withFoo' + alias: "withFoo", }) class Container extends React.Component { render(): React.ReactNode { @@ -699,10 +710,10 @@ describe('queries', () => { // ); // Not sure why I have to cast Container to any - expect((Container as any).displayName).toEqual('withFoo(Container)'); + expect((Container as any).displayName).toEqual("withFoo(Container)"); }); - itAsync('passes context to the link', (resolve, reject) => { + itAsync("passes context to the link", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query people { @@ -721,13 +732,13 @@ describe('queries', () => { mockSingleLink({ request: { query }, result: { - data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } - } + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, }) ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -737,7 +748,7 @@ describe('queries', () => { } const ContainerWithData = graphql<any, Data>(query, { - options: props => ({ context: { fromProps: props.context } }) + options: (props) => ({ context: { fromProps: props.context } }), })(() => null); render( @@ -751,21 +762,21 @@ describe('queries', () => { }).then(resolve, reject); }); - describe('Return partial data', () => { + describe("Return partial data", () => { const consoleWarn = console.warn; beforeAll(() => { - console.warn = () => null + console.warn = () => null; }); afterAll(() => { console.warn = consoleWarn; }); - it('should not return partial cache data when `returnPartialData` is false', () => { + it("should not return partial cache data when `returnPartialData` is false", () => { const cache = new Cache(); const client = new ApolloClient({ cache, - link: ApolloLink.empty() + link: ApolloLink.empty(), }); const fullQuery = gql` @@ -786,20 +797,20 @@ describe('queries', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.' - } - ] - } - ] - } + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", + }, + ], + }, + ], + }, }); const partialQuery = gql` @@ -831,11 +842,11 @@ describe('queries', () => { render(<App />); }); - it('should return partial cache data when `returnPartialData` is true', () => { + it("should return partial cache data when `returnPartialData` is true", () => { const cache = new Cache(); const client = new ApolloClient({ cache, - link: ApolloLink.empty() + link: ApolloLink.empty(), }); const fullQuery = gql` @@ -856,20 +867,20 @@ describe('queries', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.' - } - ] - } - ] - } + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", + }, + ], + }, + ], + }, }); const partialQuery = gql` @@ -885,22 +896,22 @@ describe('queries', () => { const ComponentWithData = graphql<any, any>(partialQuery, { options: { - returnPartialData: true - } + returnPartialData: true, + }, })( class Compnent extends React.Component<any> { render() { if (!this.props.data.loading) { expect(this.props.data.cars).toEqual([ { - __typename: 'Car', + __typename: "Car", repairs: [ { - __typename: 'Repair', - date: '2019-05-08' - } - ] - } + __typename: "Repair", + date: "2019-05-08", + }, + ], + }, ]); } return null; diff --git a/src/react/hoc/__tests__/queries/lifecycle.test.tsx b/src/react/hoc/__tests__/queries/lifecycle.test.tsx --- a/src/react/hoc/__tests__/queries/lifecycle.test.tsx +++ b/src/react/hoc/__tests__/queries/lifecycle.test.tsx @@ -1,21 +1,19 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { mockSingleLink } from '../../../../testing'; -import { Query as QueryComponent } from '../../../components'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18'); - -describe('[queries] lifecycle', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { mockSingleLink } from "../../../../testing"; +import { Query as QueryComponent } from "../../../components"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] lifecycle", () => { // lifecycle - it('reruns the query if it changes', async () => { + it("reruns the query if it changes", async () => { let count = 0; const query: DocumentNode = gql` query people($first: Int) { @@ -27,12 +25,12 @@ describe('[queries] lifecycle', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data1; const variables1 = { first: 1 }; type Vars = typeof variables1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const variables2 = { first: 2 }; const link = mockSingleLink( @@ -42,14 +40,14 @@ describe('[queries] lifecycle', () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data, Vars>(query, { - options: props => ({ + options: (props) => ({ variables: props, - fetchPolicy: count === 0 ? 'cache-and-network' : 'cache-first' - }) + fetchPolicy: count === 0 ? "cache-and-network" : "cache-first", + }), })( class extends React.Component<ChildProps<Vars, Data, Vars>> { componentDidUpdate(prevProps: ChildProps<Vars, Data, Vars>) { @@ -110,7 +108,7 @@ describe('[queries] lifecycle', () => { await waitFor(() => expect(count).toBe(3)); }); - it('rebuilds the queries on prop change when using `options`', async () => { + it("rebuilds the queries on prop change when using `options`", async () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -120,16 +118,16 @@ describe('[queries] lifecycle', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let firstRun = true; @@ -176,7 +174,7 @@ describe('[queries] lifecycle', () => { }); }); - it('reruns the query if just the variables change', async () => { + it("reruns the query if just the variables change", async () => { let count = 0; const query: DocumentNode = gql` query people($first: Int) { @@ -188,13 +186,13 @@ describe('[queries] lifecycle', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data1; const variables1 = { first: 1 }; type Vars = typeof variables1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const variables2 = { first: 2 }; const link = mockSingleLink( @@ -204,11 +202,11 @@ describe('[queries] lifecycle', () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data, Vars>(query, { - options: props => ({ variables: props }) + options: (props) => ({ variables: props }), })( class extends React.Component<ChildProps<Vars, Data, Vars>> { componentDidUpdate(prevProps: ChildProps<Vars, Data, Vars>) { @@ -270,7 +268,7 @@ describe('[queries] lifecycle', () => { await waitFor(() => expect(count).toBe(3)); }); - it('reruns the queries on prop change when using passed props', async () => { + it("reruns the queries on prop change when using passed props", async () => { let count = 0; const query: DocumentNode = gql` query people($first: Int) { @@ -282,13 +280,13 @@ describe('[queries] lifecycle', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data1; const variables1 = { first: 1 }; type Vars = typeof variables1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const variables2 = { first: 2 }; const link = mockSingleLink( @@ -298,7 +296,7 @@ describe('[queries] lifecycle', () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data, Vars>(query)( @@ -359,7 +357,7 @@ describe('[queries] lifecycle', () => { await waitFor(() => expect(count).toBe(3)); }); - it('stays subscribed to updates after irrelevant prop changes', async () => { + it("stays subscribed to updates after irrelevant prop changes", async () => { const query: DocumentNode = gql` query people($first: Int) { allPeople(first: $first) { @@ -371,17 +369,17 @@ describe('[queries] lifecycle', () => { `; const variables = { first: 1 }; type Vars = typeof variables; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const link = mockSingleLink( { request: { query, variables }, result: { data: data1 } }, { request: { query, variables }, result: { data: data2 } } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props { @@ -391,7 +389,7 @@ describe('[queries] lifecycle', () => { let count = 0; const Container = graphql<Props, Data, Vars>(query, { - options: { variables, notifyOnNetworkStatusChange: false } + options: { variables, notifyOnNetworkStatusChange: false }, })( class extends React.Component<ChildProps<Props, Data, Vars>> { componentDidUpdate() { @@ -401,23 +399,17 @@ describe('[queries] lifecycle', () => { if (count === 1) { expect(props.foo).toEqual(42); expect(props.data!.loading).toEqual(false); - expect(props.data!.allPeople).toEqual( - data1.allPeople - ); + expect(props.data!.allPeople).toEqual(data1.allPeople); props.changeState(); } else if (count === 2) { expect(props.foo).toEqual(43); expect(props.data!.loading).toEqual(false); - expect(props.data!.allPeople).toEqual( - data1.allPeople - ); + expect(props.data!.allPeople).toEqual(data1.allPeople); props.data!.refetch(); } else if (count === 3) { expect(props.foo).toEqual(43); expect(props.data!.loading).toEqual(false); - expect(props.data!.allPeople).toEqual( - data2.allPeople - ); + expect(props.data!.allPeople).toEqual(data2.allPeople); } } catch (e) { fail(e); @@ -450,7 +442,7 @@ describe('[queries] lifecycle', () => { await waitFor(() => expect(count).toBe(3)); }); - it('correctly rebuilds props on remount', async () => { + it("correctly rebuilds props on remount", async () => { const query: DocumentNode = gql` query pollingPeople { allPeople(first: 1) { @@ -460,7 +452,7 @@ describe('[queries] lifecycle', () => { } } `; - const data = { allPeople: { people: [{ name: 'Darth Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Darth Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, @@ -468,14 +460,14 @@ describe('[queries] lifecycle', () => { newData: () => ({ data: { allPeople: { - people: [{ name: `Darth Skywalker - ${Math.random()}` }] - } - } - }) + people: [{ name: `Darth Skywalker - ${Math.random()}` }], + }, + }, + }), }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let app: React.ReactElement<any>, count = 0; @@ -483,7 +475,7 @@ describe('[queries] lifecycle', () => { let done = false; let rerender: any; const Container = graphql<{}, Data>(query, { - options: { pollInterval: 10, notifyOnNetworkStatusChange: false } + options: { pollInterval: 10, notifyOnNetworkStatusChange: false }, })( class extends React.Component<ChildProps<{}, Data>> { componentDidUpdate() { @@ -512,13 +504,11 @@ describe('[queries] lifecycle', () => { rerender = render(app).rerender; await waitFor(() => { - if (!IS_REACT_18) { - expect(done).toBeTruthy() - } + expect(done).toBeTruthy(); }); }); - it('will re-execute a query when the client changes', async () => { + it("will re-execute a query when the client changes", async () => { const query: DocumentNode = gql` { a @@ -529,38 +519,38 @@ describe('[queries] lifecycle', () => { const link1 = mockSingleLink( { request: { query }, - result: { data: { a: 1, b: 2, c: 3 } } + result: { data: { a: 1, b: 2, c: 3 } }, }, { request: { query }, - result: { data: { a: 1, b: 2, c: 3 } } + result: { data: { a: 1, b: 2, c: 3 } }, } ); const link2 = mockSingleLink( { request: { query }, - result: { data: { a: 4, b: 5, c: 6 } } + result: { data: { a: 4, b: 5, c: 6 } }, }, { request: { query }, - result: { data: { a: 4, b: 5, c: 6 } } + result: { data: { a: 4, b: 5, c: 6 } }, } ); const link3 = mockSingleLink({ request: { query }, - result: { data: { a: 7, b: 8, c: 9 } } + result: { data: { a: 7, b: 8, c: 9 } }, }); const client1 = new ApolloClient({ link: link1, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const client2 = new ApolloClient({ link: link2, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const client3 = new ApolloClient({ link: link3, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -571,9 +561,10 @@ describe('[queries] lifecycle', () => { let switchClient: (client: ApolloClient<any>) => void; let refetchQuery: () => void; let count = 0; + let testFailures: any[] = []; const Query = graphql<{}, Data>(query, { - options: { notifyOnNetworkStatusChange: true } + options: { notifyOnNetworkStatusChange: true }, })( class extends React.Component<ChildProps<{}, Data>> { componentDidMount() { @@ -597,20 +588,16 @@ describe('[queries] lifecycle', () => { loading: false, a: 1, b: 2, - c: 3 + c: 3, }); refetchQuery!(); break; case 3: - if (IS_REACT_18) { - expect({ loading }).toEqual({ loading: false }); - } else { - expect({ loading }).toEqual({ loading: true }); - } + expect({ loading }).toEqual({ loading: true }); expect({ a, b, c }).toEqual({ a: 1, b: 2, - c: 3 + c: 3, }); break; case 4: @@ -618,7 +605,7 @@ describe('[queries] lifecycle', () => { loading: false, a: 1, b: 2, - c: 3 + c: 3, }); setTimeout(() => { switchClient!(client2); @@ -637,7 +624,7 @@ describe('[queries] lifecycle', () => { loading: false, a: 4, b: 5, - c: 6 + c: 6, }); refetchQuery!(); break; @@ -646,7 +633,7 @@ describe('[queries] lifecycle', () => { loading: true, a: 4, b: 5, - c: 6 + c: 6, }); break; case 8: @@ -654,7 +641,7 @@ describe('[queries] lifecycle', () => { loading: false, a: 4, b: 5, - c: 6 + c: 6, }); setTimeout(() => { switchClient!(client3); @@ -673,7 +660,7 @@ describe('[queries] lifecycle', () => { loading: false, a: 7, b: 8, - c: 9 + c: 9, }); setTimeout(() => { switchClient!(client1); @@ -684,7 +671,7 @@ describe('[queries] lifecycle', () => { loading: false, a: 1, b: 2, - c: 3 + c: 3, }); setTimeout(() => { switchClient!(client3); @@ -702,7 +689,7 @@ describe('[queries] lifecycle', () => { fail(`Unexpectedly many renders (${count})`); } } catch (err) { - fail(err); + testFailures.push(err); } return null; @@ -712,11 +699,11 @@ describe('[queries] lifecycle', () => { class ClientSwitcher extends React.Component<any, any> { state = { - client: client1 + client: client1, }; componentDidMount() { - switchClient = newClient => { + switchClient = (newClient) => { this.setState({ client: newClient }); }; } @@ -733,15 +720,14 @@ describe('[queries] lifecycle', () => { render(<ClientSwitcher />); await waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(3) - } else { - expect(count).toBe(12) + if (testFailures.length > 0) { + throw testFailures[0]; } + expect(count).toBe(12); }); }); - it('handles synchronous racecondition with prefilled data from the server', async () => { + it("handles synchronous racecondition with prefilled data from the server", async () => { const query: DocumentNode = gql` query GetUser($first: Int) { user(first: $first) { @@ -751,29 +737,29 @@ describe('[queries] lifecycle', () => { `; const variables = { first: 1 }; type Vars = typeof variables; - const data2 = { user: { name: 'Luke Skywalker' } }; + const data2 = { user: { name: "Luke Skywalker" } }; type Data = typeof data2; const link = mockSingleLink({ request: { query, variables }, result: { data: data2 }, - delay: 10 + delay: 10, }); const initialState = { apollo: { data: { ROOT_QUERY: { - 'user({"first":1})': null - } - } - } + 'user({"first":1})': null, + }, + }, + }, }; const client = new ApolloClient({ link, // prefill the store (like SSR would) // @see https://github.com/zeit/next.js/blob/master/examples/with-apollo/lib/initApollo.js - cache: new Cache({ addTypename: false }).restore(initialState) + cache: new Cache({ addTypename: false }).restore(initialState), }); let count = 0; @@ -781,8 +767,8 @@ describe('[queries] lifecycle', () => { const Container = graphql<Vars, Data>(query)( class extends React.Component<ChildProps<Vars, Data>> { componentDidMount() { - this.props.data!.refetch().then(result => { - expect(result.data!.user.name).toBe('Luke Skywalker'); + this.props.data!.refetch().then((result) => { + expect(result.data!.user.name).toBe("Luke Skywalker"); done = true; }); } @@ -790,9 +776,9 @@ describe('[queries] lifecycle', () => { render() { count++; const user = this.props.data!.user; - const name = user ? user.name : ''; + const name = user ? user.name : ""; if (count === 2) { - expect(name).toBe('Luke Skywalker'); + expect(name).toBe("Luke Skywalker"); } return null; } @@ -808,7 +794,7 @@ describe('[queries] lifecycle', () => { await waitFor(() => expect(done).toBeTruthy()); }); - it('handles asynchronous racecondition with prefilled data from the server', async () => { + it("handles asynchronous racecondition with prefilled data from the server", async () => { const query: DocumentNode = gql` query Q { books { @@ -821,42 +807,42 @@ describe('[queries] lifecycle', () => { const ssrResult = { books: [ { - name: 'ssrfirst', - __typename: 'Book' - } - ] + name: "ssrfirst", + __typename: "Book", + }, + ], }; const result = { books: [ { - name: 'first', - __typename: 'Book' - } - ] + name: "first", + __typename: "Book", + }, + ], }; const ssrLink = mockSingleLink({ request: { query } as any, - result: { data: ssrResult } + result: { data: ssrResult }, }); const link = mockSingleLink({ request: { query } as any, - result: { data: result } + result: { data: result }, }); const ssrClient = new ApolloClient({ cache: new Cache(), - link: ssrLink + link: ssrLink, }); await ssrClient.query({ query, - variables: {} + variables: {}, }); const client = new ApolloClient({ cache: new Cache().restore(ssrClient.extract()), // --- this is the "SSR" bit - link + link, }); //try to render the app / call refetch / etc @@ -869,7 +855,7 @@ describe('[queries] lifecycle', () => { {({ loading, data, refetch }: any) => { if (!loading) { if (!refetched) { - expect(data.books[0].name).toEqual('ssrfirst'); + expect(data.books[0].name).toEqual("ssrfirst"); //setTimeout allows component to mount, which often happens //when waiting ideally we should be able to call refetch //immediately However the subscription needs to start before @@ -879,13 +865,13 @@ describe('[queries] lifecycle', () => { //data you get is fresh, so one would wait for an interaction setTimeout(() => { refetch().then((refetchResult: any) => { - expect(refetchResult.data.books[0].name).toEqual('first'); + expect(refetchResult.data.books[0].name).toEqual("first"); done = true; }); }); refetched = true; } else { - expect(data.books[0].name).toEqual('first'); + expect(data.books[0].name).toEqual("first"); } } return <p> stub </p>; diff --git a/src/react/hoc/__tests__/queries/loading.test.tsx b/src/react/hoc/__tests__/queries/loading.test.tsx --- a/src/react/hoc/__tests__/queries/loading.test.tsx +++ b/src/react/hoc/__tests__/queries/loading.test.tsx @@ -1,64 +1,71 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient, NetworkStatus, WatchQueryFetchPolicy } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18'); - -describe('[queries] loading', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { + ApolloClient, + NetworkStatus, + WatchQueryFetchPolicy, +} from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] loading", () => { // networkStatus / loading - itAsync('exposes networkStatus as a part of the props api', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "exposes networkStatus as a part of the props api", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let done = false; - const Container = graphql(query, { - options: { notifyOnNetworkStatusChange: true } - })( - class extends React.Component<ChildProps> { - componentDidUpdate() { - const { data } = this.props; - expect(data!.networkStatus).toBeTruthy(); - done = true; - } - render() { - return null; + let done = false; + const Container = graphql(query, { + options: { notifyOnNetworkStatusChange: true }, + })( + class extends React.Component<ChildProps> { + componentDidUpdate() { + const { data } = this.props; + expect(data!.networkStatus).toBeTruthy(); + done = true; + } + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); - it('should set the initial networkStatus to 1 (loading)', () => { + it("should set the initial networkStatus to 1 (loading)", () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -68,14 +75,14 @@ describe('[queries] loading', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); @graphql(query, { options: { notifyOnNetworkStatusChange: true } }) @@ -97,250 +104,260 @@ describe('[queries] loading', () => { </ApolloProvider> ); - unmount() + unmount(); }); - itAsync('should set the networkStatus to 7 (ready) when the query is loaded', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "should set the networkStatus to 7 (ready) when the query is loaded", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const Container = graphql(query, { - options: { notifyOnNetworkStatusChange: true } - })( - class extends React.Component<ChildProps> { - componentDidUpdate() { - expect(this.props.data!.networkStatus).toBe(7); - done = true; - } + const Container = graphql(query, { + options: { notifyOnNetworkStatusChange: true }, + })( + class extends React.Component<ChildProps> { + componentDidUpdate() { + expect(this.props.data!.networkStatus).toBe(7); + done = true; + } - render() { - return null; + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); - itAsync('should set the networkStatus to 2 (setVariables) when the query variables are changed', (resolve, reject) => { - let count = 0; - const query: DocumentNode = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name + itAsync( + "should set the networkStatus to 2 (setVariables) when the query variables are changed", + (resolve, reject) => { + let count = 0; + const query: DocumentNode = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } } } - } - `; + `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const variables1 = { first: 1 }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const variables1 = { first: 1 }; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; - const variables2 = { first: 2 }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const variables2 = { first: 2 }; - type Data = typeof data1; - type Vars = typeof variables1; + type Data = typeof data1; + type Vars = typeof variables1; - const link = mockSingleLink( - { request: { query, variables: variables1 }, result: { data: data1 } }, - { request: { query, variables: variables2 }, result: { data: data2 } } - ); + const link = mockSingleLink( + { request: { query, variables: variables1 }, result: { data: data1 } }, + { request: { query, variables: variables2 }, result: { data: data2 } } + ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let done = false; - const Container = graphql<Vars, Data, Vars>(query, { - options: props => ({ - variables: props, - notifyOnNetworkStatusChange: true - }) - })( - class extends React.Component<ChildProps<Vars, Data, Vars>> { - componentDidUpdate(prevProps: ChildProps<Vars, Data, Vars>) { - try { - // variables changed, new query is loading, but old data is still there - switch (++count) { - case 1: - expect(prevProps.data!.loading).toBe(true); - expect(prevProps.data!.variables).toEqual(variables1); - expect(prevProps.data!.allPeople).toBe(undefined); - expect(prevProps.data!.error).toBe(undefined); - expect(prevProps.data!.networkStatus).toBe(NetworkStatus.loading); - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.variables).toEqual(variables1); - expect(this.props.data!.allPeople).toEqual(data1.allPeople); - expect(this.props.data!.error).toBe(undefined); - expect(this.props.data!.networkStatus).toBe(NetworkStatus.ready); - break; - case 2: - expect(prevProps.data!.loading).toBe(false); - expect(prevProps.data!.variables).toEqual(variables1); - expect(prevProps.data!.allPeople).toEqual(data1.allPeople); - expect(prevProps.data!.error).toBe(undefined); - expect(this.props.data!.loading).toBe(true); - expect(this.props.data!.variables).toEqual(variables2); - expect(this.props.data!.allPeople).toBe(undefined); - expect(this.props.data!.error).toBe(undefined); - expect(this.props.data!.networkStatus).toBe(NetworkStatus.setVariables); - break; - case 3: - expect(prevProps.data!.loading).toBe(true); - expect(prevProps.data!.variables).toEqual(variables2); - expect(prevProps.data!.allPeople).toBe(undefined); - expect(prevProps.data!.error).toBe(undefined); - expect(prevProps.data!.networkStatus).toBe(NetworkStatus.setVariables); - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.variables).toEqual(variables2); - expect(this.props.data!.allPeople).toEqual(data2.allPeople); - expect(this.props.data!.error).toBe(undefined); - expect(this.props.data!.networkStatus).toBe(NetworkStatus.ready); - done = true; - break; + let done = false; + const Container = graphql<Vars, Data, Vars>(query, { + options: (props) => ({ + variables: props, + notifyOnNetworkStatusChange: true, + }), + })( + class extends React.Component<ChildProps<Vars, Data, Vars>> { + componentDidUpdate(prevProps: ChildProps<Vars, Data, Vars>) { + try { + // variables changed, new query is loading, but old data is still there + switch (++count) { + case 1: + expect(prevProps.data!.loading).toBe(true); + expect(prevProps.data!.variables).toEqual(variables1); + expect(prevProps.data!.allPeople).toBe(undefined); + expect(prevProps.data!.error).toBe(undefined); + expect(prevProps.data!.networkStatus).toBe( + NetworkStatus.loading + ); + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.variables).toEqual(variables1); + expect(this.props.data!.allPeople).toEqual(data1.allPeople); + expect(this.props.data!.error).toBe(undefined); + expect(this.props.data!.networkStatus).toBe( + NetworkStatus.ready + ); + break; + case 2: + expect(prevProps.data!.loading).toBe(false); + expect(prevProps.data!.variables).toEqual(variables1); + expect(prevProps.data!.allPeople).toEqual(data1.allPeople); + expect(prevProps.data!.error).toBe(undefined); + expect(this.props.data!.loading).toBe(true); + expect(this.props.data!.variables).toEqual(variables2); + expect(this.props.data!.allPeople).toBe(undefined); + expect(this.props.data!.error).toBe(undefined); + expect(this.props.data!.networkStatus).toBe( + NetworkStatus.setVariables + ); + break; + case 3: + expect(prevProps.data!.loading).toBe(true); + expect(prevProps.data!.variables).toEqual(variables2); + expect(prevProps.data!.allPeople).toBe(undefined); + expect(prevProps.data!.error).toBe(undefined); + expect(prevProps.data!.networkStatus).toBe( + NetworkStatus.setVariables + ); + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.variables).toEqual(variables2); + expect(this.props.data!.allPeople).toEqual(data2.allPeople); + expect(this.props.data!.error).toBe(undefined); + expect(this.props.data!.networkStatus).toBe( + NetworkStatus.ready + ); + done = true; + break; + } + } catch (err) { + reject(err); } - } catch (err) { - reject(err); + } + render() { + return null; } } - render() { - return null; - } - } - ); + ); - class ChangingProps extends React.Component<any, any> { - state = { first: 1 }; + class ChangingProps extends React.Component<any, any> { + state = { first: 1 }; - componentDidMount() { - setTimeout(() => { - this.setState({ first: 2 }); - }, 50); - } + componentDidMount() { + setTimeout(() => { + this.setState({ first: 2 }); + }, 50); + } - render() { - return <Container first={this.state.first} />; + render() { + return <Container first={this.state.first} />; + } } - } - render( - <ApolloProvider client={client}> - <ChangingProps /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <ChangingProps /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBe(true)).then(resolve, reject); - }); + waitFor(() => expect(done).toBe(true)).then(resolve, reject); + } + ); - itAsync('resets the loading state after a refetched query', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "resets the loading state after a refetched query", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; - type Data = typeof data; + type Data = typeof data; - const link = mockSingleLink( - { request: { query }, result: { data } }, - { request: { query }, result: { data: data2 } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const link = mockSingleLink( + { request: { query }, result: { data } }, + { request: { query }, result: { data: data2 } } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let count = 0; - const Container = graphql<{}, Data>(query, { - options: { notifyOnNetworkStatusChange: true } - })( - class extends React.Component<ChildProps<{}, Data>> { - componentDidUpdate() { - const { data } = this.props; - switch (count++) { - case 0: - expect(data!.networkStatus).toBe(7); - // this isn't reloading fully - setTimeout(() => { - data!.refetch(); - }); - break; - case 1: - if (IS_REACT_18) { - expect(data!.loading).toBeFalsy(); - expect(data!.networkStatus).toBe(NetworkStatus.ready); - } else { + let count = 0; + const Container = graphql<{}, Data>(query, { + options: { notifyOnNetworkStatusChange: true }, + })( + class extends React.Component<ChildProps<{}, Data>> { + componentDidUpdate() { + const { data } = this.props; + switch (count++) { + case 0: + expect(data!.networkStatus).toBe(7); + // this isn't reloading fully + setTimeout(() => { + data!.refetch(); + }); + break; + case 1: expect(data!.loading).toBeTruthy(); expect(data!.networkStatus).toBe(NetworkStatus.refetch); - } - expect(data!.allPeople).toEqual(data!.allPeople); - break; - case 2: - expect(data!.loading).toBeFalsy(); - expect(data!.networkStatus).toBe(7); - expect(data!.allPeople).toEqual(data2.allPeople); - break; - default: - reject(new Error('Too many props updates')); + expect(data!.allPeople).toEqual(data!.allPeople); + break; + case 2: + expect(data!.loading).toBeFalsy(); + expect(data!.networkStatus).toBe(7); + expect(data!.allPeople).toEqual(data2.allPeople); + break; + default: + reject(new Error("Too many props updates")); + } } - } - render() { - return null; + render() { + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(2) - } else { - expect(count).toBe(3) - } - }).then(resolve, reject); - }); + waitFor(() => { + expect(count).toBe(3); + }).then(resolve, reject); + } + ); - it('correctly sets loading state on remounted network-only query', async () => { + it("correctly sets loading state on remounted network-only query", async () => { const query: DocumentNode = gql` query pollingPeople { allPeople(first: 1) { @@ -350,7 +367,7 @@ describe('[queries] loading', () => { } } `; - const data = { allPeople: { people: [{ name: 'Darth Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Darth Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ @@ -359,16 +376,16 @@ describe('[queries] loading', () => { newData: () => ({ data: { allPeople: { - people: [{ name: `Darth Skywalker - ${Math.random()}` }] - } - } - }) + people: [{ name: `Darth Skywalker - ${Math.random()}` }], + }, + }, + }), }); const client = new ApolloClient({ link, cache: new Cache({ addTypename: false }), - queryDeduplication: false + queryDeduplication: false, }); let count = 0; @@ -376,7 +393,7 @@ describe('[queries] loading', () => { const usedFetchPolicies: WatchQueryFetchPolicy[] = []; const Container = graphql<{}, Data>(query, { options: { - fetchPolicy: 'network-only', + fetchPolicy: "network-only", nextFetchPolicy(currentFetchPolicy, info) { if (info.reason === "variables-changed") { return info.initialFetchPolicy; @@ -386,7 +403,7 @@ describe('[queries] loading', () => { return "cache-first"; } return currentFetchPolicy; - } + }, }, })( class extends React.Component<ChildProps<{}, Data>> { @@ -426,292 +443,319 @@ describe('[queries] loading', () => { render(App); - await waitFor(() => { - expect(usedFetchPolicies).toEqual([ - "network-only", - "network-only", - "cache-first", - ]); - }, { interval: 1 }); - await waitFor(() => { + await waitFor( + () => { + expect(usedFetchPolicies).toEqual([ + "network-only", + "network-only", + "cache-first", + ]); + }, + { interval: 1 } + ); + await waitFor( + () => { expect(count).toBe(5); - }, { interval: 1 }); + }, + { interval: 1 } + ); }); - itAsync('correctly sets loading state on remounted component with changed variables', (resolve, reject) => { - const query: DocumentNode = gql` - query remount($first: Int) { - allPeople(first: $first) { - people { - name + itAsync( + "correctly sets loading state on remounted component with changed variables", + (resolve, reject) => { + const query: DocumentNode = gql` + query remount($first: Int) { + allPeople(first: $first) { + people { + name + } } } + `; + + interface Data { + allPeople: { + people: { name: string }[]; + }; } - `; + const data = { allPeople: null }; + const variables = { first: 1 }; + const variables2 = { first: 2 }; - interface Data { - allPeople: { - people: { name: string }[]; - }; - } - const data = { allPeople: null }; - const variables = { first: 1 }; - const variables2 = { first: 2 }; + type Vars = typeof variables; - type Vars = typeof variables; + const link = mockSingleLink( + { request: { query, variables }, result: { data }, delay: 10 }, + { + request: { query, variables: variables2 }, + result: { data }, + delay: 10, + } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + let renderFn: (num: number) => React.ReactElement<any>, + count = 0; + const testFailures: any[] = []; - const link = mockSingleLink( - { request: { query, variables }, result: { data }, delay: 10 }, - { - request: { query, variables: variables2 }, - result: { data }, - delay: 10 + interface Props { + first: number; } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - let renderFn: (num: number) => React.ReactElement<any>, - count = 0; + const Container = graphql<Props, Data, Vars>(query, { + options: ({ first }) => ({ variables: { first } }), + })( + class extends React.Component<ChildProps<Props, Data, Vars>> { + componentDidUpdate() { + try { + if (count === 0) { + // has data + unmount(); + setTimeout(() => { + render(renderFn(2)); + }, 5); + } - interface Props { - first: number; - } - const Container = graphql<Props, Data, Vars>(query, { - options: ({ first }) => ({ variables: { first } }) - })( - class extends React.Component<ChildProps<Props, Data, Vars>> { - componentDidUpdate() { - if (count === 0) { - // has data - unmount(); - setTimeout(() => { - render(renderFn(2)); - }, 5); + if (count === 2) { + // remounted data after fetch + expect(this.props.data!.loading).toBeFalsy(); + } + count++; + } catch (e) { + testFailures.push(e); + } } - if (count === 2) { - // remounted data after fetch - expect(this.props.data!.loading).toBeFalsy(); - } - count++; - } + render() { + try { + if (count === 1) { + expect(this.props.data!.loading).toBeTruthy(); // on remount + count++; + } + } catch (e) { + testFailures.push(e); + } - render() { - if (count === 1) { - expect(this.props.data!.loading).toBeTruthy(); // on remount - count++; + return null; } - - return null; } - } - ); + ); - renderFn = (first: number) => ( - <ApolloProvider client={client}> - <Container first={first} /> - </ApolloProvider> - ); - const { unmount } = render(renderFn(1)); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toBe(0) - } else { - expect(count).toBe(3) - } - }).then(resolve, reject); - }); + renderFn = (first: number) => ( + <ApolloProvider client={client}> + <Container first={first} /> + </ApolloProvider> + ); + const { unmount } = render(renderFn(1)); + waitFor(() => { + if (testFailures.length > 0) { + throw testFailures[0]; + } + expect(count).toBe(3); + }).then(resolve, reject); + } + ); - itAsync('correctly sets loading state on remounted component with changed variables (alt)', (resolve, reject) => { - const query: DocumentNode = gql` - query remount($name: String) { - allPeople(name: $name) { - people { - name + itAsync( + "correctly sets loading state on remounted component with changed variables (alt)", + (resolve, reject) => { + const query: DocumentNode = gql` + query remount($name: String) { + allPeople(name: $name) { + people { + name + } } } - } - `; + `; - interface Data { - allPeople: { - people: { name: string }[]; - }; - } - const data = { allPeople: null }; - const variables = { name: 'does-not-exist' }; - const variables2 = { name: 'nothing-either' }; + interface Data { + allPeople: { + people: { name: string }[]; + }; + } + const data = { allPeople: null }; + const variables = { name: "does-not-exist" }; + const variables2 = { name: "nothing-either" }; - type Vars = typeof variables; + type Vars = typeof variables; - const link = mockSingleLink( - { request: { query, variables }, result: { data } }, - { - request: { query, variables: variables2 }, - result: { data } - } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const link = mockSingleLink( + { request: { query, variables }, result: { data } }, + { + request: { query, variables: variables2 }, + result: { data }, + } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let count = 0; + let count = 0; - const Container = graphql<Vars, Data, Vars>(query)( - class extends React.Component<ChildProps<Vars, Data, Vars>> { - render() { - const { loading } = this.props.data!; - if (count === 0) expect(loading).toBeTruthy(); - if (count === 1) { - expect(loading).toBeFalsy(); - setTimeout(() => { - unmount(); - render( - <ApolloProvider client={client}> - <Container {...variables2} /> - </ApolloProvider> - ); - }, 0); - } - if (count === 2) expect(loading).toBeTruthy(); - if (count === 3) { - expect(loading).toBeFalsy(); - } - count++; - return null; + const Container = graphql<Vars, Data, Vars>(query)( + class extends React.Component<ChildProps<Vars, Data, Vars>> { + render() { + const { loading } = this.props.data!; + if (count === 0) expect(loading).toBeTruthy(); + if (count === 1) { + expect(loading).toBeFalsy(); + setTimeout(() => { + unmount(); + render( + <ApolloProvider client={client}> + <Container {...variables2} /> + </ApolloProvider> + ); + }, 0); + } + if (count === 2) expect(loading).toBeTruthy(); + if (count === 3) { + expect(loading).toBeFalsy(); + } + count++; + return null; + } } - } - ); + ); - const { unmount } = render( - <ApolloProvider client={client}> - <Container {...variables} /> - </ApolloProvider> - ); + const { unmount } = render( + <ApolloProvider client={client}> + <Container {...variables} /> + </ApolloProvider> + ); - waitFor(() => expect(count).toBe(4)).then(resolve, reject); - }); + waitFor(() => expect(count).toBe(4)).then(resolve, reject); + } + ); - itAsync('correctly sets loading state on component with changed variables and unchanged result', (resolve, reject) => { - const query: DocumentNode = gql` - query remount($first: Int) { - allPeople(first: $first) { - people { - name + itAsync( + "correctly sets loading state on component with changed variables and unchanged result", + (resolve, reject) => { + const query: DocumentNode = gql` + query remount($first: Int) { + allPeople(first: $first) { + people { + name + } } } + `; + interface Data { + allPeople: { + people: { name: string }[]; + }; } - `; - interface Data { - allPeople: { - people: { name: string }[]; - }; - } - const data = { allPeople: null }; - const variables = { first: 1 }; - const variables2 = { first: 2 }; + const data = { allPeople: null }; + const variables = { first: 1 }; + const variables2 = { first: 2 }; - type Vars = typeof variables; - const link = mockSingleLink( - { request: { query, variables }, result: { data } }, - { - request: { query, variables: variables2 }, - result: { data } + type Vars = typeof variables; + const link = mockSingleLink( + { request: { query, variables }, result: { data } }, + { + request: { query, variables: variables2 }, + result: { data }, + } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + let count = 0; + + interface Props extends Vars { + setFirst: (first: number) => void; } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - let count = 0; - interface Props extends Vars { - setFirst: (first: number) => void; - } + const connect = ( + component: React.ComponentType< + React.PropsWithChildren<React.PropsWithChildren<Props>> + > + ): React.ComponentType< + React.PropsWithChildren<React.PropsWithChildren<{}>> + > => { + return class extends React.Component<{}, { first: number }> { + constructor(props: {}) { + super(props); - const connect = ( - component: React.ComponentType<React.PropsWithChildren<React.PropsWithChildren<Props>>> - ): React.ComponentType<React.PropsWithChildren<React.PropsWithChildren<{}>>> => { - return class extends React.Component<{}, { first: number }> { - constructor(props: {}) { - super(props); - - this.state = { - first: 1 - }; - this.setFirst = this.setFirst.bind(this); - } + this.state = { + first: 1, + }; + this.setFirst = this.setFirst.bind(this); + } - setFirst(first: number) { - this.setState({ first }); - } + setFirst(first: number) { + this.setState({ first }); + } - render() { - return React.createElement(component, { - first: this.state.first, - setFirst: this.setFirst - }); - } + render() { + return React.createElement(component, { + first: this.state.first, + setFirst: this.setFirst, + }); + } + }; }; - }; - const Container = connect( - graphql<Props, Data, Vars>(query, { - options: ({ first }) => ({ variables: { first } }) - })( - class extends React.Component<ChildProps<Props, Data, Vars>> { - render() { - try { - switch (count) { - case 0: - expect(this.props.data!.loading).toBe(true); - expect(this.props.data!.allPeople).toBeUndefined(); - break; - case 1: - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.allPeople).toBe(data.allPeople); - setTimeout(() => { - this.props.setFirst(2); - }); - break; - case 2: - expect(this.props.data!.loading).toBe(true); // on variables change - expect(this.props.data!.allPeople).toBeUndefined(); - break; - case 4: - // new data after fetch - expect(this.props.data!.loading).toBe(false); - expect(this.props.data!.allPeople).toBe(data.allPeople); - break; + const Container = connect( + graphql<Props, Data, Vars>(query, { + options: ({ first }) => ({ variables: { first } }), + })( + class extends React.Component<ChildProps<Props, Data, Vars>> { + render() { + try { + switch (count) { + case 0: + expect(this.props.data!.loading).toBe(true); + expect(this.props.data!.allPeople).toBeUndefined(); + break; + case 1: + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.allPeople).toBe(data.allPeople); + setTimeout(() => { + this.props.setFirst(2); + }); + break; + case 2: + expect(this.props.data!.loading).toBe(true); // on variables change + expect(this.props.data!.allPeople).toBeUndefined(); + break; + case 4: + // new data after fetch + expect(this.props.data!.loading).toBe(false); + expect(this.props.data!.allPeople).toBe(data.allPeople); + break; + } + } catch (err) { + reject(err); } - } catch (err) { - reject(err); - } - count++; + count++; - return null; + return null; + } } - } - ) - ); + ) + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => expect(count).toBe(4)).then(resolve, reject); - }); + waitFor(() => expect(count).toBe(4)).then(resolve, reject); + } + ); itAsync( - 'correctly sets loading state on component with changed variables, ' + - 'unchanged result, and network-only', + "correctly sets loading state on component with changed variables, " + + "unchanged result, and network-only", (resolve, reject) => { const query: DocumentNode = gql` query remount($first: Int) { @@ -728,7 +772,7 @@ describe('[queries] loading', () => { }; } - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const variables = { first: 1 }; const variables2 = { first: 2 }; @@ -738,12 +782,12 @@ describe('[queries] loading', () => { { request: { query, variables: variables2 }, result: { data }, - delay: 10 + delay: 10, } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; @@ -752,14 +796,18 @@ describe('[queries] loading', () => { } const connect = ( - component: React.ComponentType<React.PropsWithChildren<React.PropsWithChildren<Props>>> - ): React.ComponentType<React.PropsWithChildren<React.PropsWithChildren<{}>>> => { + component: React.ComponentType< + React.PropsWithChildren<React.PropsWithChildren<Props>> + > + ): React.ComponentType< + React.PropsWithChildren<React.PropsWithChildren<{}>> + > => { return class extends React.Component<{}, { first: number }> { constructor(props: {}) { super(props); this.state = { - first: 1 + first: 1, }; this.setFirst = this.setFirst.bind(this); } @@ -771,7 +819,7 @@ describe('[queries] loading', () => { render() { return React.createElement(component, { first: this.state.first, - setFirst: this.setFirst + setFirst: this.setFirst, }); } }; @@ -781,8 +829,8 @@ describe('[queries] loading', () => { graphql<Props, Data, Vars>(query, { options: ({ first }) => ({ variables: { first }, - fetchPolicy: 'network-only' - }) + fetchPolicy: "network-only", + }), })( class extends React.Component<ChildProps<Props, Data, Vars>> { render() { diff --git a/src/react/hoc/__tests__/queries/observableQuery.test.tsx b/src/react/hoc/__tests__/queries/observableQuery.test.tsx --- a/src/react/hoc/__tests__/queries/observableQuery.test.tsx +++ b/src/react/hoc/__tests__/queries/observableQuery.test.tsx @@ -1,19 +1,19 @@ -import React from 'react'; -import userEvent from '@testing-library/user-event'; -import { render, waitFor, screen } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -describe('[queries] observableQuery', () => { +import React from "react"; +import userEvent from "@testing-library/user-event"; +import { render, waitFor, screen } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] observableQuery", () => { // observableQuery - it('will recycle `ObservableQuery`s when re-rendering the entire tree', async () => { + it("will recycle `ObservableQuery`s when re-rendering the entire tree", async () => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -23,7 +23,7 @@ describe('[queries] observableQuery', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink( @@ -32,7 +32,7 @@ describe('[queries] observableQuery', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; @@ -41,32 +41,30 @@ describe('[queries] observableQuery', () => { const keys = Array.from( ((client as any).queryManager as any).queries.keys() ); - await waitFor(() => expect(keys).toEqual(['1']), { interval: 1 }); + await waitFor(() => expect(keys).toEqual(["1"]), { interval: 1 }); }; const assert2 = async () => { const keys = Array.from( ((client as any).queryManager as any).queries.keys() ); - await waitFor(() => expect(keys).toEqual(['1']), { interval: 1 }); + await waitFor(() => expect(keys).toEqual(["1"]), { interval: 1 }); }; let done = false; const Container = graphql<{}, Data>(query, { - options: { fetchPolicy: 'cache-and-network' } + options: { fetchPolicy: "cache-and-network" }, })( class extends React.Component<ChildProps<{}, Data>> { async componentDidUpdate() { if (count === 2) { expect(this.props.data!.loading).toBeFalsy(); - expect(this.props.data!.allPeople).toEqual( - data.allPeople - ); + expect(this.props.data!.allPeople).toEqual(data.allPeople); // ensure first assertion and umount tree await assert1(); - userEvent.click(screen.getByText('Break things')); + userEvent.click(screen.getByText("Break things")); // ensure cleanup await assert2(); @@ -87,9 +85,7 @@ describe('[queries] observableQuery', () => { // be present; if (count === 3) { expect(this.props.data!.loading).toBeFalsy(); - expect(this.props.data!.allPeople).toEqual( - data.allPeople - ); + expect(this.props.data!.allPeople).toEqual(data.allPeople); } count++; return null; @@ -109,7 +105,7 @@ describe('[queries] observableQuery', () => { class AppWrapper extends React.Component<{}, { renderRedirect: boolean }> { state = { - renderRedirect: false + renderRedirect: false, }; goToRedirect = () => { @@ -147,129 +143,131 @@ describe('[queries] observableQuery', () => { }); }); - itAsync("will recycle `ObservableQuery`s when re-rendering a portion of the tree but not return stale data if variables don't match", (resolve, reject) => { - const query: DocumentNode = gql` - query people($first: Int!) { - allPeople(first: $first) { - people { - name - friends(id: $first) { + itAsync( + "will recycle `ObservableQuery`s when re-rendering a portion of the tree but not return stale data if variables don't match", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($first: Int!) { + allPeople(first: $first) { + people { name + friends(id: $first) { + name + } } } } - } - `; - const variables1 = { first: 1 }; - const variables2 = { first: 2 }; - const data = { - allPeople: { - people: [{ name: 'Luke Skywalker', friends: [{ name: 'r2d2' }] }] - } - }; - const data2 = { - allPeople: { - people: [{ name: 'Leia Skywalker', friends: [{ name: 'luke' }] }] - } - }; + `; + const variables1 = { first: 1 }; + const variables2 = { first: 2 }; + const data = { + allPeople: { + people: [{ name: "Luke Skywalker", friends: [{ name: "r2d2" }] }], + }, + }; + const data2 = { + allPeople: { + people: [{ name: "Leia Skywalker", friends: [{ name: "luke" }] }], + }, + }; - type Data = typeof data; - type Vars = typeof variables1; + type Data = typeof data; + type Vars = typeof variables1; - const link = mockSingleLink( - { request: { query, variables: variables1 }, result: { data } }, - { request: { query, variables: variables2 }, result: { data: data2 } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - let remount: any; + const link = mockSingleLink( + { request: { query, variables: variables1 }, result: { data } }, + { request: { query, variables: variables2 }, result: { data: data2 } } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + let remount: any; + + const Container = graphql<Vars, Data, Vars>(query)( + class extends React.Component<ChildProps<Vars, Data, Vars>> { + render() { + try { + const { variables, loading, allPeople } = this.props.data!; + // first variable render + if (variables.first === 1) { + if (loading) expect(allPeople).toBeUndefined(); + if (!loading) { + expect(allPeople).toEqual(data.allPeople); + } + } - const Container = graphql<Vars, Data, Vars>(query)( - class extends React.Component<ChildProps<Vars, Data, Vars>> { - render() { - try { - const { variables, loading, allPeople } = this.props.data!; - // first variable render - if (variables.first === 1) { - if (loading) expect(allPeople).toBeUndefined(); - if (!loading) { - expect(allPeople).toEqual(data.allPeople); + if (variables.first === 2) { + // second variables render + if (loading) expect(allPeople).toBeUndefined(); + if (!loading) expect(allPeople).toEqual(data2.allPeople); } + } catch (e) { + reject(e); } - if (variables.first === 2) { - // second variables render - if (loading) expect(allPeople).toBeUndefined(); - if (!loading) - expect(allPeople).toEqual(data2.allPeople); - } - } catch (e) { - reject(e); + return null; } - - return null; } - } - ); - - class Remounter extends React.Component< - { render: typeof Container }, - { showChildren: boolean; variables: Vars } - > { - state = { - showChildren: true, - variables: variables1 - }; + ); - componentDidMount() { - remount = () => { - this.setState({ showChildren: false }, () => { - setTimeout(() => { - this.setState({ - showChildren: true, - variables: variables2 - }); - }, 10); - }); + class Remounter extends React.Component< + { render: typeof Container }, + { showChildren: boolean; variables: Vars } + > { + state = { + showChildren: true, + variables: variables1, }; - } - render() { - if (!this.state.showChildren) return null; - const Thing = this.props.render; - return <Thing first={this.state.variables.first} />; + componentDidMount() { + remount = () => { + this.setState({ showChildren: false }, () => { + setTimeout(() => { + this.setState({ + showChildren: true, + variables: variables2, + }); + }, 10); + }); + }; + } + + render() { + if (!this.state.showChildren) return null; + const Thing = this.props.render; + return <Thing first={this.state.variables.first} />; + } } - } - // the initial mount fires off the query - // the same as episode id = 1 - render( - <ApolloProvider client={client}> - <Remounter render={Container} /> - </ApolloProvider> - ); + // the initial mount fires off the query + // the same as episode id = 1 + render( + <ApolloProvider client={client}> + <Remounter render={Container} /> + </ApolloProvider> + ); - // after the initial data has been returned - // the user navigates to a different page - // but the query is recycled - let done = false; - setTimeout(() => { - // move to the "home" page from the "episode" page - remount(); + // after the initial data has been returned + // the user navigates to a different page + // but the query is recycled + let done = false; setTimeout(() => { - // move to a new "epsiode" page - // epsiode id = 2 - // wait to verify the data isn't stale then end - done = true; - }, 20); - }, 5); - - return waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + // move to the "home" page from the "episode" page + remount(); + setTimeout(() => { + // move to a new "epsiode" page + // epsiode id = 2 + // wait to verify the data isn't stale then end + done = true; + }, 20); + }, 5); + + return waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); - it('not overly rerender', async () => { + it("not overly rerender", async () => { const query: DocumentNode = gql` query people($first: Int!) { allPeople(first: $first) { @@ -286,20 +284,20 @@ describe('[queries] observableQuery', () => { const variables = { first: 1 }; const data = { allPeople: { - people: [{ name: 'Luke Skywalker', friends: [{ name: 'r2d2' }] }] - } + people: [{ name: "Luke Skywalker", friends: [{ name: "r2d2" }] }], + }, }; type Data = typeof data; type Vars = typeof variables; const link = mockSingleLink({ request: { query, variables }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let remount: any; @@ -335,7 +333,7 @@ describe('[queries] observableQuery', () => { > { state = { showChildren: true, - variables + variables, }; componentDidMount() { @@ -382,93 +380,96 @@ describe('[queries] observableQuery', () => { }); }); - itAsync('does rerender if query returns differnt result', (resolve, reject) => { - const query: DocumentNode = gql` - query people($first: Int!) { - allPeople(first: $first) { - people { - name - friends(id: $first) { + itAsync( + "does rerender if query returns differnt result", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($first: Int!) { + allPeople(first: $first) { + people { name + friends(id: $first) { + name + } } } } - } - `; - - const variables = { first: 1 }; - const dataOne = { - allPeople: { - people: [{ name: 'Luke Skywalker', friends: [{ name: 'r2d2' }] }] - } - }; - const dataTwo = { - allPeople: { - people: [ - { name: 'Luke Skywalker', friends: [{ name: 'Leia Skywalker' }] } - ] - } - }; - - type Data = typeof dataOne; - type Vars = typeof variables; + `; - const link = mockSingleLink( - { - request: { query, variables }, - result: { data: dataOne } - }, - { - request: { query, variables }, - result: { data: dataTwo } - } - ); + const variables = { first: 1 }; + const dataOne = { + allPeople: { + people: [{ name: "Luke Skywalker", friends: [{ name: "r2d2" }] }], + }, + }; + const dataTwo = { + allPeople: { + people: [ + { name: "Luke Skywalker", friends: [{ name: "Leia Skywalker" }] }, + ], + }, + }; - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + type Data = typeof dataOne; + type Vars = typeof variables; + + const link = mockSingleLink( + { + request: { query, variables }, + result: { data: dataOne }, + }, + { + request: { query, variables }, + result: { data: dataTwo }, + } + ); - let count = 0; - const Container = graphql<Vars, Data, Vars>(query)( - class extends React.Component<ChildProps<Vars, Data, Vars>> { - render() { - count++; - try { - const { loading, allPeople, refetch } = this.props.data!; - // first variable render - if (count === 1) { - expect(loading).toBe(true); - } - if (count === 2) { - expect(loading).toBe(false); - expect(allPeople).toEqual(dataOne.allPeople); - refetch(); - } - if (count === 3) { - expect(loading).toBe(false); - expect(allPeople).toEqual(dataTwo.allPeople); - } - if (count > 3) { - throw new Error('too many renders'); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let count = 0; + const Container = graphql<Vars, Data, Vars>(query)( + class extends React.Component<ChildProps<Vars, Data, Vars>> { + render() { + count++; + try { + const { loading, allPeople, refetch } = this.props.data!; + // first variable render + if (count === 1) { + expect(loading).toBe(true); + } + if (count === 2) { + expect(loading).toBe(false); + expect(allPeople).toEqual(dataOne.allPeople); + refetch(); + } + if (count === 3) { + expect(loading).toBe(false); + expect(allPeople).toEqual(dataTwo.allPeople); + } + if (count > 3) { + throw new Error("too many renders"); + } + } catch (e) { + reject(e); } - } catch (e) { - reject(e); - } - return null; + return null; + } } - } - ); + ); - // the initial mount fires off the query - // the same as episode id = 1 - render( - <ApolloProvider client={client}> - <Container first={1} /> - </ApolloProvider> - ); + // the initial mount fires off the query + // the same as episode id = 1 + render( + <ApolloProvider client={client}> + <Container first={1} /> + </ApolloProvider> + ); - return waitFor(() => expect(count).toBe(3)).then(resolve, reject); - }); + return waitFor(() => expect(count).toBe(3)).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/queries/polling.test.tsx b/src/react/hoc/__tests__/queries/polling.test.tsx --- a/src/react/hoc/__tests__/queries/polling.test.tsx +++ b/src/react/hoc/__tests__/queries/polling.test.tsx @@ -1,17 +1,17 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient, ApolloLink } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { Observable } from '../../../../utilities'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -describe('[queries] polling', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient, ApolloLink } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { Observable } from "../../../../utilities"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] polling", () => { let error: typeof console.error; beforeEach(() => { @@ -25,7 +25,7 @@ describe('[queries] polling', () => { }); // polling - itAsync('allows a polling query to be created', (resolve, reject) => { + itAsync("allows a polling query to be created", (resolve, reject) => { const POLL_INTERVAL = 5; const query: DocumentNode = gql` query people { @@ -36,14 +36,14 @@ describe('[queries] polling', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const link = mockSingleLink( { request: { query }, result: { data } }, { request: { query }, result: { data: data2 } }, { request: { query }, result: { data } } ); - const cache = new Cache({ addTypename: false }) + const cache = new Cache({ addTypename: false }); const client = new ApolloClient({ link, cache, @@ -54,12 +54,12 @@ describe('[queries] polling', () => { options: () => ({ pollInterval: POLL_INTERVAL, notifyOnNetworkStatusChange: false, - }) + }), })(({ data }) => { count++; if (count === 4) { data!.stopPolling(); - expect(cache.readQuery({query})).toBeTruthy(); + expect(cache.readQuery({ query })).toBeTruthy(); resolve(); } return null; @@ -74,55 +74,58 @@ describe('[queries] polling', () => { waitFor(() => expect(count).toBe(4)).then(resolve, reject); }); - itAsync('ensures polling respects no-cache fetchPolicy', (resolve, reject) => { - const POLL_INTERVAL = 5; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "ensures polling respects no-cache fetchPolicy", + (resolve, reject) => { + const POLL_INTERVAL = 5; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; - const link = mockSingleLink( - { request: { query }, result: { data } }, - { request: { query }, result: { data: data2 } }, - { request: { query }, result: { data } } - ); - const cache = new Cache({ addTypename: false }) - const client = new ApolloClient({ - link, - cache, - }); - - let count = 0; - const Container = graphql(query, { - options: () => ({ - pollInterval: POLL_INTERVAL, - notifyOnNetworkStatusChange: false, - fetchPolicy: 'no-cache' - }) - })(({ data }) => { - count++; - if (count === 4) { - data!.stopPolling(); - expect(cache.readQuery({query})).toBeNull(); - resolve(); - } - return null; - }); + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const link = mockSingleLink( + { request: { query }, result: { data } }, + { request: { query }, result: { data: data2 } }, + { request: { query }, result: { data } } + ); + const cache = new Cache({ addTypename: false }); + const client = new ApolloClient({ + link, + cache, + }); + + let count = 0; + const Container = graphql(query, { + options: () => ({ + pollInterval: POLL_INTERVAL, + notifyOnNetworkStatusChange: false, + fetchPolicy: "no-cache", + }), + })(({ data }) => { + count++; + if (count === 4) { + data!.stopPolling(); + expect(cache.readQuery({ query })).toBeNull(); + resolve(); + } + return null; + }); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => expect(count).toBe(4)).then(resolve, reject); - }); + waitFor(() => expect(count).toBe(4)).then(resolve, reject); + } + ); const allPeopleQuery: DocumentNode = gql` query people { @@ -134,25 +137,26 @@ describe('[queries] polling', () => { } `; - const lukeLink = new ApolloLink(operation => new Observable(observer => { - expect(operation.query).toBe(allPeopleQuery); - observer.next({ - data: { - allPeople: { - people: [ - { name: "Luke Skywalker" }, - ], - }, - }, - }); - observer.complete(); - })); + const lukeLink = new ApolloLink( + (operation) => + new Observable((observer) => { + expect(operation.query).toBe(allPeopleQuery); + observer.next({ + data: { + allPeople: { + people: [{ name: "Luke Skywalker" }], + }, + }, + }); + observer.complete(); + }) + ); - itAsync('exposes stopPolling as part of the props api', (resolve, reject) => { + itAsync("exposes stopPolling as part of the props api", (resolve, reject) => { let done = false; const client = new ApolloClient({ link: lukeLink, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql(allPeopleQuery)( @@ -184,39 +188,44 @@ describe('[queries] polling', () => { }).then(resolve, reject); }); - itAsync('exposes startPolling as part of the props api', (resolve, reject) => { - let done = false; - const client = new ApolloClient({ - link: lukeLink, - cache: new Cache({ addTypename: false }) - }); - - const Container = graphql(allPeopleQuery, { options: { pollInterval: 10 } })( - class extends React.Component<ChildProps> { - componentDidUpdate() { - try { - const { data } = this.props; - expect(data!.startPolling).toBeTruthy(); - expect(data!.startPolling instanceof Function).toBeTruthy(); - done = true; - } catch (e) { - reject(e); + itAsync( + "exposes startPolling as part of the props api", + (resolve, reject) => { + let done = false; + const client = new ApolloClient({ + link: lukeLink, + cache: new Cache({ addTypename: false }), + }); + + const Container = graphql(allPeopleQuery, { + options: { pollInterval: 10 }, + })( + class extends React.Component<ChildProps> { + componentDidUpdate() { + try { + const { data } = this.props; + expect(data!.startPolling).toBeTruthy(); + expect(data!.startPolling instanceof Function).toBeTruthy(); + done = true; + } catch (e) { + reject(e); + } + } + render() { + return null; } } - render() { - return null; - } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/queries/recomposeWithState.ts b/src/react/hoc/__tests__/queries/recomposeWithState.ts --- a/src/react/hoc/__tests__/queries/recomposeWithState.ts +++ b/src/react/hoc/__tests__/queries/recomposeWithState.ts @@ -2,7 +2,6 @@ // to avoid incurring an indirect dependency on ua-parser-js via fbjs. import React, { createFactory, Component } from "react"; -import '../../../../utilities/globals'; // For __DEV__ const setStatic = (key: string, value: string) => (BaseComponent: React.ComponentClass) => { @@ -35,7 +34,10 @@ export const withState = (stateName: string, stateUpdaterName: string, initialState: unknown) => (BaseComponent: React.ComponentClass) => { const factory = createFactory(BaseComponent); - class WithState extends Component<Record<string, unknown>, { stateValue: unknown }> { + class WithState extends Component< + Record<string, unknown>, + { stateValue: unknown } + > { state = { stateValue: typeof initialState === "function" diff --git a/src/react/hoc/__tests__/queries/reducer.test.tsx b/src/react/hoc/__tests__/queries/reducer.test.tsx --- a/src/react/hoc/__tests__/queries/reducer.test.tsx +++ b/src/react/hoc/__tests__/queries/reducer.test.tsx @@ -1,18 +1,18 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { DataValue } from '../../types'; - -describe('[queries] reducer', () => { +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { DataValue } from "../../types"; + +describe("[queries] reducer", () => { // props reducer - itAsync('allows custom mapping of a result to props', (resolve, reject) => { + itAsync("allows custom mapping of a result to props", (resolve, reject) => { const query: DocumentNode = gql` query thing { getThing { @@ -23,11 +23,11 @@ describe('[queries] reducer', () => { const result = { getThing: { thing: true } }; const link = mockSingleLink({ request: { query }, - result: { data: result } + result: { data: result }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); type Data = typeof result; @@ -36,7 +36,7 @@ describe('[queries] reducer', () => { let count = 0; const ContainerWithData = graphql<{}, Data, {}, ChildProps>(query, { - props: ({ data }) => ({ ...data! }) + props: ({ data }) => ({ ...data! }), })(({ getThing, loading }) => { count++; if (count === 1) expect(loading).toBe(true); @@ -55,60 +55,63 @@ describe('[queries] reducer', () => { waitFor(() => expect(count).toBe(2)).then(resolve, reject); }); - itAsync('allows custom mapping of a result to props that includes the passed props', (resolve, reject) => { - const query: DocumentNode = gql` - query thing { - getThing { - thing + itAsync( + "allows custom mapping of a result to props that includes the passed props", + (resolve, reject) => { + const query: DocumentNode = gql` + query thing { + getThing { + thing + } } + `; + const link = mockSingleLink({ + request: { query }, + result: { data: { getThing: { thing: true } } }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + interface Data { + getThing: { thing: boolean }; } - `; - const link = mockSingleLink({ - request: { query }, - result: { data: { getThing: { thing: true } } } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - interface Data { - getThing: { thing: boolean }; - } - interface Props { - sample: number; - } - - type FinalProps = { - showSpinner: boolean; - }; - - let count = 0; - const ContainerWithData = graphql<Props, Data, {}, FinalProps>(query, { - props: ({ data, ownProps }) => { - expect(ownProps.sample).toBe(1); - return { showSpinner: data!.loading }; - } - })(({ showSpinner }: FinalProps) => { - if (count === 0) { - expect(showSpinner).toBeTruthy(); + interface Props { + sample: number; } - count += 1; - return null; - }); - render( - <ApolloProvider client={client}> - <ContainerWithData sample={1} /> - </ApolloProvider> - ); + type FinalProps = { + showSpinner: boolean; + }; + + let count = 0; + const ContainerWithData = graphql<Props, Data, {}, FinalProps>(query, { + props: ({ data, ownProps }) => { + expect(ownProps.sample).toBe(1); + return { showSpinner: data!.loading }; + }, + })(({ showSpinner }: FinalProps) => { + if (count === 0) { + expect(showSpinner).toBeTruthy(); + } + count += 1; + return null; + }); - waitFor(() => { - expect(count).toBe(2); - }).then(resolve, reject); - }); + render( + <ApolloProvider client={client}> + <ContainerWithData sample={1} /> + </ApolloProvider> + ); + + waitFor(() => { + expect(count).toBe(2); + }).then(resolve, reject); + } + ); - itAsync('allows custom mapping of a result to props 2', (resolve, reject) => { + itAsync("allows custom mapping of a result to props 2", (resolve, reject) => { let done = false; const query: DocumentNode = gql` query thing { @@ -120,11 +123,11 @@ describe('[queries] reducer', () => { const expectedData = { getThing: { thing: true } }; const link = mockSingleLink({ request: { query }, - result: { data: expectedData } + result: { data: expectedData }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -136,7 +139,7 @@ describe('[queries] reducer', () => { } const withData = graphql<{}, Data, {}, FinalProps>(query, { - props: ({ data }) => ({ thingy: data!.getThing! }) + props: ({ data }) => ({ thingy: data!.getThing! }), }); class Container extends React.Component<FinalProps> { @@ -158,85 +161,89 @@ describe('[queries] reducer', () => { ); waitFor(() => { - expect(done).toBe(true) + expect(done).toBe(true); }).then(resolve, reject); }); - itAsync('passes the prior props to the result-props mapper', (resolve, reject) => { - const query: DocumentNode = gql` - query thing { - getThing { - thing + itAsync( + "passes the prior props to the result-props mapper", + (resolve, reject) => { + const query: DocumentNode = gql` + query thing { + getThing { + thing + } + other } - other - } - `; - const expectedData = { getThing: { thing: true }, other: false }; - const expectedDataAfterRefetch = { getThing: { thing: true }, other: true }; - const link = mockSingleLink( - { - request: { query }, - result: { data: expectedData } - }, - { - request: { query }, - result: { data: expectedDataAfterRefetch } - } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - type Data = typeof expectedData; - interface FinalProps { - wrapper: { thingy: { thing: boolean } }; - refetch: () => any; - } - - const withData = graphql<{}, Data, {}, FinalProps>(query, { - props: ({ data }, lastProps) => { - const refetch = data!.refetch!; - let wrapper = { thingy: data!.getThing! }; - - // If the current thingy is equal to the last thingy, - // reuse the wrapper (to preserve referential equality). - if (lastProps && lastProps.wrapper.thingy === wrapper.thingy) { - wrapper = lastProps!.wrapper!; + `; + const expectedData = { getThing: { thing: true }, other: false }; + const expectedDataAfterRefetch = { + getThing: { thing: true }, + other: true, + }; + const link = mockSingleLink( + { + request: { query }, + result: { data: expectedData }, + }, + { + request: { query }, + result: { data: expectedDataAfterRefetch }, } - - return { wrapper, refetch }; + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + type Data = typeof expectedData; + interface FinalProps { + wrapper: { thingy: { thing: boolean } }; + refetch: () => any; } - }); - let counter = 0; - let done = false; - class Container extends React.Component<FinalProps> { - componentDidUpdate(nextProps: FinalProps) { - expect(this.props.wrapper.thingy).toEqual( - expectedData.getThing - ); - if (counter === 1) { - expect(this.props.wrapper).toEqual(nextProps.wrapper); - done = true; - } else { - counter++; - this.props.refetch(); + const withData = graphql<{}, Data, {}, FinalProps>(query, { + props: ({ data }, lastProps) => { + const refetch = data!.refetch!; + let wrapper = { thingy: data!.getThing! }; + + // If the current thingy is equal to the last thingy, + // reuse the wrapper (to preserve referential equality). + if (lastProps && lastProps.wrapper.thingy === wrapper.thingy) { + wrapper = lastProps!.wrapper!; + } + + return { wrapper, refetch }; + }, + }); + + let counter = 0; + let done = false; + class Container extends React.Component<FinalProps> { + componentDidUpdate(nextProps: FinalProps) { + expect(this.props.wrapper.thingy).toEqual(expectedData.getThing); + if (counter === 1) { + expect(this.props.wrapper).toEqual(nextProps.wrapper); + done = true; + } else { + counter++; + this.props.refetch(); + } + } + render() { + return null; } } - render() { - return null; - } - } - const ContainerWithData = withData(Container); + const ContainerWithData = withData(Container); - render( - <ApolloProvider client={client}> - <ContainerWithData /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <ContainerWithData /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/queries/skip.test.tsx b/src/react/hoc/__tests__/queries/skip.test.tsx --- a/src/react/hoc/__tests__/queries/skip.test.tsx +++ b/src/react/hoc/__tests__/queries/skip.test.tsx @@ -1,755 +1,772 @@ -import React from 'react'; -import { render, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloLink } from '../../../../link/core'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; - -const IS_REACT_18 = React.version.startsWith('18'); - -describe('[queries] skip', () => { - itAsync('allows you to skip a query without running it', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name +import React from "react"; +import { render, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloLink } from "../../../../link/core"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; + +describe("[queries] skip", () => { + itAsync( + "allows you to skip a query without running it", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - interface Props { - skip: boolean; - } - - let queryExecuted = false; - const Container = graphql<Props>(query, { - skip: ({ skip }) => skip - })( - class extends React.Component<ChildProps<Props>> { - componentDidUpdate() { - queryExecuted = true; - } - render() { - expect(this.props.data).toBeUndefined(); - return null; - } - } - ); - - render( - <ApolloProvider client={client}> - <Container skip={true} /> - </ApolloProvider> - ); - - let done = false; - setTimeout(() => { - if (!queryExecuted) { - done = true; - return; - } - reject(new Error('query ran even though skip present')); - }, 25); - - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); - - itAsync('continues to not subscribe to a skipped query when props change', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + interface Props { + skip: boolean; + } + + let queryExecuted = false; + const Container = graphql<Props>(query, { + skip: ({ skip }) => skip, + })( + class extends React.Component<ChildProps<Props>> { + componentDidUpdate() { + queryExecuted = true; + } + render() { + expect(this.props.data).toBeUndefined(); + return null; } } - } - `; + ); - const link = new ApolloLink((o, f) => { - reject(new Error('query ran even though skip present')); - return f ? f(o) : null; - }).concat(mockSingleLink()); - // const oldQuery = link.query; - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - interface Props { - foo: number; - } + render( + <ApolloProvider client={client}> + <Container skip={true} /> + </ApolloProvider> + ); - let done = false; - const Container = graphql<Props>(query, { skip: true })( - class extends React.Component<ChildProps<Props>> { - componentDidUpdate() { + let done = false; + setTimeout(() => { + if (!queryExecuted) { done = true; + return; } - render() { - return null; - } - } - ); - - class Parent extends React.Component<{}, { foo: number }> { - state = { foo: 42 }; + reject(new Error("query ran even though skip present")); + }, 25); - componentDidMount() { - this.setState({ foo: 43 }); - } - render() { - return <Container foo={this.state.foo} />; - } + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); } - - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); - - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); - - itAsync('supports using props for skipping which are used in options', (resolve, reject) => { - const query: DocumentNode = gql` - query people($id: ID!) { - allPeople(first: $id) { - people { - id + ); + + itAsync( + "continues to not subscribe to a skipped query when props change", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - - const data = { - allPeople: { people: { id: 1 } } - }; - - type Data = typeof data; + `; - const variables = { id: 1 }; - type Vars = typeof variables; + const link = new ApolloLink((o, f) => { + reject(new Error("query ran even though skip present")); + return f ? f(o) : null; + }).concat(mockSingleLink()); + // const oldQuery = link.query; + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const link = mockSingleLink({ - request: { query, variables }, - result: { data } - }); + interface Props { + foo: number; + } - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + let done = false; + const Container = graphql<Props>(query, { skip: true })( + class extends React.Component<ChildProps<Props>> { + componentDidUpdate() { + done = true; + } + render() { + return null; + } + } + ); - let count = 0; - let renderCount = 0; + class Parent extends React.Component<{}, { foo: number }> { + state = { foo: 42 }; - interface Props { - person: { id: number } | null; - } - const Container = graphql<Props, Data, Vars>(query, { - skip: ({ person }) => !person, - options: ({ person }) => ({ - variables: { - id: person!.id - } - }) - })( - class extends React.Component<ChildProps<Props, Data, Vars>> { - componentDidUpdate() { - try { - const { props } = this; - switch (++count) { - case 1: - expect(props.data!.loading).toBe(true); - break; - case 2: - expect(props.data!.loading).toBe(false); - expect(props.data!.allPeople).toEqual(data.allPeople); - expect(renderCount).toBe(3); - break; - default: - reject(`Too many renders (${count})`); - } - } catch (err) { - reject(err); - } + componentDidMount() { + this.setState({ foo: 43 }); } render() { - renderCount++; - return null; + return <Container foo={this.state.foo} />; } } - ); - class Parent extends React.Component< - {}, - { person: { id: number } | null } - > { - state = { person: null }; + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); - componentDidMount() { - this.setState({ person: { id: 1 } }); - } - render() { - return <Container person={this.state.person} />; - } + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); } - - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); - - waitFor(() => expect(count).toBe(2)).then(resolve, reject); - }); - - itAsync("doesn't run options or props when skipped, including option.client", (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + ); + + itAsync( + "supports using props for skipping which are used in options", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($id: ID!) { + allPeople(first: $id) { + people { + id + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - let queryExecuted = false; - let optionsCalled = false; - - interface Props { - skip: boolean; - pollInterval?: number; - } + `; + + const data = { + allPeople: { people: { id: 1 } }, + }; + + type Data = typeof data; + + const variables = { id: 1 }; + type Vars = typeof variables; + + const link = mockSingleLink({ + request: { query, variables }, + result: { data }, + }); + + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let count = 0; + let renderCount = 0; + + interface Props { + person: { id: number } | null; + } + const Container = graphql<Props, Data, Vars>(query, { + skip: ({ person }) => !person, + options: ({ person }) => ({ + variables: { + id: person!.id, + }, + }), + })( + class extends React.Component<ChildProps<Props, Data, Vars>> { + componentDidUpdate() { + try { + const { props } = this; + switch (++count) { + case 1: + expect(props.data!.loading).toBe(true); + break; + case 2: + expect(props.data!.loading).toBe(false); + expect(props.data!.allPeople).toEqual(data.allPeople); + expect(renderCount).toBe(3); + break; + default: + reject(`Too many renders (${count})`); + } + } catch (err) { + reject(err); + } + } + render() { + renderCount++; + return null; + } + } + ); - interface FinalProps { - pollInterval: number; - data?: {}; - } + class Parent extends React.Component< + {}, + { person: { id: number } | null } + > { + state = { person: null }; - const Container = graphql<Props, {}, {}, FinalProps>(query, { - skip: ({ skip }) => skip, - options: props => { - optionsCalled = true; - return { - pollInterval: props.pollInterval - }; - }, - props: props => ({ - // intentionally incorrect - pollInterval: (props as any).willThrowIfAccesed.pollInterval - }) - })( - class extends React.Component<FinalProps & Props> { - componentDidUpdate() { - queryExecuted = true; + componentDidMount() { + this.setState({ person: { id: 1 } }); } render() { - expect(this.props.data).toBeFalsy(); - return null; + return <Container person={this.state.person} />; } } - ); - - render( - <ApolloProvider client={client}> - <Container skip={true} /> - </ApolloProvider> - ); - let done = false; - setTimeout(() => { - if (!queryExecuted) { - done = true; - return; - } - if (optionsCalled) { - reject(new Error('options ran even though skip present')); - return; - } - reject(new Error('query ran even though skip present')); - }, 25); - - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); - itAsync("doesn't run options or props when skipped even if the component updates", (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + waitFor(() => expect(count).toBe(2)).then(resolve, reject); + } + ); + + itAsync( + "doesn't run options or props when skipped, including option.client", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - - const link = mockSingleLink({ - request: { query }, - result: {} - }); - - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - let queryWasSkipped = true; + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let queryExecuted = false; + let optionsCalled = false; + + interface Props { + skip: boolean; + pollInterval?: number; + } + + interface FinalProps { + pollInterval: number; + data?: {}; + } + + const Container = graphql<Props, {}, {}, FinalProps>(query, { + skip: ({ skip }) => skip, + options: (props) => { + optionsCalled = true; + return { + pollInterval: props.pollInterval, + }; + }, + props: (props) => ({ + // intentionally incorrect + pollInterval: (props as any).willThrowIfAccesed.pollInterval, + }), + })( + class extends React.Component<FinalProps & Props> { + componentDidUpdate() { + queryExecuted = true; + } + render() { + expect(this.props.data).toBeFalsy(); + return null; + } + } + ); - interface Props { - foo: string; - } + render( + <ApolloProvider client={client}> + <Container skip={true} /> + </ApolloProvider> + ); - let done = false; - const Container = graphql<Props>(query, { - skip: true, - options: () => { - queryWasSkipped = false; - return {}; - }, - props: () => { - queryWasSkipped = false; - return {}; - } - })( - class extends React.Component<ChildProps<Props>> { - componentDidUpdate() { - expect(queryWasSkipped).toBeTruthy(); + let done = false; + setTimeout(() => { + if (!queryExecuted) { done = true; + return; } - render() { - return null; + if (optionsCalled) { + reject(new Error("options ran even though skip present")); + return; } - } - ); + reject(new Error("query ran even though skip present")); + }, 25); - class Parent extends React.Component<{}, { foo: string }> { - state = { foo: 'bar' }; - componentDidMount() { - this.setState({ foo: 'baz' }); - } - render() { - return <Container foo={this.state.foo} />; - } + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); } - - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); - - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); - - itAsync('allows you to skip a query without running it (alternate syntax)', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + ); + + itAsync( + "doesn't run options or props when skipped even if the component updates", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; - let queryExecuted = false; - const Container = graphql(query, { skip: true })( - class extends React.Component<ChildProps> { - componentDidUpdate() { - queryExecuted = true; - } - render() { - expect(this.props.data).toBeFalsy(); - return null; - } - } - ); + const link = mockSingleLink({ + request: { query }, + result: {}, + }); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let done = false; - setTimeout(() => { - if (!queryExecuted) { - done = true; - return; - } - reject(new Error('query ran even though skip present')); - }, 25); + let queryWasSkipped = true; - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); + interface Props { + foo: string; + } - // test the case of skip:false -> skip:true -> skip:false to make sure things - // are cleaned up properly - itAsync('allows you to skip then unskip a query with top-level syntax', (resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + let done = false; + const Container = graphql<Props>(query, { + skip: true, + options: () => { + queryWasSkipped = false; + return {}; + }, + props: () => { + queryWasSkipped = false; + return {}; + }, + })( + class extends React.Component<ChildProps<Props>> { + componentDidUpdate() { + expect(queryWasSkipped).toBeTruthy(); + done = true; + } + render() { + return null; } } - } - `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const link = mockSingleLink({ - request: { query }, - result: { data } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - let hasSkipped = false; + ); - interface Props { - skip: boolean; - setSkip: (skip: boolean) => void; - } - - const Container = graphql<Props>(query, { skip: ({ skip }) => skip })( - class extends React.Component<ChildProps<Props>> { - componentDidUpdate(prevProps: ChildProps<Props>) { - if (this.props.skip) { - hasSkipped = true; - prevProps.setSkip(false); - } else { - if (!hasSkipped) { - prevProps.setSkip(true); - } - } + class Parent extends React.Component<{}, { foo: string }> { + state = { foo: "bar" }; + componentDidMount() { + this.setState({ foo: "baz" }); } render() { - return null; + return <Container foo={this.state.foo} />; } } - ); - class Parent extends React.Component<any, any> { - state = { skip: false }; - render() { - return ( - <Container - skip={this.state.skip} - setSkip={skip => this.setState({ skip })} - /> - ); - } - } - - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); - waitFor(() => expect(hasSkipped).toBeTruthy()).then(resolve, reject); - }); - - itAsync('allows you to skip then unskip a query with new options (top-level syntax)', (resolve, reject) => { - const query: DocumentNode = gql` - query people($first: Int) { - allPeople(first: $first) { - people { - name + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); + + itAsync( + "allows you to skip a query without running it (alternate syntax)", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const dataOne = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const dataTwo = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let queryExecuted = false; + const Container = graphql(query, { skip: true })( + class extends React.Component<ChildProps> { + componentDidUpdate() { + queryExecuted = true; + } + render() { + expect(this.props.data).toBeFalsy(); + return null; + } + } + ); - type Data = typeof dataOne; - type Vars = { first: number }; + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - const link = mockSingleLink( - { - request: { query, variables: { first: 1 } }, - result: { data: dataOne } - }, - { - request: { query, variables: { first: 2 } }, - result: { data: dataTwo } - }, - { - request: { query, variables: { first: 2 } }, - result: { data: dataTwo } - } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); - - let hasSkipped = false; + let done = false; + setTimeout(() => { + if (!queryExecuted) { + done = true; + return; + } + reject(new Error("query ran even though skip present")); + }, 25); - interface Props { - skip: boolean; - first: number; - setState: <K extends 'skip' | 'first'>( - state: Pick<{ skip: boolean; first: number }, K> - ) => void; + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); } + ); - let done = false; - const Container = graphql<Props, Data, Vars>(query, { - skip: ({ skip }) => skip - })( - class extends React.Component<ChildProps<Props, Data, Vars>> { - componentDidUpdate(prevProps: ChildProps<Props, Data, Vars>) { - if (this.props.skip) { - hasSkipped = true; - // change back to skip: false, with a different variable - prevProps.setState({ skip: false, first: 2 }); - } else { - if (hasSkipped) { - if (!this.props.data!.loading) { - expect(this.props.data!.allPeople).toEqual( - dataTwo.allPeople - ); - done = true; - } + // test the case of skip:false -> skip:true -> skip:false to make sure things + // are cleaned up properly + itAsync( + "allows you to skip then unskip a query with top-level syntax", + (resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } + } + } + `; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const link = mockSingleLink({ + request: { query }, + result: { data }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let hasSkipped = false; + + interface Props { + skip: boolean; + setSkip: (skip: boolean) => void; + } + + const Container = graphql<Props>(query, { skip: ({ skip }) => skip })( + class extends React.Component<ChildProps<Props>> { + componentDidUpdate(prevProps: ChildProps<Props>) { + if (this.props.skip) { + hasSkipped = true; + prevProps.setSkip(false); } else { - expect(this.props.data!.allPeople).toEqual( - dataOne.allPeople - ); - prevProps.setState({ skip: true }); + if (!hasSkipped) { + prevProps.setSkip(true); + } } } + render() { + return null; + } } + ); + + class Parent extends React.Component<any, any> { + state = { skip: false }; render() { - return null; + return ( + <Container + skip={this.state.skip} + setSkip={(skip) => this.setState({ skip })} + /> + ); } } - ); - class Parent extends React.Component<{}, { skip: boolean; first: number }> { - state = { skip: false, first: 1 }; - render() { - return ( - <Container - skip={this.state.skip} - first={this.state.first} - setState={state => this.setState(state)} - /> - ); - } - } - - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); - }); - - it('allows you to skip then unskip a query with opts syntax', () => new Promise((resolve, reject) => { - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + waitFor(() => expect(hasSkipped).toBeTruthy()).then(resolve, reject); + } + ); + + itAsync( + "allows you to skip then unskip a query with new options (top-level syntax)", + (resolve, reject) => { + const query: DocumentNode = gql` + query people($first: Int) { + allPeople(first: $first) { + people { + name + } } } - } - `; - - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const nextData = { allPeople: { people: [{ name: 'Anakin Skywalker' }] } }; - const finalData = { allPeople: { people: [{ name: 'Darth Vader' }] } }; + `; + const dataOne = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const dataTwo = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; - let ranQuery = 0; + type Data = typeof dataOne; + type Vars = { first: number }; - const link = new ApolloLink((o, f) => { - ranQuery++; - return f ? f(o) : null; - }).concat( - mockSingleLink( + const link = mockSingleLink( { - request: { query }, - result: { data }, + request: { query, variables: { first: 1 } }, + result: { data: dataOne }, }, { - request: { query }, - result: { data: nextData }, + request: { query, variables: { first: 2 } }, + result: { data: dataTwo }, }, { - request: { query }, - result: { data: finalData }, - }, - ) - ); - - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }), - queryDeduplication: false - }); + request: { query, variables: { first: 2 } }, + result: { data: dataTwo }, + } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + let hasSkipped = false; + + interface Props { + skip: boolean; + first: number; + setState: <K extends "skip" | "first">( + state: Pick<{ skip: boolean; first: number }, K> + ) => void; + } + + let done = false; + const Container = graphql<Props, Data, Vars>(query, { + skip: ({ skip }) => skip, + })( + class extends React.Component<ChildProps<Props, Data, Vars>> { + componentDidUpdate(prevProps: ChildProps<Props, Data, Vars>) { + if (this.props.skip) { + hasSkipped = true; + // change back to skip: false, with a different variable + prevProps.setState({ skip: false, first: 2 }); + } else { + if (hasSkipped) { + if (!this.props.data!.loading) { + expect(this.props.data!.allPeople).toEqual(dataTwo.allPeople); + done = true; + } + } else { + expect(this.props.data!.allPeople).toEqual(dataOne.allPeople); + prevProps.setState({ skip: true }); + } + } + } + render() { + return null; + } + } + ); - let count = 0; - const Container = graphql<any>(query, { - options: { - fetchPolicy: 'network-only', - nextFetchPolicy: 'cache-first', - notifyOnNetworkStatusChange: true - }, - skip: ({ skip }) => skip - })( - class extends React.Component<any> { + class Parent extends React.Component< + {}, + { skip: boolean; first: number } + > { + state = { skip: false, first: 1 }; render() { - expect(this.props.data?.error).toBeUndefined(); - - try { - switch (++count) { - case 1: - expect(this.props.data.loading).toBe(true); - expect(ranQuery).toBe(0); - break; - case 2: - // The first batch of data is fetched over the network, and - // verified here, followed by telling the component we want to - // skip running subsequent queries. - expect(this.props.data.loading).toBe(false); - expect(this.props.data.allPeople).toEqual(data.allPeople); - expect(ranQuery).toBe(1); - setTimeout(() => { - this.props.setSkip(true); - }, 10); - break; - case 3: - // This render is triggered after setting skip to true. Now - // let's set skip to false to re-trigger the query. - setTimeout(() => { - this.props.setSkip(false); - }, 10); - expect(this.props.skip).toBe(true); - expect(this.props.data).toBeUndefined(); - expect(ranQuery).toBe(1); - break; - case 4: - expect(this.props.skip).toBe(false); - expect(this.props.data!.loading).toBe(false); - expect(this.props.data.allPeople).toEqual(data.allPeople); - expect(ranQuery).toBe(2); - break; - case 5: - expect(this.props.skip).toBe(false); - expect(this.props.data!.loading).toBe(false); - expect(this.props.data.allPeople).toEqual(nextData.allPeople); - expect(ranQuery).toBe(2); - // Since the `nextFetchPolicy` was set to `cache-first`, our - // query isn't loading as it's able to find the result of the - // query directly from the cache. Let's trigger a refetch - // to manually load the next batch of data. - setTimeout(() => { - this.props.data.refetch(); - }, 10); - break; - case 6: - expect(this.props.skip).toBe(false); - if (IS_REACT_18) { + return ( + <Container + skip={this.state.skip} + first={this.state.first} + setState={(state) => this.setState(state)} + /> + ); + } + } + + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + + waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + } + ); + + it("allows you to skip then unskip a query with opts syntax", () => + new Promise((resolve, reject) => { + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } + } + } + `; + + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const nextData = { + allPeople: { people: [{ name: "Anakin Skywalker" }] }, + }; + const finalData = { allPeople: { people: [{ name: "Darth Vader" }] } }; + + let ranQuery = 0; + + const link = new ApolloLink((o, f) => { + ranQuery++; + return f ? f(o) : null; + }).concat( + mockSingleLink( + { + request: { query }, + result: { data }, + }, + { + request: { query }, + result: { data: nextData }, + }, + { + request: { query }, + result: { data: finalData }, + } + ) + ); + + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + queryDeduplication: false, + }); + + let count = 0; + const Container = graphql<any>(query, { + options: { + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", + notifyOnNetworkStatusChange: true, + }, + skip: ({ skip }) => skip, + })( + class extends React.Component<any> { + render() { + expect(this.props.data?.error).toBeUndefined(); + + try { + switch (++count) { + case 1: + expect(this.props.data.loading).toBe(true); + expect(ranQuery).toBe(0); + break; + case 2: + // The first batch of data is fetched over the network, and + // verified here, followed by telling the component we want to + // skip running subsequent queries. + expect(this.props.data.loading).toBe(false); + expect(this.props.data.allPeople).toEqual(data.allPeople); + expect(ranQuery).toBe(1); + setTimeout(() => { + this.props.setSkip(true); + }, 10); + break; + case 3: + // This render is triggered after setting skip to true. Now + // let's set skip to false to re-trigger the query. + setTimeout(() => { + this.props.setSkip(false); + }, 10); + expect(this.props.skip).toBe(true); + expect(this.props.data).toBeUndefined(); + expect(ranQuery).toBe(1); + break; + case 4: + expect(this.props.skip).toBe(false); expect(this.props.data!.loading).toBe(false); - expect(this.props.data.allPeople).toEqual(finalData.allPeople); - } else { + expect(this.props.data.allPeople).toEqual(data.allPeople); + expect(ranQuery).toBe(2); + break; + case 5: + expect(this.props.skip).toBe(false); + expect(this.props.data!.loading).toBe(false); + expect(this.props.data.allPeople).toEqual(nextData.allPeople); + expect(ranQuery).toBe(2); + // Since the `nextFetchPolicy` was set to `cache-first`, our + // query isn't loading as it's able to find the result of the + // query directly from the cache. Let's trigger a refetch + // to manually load the next batch of data. + setTimeout(() => { + this.props.data.refetch(); + }, 10); + break; + case 6: + expect(this.props.skip).toBe(false); expect(ranQuery).toBe(3); expect(this.props.data.allPeople).toEqual(nextData.allPeople); expect(this.props.data!.loading).toBe(true); - } - break; - case 7: - // The next batch of data has loaded. - expect(this.props.skip).toBe(false); - expect(this.props.data!.loading).toBe(false); - expect(this.props.data.allPeople).toEqual(finalData.allPeople); - expect(ranQuery).toBe(3); - break; - default: - throw new Error(`too many renders (${count})`); + break; + case 7: + // The next batch of data has loaded. + expect(this.props.skip).toBe(false); + expect(this.props.data!.loading).toBe(false); + expect(this.props.data.allPeople).toEqual( + finalData.allPeople + ); + expect(ranQuery).toBe(3); + break; + default: + throw new Error(`too many renders (${count})`); + } + } catch (err) { + reject(err); } - } catch (err) { - reject(err); - } - return null; + return null; + } } - }, - ); + ); - class Parent extends React.Component<{}, { skip: boolean }> { - state = { skip: false }; - render() { - return ( - <Container - skip={this.state.skip} - setSkip={(skip: boolean) => this.setState({ skip })} - /> - ); + class Parent extends React.Component<{}, { skip: boolean }> { + state = { skip: false }; + render() { + return ( + <Container + skip={this.state.skip} + setSkip={(skip: boolean) => this.setState({ skip })} + /> + ); + } } - } - render( - <ApolloProvider client={client}> - <Parent /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); - waitFor(() => { - if (IS_REACT_18) { - expect(count).toEqual(6) - } else { - expect(count).toEqual(7) - } - }).then(resolve, reject); - })); + waitFor(() => { + expect(count).toEqual(7); + }).then(resolve, reject); + })); // This test might have value, but is currently broken (the count === 0 test // is never hit, for example, because count++ happens the first time before // componentDidUpdate is called), so we are skipping it for now. - it.skip('removes the injected props if skip becomes true', async () => { + it.skip("removes the injected props if skip becomes true", async () => { let count = 0; const query: DocumentNode = gql` query people($first: Int) { @@ -761,13 +778,13 @@ describe('[queries] skip', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const variables1 = { first: 1 }; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; const variables2 = { first: 2 }; - const data3 = { allPeople: { people: [{ name: 'Anakin Skywalker' }] } }; + const data3 = { allPeople: { people: [{ name: "Anakin Skywalker" }] } }; const variables3 = { first: 3 }; type Data = typeof data1; @@ -781,18 +798,17 @@ describe('[queries] skip', () => { const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const Container = graphql<Vars, Data>(query, { - skip: () => count === 1 + skip: () => count === 1, })( class extends React.Component<ChildProps<Vars, Data>> { componentDidUpdate() { const { data } = this.props; // loading is true, but data still there - if (count === 0) - expect(data!.allPeople).toEqual(data1.allPeople); + if (count === 0) expect(data!.allPeople).toEqual(data1.allPeople); if (count === 1) expect(data).toBeUndefined(); if (count === 2 && !data!.loading) { expect(data!.allPeople).toEqual(data3.allPeople); @@ -834,7 +850,7 @@ describe('[queries] skip', () => { }); }); - itAsync('allows you to unmount a skipped query', (resolve, reject) => { + itAsync("allows you to unmount a skipped query", (resolve, reject) => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -847,7 +863,7 @@ describe('[queries] skip', () => { const link = mockSingleLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props { @@ -856,7 +872,7 @@ describe('[queries] skip', () => { let done = false; const Container = graphql<Props>(query, { - skip: true + skip: true, })( class extends React.Component<ChildProps<Props>> { componentDidMount() { diff --git a/src/react/hoc/__tests__/queries/updateQuery.test.tsx b/src/react/hoc/__tests__/queries/updateQuery.test.tsx --- a/src/react/hoc/__tests__/queries/updateQuery.test.tsx +++ b/src/react/hoc/__tests__/queries/updateQuery.test.tsx @@ -1,18 +1,18 @@ -import React from 'react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; -import { render, waitFor } from '@testing-library/react'; +import React from "react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; +import { render, waitFor } from "@testing-library/react"; -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; -describe('[queries] updateQuery', () => { +describe("[queries] updateQuery", () => { // updateQuery - itAsync('exposes updateQuery as part of the props api', (resolve, reject) => { + itAsync("exposes updateQuery as part of the props api", (resolve, reject) => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -24,11 +24,11 @@ describe('[queries] updateQuery', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } + result: { data: { allPeople: { people: [{ name: "Luke Skywalker" }] } } }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let done = false; @@ -39,7 +39,9 @@ describe('[queries] updateQuery', () => { expect(data!.updateQuery).toBeTruthy(); expect(data!.updateQuery instanceof Function).toBeTruthy(); try { - data!.updateQuery(() => { done = true; }); + data!.updateQuery(() => { + done = true; + }); } catch (error) { reject(error); } @@ -59,213 +61,229 @@ describe('[queries] updateQuery', () => { waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); }); - itAsync('exposes updateQuery as part of the props api during componentWillMount', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "exposes updateQuery as part of the props api during componentWillMount", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const Container = graphql(query)( - class extends React.Component<ChildProps> { - render() { - expect(this.props.data!.updateQuery).toBeTruthy(); - expect(this.props.data!.updateQuery instanceof Function).toBeTruthy(); - done = true; - return null; + const Container = graphql(query)( + class extends React.Component<ChildProps> { + render() { + expect(this.props.data!.updateQuery).toBeTruthy(); + expect( + this.props.data!.updateQuery instanceof Function + ).toBeTruthy(); + done = true; + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true) - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); - itAsync('updateQuery throws if called before data has returned', (resolve, reject) => { - let renderCount = 0; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "updateQuery throws if called before data has returned", + (resolve, reject) => { + let renderCount = 0; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const link = mockSingleLink({ - request: { query }, - result: { data: { allPeople: { people: [{ name: 'Luke Skywalker' }] } } } - }); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const link = mockSingleLink({ + request: { query }, + result: { + data: { allPeople: { people: [{ name: "Luke Skywalker" }] } }, + }, + }); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - const Container = graphql(query)( - class extends React.Component<ChildProps> { - render() { - expect(this.props.data!.updateQuery).toBeTruthy(); - expect(this.props.data!.updateQuery instanceof Function).toBeTruthy(); - try { - this.props.data!.updateQuery(p => p); - } catch (e) { - // TODO: branch never hit in test - expect(e.toString()).toMatch( - /ObservableQuery with this id doesn't exist:/ - ); - } - renderCount += 1; + const Container = graphql(query)( + class extends React.Component<ChildProps> { + render() { + expect(this.props.data!.updateQuery).toBeTruthy(); + expect( + this.props.data!.updateQuery instanceof Function + ).toBeTruthy(); + try { + this.props.data!.updateQuery((p) => p); + } catch (e) { + // TODO: branch never hit in test + expect(e.toString()).toMatch( + /ObservableQuery with this id doesn't exist:/ + ); + } + renderCount += 1; - return null; + return null; + } } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(renderCount).toBe(2) - }).then(resolve, reject); - }); + waitFor(() => { + expect(renderCount).toBe(2); + }).then(resolve, reject); + } + ); - itAsync('allows updating query results after query has finished (early binding)', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "allows updating query results after query has finished (early binding)", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - type Data = typeof data1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; - const link = mockSingleLink( - { request: { query }, result: { data: data1 } }, - { request: { query }, result: { data: data2 } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + `; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + type Data = typeof data1; + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const link = mockSingleLink( + { request: { query }, result: { data: data1 } }, + { request: { query }, result: { data: data2 } } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let isUpdated = false; - const Container = graphql<{}, Data>(query)( - class extends React.Component<ChildProps<{}, Data>> { - public updateQuery: any; - componentDidUpdate() { - if (isUpdated) { - expect(this.props.data!.allPeople).toEqual( - data2.allPeople - ); - done = true; - return; - } else { - isUpdated = true; - this.updateQuery(() => { - return data2; - }); + let isUpdated = false; + const Container = graphql<{}, Data>(query)( + class extends React.Component<ChildProps<{}, Data>> { + public updateQuery: any; + componentDidUpdate() { + if (isUpdated) { + expect(this.props.data!.allPeople).toEqual(data2.allPeople); + done = true; + return; + } else { + isUpdated = true; + this.updateQuery(() => { + return data2; + }); + } + } + render() { + this.updateQuery = this.props.data!.updateQuery; + return null; } } - render() { - this.updateQuery = this.props.data!.updateQuery; - return null; - } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true); - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); - itAsync('allows updating query results after query has finished', (resolve, reject) => { - let done = false; - const query: DocumentNode = gql` - query people { - allPeople(first: 1) { - people { - name + itAsync( + "allows updating query results after query has finished", + (resolve, reject) => { + let done = false; + const query: DocumentNode = gql` + query people { + allPeople(first: 1) { + people { + name + } } } - } - `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - type Data = typeof data1; + `; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + type Data = typeof data1; - const data2 = { allPeople: { people: [{ name: 'Leia Skywalker' }] } }; - const link = mockSingleLink( - { request: { query }, result: { data: data1 } }, - { request: { query }, result: { data: data2 } } - ); - const client = new ApolloClient({ - link, - cache: new Cache({ addTypename: false }) - }); + const data2 = { allPeople: { people: [{ name: "Leia Skywalker" }] } }; + const link = mockSingleLink( + { request: { query }, result: { data: data1 } }, + { request: { query }, result: { data: data2 } } + ); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); - let isUpdated = false; - const Container = graphql<{}, Data>(query)( - class extends React.Component<ChildProps<{}, Data>> { - componentDidUpdate() { - if (isUpdated) { - expect(this.props.data!.allPeople).toEqual( - data2.allPeople - ); - done = true - return; - } else { - isUpdated = true; - this.props.data!.updateQuery(() => { - return data2; - }); + let isUpdated = false; + const Container = graphql<{}, Data>(query)( + class extends React.Component<ChildProps<{}, Data>> { + componentDidUpdate() { + if (isUpdated) { + expect(this.props.data!.allPeople).toEqual(data2.allPeople); + done = true; + return; + } else { + isUpdated = true; + this.props.data!.updateQuery(() => { + return data2; + }); + } + } + render() { + return null; } } - render() { - return null; - } - } - ); + ); - render( - <ApolloProvider client={client}> - <Container /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Container /> + </ApolloProvider> + ); - waitFor(() => { - expect(done).toBe(true) - }).then(resolve, reject); - }); + waitFor(() => { + expect(done).toBe(true); + }).then(resolve, reject); + } + ); }); diff --git a/src/react/hoc/__tests__/shared-operations.test.tsx b/src/react/hoc/__tests__/shared-operations.test.tsx --- a/src/react/hoc/__tests__/shared-operations.test.tsx +++ b/src/react/hoc/__tests__/shared-operations.test.tsx @@ -1,41 +1,41 @@ -import React from 'react'; -import { render, cleanup } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../core'; -import { ApolloProvider } from '../../context'; -import { InMemoryCache as Cache } from '../../../cache'; -import { ApolloLink } from '../../../link/core'; -import { itAsync, mockSingleLink } from '../../../testing'; -import { graphql } from '../graphql'; -import { ChildProps, DataValue } from '../types'; -import { withApollo } from '../withApollo'; +import React from "react"; +import { render, cleanup } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../core"; +import { ApolloProvider } from "../../context"; +import { InMemoryCache as Cache } from "../../../cache"; +import { ApolloLink } from "../../../link/core"; +import { itAsync, mockSingleLink } from "../../../testing"; +import { graphql } from "../graphql"; +import { ChildProps, DataValue } from "../types"; +import { withApollo } from "../withApollo"; function compose(...funcs: Function[]) { const functions = funcs.reverse(); - return function(...args: any[]) { + return function (...args: any[]) { const [firstFunction, ...restFunctions] = functions; let result = firstFunction.apply(null, args); - restFunctions.forEach(fnc => { + restFunctions.forEach((fnc) => { result = fnc.call(null, result); }); return result; }; } -describe('shared operations', () => { +describe("shared operations", () => { afterEach(cleanup); - describe('withApollo', () => { - it('passes apollo-client to props', () => { + describe("withApollo", () => { + it("passes apollo-client to props", () => { const client = new ApolloClient({ link: new ApolloLink((o, f) => (f ? f(o) : null)), - cache: new Cache() + cache: new Cache(), }); @withApollo - class ContainerWithData extends React.Component<any > { + class ContainerWithData extends React.Component<any> { render(): React.ReactNode { expect(this.props.client).toEqual(client); return null; @@ -50,7 +50,7 @@ describe('shared operations', () => { }); }); - it('binds two queries to props', () => { + it("binds two queries to props", () => { const peopleQuery: DocumentNode = gql` query people { allPeople(first: 1) { @@ -60,7 +60,7 @@ describe('shared operations', () => { } } `; - const peopleData = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const peopleData = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; interface PeopleData { allPeople: { people: [{ name: string }] }; } @@ -74,7 +74,7 @@ describe('shared operations', () => { } } `; - const shipsData = { allships: { ships: [{ name: 'Tie Fighter' }] } }; + const shipsData = { allships: { ships: [{ name: "Tie Fighter" }] } }; interface ShipsData { allShips: { ships: [{ name: string }] }; } @@ -85,7 +85,7 @@ describe('shared operations', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface PeopleChildProps { @@ -97,7 +97,7 @@ describe('shared operations', () => { const withPeople: any = graphql<{}, PeopleData, {}, PeopleChildProps>( peopleQuery, { - name: 'people' + name: "people", } ); @@ -107,7 +107,7 @@ describe('shared operations', () => { const withShips: any = graphql<{}, ShipsData, {}, ShipsChildProps>( shipsQuery, { - name: 'ships' + name: "ships", } ); @@ -134,7 +134,7 @@ describe('shared operations', () => { unmount(); }); - it('binds two queries to props with different syntax', () => { + it("binds two queries to props with different syntax", () => { const peopleQuery: DocumentNode = gql` query people { allPeople(first: 1) { @@ -144,7 +144,7 @@ describe('shared operations', () => { } } `; - const peopleData = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const peopleData = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; interface PeopleData { allPeople: { people: [{ name: string }] }; } @@ -157,7 +157,7 @@ describe('shared operations', () => { } } `; - const shipsData = { allships: { ships: [{ name: 'Tie Fighter' }] } }; + const shipsData = { allships: { ships: [{ name: "Tie Fighter" }] } }; interface ShipsData { allShips: { ships: [{ name: string }] }; } @@ -168,7 +168,7 @@ describe('shared operations', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface PeopleChildProps { @@ -178,7 +178,7 @@ describe('shared operations', () => { const withPeople = graphql<{}, PeopleData, {}, PeopleChildProps>( peopleQuery, { - name: 'people' + name: "people", } ); @@ -191,7 +191,7 @@ describe('shared operations', () => { {}, ShipsAndPeopleChildProps >(shipsQuery, { - name: 'ships' + name: "ships", }); const ContainerWithData = withPeople( @@ -216,7 +216,7 @@ describe('shared operations', () => { unmount(); }); - it('binds two operations to props', () => { + it("binds two operations to props", () => { const peopleQuery: DocumentNode = gql` query people { allPeople(first: 1) { @@ -226,7 +226,7 @@ describe('shared operations', () => { } } `; - const peopleData = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const peopleData = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; const peopleMutation: DocumentNode = gql` mutation addPerson { @@ -238,23 +238,23 @@ describe('shared operations', () => { } `; const peopleMutationData = { - allPeople: { people: [{ name: 'Leia Skywalker' }] } + allPeople: { people: [{ name: "Leia Skywalker" }] }, }; const link = mockSingleLink( { request: { query: peopleQuery }, result: { data: peopleData } }, { request: { query: peopleMutation }, - result: { data: peopleMutationData } + result: { data: peopleMutationData }, } ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - const withPeople = graphql(peopleQuery, { name: 'people' }); - const withPeopleMutation = graphql(peopleMutation, { name: 'addPerson' }); + const withPeople = graphql(peopleQuery, { name: "people" }); + const withPeopleMutation = graphql(peopleMutation, { name: "addPerson" }); const ContainerWithData = withPeople( withPeopleMutation( @@ -281,7 +281,7 @@ describe('shared operations', () => { unmount(); }); - itAsync('allows options to take an object', (resolve, reject) => { + itAsync("allows options to take an object", (resolve, reject) => { const query: DocumentNode = gql` query people { allPeople(first: 1) { @@ -291,16 +291,16 @@ describe('shared operations', () => { } } `; - const data = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; + const data = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; type Data = typeof data; const link = mockSingleLink({ request: { query }, - result: { data } + result: { data }, }); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let queryExecuted = false; @@ -327,12 +327,12 @@ describe('shared operations', () => { resolve(); return; } - reject(new Error('query ran even though skip present')); + reject(new Error("query ran even though skip present")); }, 25); }); - describe('compose', () => { - it('binds two queries to props with different syntax', () => { + describe("compose", () => { + it("binds two queries to props with different syntax", () => { const peopleQuery: DocumentNode = gql` query people { allPeople(first: 1) { @@ -343,7 +343,7 @@ describe('shared operations', () => { } `; const peopleData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] } + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; type PeopleData = typeof peopleData; @@ -357,7 +357,7 @@ describe('shared operations', () => { } } `; - const shipsData = { allships: { ships: [{ name: 'Tie Fighter' }] } }; + const shipsData = { allships: { ships: [{ name: "Tie Fighter" }] } }; type ShipsData = typeof shipsData; @@ -367,7 +367,7 @@ describe('shared operations', () => { ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface PeopleChildProps { @@ -381,12 +381,12 @@ describe('shared operations', () => { const enhanced = compose( graphql<{}, PeopleData, {}, PeopleChildProps>(peopleQuery, { - name: 'people' + name: "people", }), graphql<PeopleChildProps, ShipsData, {}, ShipsAndPeopleChildProps>( shipsQuery, { - name: 'ships' + name: "ships", } ) ); diff --git a/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx b/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx --- a/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx +++ b/src/react/hoc/__tests__/ssr/getDataFromTree.test.tsx @@ -1,21 +1,21 @@ /** @jest-environment node */ -import React from 'react'; -import PropTypes from 'prop-types'; -import ReactDOM from 'react-dom/server'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { itAsync, mockSingleLink } from '../../../../testing'; -import { Query } from '../../../components'; -import { getDataFromTree, getMarkupFromTree } from '../../../ssr'; -import { graphql } from '../../graphql'; -import { ChildProps, DataValue } from '../../types'; - -describe('SSR', () => { - describe('`getDataFromTree`', () => { +import React from "react"; +import PropTypes from "prop-types"; +import ReactDOM from "react-dom/server"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { itAsync, mockSingleLink } from "../../../../testing"; +import { Query } from "../../../components"; +import { getDataFromTree, getMarkupFromTree } from "../../../ssr"; +import { graphql } from "../../graphql"; +import { ChildProps, DataValue } from "../../types"; + +describe("SSR", () => { + describe("`getDataFromTree`", () => { const consoleWarn = console.warn; beforeAll(() => { console.warn = () => null; @@ -25,7 +25,7 @@ describe('SSR', () => { console.warn = consoleWarn; }); - it('should run through all of the queries that want SSR', async () => { + it("should run through all of the queries that want SSR", async () => { const query = gql` { currentUser { @@ -33,14 +33,14 @@ describe('SSR', () => { } } `; - const data1 = { currentUser: { firstName: 'James' } }; + const data1 = { currentUser: { firstName: "James" } }; const link = mockSingleLink({ request: { query }, result: { data: data1 }, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -49,15 +49,15 @@ describe('SSR', () => { firstName: string; }; } - const WrappedElement = graphql<Props, Data>( - query - )(({ data }: ChildProps<Props, Data>) => ( - <div> - {!data || data.loading || !data.currentUser - ? 'loading' - : data.currentUser.firstName} - </div> - )); + const WrappedElement = graphql<Props, Data>(query)( + ({ data }: ChildProps<Props, Data>) => ( + <div> + {!data || data.loading || !data.currentUser + ? "loading" + : data.currentUser.firstName} + </div> + ) + ); const app = ( <ApolloProvider client={apolloClient}> @@ -65,19 +65,19 @@ describe('SSR', () => { </ApolloProvider> ); - await getDataFromTree(app).then(markup => { + await getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); await getMarkupFromTree({ tree: app, - renderFunction: ReactDOM.renderToString - }).then(markup => { + renderFunction: ReactDOM.renderToString, + }).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should allow network-only fetchPolicy as an option and still render prefetched data', () => { + it("should allow network-only fetchPolicy as an option and still render prefetched data", () => { const query = gql` { currentUser { @@ -87,12 +87,12 @@ describe('SSR', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { currentUser: { firstName: 'James' } } }, + result: { data: { currentUser: { firstName: "James" } } }, }); const apolloClient = new ApolloClient({ link, cache: new Cache({ addTypename: false }), - ssrMode: true + ssrMode: true, }); interface Props {} @@ -102,11 +102,11 @@ describe('SSR', () => { }; } const WrappedElement = graphql<Props, Data>(query, { - options: { fetchPolicy: 'network-only' } + options: { fetchPolicy: "network-only" }, })(({ data }: ChildProps<Props, Data>) => ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.firstName} </div> )); @@ -117,12 +117,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should allow cache-and-network fetchPolicy as an option and still render prefetched data', () => { + it("should allow cache-and-network fetchPolicy as an option and still render prefetched data", () => { const query = gql` { currentUser { @@ -132,11 +132,11 @@ describe('SSR', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { currentUser: { firstName: 'James' } } } + result: { data: { currentUser: { firstName: "James" } } }, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -146,10 +146,10 @@ describe('SSR', () => { }; } const WrappedElement = graphql<Props, Data>(query, { - options: { fetchPolicy: 'cache-and-network' } + options: { fetchPolicy: "cache-and-network" }, })(({ data }: ChildProps<Props, Data>) => ( <div> - {data && data.currentUser ? data.currentUser.firstName : 'loading'} + {data && data.currentUser ? data.currentUser.firstName : "loading"} </div> )); @@ -159,12 +159,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should pick up queries deep in the render tree', () => { + it("should pick up queries deep in the render tree", () => { const query = gql` { currentUser { @@ -174,11 +174,11 @@ describe('SSR', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { currentUser: { firstName: 'James' } } }, + result: { data: { currentUser: { firstName: "James" } } }, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -188,15 +188,15 @@ describe('SSR', () => { }; } - const WrappedElement = graphql<Props, Data>( - query - )(({ data }: ChildProps<Props, Data>) => ( - <div> - {!data || data.loading || !data.currentUser - ? 'loading' - : data.currentUser.firstName} - </div> - )); + const WrappedElement = graphql<Props, Data>(query)( + ({ data }: ChildProps<Props, Data>) => ( + <div> + {!data || data.loading || !data.currentUser + ? "loading" + : data.currentUser.firstName} + </div> + ) + ); const Page = () => ( <div> @@ -213,12 +213,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should handle nested queries that depend on each other', () => { + it("should handle nested queries that depend on each other", () => { const idQuery: DocumentNode = gql` { currentUser { @@ -226,7 +226,7 @@ describe('SSR', () => { } } `; - const idData = { currentUser: { id: '1234' } }; + const idData = { currentUser: { id: "1234" } }; const userQuery: DocumentNode = gql` query getUser($id: String) { user(id: $id) { @@ -234,8 +234,8 @@ describe('SSR', () => { } } `; - const variables = { id: '1234' }; - const userData = { user: { firstName: 'James' } }; + const variables = { id: "1234" }; + const userData = { user: { firstName: "James" } }; const link = mockSingleLink( { request: { query: idQuery }, result: { data: idData } }, { @@ -245,7 +245,7 @@ describe('SSR', () => { ); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -275,15 +275,15 @@ describe('SSR', () => { >(userQuery, { skip: ({ data }) => data!.loading, options: ({ data }) => ({ - variables: { id: data!.currentUser!.id } - }) + variables: { id: data!.currentUser!.id }, + }), }); - const Component: React.FunctionComponent<React.PropsWithChildren<React.PropsWithChildren<any>>> = ({ - data - }) => ( + const Component: React.FunctionComponent< + React.PropsWithChildren<React.PropsWithChildren<any>> + > = ({ data }) => ( <div> {!data || data.loading || !data.user - ? 'loading' + ? "loading" : data.user.firstName} </div> ); @@ -296,12 +296,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it.skip('should return the first of multiple errors thrown by nested wrapped components', () => { + it.skip("should return the first of multiple errors thrown by nested wrapped components", () => { const lastNameQuery = gql` { currentUser { @@ -328,9 +328,9 @@ describe('SSR', () => { result: { data: { currentUser: { - lastName: 'Tester' + lastName: "Tester", }, - } + }, }, }, { @@ -338,15 +338,15 @@ describe('SSR', () => { result: { data: { currentUser: { - firstName: 'James' + firstName: "James", }, - } + }, }, } ); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -354,19 +354,19 @@ describe('SSR', () => { type WithLastNameProps = ChildProps<Props, LastNameData>; const withLastName = graphql<Props, LastNameData>(lastNameQuery); - const fooError = new Error('foo'); + const fooError = new Error("foo"); const BorkedComponent = () => { throw fooError; }; const WrappedBorkedComponent = withLastName(BorkedComponent); - const ContainerComponent: React.FunctionComponent<React.PropsWithChildren<React.PropsWithChildren<WithLastNameProps>>> = ({ - data - }) => ( + const ContainerComponent: React.FunctionComponent< + React.PropsWithChildren<React.PropsWithChildren<WithLastNameProps>> + > = ({ data }) => ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.lastName} <WrappedBorkedComponent /> <WrappedBorkedComponent /> @@ -385,16 +385,16 @@ describe('SSR', () => { return getDataFromTree(app).then( () => { - throw new Error('Should have thrown an error'); + throw new Error("Should have thrown an error"); }, - e => { - expect(e.toString()).toEqual('Error: foo'); + (e) => { + expect(e.toString()).toEqual("Error: foo"); expect(e).toBe(fooError); } ); }); - it('should handle errors thrown by queries', () => { + it("should handle errors thrown by queries", () => { const query = gql` { currentUser { @@ -404,11 +404,11 @@ describe('SSR', () => { `; const link = mockSingleLink({ request: { query }, - error: new Error('Failed to fetch'), + error: new Error("Failed to fetch"), }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -417,11 +417,13 @@ describe('SSR', () => { firstName: string; }; } - const WrappedElement = graphql<Props, Data>( - query - )(({ data }: ChildProps<Props, Data>) => ( - <div>{!data || data.loading ? 'loading' : data.error?.toString()}</div> - )); + const WrappedElement = graphql<Props, Data>(query)( + ({ data }: ChildProps<Props, Data>) => ( + <div> + {!data || data.loading ? "loading" : data.error?.toString()} + </div> + ) + ); const Page = () => ( <div> @@ -438,13 +440,13 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).catch(e => { + return getDataFromTree(app).catch((e) => { expect(e).toBeTruthy(); expect(e.toString()).toMatch(/Failed to fetch/); }); }); - it('should correctly skip queries (deprecated)', () => { + it("should correctly skip queries (deprecated)", () => { const query = gql` { currentUser { @@ -454,11 +456,11 @@ describe('SSR', () => { `; const link = mockSingleLink({ request: { query }, - result: { data: { currentUser: { firstName: 'James' } } }, + result: { data: { currentUser: { firstName: "James" } } }, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -468,9 +470,9 @@ describe('SSR', () => { }; } const WrappedElement = graphql<Props, Data>(query, { - skip: true + skip: true, })(({ data }: ChildProps<Props, Data>) => ( - <div>{!data ? 'skipped' : 'dang'}</div> + <div>{!data ? "skipped" : "dang"}</div> )); const app = ( @@ -479,12 +481,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/skipped/); }); }); - it('should use the correct default props for a query', () => { + it("should use the correct default props for a query", () => { const query = gql` query user($id: ID) { currentUser(id: $id) { @@ -492,8 +494,8 @@ describe('SSR', () => { } } `; - const resultData = { currentUser: { firstName: 'James' } }; - const variables = { id: '1' }; + const resultData = { currentUser: { firstName: "James" } }; + const variables = { id: "1" }; const link = mockSingleLink({ request: { query, variables }, result: { data: resultData }, @@ -502,7 +504,7 @@ describe('SSR', () => { const cache = new Cache({ addTypename: false }); const apolloClient = new ApolloClient({ link, - cache + cache, }); interface Props { @@ -516,19 +518,19 @@ describe('SSR', () => { interface Variables { id: string; } - const Element = graphql<Props, Data, Variables>( - query, - )(({ data }: ChildProps<Props, Data, Variables>) => ( - <div> - {!data || data.loading || !data.currentUser - ? 'loading' - : data.currentUser.firstName} - </div> - )); + const Element = graphql<Props, Data, Variables>(query)( + ({ data }: ChildProps<Props, Data, Variables>) => ( + <div> + {!data || data.loading || !data.currentUser + ? "loading" + : data.currentUser.firstName} + </div> + ) + ); const app = ( <ApolloProvider client={apolloClient}> - <Element id={'1'} /> + <Element id={"1"} /> </ApolloProvider> ); @@ -541,85 +543,88 @@ describe('SSR', () => { }); }); - itAsync('should allow for setting state in a component', (resolve, reject) => { - const query = gql` - query user($id: ID) { - currentUser(id: $id) { - firstName + itAsync( + "should allow for setting state in a component", + (resolve, reject) => { + const query = gql` + query user($id: ID) { + currentUser(id: $id) { + firstName + } } + `; + const resultData = { currentUser: { firstName: "James" } }; + const variables = { id: "1" }; + const link = mockSingleLink({ + request: { query, variables }, + result: { data: resultData }, + }); + + const cache = new Cache({ addTypename: false }); + const apolloClient = new ApolloClient({ + link, + cache, + }); + + interface Props { + id: string; } - `; - const resultData = { currentUser: { firstName: 'James' } }; - const variables = { id: '1' }; - const link = mockSingleLink({ - request: { query, variables }, - result: { data: resultData }, - }); - - const cache = new Cache({ addTypename: false }); - const apolloClient = new ApolloClient({ - link, - cache - }); - - interface Props { - id: string; - } - interface Data { - currentUser: { - firstName: string; - }; - } - interface Variables { - id: string; - } - - class Element extends React.Component< - ChildProps<Props, Data, Variables>, - { thing: number } - > { - state = { thing: 1 }; - - static getDerivedStateFromProps() { - return { - thing: 2 + interface Data { + currentUser: { + firstName: string; }; } - - render() { - const { data } = this.props; - expect(this.state.thing).toBe(2); - return ( - <div> - {!data || data.loading || !data.currentUser - ? 'loading' - : data.currentUser.firstName} - </div> - ); + interface Variables { + id: string; } - } - const ElementWithData = graphql<Props, Data, Variables>(query)(Element); + class Element extends React.Component< + ChildProps<Props, Data, Variables>, + { thing: number } + > { + state = { thing: 1 }; - const app = ( - <ApolloProvider client={apolloClient}> - <ElementWithData id={'1'} /> - </ApolloProvider> - ); + static getDerivedStateFromProps() { + return { + thing: 2, + }; + } - getDataFromTree(app) - .then(() => { - const initialState = cache.extract(); - expect(initialState).toBeTruthy(); - expect( - initialState.ROOT_QUERY!['currentUser({"id":"1"})'] - ).toBeTruthy(); - resolve(); - }) - .catch(console.error); - }); + render() { + const { data } = this.props; + expect(this.state.thing).toBe(2); + return ( + <div> + {!data || data.loading || !data.currentUser + ? "loading" + : data.currentUser.firstName} + </div> + ); + } + } + + const ElementWithData = graphql<Props, Data, Variables>(query)(Element); + + const app = ( + <ApolloProvider client={apolloClient}> + <ElementWithData id={"1"} /> + </ApolloProvider> + ); + + getDataFromTree(app) + .then(() => { + const initialState = cache.extract(); + expect(initialState).toBeTruthy(); + expect( + initialState.ROOT_QUERY!['currentUser({"id":"1"})'] + ).toBeTruthy(); + resolve(); + }) + .catch(console.error); + } + ); - it('should correctly initialize an empty state to null', () => { + it("should correctly initialize an empty state to null", () => { class Element extends React.Component<any, any> { render() { expect(this.state).toBeNull(); @@ -630,15 +635,15 @@ describe('SSR', () => { return getDataFromTree(<Element />); }); - it('should maintain any state set in the element constructor', () => { + it("should maintain any state set in the element constructor", () => { class Element extends React.Component<{}, { foo: string }> { constructor(props: {}) { super(props); - this.state = { foo: 'bar' }; + this.state = { foo: "bar" }; } render() { - expect(this.state).toEqual({ foo: 'bar' }); + expect(this.state).toEqual({ foo: "bar" }); return null; } } @@ -646,7 +651,7 @@ describe('SSR', () => { return getDataFromTree(<Element />); }); - itAsync('should allow prepping state from props', (resolve, reject) => { + itAsync("should allow prepping state from props", (resolve, reject) => { const query = gql` query user($id: ID) { currentUser(id: $id) { @@ -654,8 +659,8 @@ describe('SSR', () => { } } `; - const resultData = { currentUser: { firstName: 'James' } }; - const variables = { id: '1' }; + const resultData = { currentUser: { firstName: "James" } }; + const variables = { id: "1" }; const link = mockSingleLink({ request: { query, variables }, result: { data: resultData }, @@ -663,8 +668,8 @@ describe('SSR', () => { const apolloClient = new ApolloClient({ link, cache: new Cache({ - addTypename: false - }) + addTypename: false, + }), }); interface Props { id: string; @@ -691,14 +696,14 @@ describe('SSR', () => { state: State = { thing: 1, userId: null, - client: null + client: null, }; static getDerivedStateFromProps(props: Props, state: State) { return { thing: state.thing + 1, userId: props.id, - client: apolloClient + client: apolloClient, }; } @@ -710,7 +715,7 @@ describe('SSR', () => { return ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.firstName} </div> ); @@ -721,7 +726,7 @@ describe('SSR', () => { const app = ( <ApolloProvider client={apolloClient}> - <ElementWithData id={'1'} /> + <ElementWithData id={"1"} /> </ApolloProvider> ); @@ -745,8 +750,8 @@ describe('SSR', () => { } } `; - const resultData = { currentUser: { firstName: 'James' } }; - const variables = { id: '1' }; + const resultData = { currentUser: { firstName: "James" } }; + const variables = { id: "1" }; const link = mockSingleLink({ request: { query, variables }, result: { data: resultData }, @@ -755,7 +760,7 @@ describe('SSR', () => { const cache = new Cache({ addTypename: false }); const apolloClient = new ApolloClient({ link, - cache + cache, }); interface Data { @@ -777,18 +782,18 @@ describe('SSR', () => { } const Element = graphql<Props, Data, Variables>(query, { - options: props => ({ variables: props, ssr: false }) + options: (props) => ({ variables: props, ssr: false }), })(({ data }) => ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.firstName} </div> )); const app = ( <ApolloProvider client={apolloClient}> - <Element id={'1'} /> + <Element id={"1"} /> </ApolloProvider> ); @@ -807,8 +812,8 @@ describe('SSR', () => { } } `; - const resultData = { currentUser: { firstName: 'James' } }; - const variables = { id: '1' }; + const resultData = { currentUser: { firstName: "James" } }; + const variables = { id: "1" }; const link = mockSingleLink({ request: { query, variables }, result: { data: resultData }, @@ -817,7 +822,7 @@ describe('SSR', () => { const cache = new Cache({ addTypename: false }); const apolloClient = new ApolloClient({ link, - cache + cache, }); interface Data { @@ -830,7 +835,7 @@ describe('SSR', () => { <Query query={query} ssr={false} variables={props}> {({ data, loading }: { data: Data; loading: boolean }) => ( <div> - {loading || !data ? 'loading' : data.currentUser!.firstName} + {loading || !data ? "loading" : data.currentUser!.firstName} </div> )} </Query> @@ -838,7 +843,7 @@ describe('SSR', () => { const app = ( <ApolloProvider client={apolloClient}> - <Element id={'1'} /> + <Element id={"1"} /> </ApolloProvider> ); @@ -849,7 +854,7 @@ describe('SSR', () => { }); }); - it('should correctly handle SSR mutations', () => { + it("should correctly handle SSR mutations", () => { const query = gql` { currentUser { @@ -857,7 +862,7 @@ describe('SSR', () => { } } `; - const data1 = { currentUser: { firstName: 'James' } }; + const data1 = { currentUser: { firstName: "James" } }; const mutation = gql` mutation { @@ -866,7 +871,7 @@ describe('SSR', () => { } } `; - const mutationData = { logRoutes: { id: 'foo' } }; + const mutationData = { logRoutes: { id: "foo" } }; const link = mockSingleLink( { request: { query }, result: { data: data1 } }, @@ -877,7 +882,7 @@ describe('SSR', () => { ); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Data { @@ -900,9 +905,9 @@ describe('SSR', () => { expect(data!.refetch).toBeTruthy(); return { refetchQuery: data!.refetch, - data: data! + data: data!, }; - } + }, } ); @@ -913,23 +918,29 @@ describe('SSR', () => { { action: (variables: {}) => Promise<any> } >(mutation, { props: ({ ownProps, mutate }: any) => { - if (ownProps.loading || typeof ownProps.loading === 'undefined') + if (ownProps.loading || typeof ownProps.loading === "undefined") return { action: () => Promise.resolve() }; expect(ownProps.refetchQuery).toBeTruthy(); return { action(variables: {}) { return mutate!({ variables }).then(() => ownProps.refetchQuery()); - } + }, }; - } + }, }); - const Element: React.FunctionComponent<React.PropsWithChildren<React.PropsWithChildren<QueryChildProps & { - action: (variables: {}) => Promise<any>; - }>>> = ({ data }) => ( + const Element: React.FunctionComponent< + React.PropsWithChildren< + React.PropsWithChildren< + QueryChildProps & { + action: (variables: {}) => Promise<any>; + } + > + > + > = ({ data }) => ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.firstName} </div> ); @@ -942,12 +953,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should correctly handle SSR mutations, reverse order', () => { + it("should correctly handle SSR mutations, reverse order", () => { const query = gql` { currentUser { @@ -979,16 +990,16 @@ describe('SSR', () => { const link = mockSingleLink( { request: { query }, - result: { data: { currentUser: { firstName: 'James' } } }, + result: { data: { currentUser: { firstName: "James" } } }, }, { request: { query: mutation }, - result: { data: { logRoutes: { id: 'foo' } } }, + result: { data: { logRoutes: { id: "foo" } } }, } ); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const withMutation = graphql<Props, MutationData>(mutation); @@ -999,19 +1010,21 @@ describe('SSR', () => { props: ({ ownProps, data }) => { expect(ownProps.mutate).toBeTruthy(); return { - data + data, }; - } + }, }); - const Element: React.FunctionComponent<React.PropsWithChildren<React.PropsWithChildren<ChildProps< - ChildProps<Props, MutationData>, - QueryData, - {} - >>>> = ({ data }) => ( + const Element: React.FunctionComponent< + React.PropsWithChildren< + React.PropsWithChildren< + ChildProps<ChildProps<Props, MutationData>, QueryData, {}> + > + > + > = ({ data }) => ( <div> {!data || data.loading || !data.currentUser - ? 'loading' + ? "loading" : data.currentUser.firstName} </div> ); @@ -1024,12 +1037,12 @@ describe('SSR', () => { </ApolloProvider> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); - it('should not require `ApolloProvider` to be the root component', () => { + it("should not require `ApolloProvider` to be the root component", () => { const query = gql` { currentUser { @@ -1045,27 +1058,30 @@ describe('SSR', () => { const link = mockSingleLink({ request: { query }, - result: { data: { currentUser: { firstName: 'James' } } }, + result: { data: { currentUser: { firstName: "James" } } }, }); const apolloClient = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - const WrappedElement = graphql<{}, Data>( - query - )(({ data }: ChildProps<{}, Data>) => ( - <div> - {!data || data.loading || !data.currentUser - ? 'loading' - : data.currentUser.firstName} - </div> - )); + const WrappedElement = graphql<{}, Data>(query)( + ({ data }: ChildProps<{}, Data>) => ( + <div> + {!data || data.loading || !data.currentUser + ? "loading" + : data.currentUser.firstName} + </div> + ) + ); - class MyRootContainer extends React.Component<React.PropsWithChildren, { color: string }> { + class MyRootContainer extends React.Component< + React.PropsWithChildren, + { color: string } + > { constructor(props: {}) { super(props); - this.state = { color: 'purple' }; + this.state = { color: "purple" }; } getChildContext() { @@ -1078,7 +1094,7 @@ describe('SSR', () => { } (MyRootContainer as any).childContextTypes = { - color: PropTypes.string + color: PropTypes.string, }; const app = ( @@ -1089,7 +1105,7 @@ describe('SSR', () => { </MyRootContainer> ); - return getDataFromTree(app).then(markup => { + return getDataFromTree(app).then((markup) => { expect(markup).toMatch(/James/); }); }); diff --git a/src/react/hoc/__tests__/ssr/server.test.tsx b/src/react/hoc/__tests__/ssr/server.test.tsx --- a/src/react/hoc/__tests__/ssr/server.test.tsx +++ b/src/react/hoc/__tests__/ssr/server.test.tsx @@ -1,5 +1,5 @@ /** @jest-environment node */ -import React from 'react'; +import React from "react"; import { print, graphql as execute, @@ -8,134 +8,136 @@ import { GraphQLList, GraphQLString, GraphQLID, - DocumentNode -} from 'graphql'; -import gql from 'graphql-tag'; + DocumentNode, +} from "graphql"; +import gql from "graphql-tag"; -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloLink } from '../../../../link/core'; -import { Observable } from '../../../../utilities'; -import { renderToStringWithData } from '../../../ssr'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloLink } from "../../../../link/core"; +import { Observable } from "../../../../utilities"; +import { renderToStringWithData } from "../../../ssr"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; -const planetMap = new Map([['Planet:1', { id: 'Planet:1', name: 'Tatooine' }]]); +const planetMap = new Map([["Planet:1", { id: "Planet:1", name: "Tatooine" }]]); const shipMap = new Map([ [ - 'Ship:2', + "Ship:2", { - id: 'Ship:2', - name: 'CR90 corvette', - films: ['Film:4', 'Film:6', 'Film:3'] - } + id: "Ship:2", + name: "CR90 corvette", + films: ["Film:4", "Film:6", "Film:3"], + }, ], [ - 'Ship:3', + "Ship:3", { - id: 'Ship:3', - name: 'Star Destroyer', - films: ['Film:4', 'Film:5', 'Film:6'] - } - ] + id: "Ship:3", + name: "Star Destroyer", + films: ["Film:4", "Film:5", "Film:6"], + }, + ], ]); const filmMap = new Map([ - ['Film:3', { id: 'Film:3', title: 'Revenge of the Sith' }], - ['Film:4', { id: 'Film:4', title: 'A New Hope' }], - ['Film:5', { id: 'Film:5', title: 'the Empire Strikes Back' }], - ['Film:6', { id: 'Film:6', title: 'Return of the Jedi' }] + ["Film:3", { id: "Film:3", title: "Revenge of the Sith" }], + ["Film:4", { id: "Film:4", title: "A New Hope" }], + ["Film:5", { id: "Film:5", title: "the Empire Strikes Back" }], + ["Film:6", { id: "Film:6", title: "Return of the Jedi" }], ]); const PlanetType = new GraphQLObjectType({ - name: 'Planet', + name: "Planet", fields: { id: { type: GraphQLID }, - name: { type: GraphQLString } - } + name: { type: GraphQLString }, + }, }); const FilmType = new GraphQLObjectType({ - name: 'Film', + name: "Film", fields: { id: { type: GraphQLID }, - title: { type: GraphQLString } - } + title: { type: GraphQLString }, + }, }); const ShipType = new GraphQLObjectType({ - name: 'Ship', + name: "Ship", fields: { id: { type: GraphQLID }, name: { type: GraphQLString }, films: { type: new GraphQLList(FilmType), - resolve: ({ films }) => films.map((id: string) => filmMap.get(id)) - } - } + resolve: ({ films }) => films.map((id: string) => filmMap.get(id)), + }, + }, }); const QueryType = new GraphQLObjectType({ - name: 'Query', + name: "Query", fields: { allPlanets: { type: new GraphQLList(PlanetType), - resolve: () => Array.from(planetMap.values()) + resolve: () => Array.from(planetMap.values()), }, allShips: { type: new GraphQLList(ShipType), - resolve: () => Array.from(shipMap.values()) + resolve: () => Array.from(shipMap.values()), }, ship: { type: ShipType, args: { id: { type: GraphQLID } }, - resolve: (_, { id }) => shipMap.get(id) + resolve: (_, { id }) => shipMap.get(id), }, film: { type: FilmType, args: { id: { type: GraphQLID } }, - resolve: (_, { id }) => filmMap.get(id) - } - } + resolve: (_, { id }) => filmMap.get(id), + }, + }, }); const Schema = new GraphQLSchema({ query: QueryType }); -describe('SSR', () => { - describe('`renderToStringWithData`', () => { +describe("SSR", () => { + describe("`renderToStringWithData`", () => { // XXX break into smaller tests // XXX mock all queries - it('should work on a non trivial example', function() { + it("should work on a non trivial example", function () { const apolloClient = new ApolloClient({ - link: new ApolloLink(config => { - return new Observable(observer => { + link: new ApolloLink((config) => { + return new Observable((observer) => { execute({ schema: Schema, source: print(config.query), variableValues: config.variables, - operationName: config.operationName + operationName: config.operationName, }) - .then(result => { + .then((result) => { observer.next(result); observer.complete(); }) - .catch(e => { + .catch((e) => { observer.error(e); }); }); }), - cache: new Cache() + cache: new Cache(), }); - @graphql(gql` - query data($id: ID!) { - film(id: $id) { - title + @graphql( + gql` + query data($id: ID!) { + film(id: $id) { + title + } } - } - ` as DocumentNode) + ` as DocumentNode + ) class Film extends React.Component<any, any> { render(): React.ReactNode { const { data } = this.props; @@ -156,16 +158,18 @@ describe('SSR', () => { id: string; } - @graphql<ShipVariables, ShipData, ShipVariables>(gql` - query data($id: ID!) { - ship(id: $id) { - name - films { - id + @graphql<ShipVariables, ShipData, ShipVariables>( + gql` + query data($id: ID!) { + ship(id: $id) { + name + films { + id + } } } - } - ` as DocumentNode) + ` as DocumentNode + ) class Starship extends React.Component< ChildProps<ShipVariables, ShipData, ShipVariables> > { @@ -193,13 +197,15 @@ describe('SSR', () => { allShips: { id: string }[]; } - @graphql<{}, AllShipsData>(gql` - query data { - allShips { - id + @graphql<{}, AllShipsData>( + gql` + query data { + allShips { + id + } } - } - ` as DocumentNode) + ` as DocumentNode + ) class AllShips extends React.Component<ChildProps<{}, AllShipsData>> { render(): React.ReactNode { const { data } = this.props; @@ -222,13 +228,15 @@ describe('SSR', () => { allPlanets: { name: string }[]; } - @graphql<{}, AllPlanetsData>(gql` - query data { - allPlanets { - name + @graphql<{}, AllPlanetsData>( + gql` + query data { + allPlanets { + name + } } - } - ` as DocumentNode) + ` as DocumentNode + ) class AllPlanets extends React.Component<ChildProps<{}, AllPlanetsData>> { render(): React.ReactNode { const { data } = this.props; @@ -267,7 +275,7 @@ describe('SSR', () => { </ApolloProvider> ); - return renderToStringWithData(app).then(markup => { + return renderToStringWithData(app).then((markup) => { expect(markup).toMatch(/CR90 corvette/); expect(markup).toMatch(/Return of the Jedi/); expect(markup).toMatch(/A New Hope/); diff --git a/src/react/hoc/__tests__/statics.test.tsx b/src/react/hoc/__tests__/statics.test.tsx --- a/src/react/hoc/__tests__/statics.test.tsx +++ b/src/react/hoc/__tests__/statics.test.tsx @@ -1,7 +1,7 @@ -import React from 'react'; -import gql from 'graphql-tag'; +import React from "react"; +import gql from "graphql-tag"; -import { graphql } from '../graphql'; +import { graphql } from "../graphql"; let sampleOperation = gql` { @@ -11,33 +11,33 @@ let sampleOperation = gql` } `; -describe('statics', () => { - it('should be preserved', () => { +describe("statics", () => { + it("should be preserved", () => { const ApolloContainer = graphql(sampleOperation)( class extends React.Component<any, any> { - static veryStatic = 'such global'; + static veryStatic = "such global"; } ); - expect((ApolloContainer as any).veryStatic).toBe('such global'); + expect((ApolloContainer as any).veryStatic).toBe("such global"); }); - it('exposes a debuggable displayName', () => { + it("exposes a debuggable displayName", () => { @graphql(sampleOperation) class ApolloContainer extends React.Component<any, any> {} expect((ApolloContainer as any).displayName).toBe( - 'Apollo(ApolloContainer)' + "Apollo(ApolloContainer)" ); }); - it('honors custom display names', () => { + it("honors custom display names", () => { const ApolloContainer = graphql(sampleOperation)( class extends React.Component<any, any> { - static displayName = 'Foo'; + static displayName = "Foo"; } ); - expect((ApolloContainer as any).displayName).toBe('Apollo(Foo)'); + expect((ApolloContainer as any).displayName).toBe("Apollo(Foo)"); }); }); diff --git a/src/react/hoc/__tests__/subscriptions/subscriptions.test.tsx b/src/react/hoc/__tests__/subscriptions/subscriptions.test.tsx --- a/src/react/hoc/__tests__/subscriptions/subscriptions.test.tsx +++ b/src/react/hoc/__tests__/subscriptions/subscriptions.test.tsx @@ -1,19 +1,19 @@ -import React from 'react'; -import { act, render } from '@testing-library/react'; -import gql from 'graphql-tag'; -import { DocumentNode } from 'graphql'; - -import { ApolloClient } from '../../../../core'; -import { ApolloProvider } from '../../../context'; -import { InMemoryCache as Cache } from '../../../../cache'; -import { ApolloLink } from '../../../../link/core'; -import { itAsync, MockSubscriptionLink } from '../../../../testing'; -import { graphql } from '../../graphql'; -import { ChildProps } from '../../types'; +import React from "react"; +import { act, render } from "@testing-library/react"; +import gql from "graphql-tag"; +import { DocumentNode } from "graphql"; + +import { ApolloClient } from "../../../../core"; +import { ApolloProvider } from "../../../context"; +import { InMemoryCache as Cache } from "../../../../cache"; +import { ApolloLink } from "../../../../link/core"; +import { itAsync, MockSubscriptionLink } from "../../../../testing"; +import { graphql } from "../../graphql"; +import { ChildProps } from "../../types"; const IS_REACT_18 = React.version.startsWith("18"); -describe('subscriptions', () => { +describe("subscriptions", () => { let error: typeof console.error; beforeEach(() => { @@ -27,16 +27,16 @@ describe('subscriptions', () => { }); const results = [ - 'James Baxley', - 'John Pinkerton', - 'Sam Claridge', - 'Ben Coleman' - ].map(name => ({ + "James Baxley", + "John Pinkerton", + "Sam Claridge", + "Ben Coleman", + ].map((name) => ({ result: { data: { user: { name } } }, - delay: 10 + delay: 10, })); - it('binds a subscription to props', () => { + it("binds a subscription to props", () => { const query: DocumentNode = gql` subscription UserInfo { user { @@ -47,7 +47,7 @@ describe('subscriptions', () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Props {} @@ -55,14 +55,14 @@ describe('subscriptions', () => { user: { name: string }; } - const ContainerWithData = graphql<Props, Data>(query)( - ({ data }: ChildProps<Props, Data>) => { - expect(data).toBeTruthy(); - expect(data!.user).toBeFalsy(); - expect(data!.loading).toBeTruthy(); - return null; - } - ); + const ContainerWithData = graphql<Props, Data>(query)(({ + data, + }: ChildProps<Props, Data>) => { + expect(data).toBeTruthy(); + expect(data!.user).toBeFalsy(); + expect(data!.loading).toBeTruthy(); + return null; + }); render( <ApolloProvider client={client}> @@ -71,7 +71,7 @@ describe('subscriptions', () => { ); }); - it('includes the variables in the props', () => { + it("includes the variables in the props", () => { const query: DocumentNode = gql` subscription UserInfo($name: String) { user(name: $name) { @@ -79,11 +79,11 @@ describe('subscriptions', () => { } } `; - const variables = { name: 'James Baxley' }; + const variables = { name: "James Baxley" }; const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); interface Variables { @@ -94,22 +94,22 @@ describe('subscriptions', () => { user: { name: string }; } - const ContainerWithData = graphql<Variables, Data>(query)( - ({ data }: ChildProps<Variables, Data>) => { - expect(data).toBeTruthy(); - expect(data!.variables).toEqual(variables); - return null; - } - ); + const ContainerWithData = graphql<Variables, Data>(query)(({ + data, + }: ChildProps<Variables, Data>) => { + expect(data).toBeTruthy(); + expect(data!.variables).toEqual(variables); + return null; + }); render( <ApolloProvider client={client}> - <ContainerWithData name={'James Baxley'} /> + <ContainerWithData name={"James Baxley"} /> </ApolloProvider> ); }); - itAsync('does not swallow children errors', (resolve, reject) => { + itAsync("does not swallow children errors", (resolve, reject) => { const query: DocumentNode = gql` subscription UserInfo { user { @@ -120,7 +120,7 @@ describe('subscriptions', () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let bar: any; @@ -151,7 +151,7 @@ describe('subscriptions', () => { ); }); - itAsync('executes a subscription', (resolve, reject) => { + itAsync("executes a subscription", (resolve, reject) => { jest.useFakeTimers(); const query: DocumentNode = gql` @@ -168,7 +168,7 @@ describe('subscriptions', () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; @@ -218,34 +218,34 @@ describe('subscriptions', () => { act(() => { jest.advanceTimersByTime(230); - }) + }); }); - itAsync('resubscribes to a subscription', (resolve, reject) => { + itAsync("resubscribes to a subscription", (resolve, reject) => { //we make an extra Hoc which will trigger the inner HoC to resubscribe //these are the results for the outer subscription const triggerResults = [ - '0', - 'trigger resubscribe', - '3', - '4', - '5', - '6', - '7' - ].map(trigger => ({ + "0", + "trigger resubscribe", + "3", + "4", + "5", + "6", + "7", + ].map((trigger) => ({ result: { data: { trigger } }, - delay: 10 + delay: 10, })); //These are the results from the resubscription const results3 = [ - 'NewUser: 1', - 'NewUser: 2', - 'NewUser: 3', - 'NewUser: 4' - ].map(name => ({ + "NewUser: 1", + "NewUser: 2", + "NewUser: 3", + "NewUser: 4", + ].map((name) => ({ result: { data: { user: { name } } }, - delay: 10 + delay: 10, })); const query: DocumentNode = gql` @@ -271,14 +271,14 @@ describe('subscriptions', () => { const userLink = new MockSubscriptionLink(); const triggerLink = new MockSubscriptionLink(); const link = new ApolloLink((o, f) => (f ? f(o) : null)).split( - ({ operationName }) => operationName === 'UserInfo', + ({ operationName }) => operationName === "UserInfo", userLink, triggerLink ); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let count = 0; @@ -288,9 +288,9 @@ describe('subscriptions', () => { const Container = graphql<{}, TriggerData>(triggerQuery)( graphql<TriggerQueryChildProps, QueryData>(query, { - shouldResubscribe: nextProps => { - return nextProps.data!.trigger === 'trigger resubscribe'; - } + shouldResubscribe: (nextProps) => { + return nextProps.data!.trigger === "trigger resubscribe"; + }, })( class Query extends React.Component<ComposedProps> { componentDidUpdate() { diff --git a/src/react/hooks/__tests__/useApolloClient.test.tsx b/src/react/hooks/__tests__/useApolloClient.test.tsx --- a/src/react/hooks/__tests__/useApolloClient.test.tsx +++ b/src/react/hooks/__tests__/useApolloClient.test.tsx @@ -1,22 +1,18 @@ -import React from 'react'; -import { render } from '@testing-library/react'; -import { InvariantError } from 'ts-invariant'; +import React from "react"; +import { render } from "@testing-library/react"; +import { InvariantError } from "ts-invariant"; -import { ApolloClient } from '../../../core'; -import { ApolloLink } from '../../../link/core'; -import { ApolloProvider, resetApolloContext } from '../../context'; -import { InMemoryCache } from '../../../cache'; -import { useApolloClient } from '../useApolloClient'; +import { ApolloClient } from "../../../core"; +import { ApolloLink } from "../../../link/core"; +import { ApolloProvider } from "../../context"; +import { InMemoryCache } from "../../../cache"; +import { useApolloClient } from "../useApolloClient"; -describe('useApolloClient Hook', () => { - afterEach(() => { - resetApolloContext(); - }); - - it('should return a client instance from the context if available', () => { +describe("useApolloClient Hook", () => { + it("should return a client instance from the context if available", () => { const client = new ApolloClient({ cache: new InMemoryCache(), - link: ApolloLink.empty() + link: ApolloLink.empty(), }); function App() { diff --git a/src/react/hooks/__tests__/useBackgroundQuery.test.tsx b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx new file mode 100644 --- /dev/null +++ b/src/react/hooks/__tests__/useBackgroundQuery.test.tsx @@ -0,0 +1,5436 @@ +import React, { Fragment, StrictMode, Suspense } from "react"; +import { + act, + render, + screen, + renderHook, + RenderHookOptions, + waitFor, +} from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { ErrorBoundary, ErrorBoundaryProps } from "react-error-boundary"; +import { expectTypeOf } from "expect-type"; +import { GraphQLError } from "graphql"; +import { + gql, + ApolloError, + DocumentNode, + ApolloClient, + ErrorPolicy, + NormalizedCacheObject, + NetworkStatus, + ApolloCache, + TypedDocumentNode, + ApolloLink, + Observable, + FetchMoreQueryOptions, + OperationVariables, + ApolloQueryResult, +} from "../../../core"; +import { + MockedResponse, + MockedProvider, + MockLink, + MockSubscriptionLink, + mockSingleLink, +} from "../../../testing"; +import { + concatPagination, + offsetLimitPagination, + DeepPartial, +} from "../../../utilities"; +import { useBackgroundQuery } from "../useBackgroundQuery"; +import { useReadQuery } from "../useReadQuery"; +import { ApolloProvider } from "../../context"; +import { unwrapQueryRef, QueryReference } from "../../cache/QueryReference"; +import { InMemoryCache } from "../../../cache"; +import { + SuspenseQueryHookFetchPolicy, + SuspenseQueryHookOptions, +} from "../../types/types"; +import equal from "@wry/equality"; +import { RefetchWritePolicy } from "../../../core/watchQueryOptions"; +import { skipToken } from "../constants"; + +function renderIntegrationTest({ + client, +}: { + client?: ApolloClient<NormalizedCacheObject>; +} = {}) { + const query: TypedDocumentNode<QueryData> = gql` + query SimpleQuery { + foo { + bar + } + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { foo: { bar: "hello" } } }, + }, + ]; + const _client = + client || + new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + }; + const errorBoundaryProps: ErrorBoundaryProps = { + fallback: <div>Error</div>, + onError: (error) => { + renders.errorCount++; + renders.errors.push(error); + }, + }; + + interface QueryData { + foo: { bar: string }; + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <div>loading</div>; + } + + function Child({ queryRef }: { queryRef: QueryReference<QueryData> }) { + const { data } = useReadQuery(queryRef); + // count renders in the child component + renders.count++; + return <div>{data.foo.bar}</div>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(query); + return <Child queryRef={queryRef} />; + } + + function ParentWithVariables() { + const [queryRef] = useBackgroundQuery(query); + return <Child queryRef={queryRef} />; + } + + function App({ variables }: { variables?: Record<string, unknown> }) { + return ( + <ApolloProvider client={_client}> + <ErrorBoundary {...errorBoundaryProps}> + <Suspense fallback={<SuspenseFallback />}> + {variables ? <ParentWithVariables /> : <Parent />} + </Suspense> + </ErrorBoundary> + </ApolloProvider> + ); + } + + const { ...rest } = render(<App />); + return { ...rest, query, client: _client, renders }; +} + +interface VariablesCaseData { + character: { + id: string; + name: string; + }; +} + +interface VariablesCaseVariables { + id: string; +} + +function useVariablesIntegrationTestCase() { + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + const CHARACTERS = ["Spider-Man", "Black Widow", "Iron Man", "Hulk"]; + let mocks = [...CHARACTERS].map((name, index) => ({ + request: { query, variables: { id: String(index + 1) } }, + result: { data: { character: { id: String(index + 1), name } } }, + })); + return { mocks, query }; +} + +function renderVariablesIntegrationTest({ + variables, + mocks, + errorPolicy, + options, + cache, +}: { + mocks?: { + request: { query: DocumentNode; variables: { id: string } }; + result: { + data?: { + character: { + id: string; + name: string | null; + }; + }; + }; + }[]; + variables: { id: string }; + options?: SuspenseQueryHookOptions; + cache?: InMemoryCache; + errorPolicy?: ErrorPolicy; +}) { + let { mocks: _mocks, query } = useVariablesIntegrationTestCase(); + + // duplicate mocks with (updated) in the name for refetches + _mocks = [..._mocks, ..._mocks, ..._mocks].map( + ({ request, result }, index) => { + return { + request: request, + result: { + data: { + character: { + ...result.data.character, + name: + index > 3 + ? index > 7 + ? `${result.data.character.name} (updated again)` + : `${result.data.character.name} (updated)` + : result.data.character.name, + }, + }, + }, + }; + } + ); + const client = new ApolloClient({ + cache: cache || new InMemoryCache(), + link: new MockLink(mocks || _mocks), + }); + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: { + data: VariablesCaseData; + networkStatus: NetworkStatus; + error: ApolloError | undefined; + }[]; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const errorBoundaryProps: ErrorBoundaryProps = { + fallback: <div>Error</div>, + onError: (error) => { + renders.errorCount++; + renders.errors.push(error); + }, + }; + + function SuspenseFallback() { + renders.suspenseCount++; + return <div>loading</div>; + } + + function Child({ + refetch, + variables: _variables, + queryRef, + }: { + variables: VariablesCaseVariables; + refetch: ( + variables?: Partial<OperationVariables> | undefined + ) => Promise<ApolloQueryResult<VariablesCaseData>>; + queryRef: QueryReference<VariablesCaseData>; + }) { + const { data, error, networkStatus } = useReadQuery(queryRef); + const [variables, setVariables] = React.useState(_variables); + // count renders in the child component + renders.count++; + renders.frames.push({ data, networkStatus, error }); + + return ( + <div> + {error ? <div>{error.message}</div> : null} + <button + onClick={() => { + refetch(variables); + }} + > + Refetch + </button> + <button + onClick={() => { + setVariables({ id: "2" }); + }} + > + Set variables to id: 2 + </button> + {data?.character.id} - {data?.character.name} + </div> + ); + } + + function ParentWithVariables({ + variables, + errorPolicy = "none", + }: { + variables: VariablesCaseVariables; + errorPolicy?: ErrorPolicy; + }) { + const [queryRef, { refetch }] = useBackgroundQuery(query, { + ...options, + variables, + errorPolicy, + }); + return ( + <Child refetch={refetch} variables={variables} queryRef={queryRef} /> + ); + } + + function App({ + variables, + errorPolicy, + }: { + variables: VariablesCaseVariables; + errorPolicy?: ErrorPolicy; + }) { + return ( + <ApolloProvider client={client}> + <ErrorBoundary {...errorBoundaryProps}> + <Suspense fallback={<SuspenseFallback />}> + <ParentWithVariables + variables={variables} + errorPolicy={errorPolicy} + /> + </Suspense> + </ErrorBoundary> + </ApolloProvider> + ); + } + + const { ...rest } = render( + <App errorPolicy={errorPolicy} variables={variables} /> + ); + const rerender = ({ variables }: { variables: VariablesCaseVariables }) => { + return rest.rerender(<App variables={variables} />); + }; + return { ...rest, query, rerender, client, renders, mocks: mocks || _mocks }; +} + +function renderPaginatedIntegrationTest({ + updateQuery, + fieldPolicies, +}: { + fieldPolicies?: boolean; + updateQuery?: boolean; + mocks?: { + request: { + query: DocumentNode; + variables: { offset: number; limit: number }; + }; + result: { + data: { + letters: { + letter: string; + position: number; + }[]; + }; + }; + }[]; +} = {}) { + interface QueryData { + letters: { + letter: string; + position: number; + }[]; + } + + interface Variables { + limit?: number; + offset?: number; + } + + const query: TypedDocumentNode<QueryData, Variables> = gql` + query letters($limit: Int, $offset: Int) { + letters(limit: $limit) { + letter + position + } + } + `; + + const data = "ABCDEFG" + .split("") + .map((letter, index) => ({ letter, position: index + 1 })); + + const link = new ApolloLink((operation) => { + const { offset = 0, limit = 2 } = operation.variables; + const letters = data.slice(offset, offset + limit); + + return new Observable((observer) => { + setTimeout(() => { + observer.next({ data: { letters } }); + observer.complete(); + }, 10); + }); + }); + + const cacheWithTypePolicies = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + letters: concatPagination(), + }, + }, + }, + }); + const client = new ApolloClient({ + cache: fieldPolicies ? cacheWithTypePolicies : new InMemoryCache(), + link, + }); + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + }; + + const errorBoundaryProps: ErrorBoundaryProps = { + fallback: <div>Error</div>, + onError: (error) => { + renders.errorCount++; + renders.errors.push(error); + }, + }; + + function SuspenseFallback() { + renders.suspenseCount++; + return <div>loading</div>; + } + + function Child({ + queryRef, + onFetchMore, + }: { + onFetchMore: (options: FetchMoreQueryOptions<Variables, QueryData>) => void; + queryRef: QueryReference<QueryData>; + }) { + const { data, error } = useReadQuery(queryRef); + // count renders in the child component + renders.count++; + return ( + <div> + {error ? <div>{error.message}</div> : null} + <button + onClick={() => { + const fetchMoreOpts: FetchMoreQueryOptions<Variables, QueryData> & { + updateQuery?: ( + previousQueryResult: QueryData, + options: { + fetchMoreResult: QueryData; + variables: Variables; + } + ) => QueryData; + } = { variables: { offset: 2, limit: 2 } }; + + if (updateQuery) { + fetchMoreOpts.updateQuery = (prev, { fetchMoreResult }) => ({ + letters: prev.letters.concat(fetchMoreResult.letters), + }); + } + + onFetchMore(fetchMoreOpts); + }} + > + Fetch more + </button> + <ul> + {data.letters.map(({ letter, position }) => ( + <li data-testid="letter" key={position}> + {letter} + </li> + ))} + </ul> + </div> + ); + } + + function ParentWithVariables() { + const [queryRef, { fetchMore }] = useBackgroundQuery(query, { + variables: { limit: 2, offset: 0 }, + }); + return <Child onFetchMore={fetchMore} queryRef={queryRef} />; + } + + function App() { + return ( + <ApolloProvider client={client}> + <ErrorBoundary {...errorBoundaryProps}> + <Suspense fallback={<SuspenseFallback />}> + <ParentWithVariables /> + </Suspense> + </ErrorBoundary> + </ApolloProvider> + ); + } + + const { ...rest } = render(<App />); + return { ...rest, data, query, client, renders }; +} + +type RenderSuspenseHookOptions<Props, TSerializedCache = {}> = Omit< + RenderHookOptions<Props>, + "wrapper" +> & { + client?: ApolloClient<TSerializedCache>; + link?: ApolloLink; + cache?: ApolloCache<TSerializedCache>; + mocks?: MockedResponse[]; + strictMode?: boolean; +}; + +interface Renders<Result> { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: Result[]; +} + +interface SimpleQueryData { + greeting: string; +} + +function renderSuspenseHook<Result, Props>( + render: (initialProps: Props) => Result, + options: RenderSuspenseHookOptions<Props> = Object.create(null) +) { + function SuspenseFallback() { + renders.suspenseCount++; + + return <div>loading</div>; + } + + const renders: Renders<Result> = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const { mocks = [], strictMode, ...renderHookOptions } = options; + + const client = + options.client || + new ApolloClient({ + cache: options.cache || new InMemoryCache(), + link: options.link || new MockLink(mocks), + }); + + const view = renderHook( + (props) => { + renders.count++; + + const view = render(props); + + renders.frames.push(view); + + return view; + }, + { + ...renderHookOptions, + wrapper: ({ children }) => { + const Wrapper = strictMode ? StrictMode : Fragment; + + return ( + <Wrapper> + <Suspense fallback={<SuspenseFallback />}> + <ErrorBoundary + fallback={<div>Error</div>} + onError={(error) => { + renders.errorCount++; + renders.errors.push(error); + }} + > + <ApolloProvider client={client}>{children}</ApolloProvider> + </ErrorBoundary> + </Suspense> + </Wrapper> + ); + }, + } + ); + + return { ...view, renders }; +} + +describe("useBackgroundQuery", () => { + it("fetches a simple query with minimal config", async () => { + const query = gql` + query { + hello + } + `; + const mocks = [ + { + request: { query }, + result: { data: { hello: "world 1" } }, + }, + ]; + const { result } = renderHook(() => useBackgroundQuery(query), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { hello: "world 1" }, + loading: false, + networkStatus: 7, + }); + }); + + it("allows the client to be overridden", async () => { + const query: TypedDocumentNode<SimpleQueryData> = gql` + query UserQuery { + greeting + } + `; + + const globalClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "global hello" } }) + ), + cache: new InMemoryCache(), + }); + + const localClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "local hello" } }) + ), + cache: new InMemoryCache(), + }); + + const { result } = renderSuspenseHook( + () => useBackgroundQuery(query, { client: localClient }), + { client: globalClient } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + await waitFor(() => { + expect(_result).toEqual({ + data: { greeting: "local hello" }, + loading: false, + networkStatus: NetworkStatus.ready, + }); + }); + }); + + it("passes context to the link", async () => { + const query = gql` + query ContextQuery { + context + } + `; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + const { valueA, valueB } = operation.getContext(); + + observer.next({ data: { context: { valueA, valueB } } }); + observer.complete(); + }); + }); + + const { result } = renderHook( + () => + useBackgroundQuery(query, { + context: { valueA: "A", valueB: "B" }, + }), + { + wrapper: ({ children }) => ( + <MockedProvider link={link}>{children}</MockedProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + await waitFor(() => { + expect(_result).toMatchObject({ + data: { context: { valueA: "A", valueB: "B" } }, + networkStatus: NetworkStatus.ready, + }); + }); + }); + + it('enables canonical results when canonizeResults is "true"', async () => { + interface Result { + __typename: string; + value: number; + } + + const cache = new InMemoryCache({ + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<{ results: Result[] }> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + cache.writeQuery({ + query, + data: { results }, + }); + + const { result } = renderHook( + () => + useBackgroundQuery(query, { + canonizeResults: true, + }), + { + wrapper: ({ children }) => ( + <MockedProvider cache={cache}>{children}</MockedProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + const resultSet = new Set(_result.data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(_result.data).toEqual({ results }); + expect(_result.data.results.length).toBe(6); + expect(resultSet.size).toBe(5); + expect(values).toEqual([0, 1, 2, 3, 5]); + }); + + it("can disable canonical results when the cache's canonizeResults setting is true", async () => { + interface Result { + __typename: string; + value: number; + } + + const cache = new InMemoryCache({ + canonizeResults: true, + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<{ results: Result[] }> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + cache.writeQuery({ + query, + data: { results }, + }); + + const { result } = renderHook( + () => + useBackgroundQuery(query, { + canonizeResults: false, + }), + { + wrapper: ({ children }) => ( + <MockedProvider cache={cache}>{children}</MockedProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + const resultSet = new Set(_result.data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(_result.data).toEqual({ results }); + expect(_result.data.results.length).toBe(6); + expect(resultSet.size).toBe(6); + expect(values).toEqual([0, 1, 1, 2, 3, 5]); + }); + + // TODO(FIXME): test fails, should return cache data first if it exists + it.skip("returns initial cache data followed by network data when the fetch policy is `cache-and-network`", async () => { + const query = gql` + { + hello + } + `; + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + delay: 20, + }); + + const client = new ApolloClient({ + link, + cache, + }); + + cache.writeQuery({ query, data: { hello: "from cache" } }); + + const { result } = renderHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-and-network" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { hello: "from link" }, + loading: false, + networkStatus: 7, + }); + }); + + it("all data is present in the cache, no network request is made", async () => { + const query = gql` + { + hello + } + `; + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + delay: 20, + }); + + const client = new ApolloClient({ + link, + cache, + }); + + cache.writeQuery({ query, data: { hello: "from cache" } }); + + const { result } = renderHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { hello: "from cache" }, + loading: false, + networkStatus: 7, + }); + }); + it("partial data is present in the cache so it is ignored and network request is made", async () => { + const query = gql` + { + hello + foo + } + `; + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link", foo: "bar" } }, + delay: 20, + }); + + const client = new ApolloClient({ + link, + cache, + }); + + // we expect a "Missing field 'foo' while writing result..." error + // when writing hello to the cache, so we'll silence the console.error + const originalConsoleError = console.error; + console.error = () => { + /* noop */ + }; + cache.writeQuery({ query, data: { hello: "from cache" } }); + console.error = originalConsoleError; + + const { result } = renderHook( + () => useBackgroundQuery(query, { fetchPolicy: "cache-first" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { foo: "bar", hello: "from link" }, + loading: false, + networkStatus: 7, + }); + }); + + it("existing data in the cache is ignored", async () => { + const query = gql` + { + hello + } + `; + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + delay: 20, + }); + + const client = new ApolloClient({ + link, + cache, + }); + + cache.writeQuery({ query, data: { hello: "from cache" } }); + + const { result } = renderHook( + () => useBackgroundQuery(query, { fetchPolicy: "network-only" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { hello: "from link" }, + loading: false, + networkStatus: 7, + }); + expect(client.cache.extract()).toEqual({ + ROOT_QUERY: { __typename: "Query", hello: "from link" }, + }); + }); + + it("fetches data from the network but does not update the cache", async () => { + const query = gql` + { + hello + } + `; + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + delay: 20, + }); + + const client = new ApolloClient({ + link, + cache, + }); + + cache.writeQuery({ query, data: { hello: "from cache" } }); + + const { result } = renderHook( + () => useBackgroundQuery(query, { fetchPolicy: "no-cache" }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); + + const [queryRef] = result.current; + + const _result = await unwrapQueryRef(queryRef).promise; + + expect(_result).toEqual({ + data: { hello: "from link" }, + loading: false, + networkStatus: 7, + }); + // ...but not updated in the cache + expect(client.cache.extract()).toEqual({ + ROOT_QUERY: { __typename: "Query", hello: "from cache" }, + }); + }); + + describe("integration tests with useReadQuery", () => { + it("suspends and renders hello", async () => { + const { renders } = renderIntegrationTest(); + // ensure the hook suspends immediately + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + // the parent component re-renders when promise fulfilled + expect(await screen.findByText("hello")).toBeInTheDocument(); + expect(renders.count).toBe(1); + }); + + it("works with startTransition to change variables", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + todo: { id: "2", name: "Take out trash", completed: true }, + }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Parent() { + const [id, setId] = React.useState("1"); + const [queryRef] = useBackgroundQuery(query, { + variables: { id }, + }); + return <Todo queryRef={queryRef} onChange={setId} />; + } + + function Todo({ + queryRef, + onChange, + }: { + queryRef: QueryReference<Data>; + onChange: (id: string) => void; + }) { + const { data } = useReadQuery(queryRef); + const [isPending, startTransition] = React.useTransition(); + const { todo } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + onChange("2"); + }); + }} + > + Refresh + </button> + <div data-testid="todo" aria-busy={isPending}> + {todo.name} + {todo.completed && " (completed)"} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todo")).toBeInTheDocument(); + + const todo = screen.getByTestId("todo"); + const button = screen.getByText("Refresh"); + + expect(todo).toHaveTextContent("Clean room"); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todo).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todo content once its done + // suspending. + await waitFor(() => { + expect(todo).toHaveTextContent("Take out trash (completed)"); + }); + }); + + it('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + interface Data { + greeting: { + __typename: string; + message: string; + recipient: { name: string; __typename: string }; + }; + } + + const query: TypedDocumentNode<Data> = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + const client = new ApolloClient({ cache, link }); + let renders = 0; + let suspenseCount = 0; + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(query, { + fetchPolicy: "cache-and-network", + }); + return <Todo queryRef={queryRef} />; + } + + function Todo({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + const { greeting } = data; + renders++; + + return ( + <> + <div>Message: {greeting.message}</div> + <div>Recipient: {greeting.recipient.name}</div> + <div>Network status: {networkStatus}</div> + <div>Error: {error ? error.message : "none"}</div> + </> + ); + } + + render(<App />); + + expect(screen.getByText(/Message/i)).toHaveTextContent( + "Message: Hello cached" + ); + expect(screen.getByText(/Recipient/i)).toHaveTextContent( + "Recipient: Cached Alice" + ); + expect(screen.getByText(/Network status/i)).toHaveTextContent( + "Network status: 1" // loading + ); + expect(screen.getByText(/Error/i)).toHaveTextContent("none"); + + link.simulateResult({ + result: { + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(screen.getByText(/Message/i)).toHaveTextContent( + "Message: Hello world" + ); + }); + expect(screen.getByText(/Recipient/i)).toHaveTextContent( + "Recipient: Cached Alice" + ); + expect(screen.getByText(/Network status/i)).toHaveTextContent( + "Network status: 7" // ready + ); + expect(screen.getByText(/Error/i)).toHaveTextContent("none"); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(screen.getByText(/Recipient/i)).toHaveTextContent( + "Recipient: Alice" + ); + }); + expect(screen.getByText(/Message/i)).toHaveTextContent( + "Message: Hello world" + ); + expect(screen.getByText(/Network status/i)).toHaveTextContent( + "Network status: 7" // ready + ); + expect(screen.getByText(/Error/i)).toHaveTextContent("none"); + + expect(renders).toBe(3); + expect(suspenseCount).toBe(0); + }); + }); + + it("reacts to cache updates", async () => { + const { renders, client, query } = renderIntegrationTest(); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + // the parent component re-renders when promise fulfilled + expect(await screen.findByText("hello")).toBeInTheDocument(); + expect(renders.count).toBe(1); + + client.writeQuery({ + query, + data: { foo: { bar: "baz" } }, + }); + + // the parent component re-renders when promise fulfilled + expect(await screen.findByText("baz")).toBeInTheDocument(); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + + client.writeQuery({ + query, + data: { foo: { bar: "bat" } }, + }); + + expect(await screen.findByText("bat")).toBeInTheDocument(); + + expect(renders.suspenseCount).toBe(1); + }); + + it("reacts to variables updates", async () => { + const { renders, rerender } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("1 - Spider-Man")).toBeInTheDocument(); + + rerender({ variables: { id: "2" } }); + + expect(renders.suspenseCount).toBe(2); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("2 - Black Widow")).toBeInTheDocument(); + }); + + it("does not suspend when `skip` is true", async () => { + interface Data { + greeting: string; + } + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(query, { skip: true }); + + return ( + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + }); + + it("does not suspend when using `skipToken` in options", async () => { + interface Data { + greeting: string; + } + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(query, skipToken); + + return ( + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + }); + + it("suspends when `skip` becomes `false` after it was `true`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery(query, { skip }); + + return ( + <> + <button onClick={() => setSkip(false)}>Run query</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + + await act(() => user.click(screen.getByText("Run query"))); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + }); + + it("suspends when switching away from `skipToken` in options", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery( + query, + skip ? skipToken : undefined + ); + + return ( + <> + <button onClick={() => setSkip(false)}>Run query</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + + await act(() => user.click(screen.getByText("Run query"))); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + }); + + it("renders skip result, does not suspend, and maintains `data` when `skip` becomes `true` after it was `false`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(false); + const [queryRef] = useBackgroundQuery(query, { skip }); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + it("renders skip result, does not suspend, and maintains `data` when switching back to `skipToken`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(false); + const [queryRef] = useBackgroundQuery( + query, + skip ? skipToken : undefined + ); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + it("does not make network requests when `skip` is `true`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.query, operation.query) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery(query, { skip }); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(fetchCount).toBe(0); + + // Toggle skip to `false` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(fetchCount).toBe(1); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + // Toggle skip to `true` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(fetchCount).toBe(1); + }); + + it("does not make network requests when `skipToken` is used", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.query, operation.query) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery( + query, + skip ? skipToken : undefined + ); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(fetchCount).toBe(0); + + // Toggle skip to `false` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(fetchCount).toBe(1); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + // Toggle skip to `true` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(fetchCount).toBe(1); + }); + + it("`skip` result is referentially stable", async () => { + interface Data { + greeting: string; + } + + interface CurrentResult { + current: Data | undefined; + } + + const user = userEvent.setup(); + + const result: CurrentResult = { + current: undefined, + }; + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery(query, { skip }); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + result.current = data; + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + const { rerender } = render(<App />); + + const skipResult = result.current; + + rerender(<App />); + + expect(result.current).toBe(skipResult); + + // Toggle skip to `false` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + const fetchedResult = result.current; + + rerender(<App />); + + expect(result.current).toBe(fetchedResult); + }); + + it("`skip` result is referentially stable when using `skipToken`", async () => { + interface Data { + greeting: string; + } + + interface CurrentResult { + current: Data | undefined; + } + + const user = userEvent.setup(); + + const result: CurrentResult = { + current: undefined, + }; + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [queryRef] = useBackgroundQuery( + query, + skip ? skipToken : undefined + ); + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + result.current = data; + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + const { rerender } = render(<App />); + + const skipResult = result.current; + + rerender(<App />); + + expect(result.current).toBe(skipResult); + + // Toggle skip to `false` + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(screen.getByText("Loading...")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + + const fetchedResult = result.current; + + rerender(<App />); + + expect(result.current).toBe(fetchedResult); + }); + + it("`skip` option works with `startTransition`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [isPending, startTransition] = React.useTransition(); + const [queryRef] = useBackgroundQuery(query, { skip }); + + return ( + <> + <button + disabled={isPending} + onClick={() => + startTransition(() => { + setSkip((skip) => !skip); + }) + } + > + Toggle skip + </button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const button = screen.getByText("Toggle skip"); + + // Toggle skip to `false` + await act(() => user.click(button)); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(button).toBeDisabled(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + }); + + it("`skipToken` works with `startTransition`", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [skip, setSkip] = React.useState(true); + const [isPending, startTransition] = React.useTransition(); + const [queryRef] = useBackgroundQuery( + query, + skip ? skipToken : undefined + ); + + return ( + <> + <button + disabled={isPending} + onClick={() => + startTransition(() => { + setSkip((skip) => !skip); + }) + } + > + Toggle skip + </button> + <Suspense fallback={<SuspenseFallback />}> + {queryRef && <Greeting queryRef={queryRef} />} + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="greeting">{data.greeting}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const button = screen.getByText("Toggle skip"); + + // Toggle skip to `false` + await act(() => user.click(button)); + + expect(screen.queryByText("Loading...")).not.toBeInTheDocument(); + expect(button).toBeDisabled(); + expect(screen.queryByTestId("greeting")).not.toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("greeting")).toHaveTextContent("Hello"); + }); + }); + + it("applies `errorPolicy` on next fetch when it changes between renders", async () => { + interface Data { + greeting: string; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + { + request: { query }, + result: { + errors: [new GraphQLError("oops")], + }, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [errorPolicy, setErrorPolicy] = React.useState<ErrorPolicy>("none"); + const [queryRef, { refetch }] = useBackgroundQuery(query, { + errorPolicy, + }); + + return ( + <> + <button onClick={() => setErrorPolicy("all")}> + Change error policy + </button> + <button onClick={() => refetch()}>Refetch greeting</button> + <Suspense fallback={<SuspenseFallback />}> + <Greeting queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Greeting({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data, error } = useReadQuery(queryRef); + + return error ? ( + <div data-testid="error">{error.message}</div> + ) : ( + <div data-testid="greeting">{data.greeting}</div> + ); + } + + function App() { + return ( + <ApolloProvider client={client}> + <ErrorBoundary + fallback={<div data-testid="error">Error boundary</div>} + > + <Parent /> + </ErrorBoundary> + </ApolloProvider> + ); + } + + render(<App />); + + expect(await screen.findByTestId("greeting")).toHaveTextContent("Hello"); + + await act(() => user.click(screen.getByText("Change error policy"))); + await act(() => user.click(screen.getByText("Refetch greeting"))); + + // Ensure we aren't rendering the error boundary and instead rendering the + // error message in the Greeting component. + expect(await screen.findByTestId("error")).toHaveTextContent("oops"); + }); + + it("applies `context` on next fetch when it changes between renders", async () => { + interface Data { + context: Record<string, any>; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + context + } + `; + + const link = new ApolloLink((operation) => { + return Observable.of({ + data: { + context: operation.getContext(), + }, + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [phase, setPhase] = React.useState("initial"); + const [queryRef, { refetch }] = useBackgroundQuery(query, { + context: { phase }, + }); + + return ( + <> + <button onClick={() => setPhase("rerender")}>Update context</button> + <button onClick={() => refetch()}>Refetch</button> + <Suspense fallback={<SuspenseFallback />}> + <Context queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Context({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <div data-testid="context">{data.context.phase}</div>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + expect(await screen.findByTestId("context")).toHaveTextContent("initial"); + + await act(() => user.click(screen.getByText("Update context"))); + await act(() => user.click(screen.getByText("Refetch"))); + + expect(await screen.findByTestId("context")).toHaveTextContent("rerender"); + }); + + // NOTE: We only test the `false` -> `true` path here. If the option changes + // from `true` -> `false`, the data has already been canonized, so it has no + // effect on the output. + it("returns canonical results immediately when `canonizeResults` changes from `false` to `true` between renders", async () => { + interface Result { + __typename: string; + value: number; + } + + interface Data { + results: Result[]; + } + + const cache = new InMemoryCache({ + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<Data> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + const user = userEvent.setup(); + + cache.writeQuery({ + query, + data: { results }, + }); + + const client = new ApolloClient({ + link: new MockLink([]), + cache, + }); + + const result: { current: Data | null } = { + current: null, + }; + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [canonizeResults, setCanonizeResults] = React.useState(false); + const [queryRef] = useBackgroundQuery(query, { + canonizeResults, + }); + + return ( + <> + <button onClick={() => setCanonizeResults(true)}> + Canonize results + </button> + <Suspense fallback={<SuspenseFallback />}> + <Results queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Results({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + result.current = data; + + return null; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + function verifyCanonicalResults(data: Data, canonized: boolean) { + const resultSet = new Set(data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(data).toEqual({ results }); + + if (canonized) { + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(5); + expect(values).toEqual([0, 1, 2, 3, 5]); + } else { + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(6); + expect(values).toEqual([0, 1, 1, 2, 3, 5]); + } + } + + verifyCanonicalResults(result.current!, false); + + await act(() => user.click(screen.getByText("Canonize results"))); + + verifyCanonicalResults(result.current!, true); + }); + + it("applies changed `refetchWritePolicy` to next fetch when changing between renders", async () => { + interface Data { + primes: number[]; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, { min: number; max: number }> = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + { + request: { query, variables: { min: 30, max: 50 } }, + result: { data: { primes: [31, 37, 41, 43, 47] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [refetchWritePolicy, setRefetchWritePolicy] = + React.useState<RefetchWritePolicy>("merge"); + + const [queryRef, { refetch }] = useBackgroundQuery(query, { + refetchWritePolicy, + variables: { min: 0, max: 12 }, + }); + + return ( + <> + <button onClick={() => setRefetchWritePolicy("overwrite")}> + Change refetch write policy + </button> + <button onClick={() => refetch({ min: 12, max: 30 })}> + Refetch next + </button> + <button onClick={() => refetch({ min: 30, max: 50 })}> + Refetch last + </button> + <Suspense fallback={<SuspenseFallback />}> + <Primes queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Primes({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <span data-testid="primes">{data.primes.join(", ")}</span>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const primes = await screen.findByTestId("primes"); + + expect(primes).toHaveTextContent("2, 3, 5, 7, 11"); + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + await act(() => user.click(screen.getByText("Refetch next"))); + + await waitFor(() => { + expect(primes).toHaveTextContent("2, 3, 5, 7, 11, 13, 17, 19, 23, 29"); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + ]); + + await act(() => + user.click(screen.getByText("Change refetch write policy")) + ); + + await act(() => user.click(screen.getByText("Refetch last"))); + + await waitFor(() => { + expect(primes).toHaveTextContent("31, 37, 41, 43, 47"); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + [undefined, [31, 37, 41, 43, 47]], + ]); + }); + + it("applies `returnPartialData` on next fetch when it changes between renders", async () => { + interface Data { + character: { + __typename: "Character"; + id: string; + name: string; + }; + } + + interface PartialData { + character: { + __typename: "Character"; + id: string; + }; + } + + const user = userEvent.setup(); + + const fullQuery: TypedDocumentNode<Data> = gql` + query { + character { + __typename + id + name + } + } + `; + + const partialQuery: TypedDocumentNode<PartialData> = gql` + query { + character { + __typename + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }, + }, + }, + { + request: { query: fullQuery }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange (refetched)", + }, + }, + }, + delay: 100, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { __typename: "Character", id: "1" } }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [returnPartialData, setReturnPartialData] = React.useState(false); + + const [queryRef] = useBackgroundQuery(fullQuery, { + returnPartialData, + }); + + return ( + <> + <button onClick={() => setReturnPartialData(true)}> + Update partial data + </button> + <Suspense fallback={<SuspenseFallback />}> + <Character queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Character({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return ( + <span data-testid="character">{data.character.name ?? "unknown"}</span> + ); + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const character = await screen.findByTestId("character"); + + expect(character).toHaveTextContent("Doctor Strange"); + + await act(() => user.click(screen.getByText("Update partial data"))); + + cache.modify({ + id: cache.identify({ __typename: "Character", id: "1" }), + fields: { + name: (_, { DELETE }) => DELETE, + }, + }); + + await waitFor(() => { + expect(character).toHaveTextContent("unknown"); + }); + + await waitFor(() => { + expect(character).toHaveTextContent("Doctor Strange (refetched)"); + }); + }); + + it("applies updated `fetchPolicy` on next fetch when it changes between renders", async () => { + interface Data { + character: { + __typename: "Character"; + id: string; + name: string; + }; + } + + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data> = gql` + query { + character { + __typename + id + name + } + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }, + }, + delay: 10, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [fetchPolicy, setFetchPolicy] = + React.useState<SuspenseQueryHookFetchPolicy>("cache-first"); + + const [queryRef, { refetch }] = useBackgroundQuery(query, { + fetchPolicy, + }); + + return ( + <> + <button onClick={() => setFetchPolicy("no-cache")}> + Change fetch policy + </button> + <button onClick={() => refetch()}>Refetch</button> + <Suspense fallback={<SuspenseFallback />}> + <Character queryRef={queryRef} /> + </Suspense> + </> + ); + } + + function Character({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data } = useReadQuery(queryRef); + + return <span data-testid="character">{data.character.name}</span>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const character = await screen.findByTestId("character"); + + expect(character).toHaveTextContent("Doctor Strangecache"); + + await act(() => user.click(screen.getByText("Change fetch policy"))); + await act(() => user.click(screen.getByText("Refetch"))); + await waitFor(() => { + expect(character).toHaveTextContent("Doctor Strange"); + }); + + // Because we switched to a `no-cache` fetch policy, we should not see the + // newly fetched data in the cache after the fetch occured. + expect(cache.readQuery({ query })).toEqual({ + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }); + }); + + it("properly handles changing options along with changing `variables`", async () => { + interface Data { + character: { + __typename: "Character"; + id: string; + name: string; + }; + } + + const user = userEvent.setup(); + const query: TypedDocumentNode<Data, { id: string }> = gql` + query ($id: ID!) { + character(id: $id) { + __typename + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("oops")], + }, + delay: 10, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Hulk", + }, + }, + }, + delay: 10, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + variables: { + id: "1", + }, + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function SuspenseFallback() { + return <div>Loading...</div>; + } + + function Parent() { + const [id, setId] = React.useState("1"); + + const [queryRef, { refetch }] = useBackgroundQuery(query, { + errorPolicy: id === "1" ? "all" : "none", + variables: { id }, + }); + + return ( + <> + <button onClick={() => setId("1")}>Get first character</button> + <button onClick={() => setId("2")}>Get second character</button> + <button onClick={() => refetch()}>Refetch</button> + <ErrorBoundary + fallback={<div data-testid="error">Error boundary</div>} + > + <Suspense fallback={<SuspenseFallback />}> + <Character queryRef={queryRef} /> + </Suspense> + </ErrorBoundary> + </> + ); + } + + function Character({ queryRef }: { queryRef: QueryReference<Data> }) { + const { data, error } = useReadQuery(queryRef); + + return error ? ( + <div data-testid="error">{error.message}</div> + ) : ( + <span data-testid="character">{data.character.name}</span> + ); + } + + function App() { + return ( + <ApolloProvider client={client}> + <Parent /> + </ApolloProvider> + ); + } + + render(<App />); + + const character = await screen.findByTestId("character"); + + expect(character).toHaveTextContent("Doctor Strangecache"); + + await act(() => user.click(screen.getByText("Get second character"))); + + await waitFor(() => { + expect(character).toHaveTextContent("Hulk"); + }); + + await act(() => user.click(screen.getByText("Get first character"))); + + await waitFor(() => { + expect(character).toHaveTextContent("Doctor Strangecache"); + }); + + await act(() => user.click(screen.getByText("Refetch"))); + + // Ensure we render the inline error instead of the error boundary, which + // tells us the error policy was properly applied. + expect(await screen.findByTestId("error")).toHaveTextContent("oops"); + }); + + describe("refetch", () => { + it("re-suspends when calling `refetch`", async () => { + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("1 - Spider-Man")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + expect(renders.count).toBe(2); + + expect( + await screen.findByText("1 - Spider-Man (updated)") + ).toBeInTheDocument(); + }); + it("re-suspends when calling `refetch` with new variables", async () => { + interface QueryData { + character: { + id: string; + name: string; + }; + } + + interface QueryVariables { + id: string; + } + const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { character: { id: "2", name: "Captain America" } }, + }, + }, + ]; + + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + mocks, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("1 - Captain Marvel")).toBeInTheDocument(); + + const newVariablesRefetchButton = screen.getByText( + "Set variables to id: 2" + ); + const refetchButton = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(newVariablesRefetchButton)); + await act(() => user.click(refetchButton)); + + expect( + await screen.findByText("2 - Captain America") + ).toBeInTheDocument(); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + expect(renders.count).toBe(3); + + // extra render puts an additional frame into the array + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + it("re-suspends multiple times when calling `refetch` multiple times", async () => { + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("1 - Spider-Man")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + expect(renders.count).toBe(2); + + expect( + await screen.findByText("1 - Spider-Man (updated)") + ).toBeInTheDocument(); + + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(3); + expect(renders.count).toBe(3); + + expect( + await screen.findByText("1 - Spider-Man (updated again)") + ).toBeInTheDocument(); + }); + it("throws errors when errors are returned after calling `refetch`", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + interface QueryData { + character: { + id: string; + name: string; + }; + } + + interface QueryVariables { + id: string; + } + const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + mocks, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + expect(await screen.findByText("1 - Captain Marvel")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + await waitFor(() => { + expect(renders.errorCount).toBe(1); + }); + + expect(renders.errors).toEqual([ + new ApolloError({ + graphQLErrors: [new GraphQLError("Something went wrong")], + }), + ]); + + consoleSpy.mockRestore(); + }); + it('ignores errors returned after calling `refetch` when errorPolicy is set to "ignore"', async () => { + interface QueryData { + character: { + id: string; + name: string; + }; + } + + interface QueryVariables { + id: string; + } + const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + errorPolicy: "ignore", + mocks, + }); + + expect(await screen.findByText("1 - Captain Marvel")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + }); + it('returns errors after calling `refetch` when errorPolicy is set to "all"', async () => { + interface QueryData { + character: { + id: string; + name: string; + }; + } + + interface QueryVariables { + id: string; + } + const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + errorPolicy: "all", + mocks, + }); + + expect(await screen.findByText("1 - Captain Marvel")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + + expect( + await screen.findByText("Something went wrong") + ).toBeInTheDocument(); + }); + it('handles partial data results after calling `refetch` when errorPolicy is set to "all"', async () => { + interface QueryData { + character: { + id: string; + name: string; + }; + } + + interface QueryVariables { + id: string; + } + const query: TypedDocumentNode<QueryData, QueryVariables> = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { character: { id: "1", name: null } }, + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { renders } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + errorPolicy: "all", + mocks, + }); + + expect(await screen.findByText("1 - Captain Marvel")).toBeInTheDocument(); + + const button = screen.getByText("Refetch"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + + expect( + await screen.findByText("Something went wrong") + ).toBeInTheDocument(); + + const expectedError = new ApolloError({ + graphQLErrors: [new GraphQLError("Something went wrong")], + }); + + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: mocks[1].result.data, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + ]); + }); + it("`refetch` works with startTransition to allow React to show stale UI until finished suspending", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: true } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Parent() { + const [id, setId] = React.useState("1"); + const [queryRef, { refetch }] = useBackgroundQuery(query, { + variables: { id }, + }); + const onRefetchHandler = () => { + refetch(); + }; + return ( + <Todo + onRefetch={onRefetchHandler} + queryRef={queryRef} + onChange={setId} + /> + ); + } + + function Todo({ + queryRef, + onRefetch, + }: { + onRefetch: () => void; + queryRef: QueryReference<Data>; + onChange: (id: string) => void; + }) { + const { data } = useReadQuery(queryRef); + const [isPending, startTransition] = React.useTransition(); + const { todo } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + onRefetch(); + }); + }} + > + Refresh + </button> + <div data-testid="todo" aria-busy={isPending}> + {todo.name} + {todo.completed && " (completed)"} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todo")).toBeInTheDocument(); + + const todo = screen.getByTestId("todo"); + const button = screen.getByText("Refresh"); + + expect(todo).toHaveTextContent("Clean room"); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todo).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todo content once its done + // suspending. + await waitFor(() => { + expect(todo).toHaveTextContent("Clean room (completed)"); + }); + }); + }); + + describe("fetchMore", () => { + function getItemTexts() { + return screen.getAllByTestId(/letter/).map( + // eslint-disable-next-line testing-library/no-node-access + (li) => li.firstChild!.textContent + ); + } + it("re-suspends when calling `fetchMore` with different variables", async () => { + const { renders } = renderPaginatedIntegrationTest(); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + const items = await screen.findAllByTestId(/letter/i); + expect(items).toHaveLength(2); + expect(getItemTexts()).toStrictEqual(["A", "B"]); + + const button = screen.getByText("Fetch more"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + await waitFor(() => { + expect(renders.count).toBe(2); + }); + + expect(getItemTexts()).toStrictEqual(["C", "D"]); + }); + it("properly uses `updateQuery` when calling `fetchMore`", async () => { + const { renders } = renderPaginatedIntegrationTest({ + updateQuery: true, + }); + + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + const items = await screen.findAllByTestId(/letter/i); + + expect(items).toHaveLength(2); + expect(getItemTexts()).toStrictEqual(["A", "B"]); + + const button = screen.getByText("Fetch more"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + await waitFor(() => { + expect(renders.count).toBe(2); + }); + + const moreItems = await screen.findAllByTestId(/letter/i); + expect(moreItems).toHaveLength(4); + expect(getItemTexts()).toStrictEqual(["A", "B", "C", "D"]); + }); + it("properly uses cache field policies when calling `fetchMore` without `updateQuery`", async () => { + const { renders } = renderPaginatedIntegrationTest({ + fieldPolicies: true, + }); + expect(renders.suspenseCount).toBe(1); + expect(screen.getByText("loading")).toBeInTheDocument(); + + const items = await screen.findAllByTestId(/letter/i); + + expect(items).toHaveLength(2); + expect(getItemTexts()).toStrictEqual(["A", "B"]); + + const button = screen.getByText("Fetch more"); + const user = userEvent.setup(); + await act(() => user.click(button)); + + // parent component re-suspends + expect(renders.suspenseCount).toBe(2); + await waitFor(() => { + expect(renders.count).toBe(2); + }); + + const moreItems = await screen.findAllByTestId(/letter/i); + expect(moreItems).toHaveLength(4); + expect(getItemTexts()).toStrictEqual(["A", "B", "C", "D"]); + }); + it("`fetchMore` works with startTransition to allow React to show stale UI until finished suspending", async () => { + type Variables = { + offset: number; + }; + + interface Todo { + __typename: "Todo"; + id: string; + name: string; + completed: boolean; + } + interface Data { + todos: Todo[]; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodosQuery($offset: Int!) { + todos(offset: $offset) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { offset: 0 } }, + result: { + data: { + todos: [ + { + __typename: "Todo", + id: "1", + name: "Clean room", + completed: false, + }, + ], + }, + }, + delay: 10, + }, + { + request: { query, variables: { offset: 1 } }, + result: { + data: { + todos: [ + { + __typename: "Todo", + id: "2", + name: "Take out trash", + completed: true, + }, + ], + }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + todos: offsetLimitPagination(), + }, + }, + }, + }), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Parent() { + const [queryRef, { fetchMore }] = useBackgroundQuery(query, { + variables: { offset: 0 }, + }); + const onFetchMoreHandler = (variables: Variables) => { + fetchMore({ variables }); + }; + return <Todo onFetchMore={onFetchMoreHandler} queryRef={queryRef} />; + } + + function Todo({ + queryRef, + onFetchMore, + }: { + onFetchMore: (variables: Variables) => void; + queryRef: QueryReference<Data>; + }) { + const { data } = useReadQuery(queryRef); + const [isPending, startTransition] = React.useTransition(); + const { todos } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + onFetchMore({ offset: 1 }); + }); + }} + > + Load more + </button> + <div data-testid="todos" aria-busy={isPending}> + {todos.map((todo) => ( + <div data-testid={`todo:${todo.id}`} key={todo.id}> + {todo.name} + {todo.completed && " (completed)"} + </div> + ))} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todos")).toBeInTheDocument(); + + const todos = screen.getByTestId("todos"); + const todo1 = screen.getByTestId("todo:1"); + const button = screen.getByText("Load more"); + + expect(todo1).toBeInTheDocument(); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todos).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo1).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todos content once its done + // suspending. + await waitFor(() => { + expect(screen.getByTestId("todo:2")).toHaveTextContent( + "Take out trash (completed)" + ); + expect(todo1).toHaveTextContent("Clean room"); + }); + }); + + it('honors refetchWritePolicy set to "merge"', async () => { + const user = userEvent.setup(); + + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + interface QueryData { + primes: number[]; + } + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + function SuspenseFallback() { + return <div>loading</div>; + } + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function Child({ + refetch, + queryRef, + }: { + refetch: ( + variables?: Partial<OperationVariables> | undefined + ) => Promise<ApolloQueryResult<QueryData>>; + queryRef: QueryReference<QueryData>; + }) { + const { data, error, networkStatus } = useReadQuery(queryRef); + + return ( + <div> + <button + onClick={() => { + refetch({ min: 12, max: 30 }); + }} + > + Refetch + </button> + <div data-testid="primes">{data?.primes.join(", ")}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </div> + ); + } + + function Parent() { + const [queryRef, { refetch }] = useBackgroundQuery(query, { + variables: { min: 0, max: 12 }, + refetchWritePolicy: "merge", + }); + return <Child refetch={refetch} queryRef={queryRef} />; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + render(<App />); + + await waitFor(() => { + expect(screen.getByTestId("primes")).toHaveTextContent( + "2, 3, 5, 7, 11" + ); + }); + expect(screen.getByTestId("network-status")).toHaveTextContent( + "7" // ready + ); + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + await act(() => user.click(screen.getByText("Refetch"))); + + await waitFor(() => { + expect(screen.getByTestId("primes")).toHaveTextContent( + "2, 3, 5, 7, 11, 13, 17, 19, 23, 29" + ); + }); + expect(screen.getByTestId("network-status")).toHaveTextContent( + "7" // ready + ); + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + ]); + }); + + it('defaults refetchWritePolicy to "overwrite"', async () => { + const user = userEvent.setup(); + + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + interface QueryData { + primes: number[]; + } + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + function SuspenseFallback() { + return <div>loading</div>; + } + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function Child({ + refetch, + queryRef, + }: { + refetch: ( + variables?: Partial<OperationVariables> | undefined + ) => Promise<ApolloQueryResult<QueryData>>; + queryRef: QueryReference<QueryData>; + }) { + const { data, error, networkStatus } = useReadQuery(queryRef); + + return ( + <div> + <button + onClick={() => { + refetch({ min: 12, max: 30 }); + }} + > + Refetch + </button> + <div data-testid="primes">{data?.primes.join(", ")}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </div> + ); + } + + function Parent() { + const [queryRef, { refetch }] = useBackgroundQuery(query, { + variables: { min: 0, max: 12 }, + }); + return <Child refetch={refetch} queryRef={queryRef} />; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + render(<App />); + + await waitFor(() => { + expect(screen.getByTestId("primes")).toHaveTextContent( + "2, 3, 5, 7, 11" + ); + }); + expect(screen.getByTestId("network-status")).toHaveTextContent( + "7" // ready + ); + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + await act(() => user.click(screen.getByText("Refetch"))); + + await waitFor(() => { + expect(screen.getByTestId("primes")).toHaveTextContent( + "13, 17, 19, 23, 29" + ); + }); + expect(screen.getByTestId("network-status")).toHaveTextContent( + "7" // ready + ); + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [undefined, [13, 17, 19, 23, 29]], + ]); + }); + + it('does not suspend when partial data is in the cache and using a "cache-first" fetch policy with returnPartialData', async () => { + interface Data { + character: { + id: string; + name: string; + }; + } + + const fullQuery: TypedDocumentNode<Data> = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + }; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(fullQuery, { + fetchPolicy: "cache-first", + returnPartialData: true, + }); + return <Todo queryRef={queryRef} />; + } + + function Todo({ + queryRef, + }: { + queryRef: QueryReference<DeepPartial<Data>>; + }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + renders.count++; + + return ( + <> + <div data-testid="character-id">{data.character?.id}</div> + <div data-testid="character-name">{data.character?.name}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </> + ); + } + + render(<App />); + + expect(renders.suspenseCount).toBe(0); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + expect(screen.getByTestId("character-name")).toHaveTextContent(""); + expect(screen.getByTestId("network-status")).toHaveTextContent("1"); // loading + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + await waitFor(() => { + expect(screen.getByTestId("character-name")).toHaveTextContent( + "Doctor Strange" + ); + }); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(0); + }); + + it('suspends and does not use partial data when changing variables and using a "cache-first" fetch policy with returnPartialData', async () => { + const partialQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + variables: { id: "1" }, + }); + + const { renders, mocks, rerender } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + cache, + options: { + fetchPolicy: "cache-first", + returnPartialData: true, + }, + }); + expect(renders.suspenseCount).toBe(0); + + expect(await screen.findByText("1 - Spider-Man")).toBeInTheDocument(); + + rerender({ variables: { id: "2" } }); + + expect(await screen.findByText("2 - Black Widow")).toBeInTheDocument(); + + expect(renders.frames[2]).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends when partial data is in the cache and using a "network-only" fetch policy with returnPartialData', async () => { + interface Data { + character: { + id: string; + name: string; + }; + } + + const fullQuery: TypedDocumentNode<Data> = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: { + data: DeepPartial<Data>; + networkStatus: NetworkStatus; + error: ApolloError | undefined; + }[]; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(fullQuery, { + fetchPolicy: "network-only", + returnPartialData: true, + }); + + return <Todo queryRef={queryRef} />; + } + + function Todo({ + queryRef, + }: { + queryRef: QueryReference<DeepPartial<Data>>; + }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + renders.frames.push({ data, networkStatus, error }); + renders.count++; + return ( + <> + <div data-testid="character-id">{data.character?.id}</div> + <div data-testid="character-name">{data.character?.name}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </> + ); + } + + render(<App />); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(screen.getByTestId("character-name")).toHaveTextContent( + "Doctor Strange" + ); + }); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + expect(renders.count).toBe(1); + expect(renders.suspenseCount).toBe(1); + + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends when partial data is in the cache and using a "no-cache" fetch policy with returnPartialData', async () => { + const consoleSpy = jest.spyOn(console, "warn").mockImplementation(); + interface Data { + character: { + id: string; + name: string; + }; + } + + const fullQuery: TypedDocumentNode<Data> = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: { + data: DeepPartial<Data>; + networkStatus: NetworkStatus; + error: ApolloError | undefined; + }[]; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(fullQuery, { + fetchPolicy: "no-cache", + returnPartialData: true, + }); + + return <Todo queryRef={queryRef} />; + } + + function Todo({ + queryRef, + }: { + queryRef: QueryReference<DeepPartial<Data>>; + }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + renders.frames.push({ data, networkStatus, error }); + renders.count++; + return ( + <> + <div data-testid="character-id">{data.character?.id}</div> + <div data-testid="character-name">{data.character?.name}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </> + ); + } + + render(<App />); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(screen.getByTestId("character-name")).toHaveTextContent( + "Doctor Strange" + ); + }); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + expect(renders.count).toBe(1); + expect(renders.suspenseCount).toBe(1); + + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + + consoleSpy.mockRestore(); + }); + + it('warns when using returnPartialData with a "no-cache" fetch policy', async () => { + const consoleSpy = jest.spyOn(console, "warn").mockImplementation(); + + const query: TypedDocumentNode<SimpleQueryData> = gql` + query UserQuery { + greeting + } + `; + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + renderSuspenseHook( + () => + useBackgroundQuery(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + }), + { mocks } + ); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "Using `returnPartialData` with a `no-cache` fetch policy has no effect. To read partial data from the cache, consider using an alternate fetch policy." + ); + + consoleSpy.mockRestore(); + }); + + it('does not suspend when partial data is in the cache and using a "cache-and-network" fetch policy with returnPartialData', async () => { + interface Data { + character: { + id: string; + name: string; + }; + } + + const fullQuery: TypedDocumentNode<Data> = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: { + data: DeepPartial<Data>; + networkStatus: NetworkStatus; + error: ApolloError | undefined; + }[]; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache, + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(fullQuery, { + fetchPolicy: "cache-and-network", + returnPartialData: true, + }); + + return <Todo queryRef={queryRef} />; + } + + function Todo({ + queryRef, + }: { + queryRef: QueryReference<DeepPartial<Data>>; + }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + renders.frames.push({ data, networkStatus, error }); + renders.count++; + return ( + <> + <div data-testid="character-id">{data.character?.id}</div> + <div data-testid="character-name">{data.character?.name}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </> + ); + } + + render(<App />); + + expect(renders.suspenseCount).toBe(0); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + // name is not present yet, since it's missing in partial data + expect(screen.getByTestId("character-name")).toHaveTextContent(""); + expect(screen.getByTestId("network-status")).toHaveTextContent("1"); // loading + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + await waitFor(() => { + expect(screen.getByTestId("character-name")).toHaveTextContent( + "Doctor Strange" + ); + }); + expect(screen.getByTestId("character-id")).toHaveTextContent("1"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(0); + + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends and does not use partial data when changing variables and using a "cache-and-network" fetch policy with returnPartialData', async () => { + const partialQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + variables: { id: "1" }, + }); + + const { renders, mocks, rerender } = renderVariablesIntegrationTest({ + variables: { id: "1" }, + cache, + options: { + fetchPolicy: "cache-and-network", + returnPartialData: true, + }, + }); + + expect(renders.suspenseCount).toBe(0); + + expect(await screen.findByText("1 - Spider-Man")).toBeInTheDocument(); + + rerender({ variables: { id: "2" } }); + + expect(await screen.findByText("2 - Black Widow")).toBeInTheDocument(); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + interface QueryData { + greeting: { + __typename: string; + message?: string; + recipient?: { + __typename: string; + name: string; + }; + }; + } + + const query: TypedDocumentNode<QueryData> = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + consoleSpy.mockRestore(); + + interface Renders { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: { + data: DeepPartial<QueryData>; + networkStatus: NetworkStatus; + error: ApolloError | undefined; + }[]; + } + const renders: Renders = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const client = new ApolloClient({ + link, + cache, + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Parent /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + renders.suspenseCount++; + return <p>Loading</p>; + } + + function Parent() { + const [queryRef] = useBackgroundQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }); + + return <Todo queryRef={queryRef} />; + } + + function Todo({ + queryRef, + }: { + queryRef: QueryReference<DeepPartial<QueryData>>; + }) { + const { data, networkStatus, error } = useReadQuery(queryRef); + renders.frames.push({ data, networkStatus, error }); + renders.count++; + return ( + <> + <div data-testid="message">{data.greeting?.message}</div> + <div data-testid="recipient">{data.greeting?.recipient?.name}</div> + <div data-testid="network-status">{networkStatus}</div> + <div data-testid="error">{error?.message || "undefined"}</div> + </> + ); + } + + render(<App />); + + expect(renders.suspenseCount).toBe(0); + expect(screen.getByTestId("recipient")).toHaveTextContent("Cached Alice"); + // message is not present yet, since it's missing in partial data + expect(screen.getByTestId("message")).toHaveTextContent(""); + expect(screen.getByTestId("network-status")).toHaveTextContent("1"); // loading + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + link.simulateResult({ + result: { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(screen.getByTestId("message")).toHaveTextContent("Hello world"); + }); + expect(screen.getByTestId("recipient")).toHaveTextContent("Cached Alice"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(screen.getByTestId("recipient").textContent).toEqual("Alice"); + }); + expect(screen.getByTestId("message")).toHaveTextContent("Hello world"); + expect(screen.getByTestId("network-status")).toHaveTextContent("7"); // ready + expect(screen.getByTestId("error")).toHaveTextContent("undefined"); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + }); + + describe.skip("type tests", () => { + it("returns unknown when TData cannot be inferred", () => { + const query = gql` + query { + hello + } + `; + + const [queryRef] = useBackgroundQuery(query); + const { data } = useReadQuery(queryRef); + + expectTypeOf(data).toEqualTypeOf<unknown>(); + }); + + it("disallows wider variables type than specified", () => { + const { query } = useVariablesIntegrationTestCase(); + + // @ts-expect-error should not allow wider TVariables type + useBackgroundQuery(query, { variables: { id: "1", foo: "bar" } }); + }); + + it("returns TData in default case", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it('returns TData | undefined with errorPolicy: "ignore"', () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + errorPolicy: "ignore", + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + errorPolicy: "ignore", + }); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData | undefined with errorPolicy: "all"', () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + errorPolicy: "all", + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitQueryRef] = useBackgroundQuery(query, { + errorPolicy: "all", + }); + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData with errorPolicy: "none"', () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + errorPolicy: "none", + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const [explicitQueryRef] = useBackgroundQuery(query, { + errorPolicy: "none", + }); + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it("returns DeepPartial<TData> with returnPartialData: true", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + returnPartialData: true, + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + }); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns TData with returnPartialData: false", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + returnPartialData: false, + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: false, + }); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it("returns TData when passing an option that does not affect TData", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + fetchPolicy: "no-cache", + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + fetchPolicy: "no-cache", + }); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it("handles combinations of options", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredPartialDataIgnoreQueryRef] = useBackgroundQuery(query, { + returnPartialData: true, + errorPolicy: "ignore", + }); + const { data: inferredPartialDataIgnore } = useReadQuery( + inferredPartialDataIgnoreQueryRef + ); + + expectTypeOf(inferredPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + inferredPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitPartialDataIgnoreQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: "ignore", + }); + + const { data: explicitPartialDataIgnore } = useReadQuery( + explicitPartialDataIgnoreQueryRef + ); + + expectTypeOf(explicitPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + explicitPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const [inferredPartialDataNoneQueryRef] = useBackgroundQuery(query, { + returnPartialData: true, + errorPolicy: "none", + }); + + const { data: inferredPartialDataNone } = useReadQuery( + inferredPartialDataNoneQueryRef + ); + + expectTypeOf(inferredPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + inferredPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitPartialDataNoneQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: "none", + }); + + const { data: explicitPartialDataNone } = useReadQuery( + explicitPartialDataNoneQueryRef + ); + + expectTypeOf(explicitPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + explicitPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns correct TData type when combined options that do not affect TData", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + errorPolicy: "none", + }); + const { data: inferred } = useReadQuery(inferredQueryRef); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + errorPolicy: "none", + }); + + const { data: explicit } = useReadQuery(explicitQueryRef); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns QueryReference<TData> | undefined when `skip` is present", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, { + skip: true, + }); + + expectTypeOf(inferredQueryRef).toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + expectTypeOf(inferredQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { skip: true }); + + expectTypeOf(explicitQueryRef).toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + expectTypeOf(explicitQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + + // TypeScript is too smart and using a `const` or `let` boolean variable + // for the `skip` option results in a false positive. Using an options + // object allows us to properly check for a dynamic case. + const options = { + skip: true, + }; + + const [dynamicQueryRef] = useBackgroundQuery(query, { + skip: options.skip, + }); + + expectTypeOf(dynamicQueryRef).toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + expectTypeOf(dynamicQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + }); + + it("returns `undefined` when using `skipToken` unconditionally", () => { + const { query } = useVariablesIntegrationTestCase(); + + const [inferredQueryRef] = useBackgroundQuery(query, skipToken); + + expectTypeOf(inferredQueryRef).toEqualTypeOf<undefined>(); + expectTypeOf(inferredQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, skipToken); + + expectTypeOf(explicitQueryRef).toEqualTypeOf<undefined>(); + expectTypeOf(explicitQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + }); + + it("returns QueryReference<TData> | undefined when using conditional `skipToken`", () => { + const { query } = useVariablesIntegrationTestCase(); + const options = { + skip: true, + }; + + const [inferredQueryRef] = useBackgroundQuery( + query, + options.skip ? skipToken : undefined + ); + + expectTypeOf(inferredQueryRef).toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + expectTypeOf(inferredQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + + const [explicitQueryRef] = useBackgroundQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, options.skip ? skipToken : undefined); + + expectTypeOf(explicitQueryRef).toEqualTypeOf< + QueryReference<VariablesCaseData> | undefined + >(); + expectTypeOf(explicitQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + }); + + it("returns QueryReference<DeepPartial<TData>> | undefined when using `skipToken` with `returnPartialData`", () => { + const { query } = useVariablesIntegrationTestCase(); + const options = { + skip: true, + }; + + const [inferredQueryRef] = useBackgroundQuery( + query, + options.skip ? skipToken : { returnPartialData: true } + ); + + expectTypeOf(inferredQueryRef).toEqualTypeOf< + QueryReference<DeepPartial<VariablesCaseData>> | undefined + >(); + expectTypeOf(inferredQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + + const [explicitQueryRef] = useBackgroundQuery<VariablesCaseData>( + query, + options.skip ? skipToken : { returnPartialData: true } + ); + + expectTypeOf(explicitQueryRef).toEqualTypeOf< + QueryReference<DeepPartial<VariablesCaseData>> | undefined + >(); + expectTypeOf(explicitQueryRef).not.toEqualTypeOf< + QueryReference<VariablesCaseData> + >(); + }); + }); +}); diff --git a/src/react/hooks/__tests__/useFragment.test.tsx b/src/react/hooks/__tests__/useFragment.test.tsx --- a/src/react/hooks/__tests__/useFragment.test.tsx +++ b/src/react/hooks/__tests__/useFragment.test.tsx @@ -1,9 +1,15 @@ import * as React from "react"; -import { render, waitFor, screen, renderHook } from "@testing-library/react"; -import userEvent from '@testing-library/user-event'; +import { + render, + waitFor, + screen, + renderHook, + within, +} from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; import { act } from "react-dom/test-utils"; -import { useFragment_experimental as useFragment } from "../useFragment"; +import { UseFragmentOptions, useFragment } from "../useFragment"; import { MockedProvider } from "../../../testing"; import { ApolloProvider } from "../../context"; import { @@ -14,8 +20,15 @@ import { ApolloClient, Observable, ApolloLink, + StoreObject, + DocumentNode, + FetchResult, } from "../../../core"; import { useQuery } from "../useQuery"; +import { concatPagination } from "../../../utilities"; +import assert from "assert"; +import { expectTypeOf } from "expect-type"; +import { SubscriptionObserver } from "zen-observable-ts"; describe("useFragment", () => { it("is importable and callable", () => { @@ -83,7 +96,7 @@ describe("useFragment", () => { { __typename: "Item", id: 5 }, ], }, - }) + }); const renders: string[] = []; @@ -92,9 +105,7 @@ describe("useFragment", () => { const { loading, data } = useQuery(listQuery); expect(loading).toBe(false); return ( - <ol> - {data!.list.map(item => <Item key={item.id} id={item.id}/>)} - </ol> + <ol>{data?.list.map((item) => <Item key={item.id} id={item.id} />)}</ol> ); } @@ -108,7 +119,7 @@ describe("useFragment", () => { id: props.id, }, }); - return <li>{complete ? data!.text : "incomplete"}</li>; + return <li>{complete ? data.text : "incomplete"}</li>; } render( @@ -120,24 +131,15 @@ describe("useFragment", () => { function getItemTexts() { return screen.getAllByText(/^Item/).map( // eslint-disable-next-line testing-library/no-node-access - li => li.firstChild!.textContent + (li) => li.firstChild!.textContent ); } await waitFor(() => { - expect(getItemTexts()).toEqual([ - "Item #1", - "Item #2", - "Item #5", - ]); + expect(getItemTexts()).toEqual(["Item #1", "Item #2", "Item #5"]); }); - expect(renders).toEqual([ - "list", - "item 1", - "item 2", - "item 5", - ]); + expect(renders).toEqual(["list", "item 1", "item 2", "item 5"]); act(() => { cache.writeFragment({ @@ -151,11 +153,7 @@ describe("useFragment", () => { }); await waitFor(() => { - expect(getItemTexts()).toEqual([ - "Item #1", - "Item #2 updated", - "Item #5", - ]); + expect(getItemTexts()).toEqual(["Item #1", "Item #2 updated", "Item #5"]); }); expect(renders).toEqual([ @@ -170,7 +168,7 @@ describe("useFragment", () => { act(() => { cache.modify({ fields: { - list(list: Reference[], { readField }) { + list(list: readonly Reference[], { readField }) { return [ ...list, cache.writeFragment({ @@ -189,10 +187,11 @@ describe("useFragment", () => { text: "Item #4 from cache.modify", }, })!, - ].sort((ref1, ref2) => ( - readField<Item["id"]>("id", ref1)! - - readField<Item["id"]>("id", ref2)! - )); + ].sort( + (ref1, ref2) => + readField<Item["id"]>("id", ref1)! - + readField<Item["id"]>("id", ref2)! + ); }, }, }); @@ -295,15 +294,416 @@ describe("useFragment", () => { ], }, __META: { - extraRootIds: [ - "Item:2", - "Item:3", - "Item:4", - ], + extraRootIds: ["Item:2", "Item:3", "Item:4"], }, }); }); + it("returns data on first render", () => { + const ItemFragment: TypedDocumentNode<Item> = gql` + fragment ItemFragment on Item { + id + text + } + `; + const cache = new InMemoryCache(); + const item = { __typename: "Item", id: 1, text: "Item #1" }; + cache.writeFragment({ + fragment: ItemFragment, + data: item, + }); + const client = new ApolloClient({ + cache, + }); + function Component() { + const { data } = useFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 1 }, + }); + return <>{data.text}</>; + } + render( + <ApolloProvider client={client}> + <Component /> + </ApolloProvider> + ); + + // would throw if not present synchronously + screen.getByText(/Item #1/); + }); + + it.each<TypedDocumentNode<{ list: Item[] }>>([ + // This query uses a basic field-level @nonreactive directive. + gql` + query GetItems { + list { + id + text @nonreactive + } + } + `, + // This query uses @nonreactive on an anonymous/inline ...spread directive. + gql` + query GetItems { + list { + id + ... @nonreactive { + text + } + } + } + `, + // This query uses @nonreactive on a ...spread with a type condition. + gql` + query GetItems { + list { + id + ... on Item @nonreactive { + text + } + } + } + `, + // This query uses @nonreactive directive on a named fragment ...spread. + gql` + query GetItems { + list { + id + ...ItemText @nonreactive + } + } + fragment ItemText on Item { + text + } + `, + ])( + "Parent list component can use @nonreactive to avoid rerendering", + async (query) => { + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + list: concatPagination(), + }, + }, + Item: { + keyFields: ["id"], + // Configuring keyArgs:false for Item.text is one way to prevent field + // keys like text@nonreactive, but it's not the only way. Since + // @nonreactive is now in the KNOWN_DIRECTIVES array defined in + // utilities/graphql/storeUtils.ts, the '@nonreactive' suffix won't be + // automatically appended to field keys by default. + // fields: { + // text: { + // keyArgs: false, + // }, + // }, + }, + }, + }); + + const client = new ApolloClient({ + cache, + link: ApolloLink.empty(), + }); + + const renders: string[] = []; + + function List() { + const { data } = useQuery(query); + + renders.push("list"); + + return ( + <ul> + {data?.list.map((item) => <Item key={item.id} item={item} />)} + </ul> + ); + } + + function Item({ item }: { item: Item }) { + const { data } = useFragment({ + fragment: ItemFragment, + fragmentName: "ItemFragment", + from: item, + }); + + renders.push(`item ${item.id}`); + + if (!data) return null; + + return <li>{`Item #${item.id}: ${data.text}`}</li>; + } + + act(() => { + cache.writeQuery({ + query, + data: { + list: [ + { __typename: "Item", id: 1, text: "first" }, + { __typename: "Item", id: 2, text: "second" }, + { __typename: "Item", id: 3, text: "third" }, + ], + }, + }); + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + list: [ + { __ref: 'Item:{"id":1}' }, + { __ref: 'Item:{"id":2}' }, + { __ref: 'Item:{"id":3}' }, + ], + }, + 'Item:{"id":1}': { + __typename: "Item", + id: 1, + text: "first", + }, + 'Item:{"id":2}': { + __typename: "Item", + id: 2, + text: "second", + }, + 'Item:{"id":3}': { + __typename: "Item", + id: 3, + text: "third", + }, + }); + + render( + <ApolloProvider client={client}> + <List /> + </ApolloProvider> + ); + + function getItemTexts() { + return screen.getAllByText(/Item #\d+/).map((el) => el.textContent); + } + + await waitFor(() => { + expect(getItemTexts()).toEqual([ + "Item #1: first", + "Item #2: second", + "Item #3: third", + ]); + }); + + expect(renders).toEqual(["list", "item 1", "item 2", "item 3"]); + + function appendLyToText(id: number) { + act(() => { + cache.modify({ + id: cache.identify({ __typename: "Item", id })!, + fields: { + text(existing) { + return existing + "ly"; + }, + }, + }); + }); + } + + appendLyToText(2); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: first", + "Item #2: secondly", + "Item #3: third", + ]); + }); + + appendLyToText(1); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + "item 1", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: firstly", + "Item #2: secondly", + "Item #3: third", + ]); + }); + + appendLyToText(3); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + "item 1", + "item 3", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: firstly", + "Item #2: secondly", + "Item #3: thirdly", + ]); + }); + + act(() => { + cache.writeQuery({ + query, + data: { + list: [ + { __typename: "Item", id: 4, text: "fourth" }, + { __typename: "Item", id: 5, text: "fifth" }, + ], + }, + }); + }); + + expect(cache.extract()).toEqual({ + ROOT_QUERY: { + __typename: "Query", + list: [ + { __ref: 'Item:{"id":1}' }, + { __ref: 'Item:{"id":2}' }, + { __ref: 'Item:{"id":3}' }, + { __ref: 'Item:{"id":4}' }, + { __ref: 'Item:{"id":5}' }, + ], + }, + 'Item:{"id":1}': { + __typename: "Item", + id: 1, + text: "firstly", + }, + 'Item:{"id":2}': { + __typename: "Item", + id: 2, + text: "secondly", + }, + 'Item:{"id":3}': { + __typename: "Item", + id: 3, + text: "thirdly", + }, + 'Item:{"id":4}': { + __typename: "Item", + id: 4, + text: "fourth", + }, + 'Item:{"id":5}': { + __typename: "Item", + id: 5, + text: "fifth", + }, + }); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + "item 1", + "item 3", + // The whole list had to be rendered again to append 4 and 5 + "list", + "item 1", + "item 2", + "item 3", + "item 4", + "item 5", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: firstly", + "Item #2: secondly", + "Item #3: thirdly", + "Item #4: fourth", + "Item #5: fifth", + ]); + }); + + appendLyToText(5); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + "item 1", + "item 3", + "list", + "item 1", + "item 2", + "item 3", + "item 4", + "item 5", + // A single new render: + "item 5", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: firstly", + "Item #2: secondly", + "Item #3: thirdly", + "Item #4: fourth", + "Item #5: fifthly", + ]); + }); + + appendLyToText(4); + + await waitFor(() => { + expect(renders).toEqual([ + "list", + "item 1", + "item 2", + "item 3", + "item 2", + "item 1", + "item 3", + "list", + "item 1", + "item 2", + "item 3", + "item 4", + "item 5", + "item 5", + // A single new render: + "item 4", + ]); + + expect(getItemTexts()).toEqual([ + "Item #1: firstly", + "Item #2: secondly", + "Item #3: thirdly", + "Item #4: fourthly", + "Item #5: fifthly", + ]); + }); + } + ); + it("List can use useFragment with ListFragment", async () => { const cache = new InMemoryCache({ typePolicies: { @@ -338,7 +738,7 @@ describe("useFragment", () => { ], extra: "from ListFragment", }, - }) + }); const renders: string[] = []; @@ -349,9 +749,12 @@ describe("useFragment", () => { from: { __typename: "Query" }, }); expect(complete).toBe(true); + assert(!!complete); return ( <ol> - {data!.list.map(item => <Item key={item.id} id={item.id}/>)} + {data.list.map((item) => ( + <Item key={item.id} id={item.id} /> + ))} </ol> ); } @@ -365,7 +768,7 @@ describe("useFragment", () => { id: props.id, }, }); - return <li>{complete ? data!.text : "incomplete"}</li>; + return <li>{complete ? data.text : "incomplete"}</li>; } render( @@ -377,24 +780,15 @@ describe("useFragment", () => { function getItemTexts() { return screen.getAllByText(/^Item/).map( // eslint-disable-next-line testing-library/no-node-access - li => li.firstChild!.textContent + (li) => li.firstChild!.textContent ); } await waitFor(() => { - expect(getItemTexts()).toEqual([ - "Item #1", - "Item #2", - "Item #5", - ]); + expect(getItemTexts()).toEqual(["Item #1", "Item #2", "Item #5"]); }); - expect(renders).toEqual([ - "list", - "item 1", - "item 2", - "item 5", - ]); + expect(renders).toEqual(["list", "item 1", "item 2", "item 5"]); act(() => { cache.writeFragment({ @@ -408,11 +802,7 @@ describe("useFragment", () => { }); await waitFor(() => { - expect(getItemTexts()).toEqual([ - "Item #1", - "Item #2 updated", - "Item #5", - ]); + expect(getItemTexts()).toEqual(["Item #1", "Item #2 updated", "Item #5"]); }); expect(renders).toEqual([ @@ -427,7 +817,7 @@ describe("useFragment", () => { act(() => { cache.modify({ fields: { - list(list: Reference[], { readField }) { + list(list: readonly Reference[], { readField }) { return [ ...list, cache.writeFragment({ @@ -444,10 +834,11 @@ describe("useFragment", () => { id: 4, }, })!, - ].sort((ref1, ref2) => ( - readField<Item["id"]>("id", ref1)! - - readField<Item["id"]>("id", ref2)! - )); + ].sort( + (ref1, ref2) => + readField<Item["id"]>("id", ref1)! - + readField<Item["id"]>("id", ref2)! + ); }, }, }); @@ -550,11 +941,7 @@ describe("useFragment", () => { extra: "from ListFragment", }, __META: { - extraRootIds: [ - "Item:2", - "Item:3", - "Item:4", - ], + extraRootIds: ["Item:2", "Item:3", "Item:4"], }, }); }); @@ -572,9 +959,9 @@ describe("useFragment", () => { // filtering explicitly here, so this test won't be broken. return items && items.filter(canRead); }, - } - } - } + }, + }, + }, }); const wrapper = ({ children }: any) => ( @@ -609,19 +996,18 @@ describe("useFragment", () => { `; const { result: renderResult } = renderHook( - () => useFragment({ - fragment: ListAndItemFragments, - fragmentName: "ListFragment", - from: { __typename: "Query" }, - returnPartialData: true, - }), - { wrapper }, + () => + useFragment({ + fragment: ListAndItemFragments, + fragmentName: "ListFragment", + from: { __typename: "Query" }, + }), + { wrapper } ); function checkHistory(expectedResultCount: number) { // Temporarily disabling this check until we can come up with a better // (more opt-in) system than result.previousResult.previousResult... - // function historyToArray( // result: UseFragmentResult<QueryData>, // ): UseFragmentResult<QueryData>[] { @@ -634,7 +1020,6 @@ describe("useFragment", () => { // const all = historyToArray(renderResult.current); // expect(all.length).toBe(expectedResultCount); // expect(all).toEqual(renderResult.all); - // if (renderResult.current.complete) { // expect(renderResult.current).toBe( // renderResult.current.lastCompleteResult @@ -669,8 +1054,8 @@ describe("useFragment", () => { }); }); + await waitFor(() => expect(renderResult.current.data).toEqual(data125)); expect(renderResult.current.complete).toBe(false); - expect(renderResult.current.data).toEqual(data125); expect(renderResult.current.missing).toEqual({ list: { // Even though Query.list is actually an array in the data, data paths @@ -704,38 +1089,44 @@ describe("useFragment", () => { }); }); + await waitFor(() => + expect(renderResult.current.data).toEqual(data182WithText) + ); expect(renderResult.current.complete).toBe(true); - expect(renderResult.current.data).toEqual(data182WithText); expect(renderResult.current.missing).toBeUndefined(); checkHistory(3); - await act(async () => cache.batch({ - update(cache) { - cache.evict({ - id: cache.identify({ - __typename: "Item", - id: 8, - }), - }); - - cache.evict({ - id: cache.identify({ - __typename: "Item", - id: 2, - }), - fieldName: "text", - }); - }, - })); + await act(async () => + cache.batch({ + update(cache) { + cache.evict({ + id: cache.identify({ + __typename: "Item", + id: 8, + }), + }); + + cache.evict({ + id: cache.identify({ + __typename: "Item", + id: 2, + }), + fieldName: "text", + }); + }, + }) + ); + await waitFor(() => + expect(renderResult.current.data).toEqual({ + list: [ + { __typename: "Item", id: 1, text: "oyez1" }, + { __typename: "Item", id: 2 }, + ], + }) + ); expect(renderResult.current.complete).toBe(false); - expect(renderResult.current.data).toEqual({ - list: [ - { __typename: "Item", id: 1, text: "oyez1" }, - { __typename: "Item", id: 2 }, - ], - }); expect(renderResult.current.missing).toEqual({ // TODO Figure out why Item:8 is not represented here. Likely because of // auto-filtering of dangling references from arrays, but that should @@ -765,11 +1156,7 @@ describe("useFragment", () => { }, ROOT_QUERY: { __typename: "Query", - list: [ - { __ref: "Item:1" }, - { __ref: "Item:8" }, - { __ref: "Item:2" }, - ], + list: [{ __ref: "Item:1" }, { __ref: "Item:8" }, { __ref: "Item:2" }], }, }); @@ -799,27 +1186,29 @@ describe("useFragment", () => { const client = new ApolloClient({ cache, - link: new ApolloLink(operation => new Observable(observer => { - if (operation.operationName === "ListQueryWithItemFragment") { - setTimeout(() => { - observer.next({ - data: { - list: [ - { __typename: "Item", id: 1 }, - { __typename: "Item", id: 2 }, - { __typename: "Item", id: 5 }, - ], - } - }); - observer.complete(); - }, 10); - } else { - observer.error(`unexpected query ${ - operation.operationName || - operation.query - }`); - } - })), + link: new ApolloLink( + (operation) => + new Observable((observer) => { + if (operation.operationName === "ListQueryWithItemFragment") { + setTimeout(() => { + observer.next({ + data: { + list: [ + { __typename: "Item", id: 1 }, + { __typename: "Item", id: 2 }, + { __typename: "Item", id: 5 }, + ], + }, + }); + observer.complete(); + }, 10); + } else { + observer.error( + `unexpected query ${operation.operationName || operation.query}` + ); + } + }) + ), }); const listQuery: TypedDocumentNode<QueryData> = gql` @@ -843,16 +1232,24 @@ describe("useFragment", () => { return complete ? ( <> - <select onChange={(e) => { - setCurrentItem(parseInt(e.currentTarget.value)) - }}> - {data!.list.map(item => <option key={item.id} value={item.id}>Select item {item.id}</option>)} + <select + onChange={(e) => { + setCurrentItem(parseInt(e.currentTarget.value)); + }} + > + {data.list.map((item) => ( + <option key={item.id} value={item.id}> + Select item {item.id} + </option> + ))} </select> <div> <Item id={currentItem} /> </div> <ol> - {data!.list.map(item => <Item key={item.id} id={item.id}/>)} + {data.list.map((item) => ( + <Item key={item.id} id={item.id} /> + ))} </ol> </> ) : null; @@ -866,7 +1263,7 @@ describe("useFragment", () => { id, }, }); - return <li>{complete ? data!.text : "incomplete"}</li>; + return <li>{complete ? data.text : "incomplete"}</li>; } render( @@ -878,7 +1275,7 @@ describe("useFragment", () => { function getItemTexts() { return screen.getAllByText(/^Item/).map( // eslint-disable-next-line testing-library/no-node-access - li => li.firstChild!.textContent + (li) => li.firstChild!.textContent ); } @@ -896,8 +1293,8 @@ describe("useFragment", () => { // Select "Item #2" via <select /> const user = userEvent.setup(); await user.selectOptions( - screen.getByRole('combobox'), - screen.getByRole('option', { name: 'Select item 2' }) + screen.getByRole("combobox"), + screen.getByRole("option", { name: "Select item 2" }) ); await waitFor(() => { @@ -923,11 +1320,12 @@ describe("useFragment", () => { beforeEach(() => { cache = new InMemoryCache(); - - wrapper = ({ children }: any) => <MockedProvider cache={cache}>{children}</MockedProvider>; + wrapper = ({ children }: any) => ( + <MockedProvider cache={cache}>{children}</MockedProvider> + ); // silence the console for the incomplete fragment write - const spy = jest.spyOn(console, 'error').mockImplementation(() => {}); + const spy = jest.spyOn(console, "error").mockImplementation(() => {}); cache.writeFragment({ fragment: ItemFragment, data: { @@ -951,54 +1349,353 @@ describe("useFragment", () => { expect(result.current.data).toEqual({ __typename: "Item", id: 5 }); expect(result.current.complete).toBe(false); }); + }); - it("throws an exception with `returnPartialData: false` if only partial data is available", () => { - // this is actually not intended behavior, but it is the current behavior - // let's document it in a test until we remove `returnPartialData` in 3.8 + describe("return value `complete` property", () => { + let cache: InMemoryCache, wrapper: React.FunctionComponent; + const ItemFragment = gql` + fragment ItemFragment on Item { + id + text + } + `; - let error: Error; + beforeEach(() => { + cache = new InMemoryCache(); + wrapper = ({ children }: any) => ( + <MockedProvider cache={cache}>{children}</MockedProvider> + ); + }); - renderHook( - () => { - // we can't just `expect(() => renderHook(...)).toThrow(...)` because it will render a second time, resulting in an uncaught exception - try { - useFragment({ - fragment: ItemFragment, - from: { __typename: "Item", id: 5 }, - returnPartialData: false, - }); - } catch (e) { - error = e; - } + test("if all data is available, `complete` is `true`", () => { + cache.writeFragment({ + fragment: ItemFragment, + data: { + __typename: "Item", + id: 5, + text: "Item #5", + }, + }); + + const { result } = renderHook( + () => + useFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 5 }, + }), + { wrapper } + ); + + expect(result.current).toStrictEqual({ + data: { __typename: "Item", id: 5, text: "Item #5" }, + complete: true, + }); + }); + + test("if only partial data is available, `complete` is `false`", () => { + cache.writeFragment({ + fragment: ItemFragment, + data: { + __typename: "Item", + id: 5, }, + }); + + const { result } = renderHook( + () => + useFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 5 }, + }), { wrapper } ); - expect(error!.toString()).toMatch(`Error: Can't find field 'text' on Item:5 object`); - }); - - it("throws an exception with `returnPartialData: false` if no data is available", () => { - // this is actually not intended behavior, but it is the current behavior - // let's document it in a test until we remove `returnPartialData` in 3.8 - let error: Error; - - renderHook( - () => { - // we can't just `expect(() => renderHook(...)).toThrow(...)` because it will render a second time, resulting in an uncaught exception - try { - useFragment({ - fragment: ItemFragment, - from: { __typename: "Item", id: 6 }, - returnPartialData: false, - }); - } catch (e) { - error = e; - } + expect(result.current).toStrictEqual({ + data: { __typename: "Item", id: 5 }, + complete: false, + missing: { + text: "Can't find field 'text' on Item:5 object", }, + }); + }); + + test("if no data is available, `complete` is `false`", () => { + const { result } = renderHook( + () => + useFragment({ + fragment: ItemFragment, + from: { __typename: "Item", id: 5 }, + }), { wrapper } ); - expect(error!.toString()).toMatch(`Error: Dangling reference to missing Item:6 object`); + expect(result.current).toStrictEqual({ + data: {}, + complete: false, + missing: "Dangling reference to missing Item:5 object", + }); + }); + }); +}); + +describe("has the same timing as `useQuery`", () => { + const itemFragment = gql` + fragment ItemFragment on Item { + id + title + } + `; + + it("both in same component", async () => { + const initialItem = { __typename: "Item", id: 1, title: "Item #initial" }; + const updatedItem = { __typename: "Item", id: 1, title: "Item #updated" }; + + const query = gql` + query { + item { + ...ItemFragment + } + } + ${itemFragment} + `; + let observer: SubscriptionObserver<FetchResult>; + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link: new ApolloLink( + (operation) => new Observable((o) => void (observer = o)) + ), + }); + + function Component() { + const { data: queryData } = useQuery(query, { returnPartialData: true }); + const { data: fragmentData, complete } = useFragment({ + fragment: itemFragment, + from: initialItem, + }); + + if (!queryData) { + expect(fragmentData).toStrictEqual({}); + } else { + expect({ item: fragmentData }).toStrictEqual(queryData); + } + return complete ? JSON.stringify(fragmentData) : "loading"; + } + render(<Component />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + await screen.findByText(/loading/); + assert(observer!); + observer.next({ data: { item: initialItem } }); + observer.complete(); + await screen.findByText(/Item #initial/); + cache.writeQuery({ query, data: { item: updatedItem } }); + await screen.findByText(/Item #updated/); + await new Promise((resolve) => setTimeout(resolve, 50)); + }); + + it("`useQuery` in parent, `useFragment` in child", async () => { + const item1 = { __typename: "Item", id: 1, title: "Item #1" }; + const item2 = { __typename: "Item", id: 2, title: "Item #2" }; + const query: TypedDocumentNode<{ items: Array<typeof item1> }> = gql` + query { + items { + ...ItemFragment + } + } + ${itemFragment} + `; + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + }); + cache.writeQuery({ query, data: { items: [item1, item2] } }); + + const valuePairs: Array< + [item: string, parentCount: number, childCount: number] + > = []; + function captureDOMState() { + const parent = screen.getByTestId("parent"); + const children = screen.getByTestId("children"); + valuePairs.push([ + "Item 1", + within(parent).queryAllByText(/Item #1/).length, + within(children).queryAllByText(/Item #1/).length, + ]); + valuePairs.push([ + "Item 2", + within(parent).queryAllByText(/Item #2/).length, + within(children).queryAllByText(/Item #2/).length, + ]); + } + + function Parent() { + const { data } = useQuery(query); + if (!data) throw new Error("should never happen"); + React.useEffect(captureDOMState); + return ( + <> + <div data-testid="parent"> + <p>{JSON.stringify(data)}</p> + </div> + <div data-testid="children"> + {data.items.map((item, i) => ( + <p key={i}> + <Child id={item.id} /> + </p> + ))} + </div> + </> + ); + } + function Child({ id }: { id: number }) { + const { data } = useFragment({ + fragment: itemFragment, + from: { __typename: "Item", id }, + }); + React.useEffect(captureDOMState); + return <>{JSON.stringify({ item: data })}</>; + } + + render(<Parent />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + cache.evict({ + id: cache.identify(item2), + }); + await waitFor(() => { + expect(() => screen.getByText(/Item #2/)).toThrow(); + }); + + for (const [_item, parentChount, childCount] of valuePairs) { + expect(parentChount).toBe(childCount); + } + }); + + /** + * This would be optimal, but would only work if `useFragment` and + * `useQuery` had exactly the same timing, which is not the case with + * the current implementation. + * The best we can do is to make sure that `useFragment` is not + * faster than `useQuery` in reasonable cases (of course, `useQuery` + * could trigger a network request on cache update, which would be slower + * than `useFragment`, no matter how much we delay it). + * If we change the core implementation into a more synchronous one, + * we should try to get this test to work, too. + */ + it.failing("`useFragment` in parent, `useQuery` in child", async () => { + const item1 = { __typename: "Item", id: 1, title: "Item #1" }; + const item2 = { __typename: "Item", id: 2, title: "Item #2" }; + const query: TypedDocumentNode<{ items: Array<typeof item1> }> = gql` + query { + items { + ...ItemFragment + } + } + ${itemFragment} + `; + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + }); + cache.writeQuery({ query, data: { items: [item1, item2] } }); + + const valuePairs: Array< + [item: string, parentCount: number, childCount: number] + > = []; + function captureDOMState() { + const parent = screen.getByTestId("parent"); + const children = screen.getByTestId("children"); + valuePairs.push([ + "Item 1", + within(parent).queryAllByText(/Item #1/).length, + within(children).queryAllByText(/Item #1/).length, + ]); + valuePairs.push([ + "Item 2", + within(parent).queryAllByText(/Item #2/).length, + within(children).queryAllByText(/Item #2/).length, + ]); + } + + function Parent() { + const { data: data1 } = useFragment({ + fragment: itemFragment, + from: { __typename: "Item", id: 1 }, + }); + const { data: data2 } = useFragment({ + fragment: itemFragment, + from: { __typename: "Item", id: 2 }, + }); + React.useEffect(captureDOMState); + return ( + <> + <div data-testid="parent"> + <p>{JSON.stringify(data1)}</p> + <p>{JSON.stringify(data2)}</p> + </div> + <div data-testid="children"> + <p> + <Child /> + </p> + </div> + </> + ); + } + function Child() { + const { data } = useQuery(query); + if (!data) throw new Error("should never happen"); + React.useEffect(captureDOMState); + return <>{JSON.stringify(data)}</>; + } + + render(<Parent />, { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), }); + act( + () => + void cache.evict({ + id: cache.identify(item2), + }) + ); + await waitFor(() => { + expect(() => screen.getByText(/Item #2/)).toThrow(); + }); + + for (const [_item, parentChount, childCount] of valuePairs) { + expect(parentChount).toBe(childCount); + } + }); +}); + +describe.skip("Type Tests", () => { + test("NoInfer prevents adding arbitrary additional variables", () => { + const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; + useFragment({ + fragment: typedNode, + from: { __typename: "Query" }, + variables: { + bar: 4, + // @ts-expect-error + nonExistingVariable: "string", + }, + }); + }); + + test("UseFragmentOptions interface shape", <TData, TVars>() => { + expectTypeOf<UseFragmentOptions<TData, TVars>>().toEqualTypeOf<{ + from: string | StoreObject | Reference; + fragment: DocumentNode | TypedDocumentNode<TData, TVars>; + fragmentName?: string; + optimistic?: boolean; + variables?: TVars; + canonizeResults?: boolean; + }>(); }); }); diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -1,137 +1,135 @@ -import React from 'react'; -import { GraphQLError } from 'graphql'; -import gql from 'graphql-tag'; -import { act, renderHook, waitFor } from '@testing-library/react'; +import React from "react"; +import { GraphQLError } from "graphql"; +import gql from "graphql-tag"; +import { act, renderHook, waitFor } from "@testing-library/react"; -import { +import { ApolloClient, ApolloLink, ErrorPolicy, InMemoryCache, NetworkStatus, - TypedDocumentNode -} from '../../../core'; -import { Observable } from '../../../utilities'; -import { ApolloProvider, resetApolloContext } from '../../../react'; -import { + TypedDocumentNode, +} from "../../../core"; +import { Observable } from "../../../utilities"; +import { ApolloProvider } from "../../../react"; +import { MockedProvider, mockSingleLink, wait, tick, - MockSubscriptionLink -} from '../../../testing'; -import { useLazyQuery } from '../useLazyQuery'; -import { QueryResult } from '../../types/types'; + MockSubscriptionLink, +} from "../../../testing"; +import { useLazyQuery } from "../useLazyQuery"; +import { QueryResult } from "../../types/types"; const IS_REACT_18 = React.version.startsWith("18"); -describe('useLazyQuery Hook', () => { - afterEach(() => { - resetApolloContext(); - }); +describe("useLazyQuery Hook", () => { const helloQuery: TypedDocumentNode<{ hello: string; - }> = gql`query { hello }`; + }> = gql` + query { + hello + } + `; - it('should hold query execution until manually triggered', async () => { + it("should hold query execution until manually triggered", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, delay: 20, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world" }); }); - it('should set `called` to false by default', async () => { + it("should set `called` to false by default", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, delay: 20, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); expect(result.current[1].called).toBe(false); }); - it('should set `called` to true after calling the lazy execute function', async () => { + it("should set `called` to true after calling the lazy execute function", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, delay: 20, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].called).toBe(false); const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].called).toBe(true); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].called).toBe(true); }); - it('should override `skip` if lazy mode execution function is called', async () => { + it("should override `skip` if lazy mode execution function is called", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, delay: 20, }, ]; @@ -141,11 +139,9 @@ describe('useLazyQuery Hook', () => { () => useLazyQuery(helloQuery, { skip: true } as any), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - }, + } ); expect(result.current[1].loading).toBe(false); @@ -153,20 +149,26 @@ describe('useLazyQuery Hook', () => { const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].called).toBe(true); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].called).toBe(true); }); - it('should use variables defined in hook options (if any), when running the lazy execution function', async () => { + it("should use variables defined in hook options (if any), when running the lazy execution function", async () => { const query = gql` - query($id: number) { + query ($id: number) { hello(id: $id) } `; @@ -174,41 +176,46 @@ describe('useLazyQuery Hook', () => { const mocks = [ { request: { query, variables: { id: 1 } }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, delay: 20, }, ]; const { result } = renderHook( - () => useLazyQuery(query, { - variables: { id: 1 }, - }), + () => + useLazyQuery(query, { + variables: { id: 1 }, + }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - }, + } ); const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current[1].data).toEqual({ hello: 'world 1' }); + expect(result.current[1].data).toEqual({ hello: "world 1" }); }); - it('should use variables passed into lazy execution function, overriding similar variables defined in Hook options', async () => { + it("should use variables passed into lazy execution function, overriding similar variables defined in Hook options", async () => { const query = gql` - query($id: number) { + query ($id: number) { hello(id: $id) } `; @@ -216,52 +223,60 @@ describe('useLazyQuery Hook', () => { const mocks = [ { request: { query, variables: { id: 1 } }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, delay: 20, }, { request: { query, variables: { id: 2 } }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, delay: 20, }, ]; const { result } = renderHook( - () => useLazyQuery(query, { - variables: { id: 1 }, - }), + () => + useLazyQuery(query, { + variables: { id: 1 }, + }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - }, + } ); const execute = result.current[0]; setTimeout(() => execute({ variables: { id: 2 } })); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 2' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 2" }); }); - it('should merge variables from original hook and execution function', async () => { - const counterQuery: TypedDocumentNode<{ - counter: number; - }, { - hookVar?: boolean; - execVar?: boolean; - localDefaultVar?: boolean; - globalDefaultVar?: boolean; - }> = gql` - query GetCounter ( + it("should merge variables from original hook and execution function", async () => { + const counterQuery: TypedDocumentNode< + { + counter: number; + }, + { + hookVar?: boolean; + execVar?: boolean; + localDefaultVar?: boolean; + globalDefaultVar?: boolean; + } + > = gql` + query GetCounter( $hookVar: Boolean $execVar: Boolean $localDefaultVar: Boolean @@ -282,23 +297,28 @@ describe('useLazyQuery Hook', () => { }, }, cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - if (request.operationName === "GetCounter") { - observer.next({ - data: { - counter: ++count, - vars: request.variables, - }, - }); - setTimeout(() => { - observer.complete(); - }, 10); - } else { - observer.error(new Error(`Unknown query: ${ - request.operationName || request.query - }`)); - } - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + if (request.operationName === "GetCounter") { + observer.next({ + data: { + counter: ++count, + vars: request.variables, + }, + }); + setTimeout(() => { + observer.complete(); + }, 10); + } else { + observer.error( + new Error( + `Unknown query: ${request.operationName || request.query}` + ) + ); + } + }) + ), }); const { result } = renderHook( @@ -321,22 +341,29 @@ describe('useLazyQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.data).toBeUndefined(); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.data).toBeUndefined(); + }, + { interval: 1 } + ); const expectedFinalData = { counter: 1, @@ -348,32 +375,48 @@ describe('useLazyQuery Hook', () => { }, }; - const execResult = await result.current.exec( - { - variables: { - execVar: true - } - } - ); + const execResult = await result.current.exec({ + variables: { + execVar: true, + }, + }); - await waitFor(() => { - expect(execResult.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(execResult.called).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(execResult.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); - await waitFor(() => { - expect(execResult.data).toEqual(expectedFinalData); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 10 }); + await waitFor( + () => { + expect(execResult.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(execResult.called).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(execResult.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(execResult.data).toEqual(expectedFinalData); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 10 } + ); expect(result.current.query.called).toBe(true); expect(result.current.query.data).toEqual(expectedFinalData); @@ -393,20 +436,29 @@ describe('useLazyQuery Hook', () => { }, }); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.data).toEqual({ - counter: 2, - vars: { - execVar: false, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.data).toEqual({ + counter: 2, + vars: { + execVar: false, + }, + }); + }, + { interval: 1 } + ); const execResult2 = await result.current.exec({ fetchPolicy: "cache-and-network", @@ -416,29 +468,44 @@ describe('useLazyQuery Hook', () => { }, }); - await waitFor(() => { - expect(execResult2.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(execResult2.called).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(execResult2.data).toEqual({ - counter: 3, - vars: { - ...expectedFinalData.vars, - execVar: true, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(execResult2.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(execResult2.called).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(execResult2.data).toEqual({ + counter: 3, + vars: { + ...expectedFinalData.vars, + execVar: true, + }, + }); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current.query.called).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.called).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 10 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 10 } + ); expect(result.current.query.called).toBe(true); expect(result.current.query.data).toEqual({ counter: 3, @@ -449,7 +516,6 @@ describe('useLazyQuery Hook', () => { }); }); - it("changing queries", async () => { const query1 = gql` query { @@ -465,12 +531,12 @@ describe('useLazyQuery Hook', () => { { request: { query: query1 }, result: { data: { hello: "world" } }, - delay: 20 + delay: 20, }, { request: { query: query2 }, result: { data: { name: "changed" } }, - delay: 20 + delay: 20, }, ]; @@ -526,80 +592,90 @@ describe('useLazyQuery Hook', () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, delay: 20, }, { request: { query: helloQuery }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, delay: 20, }, ]; const { result } = renderHook( - () => useLazyQuery(helloQuery, { - fetchPolicy: 'network-only', - }), + () => + useLazyQuery(helloQuery, { + fetchPolicy: "network-only", + }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - }, + } ); expect(result.current[1].loading).toBe(false); const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 1" }); setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 1" }); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 2' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 2" }); }); - it('should persist previous data when a query is re-run', async () => { + it("should persist previous data when a query is re-run", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, delay: 20, }, { request: { query: helloQuery }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, delay: 20, }, ]; const { result } = renderHook( - () => useLazyQuery(helloQuery, { - notifyOnNetworkStatusChange: true, - }), + () => + useLazyQuery(helloQuery, { + notifyOnNetworkStatusChange: true, + }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - }, + } ); expect(result.current[1].loading).toBe(false); @@ -608,35 +684,47 @@ describe('useLazyQuery Hook', () => { const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].data).toBe(undefined); expect(result.current[1].previousData).toBe(undefined); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 1" }); expect(result.current[1].previousData).toBe(undefined); const refetch = result.current[1].refetch; setTimeout(() => refetch!()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 1' }); - expect(result.current[1].previousData).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 1" }); + expect(result.current[1].previousData).toEqual({ hello: "world 1" }); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'world 2' }); - expect(result.current[1].previousData).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "world 2" }); + expect(result.current[1].previousData).toEqual({ hello: "world 1" }); }); - it('should allow for the query to start with polling', async () => { + it("should allow for the query to start with polling", async () => { const mocks = [ { request: { query: helloQuery }, @@ -657,58 +745,79 @@ describe('useLazyQuery Hook', () => { <MockedProvider mocks={mocks}>{children}</MockedProvider> ); - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { wrapper }, + const { result } = renderHook(() => useLazyQuery(helloQuery), { wrapper }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); expect(result.current[1].data).toBe(undefined); await tick(); result.current[1].startPolling(10); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - if (IS_REACT_18) { - expect(result.current[1].data).toEqual({ hello: "world 1" }); - } else { - expect(result.current[1].data).toEqual({ hello: "world 3" }); - } - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + if (IS_REACT_18) { + expect(result.current[1].data).toEqual({ hello: "world 1" }); + } else { + expect(result.current[1].data).toEqual({ hello: "world 3" }); + } + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - if (IS_REACT_18) { - expect(result.current[1].data).toEqual({ hello: "world 2" }); - } else { - expect(result.current[1].data).toEqual({ hello: "world 3" }); - } - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + if (IS_REACT_18) { + expect(result.current[1].data).toEqual({ hello: "world 2" }); + } else { + expect(result.current[1].data).toEqual({ hello: "world 3" }); + } + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toEqual({ hello: "world 3" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); result.current[1].stopPolling(); }); - it('should persist previous data when a query is re-run and variable changes', async () => { + it("should persist previous data when a query is re-run and variable changes", async () => { const CAR_QUERY_BY_ID = gql` - query($id: Int) { + query ($id: Int) { car(id: $id) { make model @@ -718,17 +827,17 @@ describe('useLazyQuery Hook', () => { const data1 = { car: { - make: 'Audi', - model: 'A4', - __typename: 'Car', + make: "Audi", + model: "A4", + __typename: "Car", }, }; const data2 = { car: { - make: 'Audi', - model: 'RS8', - __typename: 'Car', + make: "Audi", + model: "RS8", + __typename: "Car", }, }; @@ -745,76 +854,79 @@ describe('useLazyQuery Hook', () => { }, ]; - const { result } = renderHook( - () => useLazyQuery(CAR_QUERY_BY_ID), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - } - ); + const { result } = renderHook(() => useLazyQuery(CAR_QUERY_BY_ID), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); expect(result.current[1].previousData).toBe(undefined); const execute = result.current[0]; - setTimeout(() => execute({ variables: { id: 1 }})); + setTimeout(() => execute({ variables: { id: 1 } })); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].data).toBe(undefined); expect(result.current[1].previousData).toBe(undefined); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toEqual(data1); expect(result.current[1].previousData).toBe(undefined); - setTimeout(() => execute({ variables: { id: 2 }})); + setTimeout(() => execute({ variables: { id: 2 } })); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].data).toBe(undefined); expect(result.current[1].previousData).toEqual(data1); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toEqual(data2); expect(result.current[1].previousData).toEqual(data1); }); - it('should work with cache-and-network fetch policy', async () => { - const cache = new InMemoryCache(); - const link = mockSingleLink( - { - request: { query: helloQuery }, - result: { data: { hello: 'from link' } }, - delay: 20, - }, - ); + it("should work with cache-and-network fetch policy", async () => { + const cache = new InMemoryCache(); + const link = mockSingleLink({ + request: { query: helloQuery }, + result: { data: { hello: "from link" } }, + delay: 20, + }); const client = new ApolloClient({ link, cache, }); - cache.writeQuery({ query: helloQuery, data: { hello: 'from cache' }}); + cache.writeQuery({ query: helloQuery, data: { hello: "from cache" } }); const { result } = renderHook( - () => useLazyQuery(helloQuery, { fetchPolicy: 'cache-and-network' }), + () => useLazyQuery(helloQuery, { fetchPolicy: "cache-and-network" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current[1].loading).toBe(false); @@ -822,49 +934,53 @@ describe('useLazyQuery Hook', () => { const execute = result.current[0]; setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); // TODO: FIXME - expect(result.current[1].data).toEqual({ hello: 'from cache' }); + expect(result.current[1].data).toEqual({ hello: "from cache" }); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - expect(result.current[1].data).toEqual({ hello: 'from link' }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current[1].data).toEqual({ hello: "from link" }); }); - it('should return a promise from the execution function which resolves with the result', async () => { + it("should return a promise from the execution function which resolves with the result", async () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, delay: 20, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); const execute = result.current[0]; - const executeResult = new Promise<QueryResult<any, any>>(resolve => { + const executeResult = new Promise<QueryResult<any, any>>((resolve) => { setTimeout(() => resolve(execute())); }); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); let latestRenderResult: QueryResult; await waitFor(() => { @@ -873,15 +989,15 @@ describe('useLazyQuery Hook', () => { }); await waitFor(() => { latestRenderResult = result.current[1]; - expect(latestRenderResult.data).toEqual({ hello: 'world' }); + expect(latestRenderResult.data).toEqual({ hello: "world" }); }); - return executeResult.then(finalResult => { + return executeResult.then((finalResult) => { expect(finalResult).toEqual(latestRenderResult); }); }); - it('should have matching results from execution function and hook', async () => { + it("should have matching results from execution function and hook", async () => { const query = gql` query GetCountries($filter: String) { countries(filter: $filter) { @@ -928,16 +1044,11 @@ describe('useLazyQuery Hook', () => { }, ]; - const { result } = renderHook( - () => useLazyQuery(query), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(query), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); @@ -947,13 +1058,19 @@ describe('useLazyQuery Hook', () => { executeResult = execute({ variables: { filter: "PA" } }); }); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toEqual({ countries: { code: "PA", @@ -973,17 +1090,23 @@ describe('useLazyQuery Hook', () => { executeResult = execute({ variables: { filter: "BA" } }); }); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toEqual({ - countries: { - code: "BA", - name: "Bahamas", - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toEqual({ + countries: { + code: "BA", + name: "Bahamas", + }, + }); + }, + { interval: 1 } + ); expect(executeResult).toBeInstanceOf(Promise); expect((await executeResult).data).toEqual({ @@ -994,34 +1117,29 @@ describe('useLazyQuery Hook', () => { }); }); - it('the promise should reject with errors the “way useMutation does”', async () => { + it("the promise should reject with errors the “way useMutation does”", async () => { const mocks = [ { request: { query: helloQuery }, result: { - errors: [new GraphQLError('error 1')], + errors: [new GraphQLError("error 1")], }, delay: 20, }, { request: { query: helloQuery }, result: { - errors: [new GraphQLError('error 2')], + errors: [new GraphQLError("error 2")], }, delay: 20, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); const execute = result.current[0]; expect(result.current[1].loading).toBe(false); @@ -1029,129 +1147,140 @@ describe('useLazyQuery Hook', () => { const executePromise = Promise.resolve().then(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].data).toBeUndefined(); expect(result.current[1].error).toBe(undefined); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toBeUndefined(); - expect(result.current[1].error).toEqual(new Error('error 1')); + expect(result.current[1].error).toEqual(new Error("error 1")); - await executePromise.then(result => { + await executePromise.then((result) => { expect(result.loading).toBe(false); expect(result.data).toBeUndefined(); - expect(result.error!.message).toBe('error 1'); + expect(result.error!.message).toBe("error 1"); }); setTimeout(() => execute()); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current[1].data).toBeUndefined(); - expect(result.current[1].error).toEqual(new Error('error 1')); + expect(result.current[1].error).toEqual(new Error("error 1")); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toBe(undefined); - expect(result.current[1].error).toEqual(new Error('error 2')); + expect(result.current[1].error).toEqual(new Error("error 2")); }); - it('the promise should not cause an unhandled rejection', async () => { + it("the promise should not cause an unhandled rejection", async () => { const mocks = [ { request: { query: helloQuery }, result: { - errors: [new GraphQLError('error 1')], + errors: [new GraphQLError("error 1")], }, }, ]; - const { result } = renderHook( - () => useLazyQuery(helloQuery), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useLazyQuery(helloQuery), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); const execute = result.current[0]; - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - execute(); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toBe(undefined); - execute(); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + execute(); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toBe(undefined); + execute(); + }, + { interval: 1 } + ); // Making sure the rejection triggers a test failure. await wait(50); }); - it('allows in-flight requests to resolve when component unmounts', async () => { + it("allows in-flight requests to resolve when component unmounts", async () => { const link = new MockSubscriptionLink(); - const client = new ApolloClient({ link, cache: new InMemoryCache() }) + const client = new ApolloClient({ link, cache: new InMemoryCache() }); const { result, unmount } = renderHook(() => useLazyQuery(helloQuery), { - wrapper: ({ children }) => - <ApolloProvider client={client}> - {children} - </ApolloProvider> + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), }); const [execute] = result.current; - let promise: Promise<QueryResult<{ hello: string }>> + let promise: Promise<QueryResult<{ hello: string }>>; act(() => { promise = execute(); - }) + }); unmount(); - link.simulateResult({ result: { data: { hello: 'Greetings' }}}, true); + link.simulateResult({ result: { data: { hello: "Greetings" } } }, true); const queryResult = await promise!; - expect(queryResult.data).toEqual({ hello: 'Greetings' }); + expect(queryResult.data).toEqual({ hello: "Greetings" }); expect(queryResult.loading).toBe(false); expect(queryResult.networkStatus).toBe(NetworkStatus.ready); }); - it('handles resolving multiple in-flight requests when component unmounts', async () => { + it("handles resolving multiple in-flight requests when component unmounts", async () => { const link = new MockSubscriptionLink(); - const client = new ApolloClient({ link, cache: new InMemoryCache() }) + const client = new ApolloClient({ link, cache: new InMemoryCache() }); const { result, unmount } = renderHook(() => useLazyQuery(helloQuery), { - wrapper: ({ children }) => - <ApolloProvider client={client}> - {children} - </ApolloProvider> + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), }); const [execute] = result.current; - let promise1: Promise<QueryResult<{ hello: string }>> - let promise2: Promise<QueryResult<{ hello: string }>> + let promise1: Promise<QueryResult<{ hello: string }>>; + let promise2: Promise<QueryResult<{ hello: string }>>; act(() => { promise1 = execute(); promise2 = execute(); - }) + }); unmount(); - link.simulateResult({ result: { data: { hello: 'Greetings' }}}, true); + link.simulateResult({ result: { data: { hello: "Greetings" } } }, true); const expectedResult = { - data: { hello: 'Greetings' }, + data: { hello: "Greetings" }, loading: false, networkStatus: NetworkStatus.ready, }; @@ -1161,13 +1290,13 @@ describe('useLazyQuery Hook', () => { }); // https://github.com/apollographql/apollo-client/issues/9755 - it('resolves each execution of the query with the appropriate result and renders with the result from the latest execution', async () => { + it("resolves each execution of the query with the appropriate result and renders with the result from the latest execution", async () => { interface Data { - user: { id: string, name: string } + user: { id: string; name: string }; } interface Variables { - id: string + id: string; } const query: TypedDocumentNode<Data, Variables> = gql` @@ -1181,39 +1310,38 @@ describe('useLazyQuery Hook', () => { const mocks = [ { - request: { query, variables: { id: '1' } }, - result: { data: { user: { id: '1', name: 'John Doe' }}}, - delay: 20 + request: { query, variables: { id: "1" } }, + result: { data: { user: { id: "1", name: "John Doe" } } }, + delay: 20, }, { - request: { query, variables: { id: '2' } }, - result: { data: { user: { id: '2', name: 'Jane Doe' }}}, - delay: 20 + request: { query, variables: { id: "2" } }, + result: { data: { user: { id: "2", name: "Jane Doe" } } }, + delay: 20, }, - ] + ]; const { result } = renderHook(() => useLazyQuery(query), { - wrapper: ({ children }) => - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), }); const [execute] = result.current; await act(async () => { - const promise1 = execute({ variables: { id: '1' }}); - const promise2 = execute({ variables: { id: '2' }}); + const promise1 = execute({ variables: { id: "1" } }); + const promise2 = execute({ variables: { id: "2" } }); await expect(promise1).resolves.toMatchObject({ ...mocks[0].result, - loading: false , + loading: false, called: true, }); await expect(promise2).resolves.toMatchObject({ ...mocks[1].result, - loading: false , + loading: false, called: true, }); }); @@ -1225,13 +1353,13 @@ describe('useLazyQuery Hook', () => { }); }); - it('uses the most recent options when the hook rerenders before execution', async () => { + it("uses the most recent options when the hook rerenders before execution", async () => { interface Data { - user: { id: string, name: string } + user: { id: string; name: string }; } interface Variables { - id: string + id: string; } const query: TypedDocumentNode<Data, Variables> = gql` @@ -1245,29 +1373,28 @@ describe('useLazyQuery Hook', () => { const mocks = [ { - request: { query, variables: { id: '1' } }, - result: { data: { user: { id: '1', name: 'John Doe' }}}, - delay: 30 + request: { query, variables: { id: "1" } }, + result: { data: { user: { id: "1", name: "John Doe" } } }, + delay: 30, }, { - request: { query, variables: { id: '2' } }, - result: { data: { user: { id: '2', name: 'Jane Doe' }}}, - delay: 20 + request: { query, variables: { id: "2" } }, + result: { data: { user: { id: "2", name: "Jane Doe" } } }, + delay: 20, }, - ] + ]; const { result, rerender } = renderHook( - ({ id }) => useLazyQuery(query, { variables: { id } }), + ({ id }) => useLazyQuery(query, { variables: { id } }), { - initialProps: { id: '1' }, - wrapper: ({ children }) => - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + initialProps: { id: "1" }, + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), } ); - rerender({ id: '2' }); + rerender({ id: "2" }); const [execute] = result.current; @@ -1278,15 +1405,15 @@ describe('useLazyQuery Hook', () => { await waitFor(() => { expect(result.current[1].data).toEqual(mocks[1].result.data); - }) + }); - await expect(promise!).resolves.toMatchObject( - { data: mocks[1].result.data } - ); + await expect(promise!).resolves.toMatchObject({ + data: mocks[1].result.data, + }); }); // https://github.com/apollographql/apollo-client/issues/10198 - it('uses the most recent query document when the hook rerenders before execution', async () => { + it("uses the most recent query document when the hook rerenders before execution", async () => { const query = gql` query DummyQuery { shouldNotBeUsed @@ -1296,19 +1423,18 @@ describe('useLazyQuery Hook', () => { const mocks = [ { request: { query: helloQuery }, - result: { data: { hello: 'Greetings' } }, - delay: 20 + result: { data: { hello: "Greetings" } }, + delay: 20, }, - ] + ]; const { result, rerender } = renderHook( - ({ query }) => useLazyQuery(query), + ({ query }) => useLazyQuery(query), { initialProps: { query }, - wrapper: ({ children }) => - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), } ); @@ -1322,21 +1448,21 @@ describe('useLazyQuery Hook', () => { }); await waitFor(() => { - expect(result.current[1].data).toEqual({ hello: 'Greetings' }); - }) + expect(result.current[1].data).toEqual({ hello: "Greetings" }); + }); - await expect(promise!).resolves.toMatchObject( - { data: { hello: 'Greetings' } } - ); + await expect(promise!).resolves.toMatchObject({ + data: { hello: "Greetings" }, + }); }); - it('does not refetch when rerendering after executing query', async () => { + it("does not refetch when rerendering after executing query", async () => { interface Data { - user: { id: string, name: string } + user: { id: string; name: string }; } interface Variables { - id: string + id: string; } const query: TypedDocumentNode<Data, Variables> = gql` @@ -1354,30 +1480,29 @@ describe('useLazyQuery Hook', () => { fetchCount++; return new Observable((observer) => { setTimeout(() => { - observer.next({ - data: { user: { id: operation.variables.id, name: 'John Doe' } } + observer.next({ + data: { user: { id: operation.variables.id, name: "John Doe" } }, }); observer.complete(); - }, 20) + }, 20); }); }); const client = new ApolloClient({ link, cache: new InMemoryCache() }); const { result, rerender } = renderHook( - () => useLazyQuery(query, { variables: { id: '1' }}), + () => useLazyQuery(query, { variables: { id: "1" } }), { - initialProps: { id: '1' }, - wrapper: ({ children }) => - <ApolloProvider client={client}> - {children} - </ApolloProvider> + initialProps: { id: "1" }, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), } ); const [execute] = result.current; - await act(() => execute({ variables: { id: '2' }})); + await act(() => execute({ variables: { id: "2" } })); expect(fetchCount).toBe(1); @@ -1386,7 +1511,7 @@ describe('useLazyQuery Hook', () => { await wait(10); expect(fetchCount).toBe(1); - }) + }); describe("network errors", () => { async function check(errorPolicy: ErrorPolicy) { @@ -1394,24 +1519,26 @@ describe('useLazyQuery Hook', () => { const client = new ApolloClient({ cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - setTimeout(() => { - observer.error(networkError); - }, 20); - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + setTimeout(() => { + observer.error(networkError); + }, 20); + }) + ), }); const { result } = renderHook( - () => useLazyQuery(helloQuery, { - errorPolicy, - }), + () => + useLazyQuery(helloQuery, { + errorPolicy, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); const execute = result.current[0]; @@ -1421,32 +1548,53 @@ describe('useLazyQuery Hook', () => { setTimeout(execute); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - if (IS_REACT_18) { - expect(result.current[1].networkStatus).toBe(NetworkStatus.loading); - } else { - expect(result.current[1].networkStatus).toBe(NetworkStatus.error); - } - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toBeUndefined(); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + if (IS_REACT_18) { + expect(result.current[1].networkStatus).toBe(NetworkStatus.loading); + } else { + expect(result.current[1].networkStatus).toBe(NetworkStatus.error); + } + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toBeUndefined(); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].networkStatus).toBe(NetworkStatus.error); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toBeUndefined(); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].error!.message).toBe("from the network"); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].networkStatus).toBe(NetworkStatus.error); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toBeUndefined(); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].error!.message).toBe("from the network"); + }, + { interval: 1 } + ); } // For errorPolicy:"none", we expect result.error to be defined and @@ -1477,22 +1625,27 @@ describe('useLazyQuery Hook', () => { let count = 0; const client = new ApolloClient({ cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - if (request.operationName === "GetCounter") { - observer.next({ - data: { - counter: ++count, - }, - }); - setTimeout(() => { - observer.complete(); - }, 10); - } else { - observer.error(new Error(`Unknown query: ${ - request.operationName || request.query - }`)); - } - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + if (request.operationName === "GetCounter") { + observer.next({ + data: { + counter: ++count, + }, + }); + setTimeout(() => { + observer.complete(); + }, 10); + } else { + observer.error( + new Error( + `Unknown query: ${request.operationName || request.query}` + ) + ); + } + }) + ), }); const defaultFetchPolicy = "network-only"; @@ -1512,50 +1665,91 @@ describe('useLazyQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.data).toBeUndefined(); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.data).toBeUndefined(); + }, + { interval: 1 } + ); const execResult = await result.current.exec(); expect(execResult.loading).toBe(false); expect(execResult.called).toBe(true); expect(execResult.data).toEqual({ counter: 1 }); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.data).toMatchObject({ counter: 1 }); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.data).toMatchObject({ counter: 1 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(true); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.called).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.query.data).toEqual({ counter: 1 }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.called).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.query.data).toEqual({ counter: 1 }); + }, + { interval: 1 } + ); const { options } = result.current.query.observable; expect(options.fetchPolicy).toBe(defaultFetchPolicy); }); }); }); + +describe.skip("Type Tests", () => { + test("NoInfer prevents adding arbitrary additional variables", () => { + const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; + const [_, { variables }] = useLazyQuery(typedNode, { + variables: { + bar: 4, + // @ts-expect-error + nonExistingVariable: "string", + }, + }); + variables?.bar; + // @ts-expect-error + variables?.nonExistingVariable; + }); +}); diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -1,24 +1,36 @@ -import React, { useEffect } from 'react'; -import { GraphQLError } from 'graphql'; -import gql from 'graphql-tag'; -import { act } from 'react-dom/test-utils'; -import { render, waitFor, screen, renderHook } from '@testing-library/react'; -import userEvent from '@testing-library/user-event'; +import React, { useEffect } from "react"; +import { GraphQLError } from "graphql"; +import gql from "graphql-tag"; +import { act } from "react-dom/test-utils"; +import { render, waitFor, screen, renderHook } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; import fetchMock from "fetch-mock"; -import { ApolloClient, ApolloLink, ApolloQueryResult, Cache, NetworkStatus, Observable, ObservableQuery, TypedDocumentNode } from '../../../core'; -import { InMemoryCache } from '../../../cache'; -import { itAsync, MockedProvider, MockSubscriptionLink, mockSingleLink, subscribeAndCount } from '../../../testing'; -import { ApolloProvider, resetApolloContext } from '../../context'; -import { useQuery } from '../useQuery'; -import { useMutation } from '../useMutation'; -import { BatchHttpLink } from '../../../link/batch-http'; -import { FetchResult } from '../../../link/core'; - -describe('useMutation Hook', () => { - afterEach(() => { - resetApolloContext(); - }); +import { + ApolloClient, + ApolloLink, + ApolloQueryResult, + Cache, + NetworkStatus, + Observable, + ObservableQuery, + TypedDocumentNode, +} from "../../../core"; +import { InMemoryCache } from "../../../cache"; +import { + itAsync, + MockedProvider, + MockSubscriptionLink, + mockSingleLink, + subscribeAndCount, +} from "../../../testing"; +import { ApolloProvider } from "../../context"; +import { useQuery } from "../useQuery"; +import { useMutation } from "../useMutation"; +import { BatchHttpLink } from "../../../link/batch-http"; +import { FetchResult } from "../../../link/core"; + +describe("useMutation Hook", () => { interface Todo { id: number; description: string; @@ -38,38 +50,35 @@ describe('useMutation Hook', () => { const CREATE_TODO_RESULT = { createTodo: { id: 1, - description: 'Get milk!', - priority: 'High', - __typename: 'Todo' - } + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, }; - const CREATE_TODO_ERROR = 'Failed to create item'; + const CREATE_TODO_ERROR = "Failed to create item"; - describe('General use', () => { - it('should handle a simple mutation properly', async () => { + describe("General use", () => { + it("should handle a simple mutation properly", async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; - const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, - ); + const { result } = renderHook(() => useMutation(CREATE_TODO_MUTATION), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); @@ -78,31 +87,33 @@ describe('useMutation Hook', () => { expect(result.current[1].loading).toBe(true); expect(result.current[1].data).toBe(undefined); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[1].data).toEqual(CREATE_TODO_RESULT); }); - it('should be able to call mutations as an effect', async () => { + it("should be able to call mutations as an effect", async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; const useCreateTodo = () => { - const [createTodo, { loading, data }] = useMutation( - CREATE_TODO_MUTATION - ); + const [createTodo, { loading, data }] = + useMutation(CREATE_TODO_MUTATION); useEffect(() => { createTodo({ variables }); }, [variables]); @@ -110,48 +121,48 @@ describe('useMutation Hook', () => { return { loading, data }; }; - const { result } = renderHook( - () => useCreateTodo(), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, - ); + const { result } = renderHook(() => useCreateTodo(), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(CREATE_TODO_RESULT); }); - it('should ensure the mutation callback function has a stable identity no matter what', async () => { + it("should ensure the mutation callback function has a stable identity no matter what", async () => { const variables1 = { - description: 'Get milk', + description: "Get milk", }; const data1 = { createTodo: { id: 1, - description: 'Get milk!', - priority: 'High', - __typename: 'Todo', - } + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, }; const variables2 = { - description: 'Write blog post', + description: "Write blog post", }; const data2 = { createTodo: { id: 1, - description: 'Write blog post', - priority: 'High', - __typename: 'Todo', + description: "Write blog post", + priority: "High", + __typename: "Todo", }, }; @@ -176,14 +187,12 @@ describe('useMutation Hook', () => { ({ variables }) => useMutation(CREATE_TODO_MUTATION, { variables }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { variables: variables1, }, - }, + } ); const createTodo = result.current[0]; @@ -195,88 +204,86 @@ describe('useMutation Hook', () => { expect(result.current[1].loading).toBe(true); expect(result.current[1].data).toBe(undefined); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }) + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[0]).toBe(createTodo); expect(result.current[1].data).toEqual(data1); rerender({ variables: variables2 }); act(() => void createTodo()); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }) + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[0]).toBe(createTodo); expect(result.current[1].data).toEqual(data2); }); - it('should not call setResult on an unmounted component', async () => { + it("should not call setResult on an unmounted component", async () => { const errorSpy = jest.spyOn(console, "error"); const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; const useCreateTodo = () => { - const [createTodo, { reset }] = useMutation( - CREATE_TODO_MUTATION - ); + const [createTodo, { reset }] = useMutation(CREATE_TODO_MUTATION); return { reset, createTodo }; }; - const { result, unmount } = renderHook( - () => useCreateTodo(), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, - ); + const { result, unmount } = renderHook(() => useCreateTodo(), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); unmount(); await act(async () => { await result.current.createTodo({ variables }); await result.current.reset(); - }) + }); expect(errorSpy).not.toHaveBeenCalled(); errorSpy.mockRestore(); }); - it('should resolve mutate function promise with mutation results', async () => { + it("should resolve mutate function promise with mutation results", async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; - const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, - ); + const { result } = renderHook(() => useMutation(CREATE_TODO_MUTATION), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); await act(async () => { await expect(result.current[0]({ variables })).resolves.toEqual({ @@ -285,10 +292,10 @@ describe('useMutation Hook', () => { }); }); - describe('mutate function upon error', () => { - it('resolves with the resulting data and errors', async () => { + describe("mutate function upon error", () => { + it("resolves with the resulting data and errors", async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ @@ -301,17 +308,17 @@ describe('useMutation Hook', () => { data: CREATE_TODO_RESULT, errors: [new GraphQLError(CREATE_TODO_ERROR)], }, - } + }, ]; const onError = jest.fn(); const { result } = renderHook( () => useMutation(CREATE_TODO_MUTATION, { onError }), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; @@ -326,9 +333,9 @@ describe('useMutation Hook', () => { expect(onError.mock.calls[0][0].message).toBe(CREATE_TODO_ERROR); }); - it('should reject when there’s only an error and no error policy is set', async () => { + it("should reject when there’s only an error and no error policy is set", async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ @@ -340,17 +347,14 @@ describe('useMutation Hook', () => { result: { errors: [new GraphQLError(CREATE_TODO_ERROR)], }, - } + }, ]; - const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, - ); + const { result } = renderHook(() => useMutation(CREATE_TODO_MUTATION), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); const createTodo = result.current[0]; let fetchError: any; @@ -371,62 +375,110 @@ describe('useMutation Hook', () => { it(`should reject when errorPolicy is 'none'`, async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, result: { data: CREATE_TODO_RESULT, errors: [new GraphQLError(CREATE_TODO_ERROR)], }, - } + }, ]; const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: 'none' }), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: "none" }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; await act(async () => { - await expect(createTodo({ variables })).rejects.toThrow(CREATE_TODO_ERROR); + await expect(createTodo({ variables })).rejects.toThrow( + CREATE_TODO_ERROR + ); }); }); it(`should resolve with 'data' and 'error' properties when errorPolicy is 'all'`, async () => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, result: { data: CREATE_TODO_RESULT, errors: [new GraphQLError(CREATE_TODO_ERROR)], }, + }, + ]; + + const { result } = renderHook( + () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: "all" }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), } + ); + + const createTodo = result.current[0]; + + let fetchResult: any; + await act(async () => { + fetchResult = await createTodo({ variables }); + }); + + expect(fetchResult.data).toEqual(CREATE_TODO_RESULT); + expect(fetchResult.errors[0].message).toEqual(CREATE_TODO_ERROR); + }); + + it(`should call onError when errorPolicy is 'all'`, async () => { + const variables = { + description: "Get milk!", + }; + + const mocks = [ + { + request: { + query: CREATE_TODO_MUTATION, + variables, + }, + result: { + data: CREATE_TODO_RESULT, + errors: [new GraphQLError(CREATE_TODO_ERROR)], + }, + }, ]; + const onError = jest.fn(); + const onCompleted = jest.fn(); + const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: 'all' }), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => + useMutation(CREATE_TODO_MUTATION, { + errorPolicy: "all", + onError, + onCompleted, + }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; @@ -438,13 +490,17 @@ describe('useMutation Hook', () => { expect(fetchResult.data).toEqual(CREATE_TODO_RESULT); expect(fetchResult.errors[0].message).toEqual(CREATE_TODO_ERROR); + expect(onError).toHaveBeenCalledTimes(1); + expect(onError.mock.calls[0][0].message).toBe(CREATE_TODO_ERROR); + expect(onCompleted).not.toHaveBeenCalled(); }); it(`should ignore errors when errorPolicy is 'ignore'`, async () => { - const errorMock = jest.spyOn(console, "error") + const errorMock = jest + .spyOn(console, "error") .mockImplementation(() => {}); const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ @@ -456,16 +512,16 @@ describe('useMutation Hook', () => { result: { errors: [new GraphQLError(CREATE_TODO_ERROR)], }, - } + }, ]; const { result } = renderHook( () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: "ignore" }), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; @@ -479,41 +535,74 @@ describe('useMutation Hook', () => { expect(errorMock.mock.calls[0][0]).toMatch("Missing field"); errorMock.mockRestore(); }); + + it(`should not call onError when errorPolicy is 'ignore'`, async () => { + const variables = { + description: "Get milk!", + }; + + const mocks = [ + { + request: { + query: CREATE_TODO_MUTATION, + variables, + }, + result: { + errors: [new GraphQLError(CREATE_TODO_ERROR)], + }, + }, + ]; + + const onError = jest.fn(); + + const { result } = renderHook( + () => + useMutation(CREATE_TODO_MUTATION, { + errorPolicy: "ignore", + onError, + }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } + ); + + const createTodo = result.current[0]; + let fetchResult: any; + await act(async () => { + fetchResult = await createTodo({ variables }); + }); + + expect(fetchResult).toEqual({}); + expect(onError).not.toHaveBeenCalled(); + }); }); - it('should return the current client instance in the result object', async () => { - const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider> - {children} - </MockedProvider> - )}, - ); + it("should return the current client instance in the result object", async () => { + const { result } = renderHook(() => useMutation(CREATE_TODO_MUTATION), { + wrapper: ({ children }) => <MockedProvider>{children}</MockedProvider>, + }); expect(result.current[1].client).toBeInstanceOf(ApolloClient); }); - it ('should call client passed to execute function', async () => { - const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider> - {children} - </MockedProvider> - )}, - ); + it("should call client passed to execute function", async () => { + const { result } = renderHook(() => useMutation(CREATE_TODO_MUTATION), { + wrapper: ({ children }) => <MockedProvider>{children}</MockedProvider>, + }); const link = mockSingleLink(); const cache = new InMemoryCache(); const client = new ApolloClient({ cache, - link + link, }); - const mutateSpy = jest.spyOn(client, 'mutate').mockImplementation( - () => new Promise((resolve) => { - resolve({ data: CREATE_TODO_RESULT }) - }) + const mutateSpy = jest.spyOn(client, "mutate").mockImplementation( + () => + new Promise((resolve) => { + resolve({ data: CREATE_TODO_RESULT }); + }) ); const createTodo = result.current[0]; @@ -524,13 +613,13 @@ describe('useMutation Hook', () => { expect(mutateSpy).toHaveBeenCalledTimes(1); }); - it('should merge provided variables', async () => { + it("should merge provided variables", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - description: 'Get milk!', - priority: 'Low', - __typename: 'Todo', + description: "Get milk!", + priority: "Low", + __typename: "Todo", }, }; const mocks = [ @@ -538,46 +627,49 @@ describe('useMutation Hook', () => { request: { query: CREATE_TODO_MUTATION, variables: { - priority: 'Low', - description: 'Get milk.' - } + priority: "Low", + description: "Get milk.", + }, }, result: { data: CREATE_TODO_DATA, - } - } + }, + }, ]; const { result } = renderHook( - () => useMutation< - { createTodo: Todo }, - { priority?: string, description?: string } - >(CREATE_TODO_MUTATION, { - variables: { priority: 'Low' } - }), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => + useMutation< + { createTodo: Todo }, + { priority?: string; description?: string } + >(CREATE_TODO_MUTATION, { + variables: { priority: "Low" }, + }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; let fetchResult: any; await act(async () => { - fetchResult = await createTodo({ variables: { description: 'Get milk.' }}); + fetchResult = await createTodo({ + variables: { description: "Get milk." }, + }); }); expect(fetchResult).toEqual({ data: CREATE_TODO_DATA }); }); - it('should be possible to reset the mutation', async () => { + it("should be possible to reset the mutation", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - priority: 'Low', - description: 'Get milk!', - __typename: 'Todo', + priority: "Low", + description: "Get milk!", + __typename: "Todo", }, }; @@ -586,33 +678,34 @@ describe('useMutation Hook', () => { request: { query: CREATE_TODO_MUTATION, variables: { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }, }, result: { data: CREATE_TODO_DATA, - } - } + }, + }, ]; const { result } = renderHook( - () => useMutation< - { createTodo: Todo }, - { priority: string, description: string } - >(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => + useMutation< + { createTodo: Todo }, + { priority: string; description: string } + >(CREATE_TODO_MUTATION), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; let fetchResult: any; await act(async () => { fetchResult = await createTodo({ - variables: { priority: 'Low', description: 'Get milk.' }, + variables: { priority: "Low", description: "Get milk." }, }); }); @@ -622,27 +715,30 @@ describe('useMutation Hook', () => { result.current[1].reset(); }); - await waitFor(() => { - expect(result.current[1].data).toBe(undefined); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[1].data).toBe(undefined); + }, + { interval: 1 } + ); }); }); - describe('Callbacks', () => { - it('should allow passing an onCompleted handler to the execution function', async () => { + describe("Callbacks", () => { + it("should allow passing an onCompleted handler to the execution function", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - priority: 'Low', - description: 'Get milk!', - __typename: 'Todo', + priority: "Low", + description: "Get milk!", + __typename: "Todo", }, }; const variables = { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }; const mocks = [ { @@ -653,19 +749,20 @@ describe('useMutation Hook', () => { result: { data: CREATE_TODO_DATA, }, - } + }, ]; const { result } = renderHook( - () => useMutation< - { createTodo: Todo }, - { priority: string, description: string } - >(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => + useMutation< + { createTodo: Todo }, + { priority: string; description: string } + >(CREATE_TODO_MUTATION), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; @@ -683,23 +780,26 @@ describe('useMutation Hook', () => { expect(fetchResult).toEqual({ data: CREATE_TODO_DATA }); expect(result.current[1].data).toEqual(CREATE_TODO_DATA); expect(onCompleted).toHaveBeenCalledTimes(1); - expect(onCompleted).toHaveBeenCalledWith(CREATE_TODO_DATA, expect.objectContaining({variables})); + expect(onCompleted).toHaveBeenCalledWith( + CREATE_TODO_DATA, + expect.objectContaining({ variables }) + ); expect(onError).toHaveBeenCalledTimes(0); }); - it('prefers the onCompleted handler passed to the execution function rather than the hook', async () => { + it("prefers the onCompleted handler passed to the execution function rather than the hook", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - priority: 'Low', - description: 'Get milk!', - __typename: 'Todo', + priority: "Low", + description: "Get milk!", + __typename: "Todo", }, }; const variables = { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }; const mocks = [ { request: { @@ -707,22 +807,21 @@ describe('useMutation Hook', () => { variables, }, result: { - data: CREATE_TODO_DATA + data: CREATE_TODO_DATA, }, - } + }, ]; const hookOnCompleted = jest.fn(); const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION, { onCompleted: hookOnCompleted }), + () => + useMutation(CREATE_TODO_MUTATION, { onCompleted: hookOnCompleted }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ) - }, + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const [createTodo] = result.current; @@ -735,12 +834,12 @@ describe('useMutation Hook', () => { expect(hookOnCompleted).not.toHaveBeenCalled(); }); - it('should allow passing an onError handler to the execution function', async () => { + it("should allow passing an onError handler to the execution function", async () => { const errors = [new GraphQLError(CREATE_TODO_ERROR)]; const variables = { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }; const mocks = [ { request: { @@ -750,19 +849,20 @@ describe('useMutation Hook', () => { result: { errors, }, - } + }, ]; const { result } = renderHook( - () => useMutation< - { createTodo: Todo }, - { priority: string, description: string } - >(CREATE_TODO_MUTATION), - { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - )}, + () => + useMutation< + { createTodo: Todo }, + { priority: string; description: string } + >(CREATE_TODO_MUTATION), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const createTodo = result.current[0]; @@ -785,14 +885,17 @@ describe('useMutation Hook', () => { expect(onCompleted).toHaveBeenCalledTimes(0); expect(onError).toHaveBeenCalledTimes(1); - expect(onError).toHaveBeenCalledWith(errors[0], expect.objectContaining({variables})); + expect(onError).toHaveBeenCalledWith( + errors[0], + expect.objectContaining({ variables }) + ); }); - it('prefers the onError handler passed to the execution function instead of the hook', async () => { + it("prefers the onError handler passed to the execution function instead of the hook", async () => { const variables = { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }; const mocks = [ { request: { @@ -802,7 +905,7 @@ describe('useMutation Hook', () => { result: { errors: [new GraphQLError(CREATE_TODO_ERROR)], }, - } + }, ]; const hookOnError = jest.fn(); @@ -811,11 +914,9 @@ describe('useMutation Hook', () => { () => useMutation(CREATE_TODO_MUTATION, { onError: hookOnError }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> - ) - }, + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + } ); const [createTodo] = result.current; @@ -828,12 +929,12 @@ describe('useMutation Hook', () => { expect(hookOnError).not.toHaveBeenCalled(); }); - it('should allow updating onError while mutation is executing', async () => { + it("should allow updating onError while mutation is executing", async () => { const errors = [new GraphQLError(CREATE_TODO_ERROR)]; const variables = { - priority: 'Low', - description: 'Get milk.', - } + priority: "Low", + description: "Get milk.", + }; const mocks = [ { request: { @@ -843,7 +944,7 @@ describe('useMutation Hook', () => { result: { errors, }, - } + }, ]; const onCompleted = jest.fn(); @@ -853,17 +954,15 @@ describe('useMutation Hook', () => { ({ onCompleted, onError }) => { return useMutation< { createTodo: Todo }, - { priority: string, description: string } + { priority: string; description: string } >(CREATE_TODO_MUTATION, { onCompleted, onError }); }, { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { onCompleted, onError }, - }, + } ); const createTodo = result.current[0]; @@ -886,34 +985,37 @@ describe('useMutation Hook', () => { expect(onCompleted).toHaveBeenCalledTimes(0); expect(onError).toHaveBeenCalledTimes(0); expect(onError1).toHaveBeenCalledTimes(1); - expect(onError1).toHaveBeenCalledWith(errors[0], expect.objectContaining({variables})); + expect(onError1).toHaveBeenCalledWith( + errors[0], + expect.objectContaining({ variables }) + ); }); - it('should never allow onCompleted handler to be stale', async () => { + it("should never allow onCompleted handler to be stale", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - priority: 'Low', - description: 'Get milk!', - __typename: 'Todo', + priority: "Low", + description: "Get milk!", + __typename: "Todo", }, }; const variables = { - priority: 'Low', - description: 'Get milk2.', - } + priority: "Low", + description: "Get milk2.", + }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, result: { data: CREATE_TODO_DATA, }, - } + }, ]; const onCompleted = jest.fn(); @@ -921,17 +1023,15 @@ describe('useMutation Hook', () => { ({ onCompleted }) => { return useMutation< { createTodo: Todo }, - { priority: string, description: string } + { priority: string; description: string } >(CREATE_TODO_MUTATION, { onCompleted }); }, { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { onCompleted }, - }, + } ); const onCompleted1 = jest.fn(); @@ -948,34 +1048,37 @@ describe('useMutation Hook', () => { expect(result.current[1].data).toEqual(CREATE_TODO_DATA); expect(onCompleted).toHaveBeenCalledTimes(0); expect(onCompleted1).toHaveBeenCalledTimes(1); - expect(onCompleted1).toHaveBeenCalledWith(CREATE_TODO_DATA, expect.objectContaining({variables})); + expect(onCompleted1).toHaveBeenCalledWith( + CREATE_TODO_DATA, + expect.objectContaining({ variables }) + ); }); - it('should allow updating onCompleted while mutation is executing', async () => { + it("should allow updating onCompleted while mutation is executing", async () => { const CREATE_TODO_DATA = { createTodo: { id: 1, - priority: 'Low', - description: 'Get milk!', - __typename: 'Todo', + priority: "Low", + description: "Get milk!", + __typename: "Todo", }, }; const variables = { - priority: 'Low', - description: 'Get milk2.', - } + priority: "Low", + description: "Get milk2.", + }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, result: { data: CREATE_TODO_DATA, }, - } + }, ]; const onCompleted = jest.fn(); @@ -984,17 +1087,15 @@ describe('useMutation Hook', () => { ({ onCompleted }) => { return useMutation< { createTodo: Todo }, - { priority: string, description: string } + { priority: string; description: string } >(CREATE_TODO_MUTATION, { onCompleted }); }, { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { onCompleted }, - }, + } ); const createTodo = result.current[0]; @@ -1013,24 +1114,30 @@ describe('useMutation Hook', () => { expect(result.current[1].data).toEqual(CREATE_TODO_DATA); expect(onCompleted).toHaveBeenCalledTimes(0); expect(onCompleted1).toHaveBeenCalledTimes(1); - expect(onCompleted1).toHaveBeenCalledWith(CREATE_TODO_DATA, expect.objectContaining({variables})); + expect(onCompleted1).toHaveBeenCalledWith( + CREATE_TODO_DATA, + expect.objectContaining({ variables }) + ); }); }); - describe('ROOT_MUTATION cache data', () => { + describe("ROOT_MUTATION cache data", () => { const startTime = Date.now(); - const link = new ApolloLink(operation => new Observable(observer => { - observer.next({ - data: { - __typename: "Mutation", - doSomething: { - __typename: "MutationPayload", - time: startTime, - }, - }, - }); - observer.complete(); - })); + const link = new ApolloLink( + (operation) => + new Observable((observer) => { + observer.next({ + data: { + __typename: "Mutation", + doSomething: { + __typename: "MutationPayload", + time: startTime, + }, + }, + }); + observer.complete(); + }) + ); const mutation = gql` mutation DoSomething { @@ -1040,7 +1147,7 @@ describe('useMutation Hook', () => { } `; - it('should be removed by default after the mutation', async () => { + it("should be removed by default after the mutation", async () => { let timeReadCount = 0; let timeMergeCount = 0; const client = new ApolloClient({ @@ -1066,31 +1173,28 @@ describe('useMutation Hook', () => { }), }); - const { result } = renderHook( - () => useMutation(mutation), - { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - )}, - ); + const { result } = renderHook(() => useMutation(mutation), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current[1].loading).toBe(false); expect(result.current[1].called).toBe(false); expect(result.current[1].data).toBe(undefined); const mutate = result.current[0]; - let mutationResult: any + let mutationResult: any; act(() => { mutationResult = mutate({ - update(cache, { - data: { - doSomething: { - __typename, - time, + update( + cache, + { + data: { + doSomething: { __typename, time }, }, - }, - }) { + } + ) { expect(__typename).toBe("MutationPayload"); expect(time).toBeInstanceOf(Date); expect(time.getTime()).toBe(startTime); @@ -1109,28 +1213,27 @@ describe('useMutation Hook', () => { }, }); }, - }).then(({ - data: { - doSomething: { - __typename, - time, - }, - }, - }) => { - expect(__typename).toBe("MutationPayload"); - expect(time).toBeInstanceOf(Date); - expect(time.getTime()).toBe(startTime); - expect(timeReadCount).toBe(1); - expect(timeMergeCount).toBe(1); - // The contents of the ROOT_MUTATION object exist only briefly, - // for the duration of the mutation update, and are removed after - // the mutation write is finished. - expect(client.cache.extract()).toEqual({ - ROOT_MUTATION: { - __typename: "Mutation", + }).then( + ({ + data: { + doSomething: { __typename, time }, }, - }); - }); + }) => { + expect(__typename).toBe("MutationPayload"); + expect(time).toBeInstanceOf(Date); + expect(time.getTime()).toBe(startTime); + expect(timeReadCount).toBe(1); + expect(timeMergeCount).toBe(1); + // The contents of the ROOT_MUTATION object exist only briefly, + // for the duration of the mutation update, and are removed after + // the mutation write is finished. + expect(client.cache.extract()).toEqual({ + ROOT_MUTATION: { + __typename: "Mutation", + }, + }); + } + ); mutationResult.catch(() => {}); }); @@ -1145,19 +1248,16 @@ describe('useMutation Hook', () => { expect(result.current[1].data).toBeDefined(); const { - doSomething: { - __typename, - time, - }, + doSomething: { __typename, time }, } = result.current[1].data; - expect(__typename).toBe('MutationPayload'); + expect(__typename).toBe("MutationPayload"); expect(time).toBeInstanceOf(Date); expect(time.getTime()).toBe(startTime); await expect(mutationResult).resolves.toBe(undefined); }); - it('can be preserved by passing keepRootFields: true', async () => { + it("can be preserved by passing keepRootFields: true", async () => { let timeReadCount = 0; let timeMergeCount = 0; @@ -1185,14 +1285,15 @@ describe('useMutation Hook', () => { }); const { result } = renderHook( - () => useMutation(mutation, { - keepRootFields: true, - }), - { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - )}, + () => + useMutation(mutation, { + keepRootFields: true, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } ); expect(result.current[1].loading).toBe(false); @@ -1200,17 +1301,17 @@ describe('useMutation Hook', () => { expect(result.current[1].data).toBe(undefined); const mutate = result.current[0]; - let mutationResult: any + let mutationResult: any; act(() => { mutationResult = mutate({ - update(cache, { - data: { - doSomething: { - __typename, - time, + update( + cache, + { + data: { + doSomething: { __typename, time }, }, - }, - }) { + } + ) { expect(__typename).toBe("MutationPayload"); expect(time).toBeInstanceOf(Date); expect(time.getTime()).toBe(startTime); @@ -1226,29 +1327,28 @@ describe('useMutation Hook', () => { }, }); }, - }).then(({ - data: { - doSomething: { - __typename, - time, + }).then( + ({ + data: { + doSomething: { __typename, time }, }, - }, - }) => { - expect(__typename).toBe("MutationPayload"); - expect(time).toBeInstanceOf(Date); - expect(time.getTime()).toBe(startTime); - expect(timeReadCount).toBe(1); - expect(timeMergeCount).toBe(1); - expect(client.cache.extract()).toEqual({ - ROOT_MUTATION: { - __typename: "Mutation", - doSomething: { - __typename: "MutationPayload", - time: startTime, + }) => { + expect(__typename).toBe("MutationPayload"); + expect(time).toBeInstanceOf(Date); + expect(time.getTime()).toBe(startTime); + expect(timeReadCount).toBe(1); + expect(timeMergeCount).toBe(1); + expect(client.cache.extract()).toEqual({ + ROOT_MUTATION: { + __typename: "Mutation", + doSomething: { + __typename: "MutationPayload", + time: startTime, + }, }, - }, - }); - }); + }); + } + ); }); mutationResult.catch(() => {}); @@ -1263,12 +1363,9 @@ describe('useMutation Hook', () => { expect(result.current[1].data).toBeDefined(); const { - doSomething: { - __typename, - time, - }, + doSomething: { __typename, time }, } = result.current[1].data; - expect(__typename).toBe('MutationPayload'); + expect(__typename).toBe("MutationPayload"); expect(time).toBeInstanceOf(Date); expect(time.getTime()).toBe(startTime); @@ -1276,31 +1373,28 @@ describe('useMutation Hook', () => { }); }); - describe('Update function', () => { - it('should be called with the provided variables', async () => { - const variables = { description: 'Get milk!' }; + describe("Update function", () => { + it("should be called with the provided variables", async () => { + const variables = { description: "Get milk!" }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; let variablesMatched = false; const Component = () => { - const [createTodo] = useMutation( - CREATE_TODO_MUTATION, - { - update(_, __, options) { - expect(options.variables).toEqual(variables); - variablesMatched = true; - } - } - ); + const [createTodo] = useMutation(CREATE_TODO_MUTATION, { + update(_, __, options) { + expect(options.variables).toEqual(variables); + variablesMatched = true; + }, + }); useEffect(() => { createTodo({ variables }); @@ -1318,35 +1412,36 @@ describe('useMutation Hook', () => { await waitFor(() => expect(variablesMatched).toBe(true)); }); - itAsync('should be called with the provided context', (resolve, reject) => { + itAsync("should be called with the provided context", (resolve, reject) => { const context = { id: 3 }; const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; let foundContext = false; const Component = () => { - const [createTodo] = useMutation<Todo, { description: string }, { id: number }>( - CREATE_TODO_MUTATION, - { - context, - update(_, __, options) { - expect(options.context).toEqual(context); - foundContext = true; - } - } - ); + const [createTodo] = useMutation< + Todo, + { description: string }, + { id: number } + >(CREATE_TODO_MUTATION, { + context, + update(_, __, options) { + expect(options.context).toEqual(context); + foundContext = true; + }, + }); useEffect(() => { createTodo({ variables }); @@ -1366,33 +1461,30 @@ describe('useMutation Hook', () => { }).then(resolve, reject); }); - describe('If context is not provided', () => { - itAsync('should be undefined', (resolve, reject) => { + describe("If context is not provided", () => { + itAsync("should be undefined", (resolve, reject) => { const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; let checkedContext = false; const Component = () => { - const [createTodo] = useMutation( - CREATE_TODO_MUTATION, - { - update(_, __, options) { - expect(options.context).toBeUndefined(); - checkedContext = true; - } - } - ); + const [createTodo] = useMutation(CREATE_TODO_MUTATION, { + update(_, __, options) { + expect(options.context).toBeUndefined(); + checkedContext = true; + }, + }); useEffect(() => { createTodo({ variables }); @@ -1414,123 +1506,123 @@ describe('useMutation Hook', () => { }); }); - describe('Optimistic response', () => { - itAsync('should support optimistic response handling', async (resolve, reject) => { - const optimisticResponse = { - __typename: 'Mutation', - createTodo: { - id: 1, - description: 'TEMPORARY', - priority: 'High', - __typename: 'Todo' - } - }; + describe("Optimistic response", () => { + itAsync( + "should support optimistic response handling", + async (resolve, reject) => { + const optimisticResponse = { + __typename: "Mutation", + createTodo: { + id: 1, + description: "TEMPORARY", + priority: "High", + __typename: "Todo", + }, + }; - const variables = { - description: 'Get milk!' - }; + const variables = { + description: "Get milk!", + }; - const mocks = [ - { - request: { - query: CREATE_TODO_MUTATION, - variables + const mocks = [ + { + request: { + query: CREATE_TODO_MUTATION, + variables, + }, + result: { data: CREATE_TODO_RESULT }, }, - result: { data: CREATE_TODO_RESULT } - } - ]; - - const link = mockSingleLink(...mocks).setOnError(reject); - const cache = new InMemoryCache(); - const client = new ApolloClient({ - cache, - link - }); + ]; - let renderCount = 0; - const Component = () => { - const [createTodo, { loading, data }] = useMutation( - CREATE_TODO_MUTATION, - { optimisticResponse } - ); + const link = mockSingleLink(...mocks).setOnError(reject); + const cache = new InMemoryCache(); + const client = new ApolloClient({ + cache, + link, + }); - switch (renderCount) { - case 0: - expect(loading).toBeFalsy(); - expect(data).toBeUndefined(); - createTodo({ variables }); + let renderCount = 0; + const Component = () => { + const [createTodo, { loading, data }] = useMutation( + CREATE_TODO_MUTATION, + { optimisticResponse } + ); - const dataInStore = client.cache.extract(true); - expect(dataInStore['Todo:1']).toEqual( - optimisticResponse.createTodo - ); - - break; - case 1: - expect(loading).toBeTruthy(); - expect(data).toBeUndefined(); - break; - case 2: - expect(loading).toBeFalsy(); - expect(data).toEqual(CREATE_TODO_RESULT); - break; - default: - } - renderCount += 1; - return null; - }; + switch (renderCount) { + case 0: + expect(loading).toBeFalsy(); + expect(data).toBeUndefined(); + createTodo({ variables }); + + const dataInStore = client.cache.extract(true); + expect(dataInStore["Todo:1"]).toEqual( + optimisticResponse.createTodo + ); + + break; + case 1: + expect(loading).toBeTruthy(); + expect(data).toBeUndefined(); + break; + case 2: + expect(loading).toBeFalsy(); + expect(data).toEqual(CREATE_TODO_RESULT); + break; + default: + } + renderCount += 1; + return null; + }; - render( - <ApolloProvider client={client}> - <Component /> - </ApolloProvider> - ); + render( + <ApolloProvider client={client}> + <Component /> + </ApolloProvider> + ); - return waitFor(() => { - expect(renderCount).toBe(3); - }).then(resolve, reject); - }); + return waitFor(() => { + expect(renderCount).toBe(3); + }).then(resolve, reject); + } + ); - it('should be called with the provided context', async () => { + it("should be called with the provided context", async () => { const optimisticResponse = { - __typename: 'Mutation', + __typename: "Mutation", createTodo: { id: 1, - description: 'TEMPORARY', - priority: 'High', - __typename: 'Todo' - } + description: "TEMPORARY", + priority: "High", + __typename: "Todo", + }, }; const context = { id: 3 }; const variables = { - description: 'Get milk!' + description: "Get milk!", }; const mocks = [ { request: { query: CREATE_TODO_MUTATION, - variables + variables, }, - result: { data: CREATE_TODO_RESULT } - } + result: { data: CREATE_TODO_RESULT }, + }, ]; const contextFn = jest.fn(); const Component = () => { - const [createTodo] = useMutation( - CREATE_TODO_MUTATION, - { - optimisticResponse, - context, - update(_, __, options) { - contextFn(options.context); - } - } - ); + const [createTodo] = useMutation(CREATE_TODO_MUTATION, { + optimisticResponse, + context, + update(_, __, options) { + contextFn(options.context); + }, + }); useEffect(() => { createTodo({ variables }); @@ -1552,7 +1644,7 @@ describe('useMutation Hook', () => { }); }); - describe('refetching queries', () => { + describe("refetching queries", () => { const GET_TODOS_QUERY = gql` query getTodos { todos { @@ -1567,15 +1659,15 @@ describe('useMutation Hook', () => { todos: [ { id: 2, - description: 'Walk the dog', - priority: 'Medium', - __typename: 'Todo' + description: "Walk the dog", + priority: "Medium", + __typename: "Todo", }, { id: 3, - description: 'Call mom', - priority: 'Low', - __typename: 'Todo' + description: "Call mom", + priority: "Low", + __typename: "Todo", }, ], }; @@ -1584,45 +1676,47 @@ describe('useMutation Hook', () => { todos: [ { id: 1, - description: 'Get milk!', - priority: 'High', - __typename: 'Todo' + description: "Get milk!", + priority: "High", + __typename: "Todo", }, { id: 2, - description: 'Walk the dog', - priority: 'Medium', - __typename: 'Todo' + description: "Walk the dog", + priority: "Medium", + __typename: "Todo", }, { id: 3, - description: 'Call mom', - priority: 'Low', - __typename: 'Todo' + description: "Call mom", + priority: "Low", + __typename: "Todo", }, ], }; - it('can pass onQueryUpdated to useMutation', async () => { + it("can pass onQueryUpdated to useMutation", async () => { interface TData { todoCount: number; } const countQuery: TypedDocumentNode<TData> = gql` - query Count { todoCount @client } + query Count { + todoCount @client + } `; const optimisticResponse = { - __typename: 'Mutation', + __typename: "Mutation", createTodo: { id: 1, - description: 'TEMPORARY', - priority: 'High', - __typename: 'Todo' - } + description: "TEMPORARY", + priority: "High", + __typename: "Todo", + }, }; const variables = { - description: 'Get milk!' + description: "Get milk!", }; const client = new ApolloClient({ @@ -1657,7 +1751,7 @@ describe('useMutation Hook', () => { result: ApolloQueryResult<TData>; } let resolveOnUpdate: (results: OnQueryUpdatedResults) => any; - const onUpdatePromise = new Promise<OnQueryUpdatedResults>(resolve => { + const onUpdatePromise = new Promise<OnQueryUpdatedResults>((resolve) => { resolveOnUpdate = resolve; }).then((onUpdateResult) => { expect(finishedReobserving).toBe(true); @@ -1679,28 +1773,29 @@ describe('useMutation Hook', () => { onUpdatePromise.catch(() => {}); let finishedReobserving = false; - const { result } = renderHook(() => ({ - query: useQuery(countQuery), - mutation: useMutation(CREATE_TODO_MUTATION, { - optimisticResponse, - update(cache) { - const result = cache.readQuery({ query: countQuery }); + const { result } = renderHook( + () => ({ + query: useQuery(countQuery), + mutation: useMutation(CREATE_TODO_MUTATION, { + optimisticResponse, + update(cache) { + const result = cache.readQuery({ query: countQuery }); - cache.writeQuery({ - query: countQuery, - data: { - todoCount: (result ? result.todoCount : 0) + 1, - }, - }); - }, + cache.writeQuery({ + query: countQuery, + data: { + todoCount: (result ? result.todoCount : 0) + 1, + }, + }); + }, + }), }), - }), { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }); + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); expect(result.current.query.loading).toBe(false); expect(result.current.query.data).toEqual({ todoCount: 0 }); @@ -1725,16 +1820,22 @@ describe('useMutation Hook', () => { expect(result.current.mutation[1].data).toBe(undefined); expect(finishedReobserving).toBe(false); - await waitFor(() => { - expect(result.current.query.data).toEqual({ todoCount: 1 }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.data).toEqual({ todoCount: 1 }); + }, + { interval: 1 } + ); expect(result.current.query.loading).toBe(false); expect(result.current.mutation[1].loading).toBe(true); expect(result.current.mutation[1].data).toBe(undefined); - await waitFor(() => { - expect(result.current.mutation[1].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.mutation[1].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.loading).toBe(false); expect(result.current.query.data).toEqual({ todoCount: 1 }); expect(result.current.mutation[1].data).toEqual(CREATE_TODO_RESULT); @@ -1743,8 +1844,8 @@ describe('useMutation Hook', () => { await expect(onUpdatePromise).resolves.toBe(undefined); }); - it('refetchQueries with operation names should update cache', async () => { - const variables = { description: 'Get milk!' }; + it("refetchQueries with operation names should update cache", async () => { + const variables = { description: "Get milk!" }; const mocks = [ { request: { @@ -1782,39 +1883,44 @@ describe('useMutation Hook', () => { }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.query.loading).toBe(true); expect(result.current.query.data).toBe(undefined); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.data).toEqual(mocks[0].result.data); const mutate = result.current.mutation[0]; act(() => { mutate({ variables, - refetchQueries: ['getTodos'], + refetchQueries: ["getTodos"], }); }); - await waitFor(() => { - expect(result.current.query.data).toEqual(mocks[2].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.data).toEqual(mocks[2].result.data); + }, + { interval: 1 } + ); expect(result.current.query.loading).toBe(false); - expect(client.readQuery({ query: GET_TODOS_QUERY})) - .toEqual(mocks[2].result.data); + expect(client.readQuery({ query: GET_TODOS_QUERY })).toEqual( + mocks[2].result.data + ); }); - it('refetchQueries with document nodes should update cache', async () => { - const variables = { description: 'Get milk!' }; + it("refetchQueries with document nodes should update cache", async () => { + const variables = { description: "Get milk!" }; const mocks = [ { request: { @@ -1854,18 +1960,19 @@ describe('useMutation Hook', () => { }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.query.loading).toBe(true); expect(result.current.query.data).toBe(undefined); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.data).toEqual(mocks[0].result.data); const mutate = result.current.mutation[0]; @@ -1885,16 +1992,20 @@ describe('useMutation Hook', () => { expect(result.current.query.loading).toBe(false); expect(result.current.query.data).toEqual(mocks[0].result.data); - await waitFor(() => { - expect(result.current.query.data).toEqual(mocks[2].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.data).toEqual(mocks[2].result.data); + }, + { interval: 1 } + ); expect(result.current.query.loading).toBe(false); - expect(client.readQuery({ query: GET_TODOS_QUERY })) - .toEqual(mocks[2].result.data); + expect(client.readQuery({ query: GET_TODOS_QUERY })).toEqual( + mocks[2].result.data + ); }); - it('refetchQueries should update cache after unmount', async () => { - const variables = { description: 'Get milk!' }; + it("refetchQueries should update cache after unmount", async () => { + const variables = { description: "Get milk!" }; const mocks = [ { request: { @@ -1932,33 +2043,36 @@ describe('useMutation Hook', () => { }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.query.loading).toBe(true); expect(result.current.query.data).toBe(undefined); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.data).toEqual(mocks[0].result.data); const mutate = result.current.mutation[0]; let onMutationDone: Function; - const mutatePromise = new Promise((resolve) => (onMutationDone = resolve)); + const mutatePromise = new Promise( + (resolve) => (onMutationDone = resolve) + ); setTimeout(() => { act(() => { mutate({ variables, - refetchQueries: ['getTodos'], + refetchQueries: ["getTodos"], update() { unmount(); }, - }).then(result => { + }).then((result) => { expect(result.data).toEqual(CREATE_TODO_RESULT); onMutationDone(); }); @@ -1971,304 +2085,322 @@ describe('useMutation Hook', () => { await mutatePromise; await waitFor(() => { - expect( - client.readQuery({ query: GET_TODOS_QUERY }) - ).toEqual(mocks[2].result.data); + expect(client.readQuery({ query: GET_TODOS_QUERY })).toEqual( + mocks[2].result.data + ); }); }); - itAsync("using onQueryUpdated callback should not prevent cache broadcast", async (resolve, reject) => { - // Mutating this array makes the tests below much more difficult to reason - // about, so instead we reassign the numbersArray variable to remove - // elements, without mutating the previous array object. - let numbersArray: ReadonlyArray<{ id: string; value: number }> = [ - { id: '1', value: 324 }, - { id: '2', value: 729 }, - { id: '3', value: 987 }, - { id: '4', value: 344 }, - { id: '5', value: 72 }, - { id: '6', value: 899 }, - { id: '7', value: 222 }, - ]; + itAsync( + "using onQueryUpdated callback should not prevent cache broadcast", + async (resolve, reject) => { + // Mutating this array makes the tests below much more difficult to reason + // about, so instead we reassign the numbersArray variable to remove + // elements, without mutating the previous array object. + let numbersArray: ReadonlyArray<{ id: string; value: number }> = [ + { id: "1", value: 324 }, + { id: "2", value: 729 }, + { id: "3", value: 987 }, + { id: "4", value: 344 }, + { id: "5", value: 72 }, + { id: "6", value: 899 }, + { id: "7", value: 222 }, + ]; - type TNumbersQuery = { - numbers: { - __typename: "NumbersResult"; - id: string; - sum: number; - numbersArray: ReadonlyArray<{ + type TNumbersQuery = { + numbers: { + __typename: "NumbersResult"; id: string; - value: number; - }>; + sum: number; + numbersArray: ReadonlyArray<{ + id: string; + value: number; + }>; + }; }; - }; - function getNumbersData(): TNumbersQuery { - return { - numbers: { - __typename: "NumbersResult", - id: "numbersId", - numbersArray, - sum: numbersArray.reduce((sum, b) => sum + b.value, 0), - }, - }; - } + function getNumbersData(): TNumbersQuery { + return { + numbers: { + __typename: "NumbersResult", + id: "numbersId", + numbersArray, + sum: numbersArray.reduce((sum, b) => sum + b.value, 0), + }, + }; + } - const link = new ApolloLink((operation) => { - return new Observable(observer => { - const { operationName } = operation; - if (operationName === "NumbersQuery") { - observer.next({ - data: getNumbersData(), - }); - } else if (operationName === "RemoveNumberMutation") { - const last = numbersArray[numbersArray.length - 1]; - numbersArray = numbersArray.slice(0, -1); - observer.next({ - data: { - removeLastNumber: last, - }, - }); - } - setTimeout(() => { - observer.complete(); - }, 50); + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + const { operationName } = operation; + if (operationName === "NumbersQuery") { + observer.next({ + data: getNumbersData(), + }); + } else if (operationName === "RemoveNumberMutation") { + const last = numbersArray[numbersArray.length - 1]; + numbersArray = numbersArray.slice(0, -1); + observer.next({ + data: { + removeLastNumber: last, + }, + }); + } + setTimeout(() => { + observer.complete(); + }, 50); + }); }); - }); - const client = new ApolloClient({ - link, - cache: new InMemoryCache({ - typePolicies: { - NumbersResult: { - fields: { - numbersArray: { merge: false }, - sum(_, { readField }) { - const numbersArray = - readField<TNumbersQuery["numbers"]["numbersArray"]>("numbersArray"); - return (numbersArray || []).reduce( - (sum, item) => sum + item.value, - 0, - ); + const client = new ApolloClient({ + link, + cache: new InMemoryCache({ + typePolicies: { + NumbersResult: { + fields: { + numbersArray: { merge: false }, + sum(_, { readField }) { + const numbersArray = + readField<TNumbersQuery["numbers"]["numbersArray"]>( + "numbersArray" + ); + return (numbersArray || []).reduce( + (sum, item) => sum + item.value, + 0 + ); + }, }, }, }, - }, - }), - }); + }), + }); - const NumbersQuery: TypedDocumentNode<TNumbersQuery> = gql` - query NumbersQuery { - numbers { - id - sum - numbersArray { + const NumbersQuery: TypedDocumentNode<TNumbersQuery> = gql` + query NumbersQuery { + numbers { id - value + sum + numbersArray { + id + value + } } } - } - `; + `; - const RemoveNumberMutation = gql` - mutation RemoveNumberMutation { - removeLastNumber { - id + const RemoveNumberMutation = gql` + mutation RemoveNumberMutation { + removeLastNumber { + id + } } - } - `; - - const { result } = renderHook(() => ({ - query: useQuery(NumbersQuery, { - notifyOnNetworkStatusChange: true, - }), + `; - mutation: useMutation(RemoveNumberMutation, { - update(cache) { - const oldData = cache.readQuery({ query: NumbersQuery }); - cache.writeQuery({ - query: NumbersQuery, - data: oldData ? { - ...oldData, - numbers: { - ...oldData.numbers, - numbersArray: oldData.numbers.numbersArray.slice(0, -1), - }, - } : { - numbers: { - __typename: "NumbersResult", - id: "numbersId", - sum: 0, - numbersArray: [], - }, + const { result } = renderHook( + () => ({ + query: useQuery(NumbersQuery, { + notifyOnNetworkStatusChange: true, + }), + + mutation: useMutation(RemoveNumberMutation, { + update(cache) { + const oldData = cache.readQuery({ query: NumbersQuery }); + cache.writeQuery({ + query: NumbersQuery, + data: oldData + ? { + ...oldData, + numbers: { + ...oldData.numbers, + numbersArray: oldData.numbers.numbersArray.slice( + 0, + -1 + ), + }, + } + : { + numbers: { + __typename: "NumbersResult", + id: "numbersId", + sum: 0, + numbersArray: [], + }, + }, + }); }, - }); - }, - }), - }), { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }); - - const obsQueryMap = client.getObservableQueries(); - expect(obsQueryMap.size).toBe(1); - const observedResults: Array<{ data: TNumbersQuery }> = []; - subscribeAndCount(reject, obsQueryMap.values().next().value, ( - count, - result: { data: TNumbersQuery }, - ) => { - observedResults.push(result); - expect(observedResults.length).toBe(count); - const data = getNumbersData(); - - if (count === 1) { - expect(result).toEqual({ - loading: true, - networkStatus: NetworkStatus.loading, - partial: true, - }); - - } else if (count === 2) { - expect(data.numbers.numbersArray.length).toBe(7); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); - - } else if (count === 3) { - expect(data.numbers.numbersArray.length).toBe(6); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); - - } else if (count === 4) { - expect(data.numbers.numbersArray.length).toBe(5); - expect(result).toEqual({ - loading: false, - networkStatus: NetworkStatus.ready, - data, - }); - - // This line is the only way to finish this test successfully. - setTimeout(resolve, 50); + }), + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } + ); - } else { - // If we did not return false from the final onQueryUpdated function, - // we would receive an additional result here. - reject(`too many renders (${count}); final result: ${ - JSON.stringify(result) - }`); - } - }); + const obsQueryMap = client.getObservableQueries(); + expect(obsQueryMap.size).toBe(1); + const observedResults: Array<{ data: TNumbersQuery }> = []; + subscribeAndCount( + reject, + obsQueryMap.values().next().value, + (count, result: { data: TNumbersQuery }) => { + observedResults.push(result); + expect(observedResults.length).toBe(count); + const data = getNumbersData(); + + if (count === 1) { + expect(result).toEqual({ + loading: true, + networkStatus: NetworkStatus.loading, + partial: true, + }); + } else if (count === 2) { + expect(data.numbers.numbersArray.length).toBe(7); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data, + }); + } else if (count === 3) { + expect(data.numbers.numbersArray.length).toBe(6); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data, + }); + } else if (count === 4) { + expect(data.numbers.numbersArray.length).toBe(5); + expect(result).toEqual({ + loading: false, + networkStatus: NetworkStatus.ready, + data, + }); + + // This line is the only way to finish this test successfully. + setTimeout(resolve, 50); + } else { + // If we did not return false from the final onQueryUpdated function, + // we would receive an additional result here. + reject( + `too many renders (${count}); final result: ${JSON.stringify( + result + )}` + ); + } + } + ); - expect(observedResults).toEqual([]); + expect(observedResults).toEqual([]); - expect(result.current.query.loading).toBe(true); - expect(result.current.query.networkStatus).toBe(NetworkStatus.loading); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(false); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + expect(result.current.query.loading).toBe(true); + expect(result.current.query.networkStatus).toBe(NetworkStatus.loading); + expect(result.current.mutation[1].loading).toBe(false); + expect(result.current.mutation[1].called).toBe(false); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(false); + expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); + expect(result.current.mutation[1].loading).toBe(false); + expect(result.current.mutation[1].called).toBe(false); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: '7', - value: 222, - }); + expect(numbersArray[numbersArray.length - 1]).toEqual({ + id: "7", + value: 222, + }); - const [mutate] = result.current.mutation; - await act(async () => { - expect(await mutate( - // Not passing an onQueryUpdated callback should allow cache - // broadcasts to propagate as normal. The point of this test is to - // demonstrate that *adding* onQueryUpdated should not prevent cache - // broadcasts (see below for where we test that). - )).toEqual({ - data: { - removeLastNumber: { - id: '7', + const [mutate] = result.current.mutation; + await act(async () => { + expect( + await mutate() + // Not passing an onQueryUpdated callback should allow cache + // broadcasts to propagate as normal. The point of this test is to + // demonstrate that *adding* onQueryUpdated should not prevent cache + // broadcasts (see below for where we test that). + ).toEqual({ + data: { + removeLastNumber: { + id: "7", + }, }, - }, + }); }); - }); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: '6', - value: 899, - }); + expect(numbersArray[numbersArray.length - 1]).toEqual({ + id: "6", + value: 899, + }); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); + expect(result.current.query.loading).toBe(false); + expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); + expect(result.current.mutation[1].loading).toBe(false); + expect(result.current.mutation[1].called).toBe(true); - await act(async () => { - expect(await mutate({ - // Adding this onQueryUpdated callback, which merely examines the - // updated query and its DiffResult, should not change the broadcast - // behavior of the ObservableQuery. - onQueryUpdated(oq, diff) { - expect(oq.queryName).toBe("NumbersQuery"); - expect(diff.result.numbers.numbersArray.length).toBe(5); - expect(diff.result.numbers.sum).toBe(2456); - }, - })).toEqual({ - data: { - removeLastNumber: { - id: '6', + await act(async () => { + expect( + await mutate({ + // Adding this onQueryUpdated callback, which merely examines the + // updated query and its DiffResult, should not change the broadcast + // behavior of the ObservableQuery. + onQueryUpdated(oq, diff) { + expect(oq.queryName).toBe("NumbersQuery"); + expect(diff.result.numbers.numbersArray.length).toBe(5); + expect(diff.result.numbers.sum).toBe(2456); + }, + }) + ).toEqual({ + data: { + removeLastNumber: { + id: "6", + }, }, - }, + }); }); - }); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: '5', - value: 72, - }); + expect(numbersArray[numbersArray.length - 1]).toEqual({ + id: "5", + value: 72, + }); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); + expect(result.current.query.loading).toBe(false); + expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); + expect(result.current.mutation[1].loading).toBe(false); + expect(result.current.mutation[1].called).toBe(true); - await act(async () => { - expect(await mutate({ - onQueryUpdated(oq, diff) { - expect(oq.queryName).toBe("NumbersQuery"); - expect(diff.result.numbers.numbersArray.length).toBe(4); - expect(diff.result.numbers.sum).toBe(2384); - // Returning false from onQueryUpdated prevents the cache broadcast. - return false; - }, - })).toEqual({ - data: { - removeLastNumber: { - id: '5', + await act(async () => { + expect( + await mutate({ + onQueryUpdated(oq, diff) { + expect(oq.queryName).toBe("NumbersQuery"); + expect(diff.result.numbers.numbersArray.length).toBe(4); + expect(diff.result.numbers.sum).toBe(2384); + // Returning false from onQueryUpdated prevents the cache broadcast. + return false; + }, + }) + ).toEqual({ + data: { + removeLastNumber: { + id: "5", + }, }, - }, + }); }); - }); - expect(numbersArray[numbersArray.length - 1]).toEqual({ - id: '4', - value: 344, - }); + expect(numbersArray[numbersArray.length - 1]).toEqual({ + id: "4", + value: 344, + }); - expect(result.current.query.loading).toBe(false); - expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); - expect(result.current.mutation[1].loading).toBe(false); - expect(result.current.mutation[1].called).toBe(true); - }); + expect(result.current.query.loading).toBe(false); + expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); + expect(result.current.mutation[1].loading).toBe(false); + expect(result.current.mutation[1].called).toBe(true); + } + ); it("refetchQueries should work with BatchHttpLink", async () => { const MUTATION_1 = gql` @@ -2290,16 +2422,20 @@ describe('useMutation Hook', () => { fetchMock.restore(); const responseBodies = [ - { data: { items: [{ id: 1 }, { id: 2 }] }}, - { data: { doSomething: { message: 'success' }}}, - { data: { items: [{ id: 1 }, { id: 2 }, { id: 3 }] }}, + { data: { items: [{ id: 1 }, { id: 2 }] } }, + { data: { doSomething: { message: "success" } } }, + { data: { items: [{ id: 1 }, { id: 2 }, { id: 3 }] } }, ]; - fetchMock.post("/graphql", (url, opts) => new Promise(resolve => { - resolve({ - body: responseBodies.shift(), - }); - })); + fetchMock.post( + "/graphql", + (url, opts) => + new Promise((resolve) => { + resolve({ + body: responseBodies.shift(), + }); + }) + ); const Test = () => { const { data } = useQuery(QUERY_1); @@ -2310,34 +2446,43 @@ describe('useMutation Hook', () => { const { items = [] } = data || {}; - return <> - <button onClick={() => { - return mutate(); - }} type="button"> - mutate - </button> - {items.map((c: any) => ( - <div key={c.id}>item {c.id}</div> - ))} - </>; + return ( + <> + <button + onClick={() => { + return mutate(); + }} + type="button" + > + mutate + </button> + {items.map((c: any) => ( + <div key={c.id}>item {c.id}</div> + ))} + </> + ); }; const client = new ApolloClient({ link: new BatchHttpLink({ - uri: '/graphql', + uri: "/graphql", batchMax: 10, }), cache: new InMemoryCache(), }); - render(<ApolloProvider client={client}><Test /></ApolloProvider>); + render( + <ApolloProvider client={client}> + <Test /> + </ApolloProvider> + ); - await waitFor(() => screen.findByText('item 1')); - await userEvent.click(screen.getByRole('button', { name: /mutate/i })); - await waitFor(() => screen.findByText('item 3')); + await waitFor(() => screen.findByText("item 1")); + await userEvent.click(screen.getByRole("button", { name: /mutate/i })); + await waitFor(() => screen.findByText("item 3")); }); }); - describe('defer', () => { + describe("defer", () => { const CREATE_TODO_MUTATION_DEFER = gql` mutation createTodo($description: String!, $priority: String) { createTodo(description: $description, priority: $priority) { @@ -2350,9 +2495,9 @@ describe('useMutation Hook', () => { } `; const variables = { - description: 'Get milk!' + description: "Get milk!", }; - it('resolves a deferred mutation with the full result', async () => { + it("resolves a deferred mutation with the full result", async () => { const errorSpy = jest.spyOn(console, "error"); const link = new MockSubscriptionLink(); @@ -2373,16 +2518,11 @@ describe('useMutation Hook', () => { return { loading, data }; }; - const { result } = renderHook( - () => useCreateTodo(), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useCreateTodo(), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); @@ -2393,49 +2533,56 @@ describe('useMutation Hook', () => { data: { createTodo: { id: 1, - __typename: 'Todo', + __typename: "Todo", }, }, - hasNext: true + hasNext: true, }, }); }); setTimeout(() => { - link.simulateResult({ - result: { - incremental: [{ - data: { - description: 'Get milk!', - priority: 'High', - __typename: 'Todo', - }, - path: ['createTodo'], - }], - hasNext: false + link.simulateResult( + { + result: { + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + path: ["createTodo"], + }, + ], + hasNext: false, + }, }, - }, true); + true + ); }); - // When defer is used in a mutation, the final value resolves // in a single result - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ createTodo: { id: 1, description: "Get milk!", priority: "High", - __typename: 'Todo', + __typename: "Todo", }, }); expect(errorSpy).not.toHaveBeenCalled(); errorSpy.mockRestore(); }); - it('resolves with resulting errors and calls onError callback', async () => { + it("resolves with resulting errors and calls onError callback", async () => { const errorSpy = jest.spyOn(console, "error"); const link = new MockSubscriptionLink(); @@ -2449,11 +2596,9 @@ describe('useMutation Hook', () => { () => useMutation(CREATE_TODO_MUTATION_DEFER, { onError }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); const createTodo = result.current[0]; @@ -2466,27 +2611,30 @@ describe('useMutation Hook', () => { data: { createTodo: { id: 1, - __typename: 'Todo', + __typename: "Todo", }, }, - hasNext: true + hasNext: true, }, }); }); setTimeout(() => { - link.simulateResult({ - result: { - incremental: [{ - data: null, - errors: [ - new GraphQLError(CREATE_TODO_ERROR) + link.simulateResult( + { + result: { + incremental: [ + { + data: null, + errors: [new GraphQLError(CREATE_TODO_ERROR)], + path: ["createTodo"], + }, ], - path: ['createTodo'], - }], - hasNext: false + hasNext: false, + }, }, - }, true); + true + ); }); await act(async () => { fetchResult = await createTodo({ variables }); @@ -2509,7 +2657,7 @@ describe('useMutation Hook', () => { }); errorSpy.mockRestore(); }); - it('calls the update function with the final merged result data', async () => { + it("calls the update function with the final merged result data", async () => { const errorSpy = jest.spyOn(console, "error"); const link = new MockSubscriptionLink(); const update = jest.fn(); @@ -2519,15 +2667,12 @@ describe('useMutation Hook', () => { }); const { result } = renderHook( - () => useMutation(CREATE_TODO_MUTATION_DEFER, - { update }), + () => useMutation(CREATE_TODO_MUTATION_DEFER, { update }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); const [createTodo] = result.current; @@ -2542,26 +2687,31 @@ describe('useMutation Hook', () => { data: { createTodo: { id: 1, - __typename: 'Todo', + __typename: "Todo", }, }, - hasNext: true + hasNext: true, }, }); - link.simulateResult({ - result: { - incremental: [{ - data: { - description: 'Get milk!', - priority: 'High', - __typename: 'Todo', - }, - path: ['createTodo'], - }], - hasNext: false + link.simulateResult( + { + result: { + incremental: [ + { + data: { + description: "Get milk!", + priority: "High", + __typename: "Todo", + }, + path: ["createTodo"], + }, + ], + hasNext: false, + }, }, - }, true); + true + ); await act(async () => { await promiseReturnedByMutate; @@ -2579,9 +2729,9 @@ describe('useMutation Hook', () => { id: 1, description: "Get milk!", priority: "High", - __typename: 'Todo', + __typename: "Todo", }, - } + }, }), // third argument is an object containing context and variables // but we only care about variables here @@ -2595,3 +2745,16 @@ describe('useMutation Hook', () => { }); }); }); + +describe.skip("Type Tests", () => { + test("NoInfer prevents adding arbitrary additional variables", () => { + const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; + useMutation(typedNode, { + variables: { + bar: 4, + // @ts-expect-error + nonExistingVariable: "string", + }, + }); + }); +}); diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -1,9 +1,9 @@ -import React, { Fragment, ReactNode, useEffect, useState } from 'react'; -import { DocumentNode, GraphQLError } from 'graphql'; -import gql from 'graphql-tag'; -import { act } from 'react-dom/test-utils'; -import userEvent from '@testing-library/user-event'; -import { render, screen, waitFor, renderHook } from '@testing-library/react'; +import React, { Fragment, ReactNode, useEffect, useState } from "react"; +import { DocumentNode, GraphQLError } from "graphql"; +import gql from "graphql-tag"; +import { act } from "react-dom/test-utils"; +import userEvent from "@testing-library/user-event"; +import { render, screen, waitFor, renderHook } from "@testing-library/react"; import { ApolloClient, ApolloError, @@ -11,29 +11,30 @@ import { OperationVariables, TypedDocumentNode, WatchQueryFetchPolicy, -} from '../../../core'; -import { InMemoryCache } from '../../../cache'; -import { ApolloProvider, resetApolloContext } from '../../context'; -import { Observable, Reference, concatPagination } from '../../../utilities'; -import { ApolloLink } from '../../../link/core'; +} from "../../../core"; +import { InMemoryCache } from "../../../cache"; +import { ApolloProvider } from "../../context"; +import { Observable, Reference, concatPagination } from "../../../utilities"; +import { ApolloLink } from "../../../link/core"; import { MockLink, MockedProvider, MockSubscriptionLink, mockSingleLink, tick, -} from '../../../testing'; +} from "../../../testing"; import { QueryResult } from "../../types/types"; -import { useQuery } from '../useQuery'; -import { useMutation } from '../useMutation'; +import { useQuery } from "../useQuery"; +import { useMutation } from "../useMutation"; -describe('useQuery Hook', () => { - afterEach(() => { - resetApolloContext(); - }); - describe('General use', () => { - it('should handle a simple query', async () => { - const query = gql`{ hello }`; +describe("useQuery Hook", () => { + describe("General use", () => { + it("should handle a simple query", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -45,31 +46,41 @@ describe('useQuery Hook', () => { <MockedProvider mocks={mocks}>{children}</MockedProvider> ); - const { result } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query), { wrapper }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world" }); }); it("useQuery result is referentially stable", async () => { - const query = gql`{ hello }`; - const mocks = [ { + const query = gql` + { + hello + } + `; + const mocks = [ + { request: { query }, result: { data: { hello: "world" } }, - } ]; - const wrapper = ({ children }: any) => <MockedProvider mocks={mocks}>{children}</MockedProvider>; - const { result, rerender } = renderHook(() => useQuery(query), { wrapper }); + }, + ]; + const wrapper = ({ children }: any) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ); + const { result, rerender } = renderHook(() => useQuery(query), { + wrapper, + }); let oldResult: QueryResult<any, OperationVariables>; await waitFor(() => { - result.current.loading === false + result.current.loading === false; }); rerender({ children: null }); @@ -85,13 +96,23 @@ describe('useQuery Hook', () => { }); it("useQuery produces the expected renders initially", async () => { - const query = gql`{ hello }`; - const mocks = [ { - request: { query }, - result: { data: { hello: "world" } }, - } ]; - const wrapper = ({ children }: any) => <MockedProvider mocks={mocks}>{children}</MockedProvider>; - const { result, rerender } = renderHook(() => useQuery(query), { wrapper }); + const query = gql` + { + hello + } + `; + const mocks = [ + { + request: { query }, + result: { data: { hello: "world" } }, + }, + ]; + const wrapper = ({ children }: any) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ); + const { result, rerender } = renderHook(() => useQuery(query), { + wrapper, + }); await waitFor(() => result.current.loading === false); @@ -100,7 +121,7 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(result.current.loading).toBe(false); }); - expect(result.current.data).toEqual({ hello: "world"}); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ hello: "world" }); @@ -114,20 +135,25 @@ describe('useQuery Hook', () => { it("useQuery produces the expected frames when variables change", async () => { const query = gql` query ($id: Int) { - hello(id: $id) - } + hello(id: $id) + } `; - const mocks = [ { - request: { query, variables: { id: 1 } }, - result: { data: { hello: "world 1" } }, - }, { - request: { query, variables: { id: 2 } }, - result: { data: { hello: "world 2" } }, - } ]; - const wrapper = ({ children }: any) => <MockedProvider mocks={mocks}>{children}</MockedProvider>; + const mocks = [ + { + request: { query, variables: { id: 1 } }, + result: { data: { hello: "world 1" } }, + }, + { + request: { query, variables: { id: 2 } }, + result: { data: { hello: "world 2" } }, + }, + ]; + const wrapper = ({ children }: any) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ); const { result, rerender } = renderHook( (options) => useQuery(query, options), - { wrapper, initialProps: { variables: { id: 1 } } }, + { wrapper, initialProps: { variables: { id: 1 } } } ); await waitFor(() => result.current.loading === false); await waitFor(() => { @@ -170,8 +196,12 @@ describe('useQuery Hook', () => { }); }); - it('should read and write results from the cache', async () => { - const query = gql`{ hello }`; + it("should read and write results from the cache", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -181,19 +211,23 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); - const { result, rerender } = renderHook( - () => useQuery(query), - { wrapper } - ); + const { result, rerender } = renderHook(() => useQuery(query), { + wrapper, + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world" }); rerender(); @@ -201,8 +235,12 @@ describe('useQuery Hook', () => { expect(result.current.data).toEqual({ hello: "world" }); }); - it('should preserve functions between renders', async () => { - const query = gql`{ hello }`; + it("should preserve functions between renders", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -212,24 +250,21 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); - const { result } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query), { wrapper }); expect(result.current.loading).toBe(true); - const { - refetch, - fetchMore, - startPolling, - stopPolling, - subscribeToMore, - } = result.current; - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + const { refetch, fetchMore, startPolling, stopPolling, subscribeToMore } = + result.current; + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(refetch).toBe(result.current.refetch); expect(fetchMore).toBe(result.current.fetchMore); expect(startPolling).toBe(result.current.startPolling); @@ -237,8 +272,12 @@ describe('useQuery Hook', () => { expect(subscribeToMore).toBe(result.current.subscribeToMore); }); - it('should set called to true by default', async () => { - const query = gql`{ hello }`; + it("should set called to true by default", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -248,20 +287,25 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); - const { result, unmount } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result, unmount } = renderHook(() => useQuery(query), { + wrapper, + }); expect(result.current.called).toBe(true); unmount(); }); - it('should set called to false when skip option is true', async () => { - const query = gql`{ hello }`; + it("should set called to false when skip option is true", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -271,19 +315,21 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result, unmount } = renderHook( - () => useQuery(query, { skip: true }), - { wrapper }, + () => useQuery(query, { skip: true }), + { wrapper } ); expect(result.current.called).toBe(false); unmount(); }); - it('should work with variables', async () => { + it("should work with variables", async () => { const query = gql` query ($id: Int) { hello(id: $id) @@ -303,32 +349,40 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result, rerender } = renderHook( - ({ id }) => useQuery(query, { variables: { id }}), - { wrapper, initialProps: { id: 1 } }, + ({ id }) => useQuery(query, { variables: { id } }), + { wrapper, initialProps: { id: 1 } } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world 1" }); rerender({ id: 2 }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world 2" }); }); - it('should return the same results for the same variables', async () => { + it("should return the same results for the same variables", async () => { const query = gql` query ($id: Int) { hello(id: $id) @@ -348,28 +402,35 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result, rerender } = renderHook( ({ id }) => useQuery(query, { variables: { id } }), - { wrapper, initialProps: { id: 1 } }, + { wrapper, initialProps: { id: 1 } } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world 1" }); - rerender({ id: 2 }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world 2" }); rerender({ id: 2 }); @@ -377,7 +438,7 @@ describe('useQuery Hook', () => { expect(result.current.data).toEqual({ hello: "world 2" }); }); - it('should work with variables 2', async () => { + it("should work with variables 2", async () => { const query = gql` query ($name: String) { names(name: $name) @@ -401,42 +462,53 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result, rerender } = renderHook( ({ name }) => useQuery(query, { variables: { name } }), - { wrapper, initialProps: { name: "" } }, + { wrapper, initialProps: { name: "" } } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ names: ["Alice", "Bob", "Eve"] }); - rerender({ name: 'z' }); + rerender({ name: "z" }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ names: [] }); - rerender({ name: 'zz' }); + rerender({ name: "zz" }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ names: [] }); }); // An unsuccessful attempt to reproduce https://github.com/apollographql/apollo-client/issues/9135. - it('should not return stale variables when stored in state', async () => { + it("should not return stale variables when stored in state", async () => { const query = gql` query myQuery($name: String) { hello(name: $name) @@ -471,7 +543,7 @@ describe('useQuery Hook', () => { const [name, setName1] = React.useState("world 1"); setName = setName1; return [ - useQuery(query, { variables: { name }}), + useQuery(query, { variables: { name } }), useMutation(mutation, { update(cache, { data }) { cache.writeQuery({ @@ -488,15 +560,18 @@ describe('useQuery Hook', () => { {children} </MockedProvider> ), - }, + } ); expect(result.current[0].loading).toBe(true); expect(result.current[0].data).toBe(undefined); expect(result.current[0].variables).toEqual({ name: "world 1" }); - await waitFor(() => { - expect(result.current[0].loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[0].loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current[0].data).toEqual({ hello: "world 1" }); expect(result.current[0].variables).toEqual({ name: "world 1" }); @@ -523,7 +598,7 @@ describe('useQuery Hook', () => { }); // TODO: Rewrite this test - it('should not error when forcing an update with React >= 16.13.0', async () => { + it("should not error when forcing an update with React >= 16.13.0", async () => { const CAR_QUERY: DocumentNode = gql` query { cars { @@ -537,24 +612,24 @@ describe('useQuery Hook', () => { const CAR_RESULT_DATA = { cars: [ { - make: 'Audi', - model: 'RS8', - vin: 'DOLLADOLLABILL', - __typename: 'Car' - } - ] + make: "Audi", + model: "RS8", + vin: "DOLLADOLLABILL", + __typename: "Car", + }, + ], }; let wasUpdateErrorLogged = false; const consoleError = console.error; console.error = (msg: string) => { console.log(msg); - wasUpdateErrorLogged = msg.indexOf('Cannot update a component') > -1; + wasUpdateErrorLogged = msg.indexOf("Cannot update a component") > -1; }; - const CAR_MOCKS = [1, 2, 3, 4, 5, 6].map(something => ({ + const CAR_MOCKS = [1, 2, 3, 4, 5, 6].map((something) => ({ request: { query: CAR_QUERY, - variables: { something } + variables: { something }, }, result: { data: CAR_RESULT_DATA }, })); @@ -563,8 +638,8 @@ describe('useQuery Hook', () => { const InnerComponent = ({ something }: any) => { const { loading, data } = useQuery(CAR_QUERY, { - fetchPolicy: 'network-only', - variables: { something } + fetchPolicy: "network-only", + variables: { something }, }); renderCount += 1; if (loading) return null; @@ -575,7 +650,7 @@ describe('useQuery Hook', () => { function WrapperComponent({ something }: any) { const { loading } = useQuery(CAR_QUERY, { - variables: { something } + variables: { something }, }); return loading ? null : <InnerComponent something={something + 1} />; } @@ -592,36 +667,39 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(renderCount).toBe(6); - }) + }); console.error = consoleError; }); - it('should tear down the query on unmount', async () => { - const query = gql`{ hello }`; + it("should tear down the query on unmount", async () => { + const query = gql` + { + hello + } + `; const client = new ApolloClient({ - link: new ApolloLink(() => Observable.of({ data: { hello: 'world' } })), + link: new ApolloLink(() => Observable.of({ data: { hello: "world" } })), cache: new InMemoryCache(), }); const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ); - const { unmount } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { unmount } = renderHook(() => useQuery(query), { wrapper }); expect(client.getObservableQueries().size).toBe(1); unmount(); - await new Promise(resolve => setTimeout(resolve)); + await new Promise((resolve) => setTimeout(resolve)); expect(client.getObservableQueries().size).toBe(0); }); - it('should work with ssr: false', async () => { - const query = gql`{ hello }`; + it("should work with ssr: false", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -629,24 +707,24 @@ describe('useQuery Hook', () => { }, ]; - const { result } = renderHook( - () => useQuery(query, { ssr: false }), - { - wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}>{children}</MockedProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query, { ssr: false }), { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world" }); }); - it('should keep `no-cache` results when the tree is re-rendered', async () => { + it("should keep `no-cache` results when the tree is re-rendered", async () => { const query1 = gql` query people { allPeople(first: 1) { @@ -668,12 +746,12 @@ describe('useQuery Hook', () => { `; const allPeopleData = { - allPeople: { people: [{ name: 'Luke Skywalker' }] }, + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; const allThingsData = { allThings: { - thing: [{ description: 'Thing 1' }, { description: 'Thing 2' }], + thing: [{ description: "Thing 1" }, { description: "Thing 2" }], }, }; @@ -686,7 +764,7 @@ describe('useQuery Hook', () => { request: { query: query2 }, result: { data: allThingsData }, delay: 50, - }, + } ); const client = new ApolloClient({ @@ -695,17 +773,12 @@ describe('useQuery Hook', () => { }); const { result, rerender } = renderHook( - () => [ - useQuery(query1, { fetchPolicy: "no-cache" }), - useQuery(query2), - ], + () => [useQuery(query1, { fetchPolicy: "no-cache" }), useQuery(query2)], { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current[0].loading).toBe(true); @@ -713,31 +786,55 @@ describe('useQuery Hook', () => { expect(result.current[1].loading).toBe(true); expect(result.current[1].data).toBe(undefined); - await waitFor(() => { - expect(result.current[0].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[0].data).toEqual(allPeopleData); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].loading).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toBe(undefined); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[0].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[0].data).toEqual(allPeopleData); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].loading).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toBe(undefined); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current[0].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[0].data).toEqual(allPeopleData); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current[1].data).toEqual(allThingsData); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current[0].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[0].data).toEqual(allPeopleData); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current[1].data).toEqual(allThingsData); + }, + { interval: 1 } + ); rerender(); expect(result.current[0].loading).toBe(false); @@ -746,15 +843,24 @@ describe('useQuery Hook', () => { expect(result.current[1].data).toEqual(allThingsData); }); - it('changing queries', async () => { - const query1 = gql`query { hello }`; - const query2 = gql`query { hello, name }`; - const mocks = [ - { - request: { query: query1 }, - result: { data: { hello: "world" } }, - }, - { + it("changing queries", async () => { + const query1 = gql` + query { + hello + } + `; + const query2 = gql` + query { + hello + name + } + `; + const mocks = [ + { + request: { query: query1 }, + result: { data: { hello: "world" } }, + }, + { request: { query: query2 }, result: { data: { hello: "world", name: "world" } }, }, @@ -770,137 +876,157 @@ describe('useQuery Hook', () => { </MockedProvider> ), initialProps: { query: query1 }, - }, + } ); expect(result.current.loading).toBe(true); rerender({ query: query2 }); expect(result.current.loading).toBe(true); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(mocks[1].result.data); }); - it('`cache-and-network` fetch policy', async () => { - const query = gql`{ hello }`; + it("`cache-and-network` fetch policy", async () => { + const query = gql` + { + hello + } + `; const cache = new InMemoryCache(); - const link = mockSingleLink( - { - request: { query }, - result: { data: { hello: 'from link' } }, - delay: 20, - }, - ); + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + delay: 20, + }); const client = new ApolloClient({ link, cache, }); - cache.writeQuery({ query, data: { hello: 'from cache' }}); + cache.writeQuery({ query, data: { hello: "from cache" } }); const { result } = renderHook( - () => useQuery(query, { fetchPolicy: 'cache-and-network' }), + () => useQuery(query, { fetchPolicy: "cache-and-network" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); // TODO: FIXME expect(result.current.loading).toBe(true); - expect(result.current.data).toEqual({ hello: 'from cache' }); + expect(result.current.data).toEqual({ hello: "from cache" }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'from link' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "from link" }); }); - it('should not use the cache when using `network-only`', async () => { - const query = gql`{ hello }`; + it("should not use the cache when using `network-only`", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'from link' } }, + result: { data: { hello: "from link" } }, }, ]; const cache = new InMemoryCache(); cache.writeQuery({ query, - data: { hello: 'from cache' }, + data: { hello: "from cache" }, }); const { result } = renderHook( - () => useQuery(query, { fetchPolicy: 'network-only' }), + () => useQuery(query, { fetchPolicy: "network-only" }), { wrapper: ({ children }) => ( <MockedProvider mocks={mocks} cache={cache}> {children} </MockedProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBeUndefined(); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'from link' }); - }); - - it('should use the cache when in ssrMode and fetchPolicy is `network-only`', async () => { - const query = gql`query { hello }`; - const link = mockSingleLink( - { - request: { query }, - result: { data: { hello: 'from link' } }, + await waitFor( + () => { + expect(result.current.loading).toBe(false); }, + { interval: 1 } ); + expect(result.current.data).toEqual({ hello: "from link" }); + }); + + it("should use the cache when in ssrMode and fetchPolicy is `network-only`", async () => { + const query = gql` + query { + hello + } + `; + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + }); const cache = new InMemoryCache(); cache.writeQuery({ query, - data: { hello: 'from cache' }, + data: { hello: "from cache" }, }); - const client = new ApolloClient({ link, cache, ssrMode: true, }); + const client = new ApolloClient({ link, cache, ssrMode: true }); const { result } = renderHook( - () => useQuery(query, { fetchPolicy: 'network-only' }), + () => useQuery(query, { fetchPolicy: "network-only" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual({ hello: 'from cache' }); + expect(result.current.data).toEqual({ hello: "from cache" }); - await expect(waitFor(() => { - expect(result.current.data).toEqual({ hello: 'from link' }); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.data).toEqual({ hello: "from link" }); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); }); - it('should not hang when ssrMode is true but the cache is not populated for some reason', async () => { - const query = gql`query { hello }`; - const link = mockSingleLink( - { - request: { query }, - result: { data: { hello: 'from link' } }, - }, - ); + it("should not hang when ssrMode is true but the cache is not populated for some reason", async () => { + const query = gql` + query { + hello + } + `; + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + }); const client = new ApolloClient({ link, @@ -908,36 +1034,36 @@ describe('useQuery Hook', () => { ssrMode: true, }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBeUndefined(); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'from link' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "from link" }); }); }); describe("options.defaultOptions", () => { it("can provide a default fetchPolicy", async () => { - const query = gql`query { hello }`; - const link = mockSingleLink( - { - request: { query }, - result: { data: { hello: 'from link' } }, - }, - ); + const query = gql` + query { + hello + } + `; + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + }); const client = new ApolloClient({ link, @@ -960,28 +1086,27 @@ describe('useQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBeUndefined(); - expect(fetchPolicyLog).toEqual([ - "cache-and-network", - ]); + expect(fetchPolicyLog).toEqual(["cache-and-network"]); // Change the default fetchPolicy to verify that it is not used the second // time useQuery is called. defaultFetchPolicy = "network-only"; - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: 'from link' }); + expect(result.current.data).toEqual({ hello: "from link" }); expect(fetchPolicyLog).toEqual([ "cache-and-network", "cache-and-network", @@ -989,23 +1114,29 @@ describe('useQuery Hook', () => { }); it("can provide individual default variables", async () => { - const query: TypedDocumentNode<{ - vars: OperationVariables, - }, OperationVariables> = gql` + const query: TypedDocumentNode< + { + vars: OperationVariables; + }, + OperationVariables + > = gql` query VarsQuery { vars } `; const client = new ApolloClient({ - link: new ApolloLink(request => new Observable(observer => { - observer.next({ - data: { - vars: request.variables, - }, - }); - observer.complete(); - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + observer.next({ + data: { + vars: request.variables, + }, + }); + observer.complete(); + }) + ), cache: new InMemoryCache(), @@ -1040,9 +1171,7 @@ describe('useQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), } ); @@ -1055,9 +1184,9 @@ describe('useQuery Hook', () => { mandatory: true, }); - expect( - result.current.observable.options.fetchPolicy - ).toBe("cache-and-network"); + expect(result.current.observable.options.fetchPolicy).toBe( + "cache-and-network" + ); expect( // The defaultOptions field is for useQuery options (QueryHookOptions), @@ -1065,13 +1194,14 @@ describe('useQuery Hook', () => { "defaultOptions" in result.current.observable.options ).toBe(false); - expect(fetchPolicyLog).toEqual([ - "cache-and-network", - ]); + expect(fetchPolicyLog).toEqual(["cache-and-network"]); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ vars: { @@ -1086,30 +1216,32 @@ describe('useQuery Hook', () => { "cache-and-network", ]); - const reobservePromise = act(() => result.current.observable.reobserve({ - fetchPolicy: "network-only", - nextFetchPolicy: "cache-first", - variables: { - // Since reobserve replaces the variables object rather than merging - // the individual variables together, we need to include the current - // variables manually if we want them to show up in the output below. - ...result.current.observable.variables, - sourceOfVar: "reobserve", - }, - }).then(finalResult => { - expect(finalResult.loading).toBe(false); - expect(finalResult.data).toEqual({ - vars: { - sourceOfVar: "reobserve", - isGlobal: false, - mandatory: true, - }, - }); - })); + const reobservePromise = act(() => + result.current.observable + .reobserve({ + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", + variables: { + // Since reobserve replaces the variables object rather than merging + // the individual variables together, we need to include the current + // variables manually if we want them to show up in the output below. + ...result.current.observable.variables, + sourceOfVar: "reobserve", + }, + }) + .then((finalResult) => { + expect(finalResult.loading).toBe(false); + expect(finalResult.data).toEqual({ + vars: { + sourceOfVar: "reobserve", + isGlobal: false, + mandatory: true, + }, + }); + }) + ); - expect( - result.current.observable.options.fetchPolicy - ).toBe("cache-first"); + expect(result.current.observable.options.fetchPolicy).toBe("cache-first"); expect(result.current.observable.variables).toEqual({ sourceOfVar: "reobserve", @@ -1119,9 +1251,7 @@ describe('useQuery Hook', () => { await reobservePromise; - expect( - result.current.observable.options.fetchPolicy - ).toBe("cache-first"); + expect(result.current.observable.options.fetchPolicy).toBe("cache-first"); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ @@ -1131,9 +1261,7 @@ describe('useQuery Hook', () => { mandatory: true, }, }); - expect( - result.current.observable.variables - ).toEqual( + expect(result.current.observable.variables).toEqual( result.current.data!.vars ); @@ -1143,30 +1271,32 @@ describe('useQuery Hook', () => { "cache-first", ]); - const reobserveNoVarMergePromise = act(() => result.current.observable.reobserve({ - fetchPolicy: "network-only", - nextFetchPolicy: "cache-first", - variables: { - // This reobservation is like the one above, with no variable merging. - // ...result.current.observable.variables, - sourceOfVar: "reobserve without variable merge", - }, - }).then(finalResult => { - expect(finalResult.loading).toBe(false); - expect(finalResult.data).toEqual({ - vars: { - sourceOfVar: "reobserve without variable merge", - // Since we didn't merge in result.current.observable.variables, we - // don't see these variables anymore: - // isGlobal: false, - // mandatory: true, - }, - }); - })); + const reobserveNoVarMergePromise = act(() => + result.current.observable + .reobserve({ + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", + variables: { + // This reobservation is like the one above, with no variable merging. + // ...result.current.observable.variables, + sourceOfVar: "reobserve without variable merge", + }, + }) + .then((finalResult) => { + expect(finalResult.loading).toBe(false); + expect(finalResult.data).toEqual({ + vars: { + sourceOfVar: "reobserve without variable merge", + // Since we didn't merge in result.current.observable.variables, we + // don't see these variables anymore: + // isGlobal: false, + // mandatory: true, + }, + }); + }) + ); - expect( - result.current.observable.options.fetchPolicy - ).toBe("cache-first"); + expect(result.current.observable.options.fetchPolicy).toBe("cache-first"); expect(result.current.observable.variables).toEqual({ sourceOfVar: "reobserve without variable merge", @@ -1174,9 +1304,7 @@ describe('useQuery Hook', () => { await reobserveNoVarMergePromise; - expect( - result.current.observable.options.fetchPolicy - ).toBe("cache-first"); + expect(result.current.observable.options.fetchPolicy).toBe("cache-first"); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ @@ -1184,9 +1312,7 @@ describe('useQuery Hook', () => { sourceOfVar: "reobserve without variable merge", }, }); - expect( - result.current.observable.variables - ).toEqual( + expect(result.current.observable.variables).toEqual( result.current.data!.vars ); @@ -1210,22 +1336,27 @@ describe('useQuery Hook', () => { let count = 0; const client = new ApolloClient({ cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - if (request.operationName === "GetCounter") { - observer.next({ - data: { - counter: ++count, - }, - }); - setTimeout(() => { - observer.complete(); - }, 10); - } else { - observer.error(new Error(`Unknown query: ${ - request.operationName || request.query - }`)); - } - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + if (request.operationName === "GetCounter") { + observer.next({ + data: { + counter: ++count, + }, + }); + setTimeout(() => { + observer.complete(); + }, 10); + } else { + observer.error( + new Error( + `Unknown query: ${request.operationName || request.query}` + ) + ); + } + }) + ), }); const defaultFetchPolicy = "network-only"; @@ -1245,20 +1376,23 @@ describe('useQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.query.loading).toBe(false); expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); expect(result.current.query.data).toBeUndefined(); - await expect(waitFor(() => { - expect(result.current.query.data).toEqual({ counter: 1 }); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.query.data).toEqual({ counter: 1 }); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); act(() => { result.current.setSkip(false); @@ -1266,9 +1400,12 @@ describe('useQuery Hook', () => { expect(result.current.query.loading).toBe(true); expect(result.current.query.networkStatus).toBe(NetworkStatus.loading); expect(result.current.query.data).toBeUndefined(); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); expect(result.current.query.data).toEqual({ counter: 1 }); @@ -1282,9 +1419,14 @@ describe('useQuery Hook', () => { expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); expect(result.current.query.data).toBeUndefined(); - await expect(waitFor(() => { - expect(result.current.query.data).toEqual({ counter: 1 }); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.query.data).toEqual({ counter: 1 }); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); act(() => { result.current.setSkip(false); @@ -1292,9 +1434,12 @@ describe('useQuery Hook', () => { expect(result.current.query.loading).toBe(true); expect(result.current.query.networkStatus).toBe(NetworkStatus.loading); expect(result.current.query.data).toEqual({ counter: 1 }); - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.networkStatus).toBe(NetworkStatus.ready); expect(result.current.query.data).toEqual({ counter: 2 }); @@ -1303,13 +1448,15 @@ describe('useQuery Hook', () => { }); it("can provide options.client without ApolloProvider", async () => { - const query = gql`query { hello }`; - const link = mockSingleLink( - { - request: { query }, - result: { data: { hello: 'from link' } }, - }, - ); + const query = gql` + query { + hello + } + `; + const link = mockSingleLink({ + request: { query }, + result: { data: { hello: "from link" } }, + }); const client = new ApolloClient({ link, @@ -1318,7 +1465,7 @@ describe('useQuery Hook', () => { }); const { result } = renderHook( - () => useQuery(query, { client }), + () => useQuery(query, { client }) // We deliberately do not provide the usual ApolloProvider wrapper for // this test, since we are providing the client directly to useQuery. // { @@ -1333,54 +1480,68 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(true); expect(result.current.data).toBeUndefined(); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: 'from link' }); + expect(result.current.data).toEqual({ hello: "from link" }); }); - describe('<React.StrictMode>', () => { + describe("<React.StrictMode>", () => { it("double-rendering should not trigger duplicate network requests", async () => { const query: TypedDocumentNode<{ linkCount: number; - }> = gql`query Counter { linkCount }`; + }> = gql` + query Counter { + linkCount + } + `; let linkCount = 0; const client = new ApolloClient({ cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - if (request.operationName === "Counter") { - observer.next({ - data: { - linkCount: ++linkCount, - }, - }); - observer.complete(); - } - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + if (request.operationName === "Counter") { + observer.next({ + data: { + linkCount: ++linkCount, + }, + }); + observer.complete(); + } + }) + ), }); const { result } = renderHook( - () => useQuery(query, { - fetchPolicy: "cache-and-network", - }), + () => + useQuery(query, { + fetchPolicy: "cache-and-network", + }), { wrapper: ({ children }) => ( <React.StrictMode> <ApolloProvider client={client}>{children}</ApolloProvider> </React.StrictMode> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ linkCount: 1, @@ -1392,7 +1553,7 @@ describe('useQuery Hook', () => { const activeSet = new Set<typeof result.current.observable>(); const inactiveSet = new Set<typeof result.current.observable>(); - obsQueries.forEach(obsQuery => { + obsQueries.forEach((obsQuery) => { if (obsQuery.hasObservers()) { expect(inactiveSet.has(obsQuery)).toBe(false); activeSet.add(obsQuery); @@ -1414,7 +1575,7 @@ describe('useQuery Hook', () => { checkObservableQueries(1); - await result.current.reobserve().then(result => { + await result.current.reobserve().then((result) => { expect(result.loading).toBe(false); expect(result.loading).toBe(false); expect(result.networkStatus).toBe(NetworkStatus.ready); @@ -1426,22 +1587,32 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(result.current.loading).toBe(false); }); - await waitFor(() => { - expect(result.current.data).toEqual({ - linkCount: 2, - }); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ + linkCount: 2, + }); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); checkObservableQueries(2); }); }); - describe('polling', () => { - it('should support polling', async () => { - const query = gql`{ hello }`; + describe("polling", () => { + it("should support polling", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -1459,44 +1630,64 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( () => useQuery(query, { pollInterval: 10 }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 1" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 2" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 3" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); const { data: previousData } = result.current; result.current.stopPolling(); - await expect(waitFor(() => { - expect(result.current.data).not.toEqual(previousData); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.data).not.toEqual(previousData); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); }); - it('should start polling when skip goes from true to false', async () => { - const query = gql`{ hello }`; + it("should start polling when skip goes from true to false", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -1524,16 +1715,19 @@ describe('useQuery Hook', () => { {children} </MockedProvider> ), - initialProps: { skip: undefined } as any - }, + initialProps: { skip: undefined } as any, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ hello: "world 1" }); @@ -1542,27 +1736,42 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); - await expect(waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 1" }); - }, { interval: 1, timeout: 20 })).rejects.toThrow() + await expect( + waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); rerender({ skip: false }); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ hello: "world 1" }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 2" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 3" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); }); it("should return data from network when clients default fetch policy set to network-only", async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const data = { hello: "world" }; const mocks = [ { @@ -1587,22 +1796,26 @@ describe('useQuery Hook', () => { </MockedProvider> ); - const { result } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query), { wrapper }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data); }); - it('should stop polling when component unmounts', async () => { - const query = gql`{ hello }`; + it("should stop polling when component unmounts", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -1621,27 +1834,35 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const link = new MockLink(mocks); - const requestSpy = jest.spyOn(link, 'request'); + const requestSpy = jest.spyOn(link, "request"); const onErrorFn = jest.fn(); link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( - <MockedProvider link={link} cache={cache}>{children}</MockedProvider> + <MockedProvider link={link} cache={cache}> + {children} + </MockedProvider> ); const { result, unmount } = renderHook( () => useQuery(query, { pollInterval: 10 }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 1" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); await waitFor(() => { expect(requestSpy).toHaveBeenCalled(); @@ -1654,10 +1875,15 @@ describe('useQuery Hook', () => { expect(requestSpy).toHaveBeenCalledTimes(requestCount); - await expect(waitFor(() => { - const newRequestCount = requestSpy.mock.calls.length; - expect(newRequestCount).toBeGreaterThan(requestCount); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + const newRequestCount = requestSpy.mock.calls.length; + expect(newRequestCount).toBeGreaterThan(requestCount); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); await waitFor(() => { expect(onErrorFn).toHaveBeenCalledTimes(0); @@ -1666,8 +1892,12 @@ describe('useQuery Hook', () => { requestSpy.mockRestore(); }); - it('should stop polling when component is unmounted in Strict Mode', async () => { - const query = gql`{ hello }`; + it("should stop polling when component is unmounted in Strict Mode", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -1688,42 +1918,59 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const link = new MockLink(mocks); - const requestSpy = jest.spyOn(link, 'request'); + const requestSpy = jest.spyOn(link, "request"); const onErrorFn = jest.fn(); link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( <React.StrictMode> -<MockedProvider link={link} cache={cache}>{children}</MockedProvider> + <MockedProvider link={link} cache={cache}> + {children} + </MockedProvider> </React.StrictMode> ); const { result, unmount } = renderHook( () => useQuery(query, { pollInterval: 10 }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: "world 1" }); - const requestCount = requestSpy.mock.calls.length; - expect(requestSpy).toHaveBeenCalledTimes(requestCount); + const requestSpyCallCount = requestSpy.mock.calls.length; + expect(requestSpy).toHaveBeenCalledTimes(requestSpyCallCount); unmount(); - await expect(waitFor(() => { - expect(requestSpy).toHaveBeenCalledTimes(requestCount + 1) - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + expect(requestSpy).toHaveBeenCalledTimes(requestSpyCallCount); + await expect( + waitFor( + () => { + expect(requestSpy).toHaveBeenCalledTimes(requestSpyCallCount + 1); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); + expect(requestSpy).toHaveBeenCalledTimes(requestSpyCallCount); expect(onErrorFn).toHaveBeenCalledTimes(0); + requestSpy.mockRestore(); }); - it('should start and stop polling in Strict Mode', async () => { - const query = gql`{ hello }`; + it("should start and stop polling in Strict Mode", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -1745,97 +1992,121 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const link = new MockLink(mocks); - const requestSpy = jest.spyOn(link, 'request'); + const requestSpy = jest.spyOn(link, "request"); const onErrorFn = jest.fn(); link.setOnError(onErrorFn); const wrapper = ({ children }: any) => ( <React.StrictMode> - <MockedProvider link={link} cache={cache}>{children}</MockedProvider> + <MockedProvider link={link} cache={cache}> + {children} + </MockedProvider> </React.StrictMode> ); const { result } = renderHook( () => useQuery(query, { pollInterval: 20 }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 1" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 2" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - result.current.stopPolling(); - await expect(waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 3" }); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); result.current.startPolling(20); expect(requestSpy).toHaveBeenCalledTimes(2); expect(onErrorFn).toHaveBeenCalledTimes(0); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 3" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: "world 4" }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 4" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(requestSpy).toHaveBeenCalledTimes(4); expect(onErrorFn).toHaveBeenCalledTimes(0); requestSpy.mockRestore(); }); - it('should not throw an error if stopPolling is called manually', async () => { - const query = gql`{ hello }`; + it("should not throw an error if stopPolling is called manually", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - data: { hello: 'world' }, - } - } + data: { hello: "world" }, + }, + }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); - const { result, unmount } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result, unmount } = renderHook(() => useQuery(query), { + wrapper, + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); unmount(); result.current.stopPolling(); }); }); - describe('Error handling', () => { - - it('should pass along GraphQL errors', async () => { + describe("Error handling", () => { + it("should pass along GraphQL errors", async () => { const query = gql` query TestQuery { rates(currency: "USD") { @@ -1848,106 +2119,124 @@ describe('useQuery Hook', () => { { request: { query }, result: { - errors: [new GraphQLError('error')] - } - } + errors: [new GraphQLError("error")], + }, + }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); - const { result } = renderHook( - () => useQuery(query), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query), { wrapper }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); }); - it('calls `onError` when a GraphQL error is returned', async () => { - const query = gql`{ hello }`; + it("calls `onError` when a GraphQL error is returned", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')], + errors: [new GraphQLError("error")], }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); - const { result } = renderHook( - () => useQuery(query, { onError }), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query, { onError }), { + wrapper, + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); await waitFor(() => { expect(onError).toHaveBeenCalledTimes(1); }); await waitFor(() => { expect(onError).toHaveBeenCalledWith( - new ApolloError({ graphQLErrors: [new GraphQLError('error')] }) + new ApolloError({ graphQLErrors: [new GraphQLError("error")] }) ); }); }); - it('calls `onError` when a network error has occured', async () => { - const query = gql`{ hello }`; + it("calls `onError` when a network error has occured", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - error: new Error('Could not fetch') + error: new Error("Could not fetch"), }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); - const { result } = renderHook( - () => useQuery(query, { onError }), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query, { onError }), { + wrapper, + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('Could not fetch'); + expect(result.current.error!.message).toBe("Could not fetch"); expect(result.current.error!.networkError).toEqual( - new Error('Could not fetch') + new Error("Could not fetch") ); await waitFor(() => { @@ -1955,79 +2244,96 @@ describe('useQuery Hook', () => { }); await waitFor(() => { expect(onError).toHaveBeenCalledWith( - new ApolloError({ networkError: new Error('Could not fetch') }) + new ApolloError({ networkError: new Error("Could not fetch") }) ); }); }); - it('removes partial data from result when response has errors', async () => { - const query = gql`{ hello }`; + it("removes partial data from result when response has errors", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { data: { hello: null }, - errors: [new GraphQLError('Could not fetch "hello"')] - } + errors: [new GraphQLError('Could not fetch "hello"')], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); - const { result } = renderHook( - () => useQuery(query, { onError }), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(query, { onError }), { + wrapper, + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeInstanceOf(ApolloError); expect(result.current.error!.message).toBe('Could not fetch "hello"'); expect(result.current.error!.graphQLErrors).toEqual([ - new GraphQLError('Could not fetch "hello"') + new GraphQLError('Could not fetch "hello"'), ]); }); it('does not call `onError` when returning GraphQL errors while using an `errorPolicy` set to "ignore"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')] - } + errors: [new GraphQLError("error")], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, errorPolicy: 'ignore' }), - { wrapper }, + () => useQuery(query, { onError, errorPolicy: "ignore" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeUndefined(); @@ -2038,38 +2344,47 @@ describe('useQuery Hook', () => { }); it('calls `onError` when a network error has occurred while using an `errorPolicy` set to "ignore"', async () => { - const query = gql`{ hello }`; - const mocks = [ + const query = gql` { - request: { query }, - error: new Error('Could not fetch') - }, + hello + } + `; + const mocks = [ + { + request: { query }, + error: new Error("Could not fetch"), + }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, errorPolicy: 'ignore' }), - { wrapper }, + () => useQuery(query, { onError, errorPolicy: "ignore" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('Could not fetch'); + expect(result.current.error!.message).toBe("Could not fetch"); expect(result.current.error!.networkError).toEqual( - new Error('Could not fetch') + new Error("Could not fetch") ); await waitFor(() => { @@ -2077,76 +2392,94 @@ describe('useQuery Hook', () => { }); await waitFor(() => { expect(onError).toHaveBeenCalledWith( - new ApolloError({ networkError: new Error('Could not fetch') }) + new ApolloError({ networkError: new Error("Could not fetch") }) ); }); }); it('returns partial data and discards GraphQL errors when using an `errorPolicy` set to "ignore"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { data: { hello: null }, - errors: [new GraphQLError('Could not fetch "hello"')] - } + errors: [new GraphQLError('Could not fetch "hello"')], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { errorPolicy: 'ignore' }), - { wrapper }, + () => useQuery(query, { errorPolicy: "ignore" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: null }) + expect(result.current.data).toEqual({ hello: null }); expect(result.current.error).toBeUndefined(); }); it('calls `onCompleted` with partial data but avoids calling `onError` when using an `errorPolicy` set to "ignore"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { data: { hello: null }, - errors: [new GraphQLError('Could not fetch "hello"')] - } + errors: [new GraphQLError('Could not fetch "hello"')], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const onCompleted = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, onCompleted, errorPolicy: 'ignore' }), - { wrapper }, + () => useQuery(query, { onError, onCompleted, errorPolicy: "ignore" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: null }) + expect(result.current.data).toEqual({ hello: null }); expect(result.current.error).toBeUndefined(); await waitFor(() => { @@ -2161,39 +2494,48 @@ describe('useQuery Hook', () => { }); it('calls `onError` when returning GraphQL errors while using an `errorPolicy` set to "all"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')] - } + errors: [new GraphQLError("error")], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, errorPolicy: 'all' }), - { wrapper }, + () => useQuery(query, { onError, errorPolicy: "all" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBeUndefined(); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); expect(result.current.error!.graphQLErrors).toEqual([ - new GraphQLError('error') + new GraphQLError("error"), ]); await waitFor(() => { @@ -2201,85 +2543,103 @@ describe('useQuery Hook', () => { }); await waitFor(() => { expect(onError).toHaveBeenCalledWith( - new ApolloError({ graphQLErrors: [new GraphQLError('error')] }) + new ApolloError({ graphQLErrors: [new GraphQLError("error")] }) ); }); }); it('returns partial data when returning GraphQL errors while using an `errorPolicy` set to "all"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { data: { hello: null }, - errors: [new GraphQLError('Could not fetch "hello"')] - } + errors: [new GraphQLError('Could not fetch "hello"')], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, errorPolicy: 'all' }), - { wrapper }, + () => useQuery(query, { onError, errorPolicy: "all" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: null }); expect(result.current.error).toBeInstanceOf(ApolloError); expect(result.current.error!.message).toBe('Could not fetch "hello"'); expect(result.current.error!.graphQLErrors).toEqual([ - new GraphQLError('Could not fetch "hello"') + new GraphQLError('Could not fetch "hello"'), ]); }); it('calls `onError` but not `onCompleted` when returning partial data with GraphQL errors while using an `errorPolicy` set to "all"', async () => { - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { data: { hello: null }, - errors: [new GraphQLError('Could not fetch "hello"')] - } + errors: [new GraphQLError('Could not fetch "hello"')], + }, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const onCompleted = jest.fn(); const { result } = renderHook( - () => useQuery(query, { onError, onCompleted, errorPolicy: 'all' }), - { wrapper }, + () => useQuery(query, { onError, onCompleted, errorPolicy: "all" }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: null }); expect(result.current.error).toBeInstanceOf(ApolloError); expect(result.current.error!.message).toBe('Could not fetch "hello"'); expect(result.current.error!.graphQLErrors).toEqual([ - new GraphQLError('Could not fetch "hello"') + new GraphQLError('Could not fetch "hello"'), ]); await waitFor(() => { @@ -2288,7 +2648,7 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(onError).toHaveBeenCalledWith( new ApolloError({ - graphQLErrors: [new GraphQLError('Could not fetch "hello"')] + graphQLErrors: [new GraphQLError('Could not fetch "hello"')], }) ); }); @@ -2297,19 +2657,23 @@ describe('useQuery Hook', () => { }); }); - it('calls `onError` a single time when refetching returns a successful result', async () => { - const query = gql`{ hello }`; + it("calls `onError` a single time when refetching returns a successful result", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')], + errors: [new GraphQLError("error")], }, }, { request: { query }, result: { - data: { hello: 'world' }, + data: { hello: "world" }, }, delay: 10, }, @@ -2317,94 +2681,120 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const onError = jest.fn(); const { result } = renderHook( - () => useQuery(query, { - onError, - notifyOnNetworkStatusChange: true, - }), - { wrapper }, + () => + useQuery(query, { + onError, + notifyOnNetworkStatusChange: true, + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); await new Promise((resolve) => setTimeout(resolve)); expect(onError).toHaveBeenCalledTimes(1); result.current.refetch(); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); expect(onError).toHaveBeenCalledTimes(1); }); - it('should persist errors on re-render if they are still valid', async () => { - const query = gql`{ hello }`; + it("should persist errors on re-render if they are still valid", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')] - } - } + errors: [new GraphQLError("error")], + }, + }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); let updates = 0; const { result, rerender } = renderHook( () => (updates++, useQuery(query)), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); rerender(); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); let previousUpdates = updates; - await expect(waitFor(() => { - expect(updates).not.toEqual(previousUpdates) - }, { interval: 1, timeout: 20 })).rejects.toThrow() + await expect( + waitFor( + () => { + expect(updates).not.toEqual(previousUpdates); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); }); - it('should not return partial data from cache on refetch with errorPolicy: none (default) and notifyOnNetworkStatusChange: true', async () => { + it("should not return partial data from cache on refetch with errorPolicy: none (default) and notifyOnNetworkStatusChange: true", async () => { const query = gql` { dogs { @@ -2429,21 +2819,23 @@ describe('useQuery Hook', () => { const dogData = [ { - "id": "Z1fdFgU", - "breed": "affenpinscher", - "__typename": "Dog" + id: "Z1fdFgU", + breed: "affenpinscher", + __typename: "Dog", }, { - "id": "ZNDtCU", - "breed": "airedale", - "__typename": "Dog" + id: "ZNDtCU", + breed: "airedale", + __typename: "Dog", }, ]; const detailsMock = (breed: string) => ({ request: { query: GET_DOG_DETAILS, variables: { breed } }, result: { - errors: [new GraphQLError(`Cannot query field "unexisting" on type "Dog".`)], + errors: [ + new GraphQLError(`Cannot query field "unexisting" on type "Dog".`), + ], }, }); @@ -2454,15 +2846,15 @@ describe('useQuery Hook', () => { }, // use the same mock for the initial query on select change // and subsequent refetch() call - detailsMock('airedale'), - detailsMock('airedale'), + detailsMock("airedale"), + detailsMock("airedale"), ]; const Dogs: React.FC<{ onDogSelected: (event: React.ChangeEvent<HTMLSelectElement>) => void; }> = ({ onDogSelected }) => { - const { loading, error, data } = useQuery< - { dogs: { id: string; breed: string; }[] } - >(query); + const { loading, error, data } = useQuery<{ + dogs: { id: string; breed: string }[]; + }>(query); if (loading) return <>Loading...</>; if (error) return <>{`Error! ${error.message}`}</>; @@ -2485,24 +2877,16 @@ describe('useQuery Hook', () => { GET_DOG_DETAILS, { variables: { breed }, - notifyOnNetworkStatusChange: true + notifyOnNetworkStatusChange: true, } ); if (networkStatus === 4) return <p>Refetching!</p>; if (loading) return <p>Loading!</p>; return ( <div> - <div> - {data ? 'Partial data rendered' : null} - </div> + <div>{data ? "Partial data rendered" : null}</div> - <div> - {error ? ( - `Error!: ${error}` - ) : ( - 'Rendering!' - )} - </div> + <div>{error ? `Error!: ${error}` : "Rendering!"}</div> <button onClick={() => refetch()}>Refetch!</button> </div> ); @@ -2526,30 +2910,34 @@ describe('useQuery Hook', () => { render(<ParentComponent />); // on initial load, the list of dogs populates the dropdown - await screen.findByText('affenpinscher'); + await screen.findByText("affenpinscher"); // the user selects a different dog from the dropdown which // fires the GET_DOG_DETAILS query, retuning an error const user = userEvent.setup(); await user.selectOptions( - screen.getByRole('combobox'), - screen.getByRole('option', { name: 'airedale' }) + screen.getByRole("combobox"), + screen.getByRole("option", { name: "airedale" }) ); // With the default errorPolicy of 'none', the error is rendered // and partial data is not - await screen.findByText('Error!: ApolloError: Cannot query field "unexisting" on type "Dog".') + await screen.findByText( + 'Error!: ApolloError: Cannot query field "unexisting" on type "Dog".' + ); expect(screen.queryByText(/partial data rendered/i)).toBeNull(); // When we call refetch... - await user.click(screen.getByRole('button', { name: /Refetch!/i })) + await user.click(screen.getByRole("button", { name: /Refetch!/i })); // The error is still present, and partial data still not rendered - await screen.findByText('Error!: ApolloError: Cannot query field "unexisting" on type "Dog".') + await screen.findByText( + 'Error!: ApolloError: Cannot query field "unexisting" on type "Dog".' + ); expect(screen.queryByText(/partial data rendered/i)).toBeNull(); }); - it('should return partial data from cache on refetch', async () => { + it("should return partial data from cache on refetch", async () => { const GET_DOG_DETAILS = gql` query dog($breed: String!) { dog(breed: $breed) { @@ -2562,38 +2950,33 @@ describe('useQuery Hook', () => { result: { data: { dog: { - "id": "ZNDtCU", - "__typename": "Dog" - } - } + id: "ZNDtCU", + __typename: "Dog", + }, + }, }, }); const mocks = [ // use the same mock for the initial query on select change // and subsequent refetch() call - detailsMock('airedale'), - detailsMock('airedale'), + detailsMock("airedale"), + detailsMock("airedale"), ]; const DogDetails: React.FC<{ breed?: string; }> = ({ breed = "airedale" }) => { - const { data, refetch, networkStatus } = useQuery( - GET_DOG_DETAILS, - { - variables: { breed }, - notifyOnNetworkStatusChange: true - } - ); + const { data, refetch, networkStatus } = useQuery(GET_DOG_DETAILS, { + variables: { breed }, + notifyOnNetworkStatusChange: true, + }); if (networkStatus === 1) return <p>Loading!</p>; return ( // Render existing results, but dim the UI until the results // have finished loading... <div style={{ opacity: networkStatus === 4 ? 0.5 : 1 }}> - <div> - {data ? 'Data rendered' : null} - </div> + <div>{data ? "Data rendered" : null}</div> <button onClick={() => refetch()}>Refetch!</button> </div> ); @@ -2611,31 +2994,41 @@ describe('useQuery Hook', () => { const user = userEvent.setup(); - await waitFor(() => { - expect(screen.getByText('Loading!')).toBeTruthy(); - }, { interval: 1 }); + await waitFor( + () => { + expect(screen.getByText("Loading!")).toBeTruthy(); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(screen.getByText('Data rendered')).toBeTruthy(); - }, { interval: 1 }); + await waitFor( + () => { + expect(screen.getByText("Data rendered")).toBeTruthy(); + }, + { interval: 1 } + ); // When we call refetch... - await user.click(screen.getByRole('button', { name: /Refetch!/i })) + await user.click(screen.getByRole("button", { name: /Refetch!/i })); // Data from the cache remains onscreen while network request // is made - expect(screen.getByText('Data rendered')).toBeTruthy(); + expect(screen.getByText("Data rendered")).toBeTruthy(); }); - it('should persist errors on re-render with inline onError/onCompleted callbacks', async () => { - const query = gql`{ hello }`; + it("should persist errors on re-render with inline onError/onCompleted callbacks", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error')] - } - } + errors: [new GraphQLError("error")], + }, + }, ]; const cache = new InMemoryCache(); @@ -2650,38 +3043,49 @@ describe('useQuery Hook', () => { let updates = 0; const { result, rerender } = renderHook( - () => (updates++, useQuery(query, { onError: () => {}, onCompleted: () => {} })), - { wrapper }, + () => ( + updates++, + useQuery(query, { onError: () => {}, onCompleted: () => {} }) + ), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); rerender(); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); expect(onErrorFn).toHaveBeenCalledTimes(0); let previousUpdates = updates; - await expect(waitFor(() => { - expect(updates).not.toEqual(previousUpdates) - }, { interval: 1, timeout: 20 })).rejects.toThrow() + await expect( + waitFor( + () => { + expect(updates).not.toEqual(previousUpdates); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); }); - it('should not persist errors when variables change', async () => { + it("should not persist errors when variables change", async () => { const query = gql` query hello($id: ID) { hello(id: $id) @@ -2695,7 +3099,7 @@ describe('useQuery Hook', () => { variables: { id: 1 }, }, result: { - errors: [new GraphQLError('error')] + errors: [new GraphQLError("error")], }, }, { @@ -2704,7 +3108,7 @@ describe('useQuery Hook', () => { variables: { id: 2 }, }, result: { - data: { hello: 'world 2' }, + data: { hello: "world 2" }, }, }, { @@ -2713,7 +3117,7 @@ describe('useQuery Hook', () => { variables: { id: 1 }, }, result: { - data: { hello: 'world 1' }, + data: { hello: "world 1" }, }, }, ]; @@ -2722,35 +3126,39 @@ describe('useQuery Hook', () => { ({ id }) => useQuery(query, { variables: { id } }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { id: 1 }, - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error'); + expect(result.current.error!.message).toBe("error"); rerender({ id: 2 }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world 2' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world 2" }); expect(result.current.error).toBe(undefined); rerender({ id: 1 }); @@ -2758,29 +3166,36 @@ describe('useQuery Hook', () => { expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world 1" }); expect(result.current.error).toBe(undefined); }); - it('should render multiple errors when refetching', async () => { - const query = gql`{ hello }`; + it("should render multiple errors when refetching", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('error 1')] - } + errors: [new GraphQLError("error 1")], + }, }, { request: { query }, result: { - errors: [new GraphQLError('error 2')] + errors: [new GraphQLError("error 2")], }, delay: 10, - } + }, ]; const cache = new InMemoryCache(); @@ -2792,58 +3207,71 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { notifyOnNetworkStatusChange: true }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error 1'); + expect(result.current.error!.message).toBe("error 1"); const catchFn = jest.fn(); result.current.refetch().catch(catchFn); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toBe(undefined); - expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('error 2'); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toBe(undefined); + expect(result.current.error).toBeInstanceOf(ApolloError); + expect(result.current.error!.message).toBe("error 2"); expect(catchFn.mock.calls.length).toBe(1); expect(catchFn.mock.calls[0].length).toBe(1); expect(catchFn.mock.calls[0][0]).toBeInstanceOf(ApolloError); - expect(catchFn.mock.calls[0][0].message).toBe('error 2'); + expect(catchFn.mock.calls[0][0].message).toBe("error 2"); }); - it('should render the same error on refetch', async () => { - const query = gql`{ hello }`; + it("should render the same error on refetch", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('same error')] - } + errors: [new GraphQLError("same error")], + }, }, { request: { query }, result: { - errors: [new GraphQLError('same error')] - } - } + errors: [new GraphQLError("same error")], + }, + }, ]; const cache = new InMemoryCache(); @@ -2855,21 +3283,23 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { notifyOnNetworkStatusChange: true }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('same error'); - + expect(result.current.error!.message).toBe("same error"); const catchFn = jest.fn(); await act(async () => { @@ -2879,37 +3309,41 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('same error'); + expect(result.current.error!.message).toBe("same error"); expect(catchFn.mock.calls.length).toBe(1); expect(catchFn.mock.calls[0].length).toBe(1); expect(catchFn.mock.calls[0][0]).toBeInstanceOf(ApolloError); - expect(catchFn.mock.calls[0][0].message).toBe('same error'); + expect(catchFn.mock.calls[0][0].message).toBe("same error"); }); - it('should render data and errors with refetch', async () => { - const query = gql`{ hello }`; + it("should render data and errors with refetch", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, result: { - errors: [new GraphQLError('same error')], + errors: [new GraphQLError("same error")], }, }, { request: { query }, result: { - data: { hello: 'world' }, + data: { hello: "world" }, }, delay: 10, }, { request: { query }, result: { - errors: [new GraphQLError('same error')], + errors: [new GraphQLError("same error")], }, delay: 10, - } + }, ]; const cache = new InMemoryCache(); @@ -2921,59 +3355,74 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { notifyOnNetworkStatusChange: true }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('same error'); + expect(result.current.error!.message).toBe("same error"); result.current.refetch(); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.error).toBe(undefined); const catchFn = jest.fn(); result.current.refetch().catch(catchFn); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); // TODO: Is this correct behavior here? - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.error!.message).toBe('same error'); + expect(result.current.error!.message).toBe("same error"); expect(catchFn.mock.calls.length).toBe(1); expect(catchFn.mock.calls[0].length).toBe(1); expect(catchFn.mock.calls[0][0]).toBeInstanceOf(ApolloError); - expect(catchFn.mock.calls[0][0].message).toBe('same error'); + expect(catchFn.mock.calls[0][0].message).toBe("same error"); }); - it('should call onCompleted when variables change', async () => { + it("should call onCompleted when variables change", async () => { const query = gql` query people($first: Int) { allPeople(first: $first) { @@ -2984,8 +3433,8 @@ describe('useQuery Hook', () => { } `; - const data1 = { allPeople: { people: [{ name: 'Luke Skywalker' }] } }; - const data2 = { allPeople: { people: [{ name: 'Han Solo' }] } }; + const data1 = { allPeople: { people: [{ name: "Luke Skywalker" }] } }; + const data2 = { allPeople: { people: [{ name: "Han Solo" }] } }; const mocks = [ { request: { query, variables: { first: 1 } }, @@ -3003,45 +3452,52 @@ describe('useQuery Hook', () => { ({ variables }) => useQuery(query, { variables, onCompleted }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), initialProps: { variables: { first: 1 }, }, - }, + } ); expect(result.current.loading).toBe(true); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data1); expect(onCompleted).toHaveBeenLastCalledWith(data1); rerender({ variables: { first: 2 } }); expect(result.current.loading).toBe(true); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data2); expect(onCompleted).toHaveBeenLastCalledWith(data2); rerender({ variables: { first: 1 } }); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual(data1); - await waitFor(() => { - expect(onCompleted).toHaveBeenLastCalledWith(data1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onCompleted).toHaveBeenLastCalledWith(data1); + }, + { interval: 1 } + ); expect(onCompleted).toHaveBeenCalledTimes(3); }); }); - describe('Pagination', () => { + describe("Pagination", () => { const query = gql` query letters($limit: Int) { letters(limit: $limit) { @@ -3052,13 +3508,13 @@ describe('useQuery Hook', () => { `; const ab = [ - { name: 'A', position: 1 }, - { name: 'B', position: 2 }, + { name: "A", position: 1 }, + { name: "B", position: 2 }, ]; const cd = [ - { name: 'C', position: 3 }, - { name: 'D', position: 4 }, + { name: "C", position: 3 }, + { name: "D", position: 4 }, ]; const mocks = [ @@ -3081,7 +3537,7 @@ describe('useQuery Hook', () => { }, ]; - it('should fetchMore with updateQuery', async () => { + it("should fetchMore with updateQuery", async () => { // TODO: Calling fetchMore with an updateQuery callback is deprecated const warnSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); @@ -3091,7 +3547,7 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { variables: { limit: 2 } }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); @@ -3104,23 +3560,29 @@ describe('useQuery Hook', () => { expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab }); - await waitFor(() => void result.current.fetchMore({ - variables: { limit: 2 }, - updateQuery: (prev, { fetchMoreResult }) => ({ - letters: prev.letters.concat(fetchMoreResult.letters), - }), - })); + await waitFor( + () => + void result.current.fetchMore({ + variables: { limit: 2 }, + updateQuery: (prev, { fetchMoreResult }) => ({ + letters: prev.letters.concat(fetchMoreResult.letters), + }), + }) + ); - await waitFor(() => { - expect(result.current.data).toEqual({ letters: ab.concat(cd) }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ letters: ab.concat(cd) }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.networkStatus).toBe(NetworkStatus.ready); warnSpy.mockRestore(); }); - it('should fetchMore with updateQuery and notifyOnNetworkStatusChange', async () => { + it("should fetchMore with updateQuery and notifyOnNetworkStatusChange", async () => { // TODO: Calling fetchMore with an updateQuery callback is deprecated const warnSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); @@ -3129,45 +3591,55 @@ describe('useQuery Hook', () => { ); const { result } = renderHook( - () => useQuery(query, { - variables: { limit: 2 }, - notifyOnNetworkStatusChange: true, - }), - { wrapper }, + () => + useQuery(query, { + variables: { limit: 2 }, + notifyOnNetworkStatusChange: true, + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab }); - act(() => void result.current.fetchMore({ - variables: { limit: 2 }, - updateQuery: (prev, { fetchMoreResult }) => ({ - letters: prev.letters.concat(fetchMoreResult.letters), - }), - })); + act( + () => + void result.current.fetchMore({ + variables: { limit: 2 }, + updateQuery: (prev, { fetchMoreResult }) => ({ + letters: prev.letters.concat(fetchMoreResult.letters), + }), + }) + ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.fetchMore); expect(result.current.data).toEqual({ letters: ab }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab.concat(cd) }); warnSpy.mockRestore(); }); - it('fetchMore with concatPagination', async () => { + it("fetchMore with concatPagination", async () => { const cache = new InMemoryCache({ typePolicies: { Query: { @@ -3179,33 +3651,41 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( () => useQuery(query, { variables: { limit: 2 } }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab }); result.current.fetchMore({ variables: { limit: 2 } }); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ letters: ab.concat(cd) }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ letters: ab.concat(cd) }); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); }); - it('fetchMore with concatPagination and notifyOnNetworkStatusChange', async () => { + it("fetchMore with concatPagination and notifyOnNetworkStatusChange", async () => { const cache = new InMemoryCache({ typePolicies: { Query: { @@ -3217,24 +3697,30 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { - variables: { limit: 2 }, - notifyOnNetworkStatusChange: true, - }), - { wrapper }, + () => + useQuery(query, { + variables: { limit: 2 }, + notifyOnNetworkStatusChange: true, + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab }); @@ -3243,9 +3729,12 @@ describe('useQuery Hook', () => { expect(result.current.networkStatus).toBe(NetworkStatus.fetchMore); expect(result.current.data).toEqual({ letters: ab }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ letters: ab.concat(cd) }); }); @@ -3256,18 +3745,18 @@ describe('useQuery Hook', () => { Country: { fields: { cities: { - keyArgs: ['size'], + keyArgs: ["size"], merge(existing, incoming, { args }) { - if (!args) return incoming + if (!args) return incoming; - const items = existing ? existing.slice(0) : [] + const items = existing ? existing.slice(0) : []; - const offset = args.offset ?? 0 + const offset = args.offset ?? 0; for (let i = 0; i < incoming.length; ++i) { - items[offset + i] = incoming[i] + items[offset + i] = incoming[i]; } - return items + return items; }, }, }, @@ -3306,49 +3795,51 @@ describe('useQuery Hook', () => { const countries = [ { - __typename: 'Country', + __typename: "Country", id: 123, biggestCity: { - __typename: 'City', + __typename: "City", id: 234, info: { - __typename: 'CityInfo', + __typename: "CityInfo", airQuality: 0, }, }, - smallCities: [ - { __typename: 'City', id: 345 }, - ], + smallCities: [{ __typename: "City", id: 345 }], }, ]; const wrapper = ({ children }: any) => ( - <MockedProvider mocks={[ - { - request: { query: GET_COUNTRIES }, - result: { data: { countries } }, - }, - ]} cache={cache}>{children}</MockedProvider> + <MockedProvider + mocks={[ + { + request: { query: GET_COUNTRIES }, + result: { data: { countries } }, + }, + ]} + cache={cache} + > + {children} + </MockedProvider> ); - const { result } = renderHook( - () => useQuery(GET_COUNTRIES), - { wrapper }, - ); + const { result } = renderHook(() => useQuery(GET_COUNTRIES), { wrapper }); expect(result.current.loading).toBe(true); expect(result.current.data).toBeUndefined(); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ countries }); }); }); - - describe('Refetching', () => { - it('refetching with different variables', async () => { + describe("Refetching", () => { + it("refetching with different variables", async () => { const query = gql` query ($id: Int) { hello(id: $id) @@ -3358,63 +3849,79 @@ describe('useQuery Hook', () => { const mocks = [ { request: { query, variables: { id: 1 } }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, }, { request: { query, variables: { id: 2 } }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, delay: 10, }, ]; const cache = new InMemoryCache(); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { - variables: { id: 1 }, - notifyOnNetworkStatusChange: true, - }), - { wrapper }, + () => + useQuery(query, { + variables: { id: 1 }, + notifyOnNetworkStatusChange: true, + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world 1' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world 1" }); result.current.refetch({ id: 2 }); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world 2' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world 2" }); }); - it('refetching after an error', async () => { - const query = gql`{ hello }`; + it("refetching after an error", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, }, { request: { query }, - error: new Error('This is an error!'), + error: new Error("This is an error!"), delay: 10, }, { request: { query }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, delay: 10, }, ]; @@ -3422,57 +3929,73 @@ describe('useQuery Hook', () => { const cache = new InMemoryCache(); const { result } = renderHook( - () => useQuery(query, { - notifyOnNetworkStatusChange: true, - }), + () => + useQuery(query, { + notifyOnNetworkStatusChange: true, + }), { wrapper: ({ children }) => ( <MockedProvider mocks={mocks} cache={cache}> {children} </MockedProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); - expect(result.current.data).toEqual({ hello: 'world 1' }); + expect(result.current.data).toEqual({ hello: "world 1" }); result.current.refetch(); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); - expect(result.current.data).toEqual({ hello: 'world 1' }); + expect(result.current.data).toEqual({ hello: "world 1" }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeInstanceOf(ApolloError); - expect(result.current.data).toEqual({ hello: 'world 1' }); + expect(result.current.data).toEqual({ hello: "world 1" }); result.current.refetch(); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); - expect(result.current.data).toEqual({ hello: 'world 1' }); + expect(result.current.data).toEqual({ hello: "world 1" }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); - expect(result.current.data).toEqual({ hello: 'world 2' }); + expect(result.current.data).toEqual({ hello: "world 2" }); }); - describe('refetchWritePolicy', () => { + describe("refetchWritePolicy", () => { const query = gql` - query GetPrimes ($min: number, $max: number) { + query GetPrimes($min: number, $max: number) { primes(min: $min, max: $max) } `; @@ -3486,8 +4009,8 @@ describe('useQuery Hook', () => { result: { data: { primes: [2, 3, 5, 7, 11], - } - } + }, + }, }, { request: { @@ -3497,7 +4020,7 @@ describe('useQuery Hook', () => { result: { data: { primes: [13, 17, 19, 23, 29], - } + }, }, delay: 10, }, @@ -3522,40 +4045,46 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { - variables: { min: 0, max: 12 }, - notifyOnNetworkStatusChange: true, - // This is the key line in this test. - refetchWritePolicy: 'overwrite', - }), - { wrapper }, + () => + useQuery(query, { + variables: { min: 0, max: 12 }, + notifyOnNetworkStatusChange: true, + // This is the key line in this test. + refetchWritePolicy: "overwrite", + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); - expect(typeof result.current.refetch).toBe('function'); + expect(typeof result.current.refetch).toBe("function"); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeUndefined(); expect(result.current.data).toEqual({ primes: [2, 3, 5, 7, 11] }); - expect(mergeParams).toEqual([ - [void 0, [2, 3, 5, 7, 11]], - ]); - + expect(mergeParams).toEqual([[void 0, [2, 3, 5, 7, 11]]]); const thenFn = jest.fn(); result.current.refetch({ min: 12, max: 30 }).then(thenFn); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ // We get the stale data because we configured keyArgs: false. @@ -3566,9 +4095,12 @@ describe('useQuery Hook', () => { // called refetch with new variables. expect(result.current.networkStatus).toBe(NetworkStatus.setVariables); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ primes: [13, 17, 19, 23, 29] }); @@ -3606,40 +4138,46 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { - variables: { min: 0, max: 12 }, - notifyOnNetworkStatusChange: true, - // This is the key line in this test. - refetchWritePolicy: 'merge', - }), - { wrapper }, + () => + useQuery(query, { + variables: { min: 0, max: 12 }, + notifyOnNetworkStatusChange: true, + // This is the key line in this test. + refetchWritePolicy: "merge", + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); - expect(typeof result.current.refetch).toBe('function'); + expect(typeof result.current.refetch).toBe("function"); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeUndefined(); expect(result.current.data).toEqual({ primes: [2, 3, 5, 7, 11] }); - expect(mergeParams).toEqual([ - [undefined, [2, 3, 5, 7, 11]], - ]); - + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); const thenFn = jest.fn(); result.current.refetch({ min: 12, max: 30 }).then(thenFn); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ // We get the stale data because we configured keyArgs: false. @@ -3648,9 +4186,12 @@ describe('useQuery Hook', () => { // This networkStatus is setVariables instead of refetch because we // called refetch with new variables. expect(result.current.networkStatus).toBe(NetworkStatus.setVariables); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ @@ -3659,13 +4200,19 @@ describe('useQuery Hook', () => { expect(mergeParams).toEqual([ [void 0, [2, 3, 5, 7, 11]], // This indicates concatenation happened. - [[2, 3, 5, 7, 11], [13, 17, 19, 23, 29]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], ]); expect(mergeParams).toEqual([ [undefined, [2, 3, 5, 7, 11]], // Without refetchWritePolicy: "overwrite", this array will be // all 10 primes (2 through 29) together. - [[2, 3, 5, 7, 11], [13, 17, 19, 23, 29]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], ]); expect(thenFn).toHaveBeenCalledTimes(1); @@ -3695,39 +4242,45 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <MockedProvider mocks={mocks} cache={cache}>{children}</MockedProvider> + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> ); const { result } = renderHook( - () => useQuery(query, { - variables: { min: 0, max: 12 }, - notifyOnNetworkStatusChange: true, - // Intentionally not passing refetchWritePolicy. - }), - { wrapper }, + () => + useQuery(query, { + variables: { min: 0, max: 12 }, + notifyOnNetworkStatusChange: true, + // Intentionally not passing refetchWritePolicy. + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); - expect(typeof result.current.refetch).toBe('function'); + expect(typeof result.current.refetch).toBe("function"); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBeUndefined(); expect(result.current.data).toEqual({ primes: [2, 3, 5, 7, 11] }); - expect(mergeParams).toEqual([ - [void 0, [2, 3, 5, 7, 11]], - ]); - + expect(mergeParams).toEqual([[void 0, [2, 3, 5, 7, 11]]]); const thenFn = jest.fn(); result.current.refetch({ min: 12, max: 30 }).then(thenFn); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ // We get the stale data because we configured keyArgs: false. @@ -3738,9 +4291,12 @@ describe('useQuery Hook', () => { // called refetch with new variables. expect(result.current.networkStatus).toBe(NetworkStatus.setVariables); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toEqual({ primes: [13, 17, 19, 23, 29] }); @@ -3761,14 +4317,18 @@ describe('useQuery Hook', () => { }); }); - describe('Callbacks', () => { - it('onCompleted is called once with cached data', async () => { - const query = gql`{ hello }`; + describe("Callbacks", () => { + it("onCompleted is called once with cached data", async () => { + const query = gql` + { + hello + } + `; const cache = new InMemoryCache(); cache.writeQuery({ query, - data: { hello: 'world' }, + data: { hello: "world" }, }); const wrapper = ({ children }: any) => ( @@ -3779,34 +4339,51 @@ describe('useQuery Hook', () => { const onCompleted = jest.fn(); const { result } = renderHook( - () => (useQuery(query, { - fetchPolicy: 'cache-only', - onCompleted, - })), - { wrapper }, + () => + useQuery(query, { + fetchPolicy: "cache-only", + onCompleted, + }), + { wrapper } ); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world' }); - }, { interval: 1 }); - await waitFor(() => { - expect(onCompleted).toHaveBeenCalledTimes(1); - }, { interval: 1 }); - await waitFor(() => { - expect(onCompleted).toHaveBeenCalledWith({ hello: 'world' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world" }); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(onCompleted).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(onCompleted).toHaveBeenCalledWith({ hello: "world" }); + }, + { interval: 1 } + ); expect(onCompleted).toHaveBeenCalledTimes(1); }); - it('onCompleted is called once despite state changes', async () => { - const query = gql`{ hello }`; + it("onCompleted is called once despite state changes", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; @@ -3819,37 +4396,45 @@ describe('useQuery Hook', () => { const onCompleted = jest.fn(); const { result, rerender } = renderHook( - () => useQuery(query, { - onCompleted, - }), - { wrapper }, + () => + useQuery(query, { + onCompleted, + }), + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); expect(onCompleted).toHaveBeenCalledTimes(1); - expect(onCompleted).toHaveBeenCalledWith({ hello: 'world' }); + expect(onCompleted).toHaveBeenCalledWith({ hello: "world" }); rerender(); expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); expect(onCompleted).toHaveBeenCalledTimes(1); - expect(onCompleted).toHaveBeenCalledWith({ hello: 'world' }); + expect(onCompleted).toHaveBeenCalledWith({ hello: "world" }); expect(onCompleted).toHaveBeenCalledTimes(1); }); - it('should not call onCompleted if skip is true', async () => { - const query = gql`{ hello }`; + it("should not call onCompleted if skip is true", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; @@ -3862,28 +4447,41 @@ describe('useQuery Hook', () => { const onCompleted = jest.fn(); const { result } = renderHook( - () => useQuery(query, { - skip: true, - onCompleted, - }), - { wrapper }, + () => + useQuery(query, { + skip: true, + onCompleted, + }), + { wrapper } ); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); - await expect(waitFor(() => { - expect(onCompleted).not.toHaveBeenCalledTimes(0); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + expect(onCompleted).toHaveBeenCalledTimes(0); + + await expect( + waitFor( + () => { + expect(onCompleted).toHaveBeenCalledTimes(1); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); + expect(onCompleted).toHaveBeenCalledTimes(0); }); - it('should not make extra network requests when `onCompleted` is defined with a `network-only` fetch policy', async () => { - const query = gql`{ hello }`; + it("should not make extra network requests when `onCompleted` is defined with a `network-only` fetch policy", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; @@ -3899,79 +4497,170 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => { const pendingResult = useQuery(query, { - fetchPolicy: 'network-only', + fetchPolicy: "network-only", onCompleted, }); updates++; return pendingResult; }, - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); - const previousUpdates = updates - await expect(waitFor(() => { - expect(updates).not.toEqual(previousUpdates) - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); + const previousUpdates = updates; + await expect( + waitFor( + () => { + expect(updates).not.toEqual(previousUpdates); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); expect(onCompleted).toHaveBeenCalledTimes(1); }); - it('onCompleted should work with polling', async () => { - const query = gql`{ hello }`; + it("onCompleted should not fire for polling queries without notifyOnNetworkStatusChange: true", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world 1' } }, + result: { data: { hello: "world 1" } }, }, { request: { query }, - result: { data: { hello: 'world 2' } }, + result: { data: { hello: "world 2" } }, }, { request: { query }, - result: { data: { hello: 'world 3' } }, + result: { data: { hello: "world 3" } }, }, ]; const cache = new InMemoryCache(); const onCompleted = jest.fn(); const { result } = renderHook( - () => useQuery(query, { - onCompleted, - pollInterval: 10, - }), + () => + useQuery(query, { + onCompleted, + pollInterval: 10, + }), { wrapper: ({ children }) => ( <MockedProvider mocks={mocks} cache={cache}> {children} </MockedProvider> ), + } + ); + + expect(result.current.loading).toBe(true); + + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); + expect(result.current.loading).toBe(false); + expect(onCompleted).toHaveBeenCalledTimes(1); + + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); + expect(result.current.loading).toBe(false); + expect(onCompleted).toHaveBeenCalledTimes(1); + + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); + expect(result.current.loading).toBe(false); + expect(onCompleted).toHaveBeenCalledTimes(1); + }); + + it("onCompleted should fire when polling with notifyOnNetworkStatusChange: true", async () => { + const query = gql` + { + hello + } + `; + const mocks = [ + { + request: { query }, + result: { data: { hello: "world 1" } }, + }, + { + request: { query }, + result: { data: { hello: "world 2" } }, + }, + { + request: { query }, + result: { data: { hello: "world 3" } }, }, + ]; + + const cache = new InMemoryCache(); + const onCompleted = jest.fn(); + const { result } = renderHook( + () => + useQuery(query, { + onCompleted, + notifyOnNetworkStatusChange: true, + pollInterval: 10, + }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + } ); expect(result.current.loading).toBe(true); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 1' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(onCompleted).toHaveBeenCalledTimes(1); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 2' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(onCompleted).toHaveBeenCalledTimes(2); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 3' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(onCompleted).toHaveBeenCalledTimes(3); }); @@ -4021,29 +4710,161 @@ describe('useQuery Hook', () => { expect(errorSpy).not.toHaveBeenCalled(); errorSpy.mockRestore(); }); - }); - describe('Optimistic data', () => { - it('should display rolled back optimistic data when an error occurs', async () => { + it("onCompleted should not execute on cache writes after initial query execution", async () => { const query = gql` - query AllCars { - cars { - id - make - model - } + { + hello } `; - - const carsData = { - cars: [ - { - id: 1, - make: 'Audi', - model: 'RS8', - __typename: 'Car' - } - ] + const mocks = [ + { + request: { query }, + result: { data: { hello: "foo" } }, + }, + { + request: { query }, + result: { data: { hello: "bar" } }, + }, + ]; + const link = new MockLink(mocks); + const cache = new InMemoryCache(); + const onCompleted = jest.fn(); + + const ChildComponent: React.FC = () => { + const { data, client } = useQuery(query, { onCompleted }); + function refetchQueries() { + client.refetchQueries({ include: "active" }); + } + function writeQuery() { + client.writeQuery({ query, data: { hello: "baz" } }); + } + return ( + <div> + <span>Data: {data?.hello}</span> + <button onClick={() => refetchQueries()}>Refetch queries</button> + <button onClick={() => writeQuery()}>Update word</button> + </div> + ); + }; + + const ParentComponent: React.FC = () => { + return ( + <MockedProvider link={link} cache={cache}> + <div> + <ChildComponent /> + </div> + </MockedProvider> + ); + }; + + render(<ParentComponent />); + + await screen.findByText("Data: foo"); + await userEvent.click( + screen.getByRole("button", { name: /refetch queries/i }) + ); + expect(onCompleted).toBeCalledTimes(1); + await screen.findByText("Data: bar"); + await userEvent.click( + screen.getByRole("button", { name: /update word/i }) + ); + expect(onCompleted).toBeCalledTimes(1); + await screen.findByText("Data: baz"); + expect(onCompleted).toBeCalledTimes(1); + }); + + it("onCompleted should execute on cache writes after initial query execution with notifyOnNetworkStatusChange: true", async () => { + const query = gql` + { + hello + } + `; + const mocks = [ + { + request: { query }, + result: { data: { hello: "foo" } }, + }, + { + request: { query }, + result: { data: { hello: "bar" } }, + }, + ]; + const link = new MockLink(mocks); + const cache = new InMemoryCache(); + const onCompleted = jest.fn(); + + const ChildComponent: React.FC = () => { + const { data, client } = useQuery(query, { + onCompleted, + notifyOnNetworkStatusChange: true, + }); + function refetchQueries() { + client.refetchQueries({ include: "active" }); + } + function writeQuery() { + client.writeQuery({ query, data: { hello: "baz" } }); + } + return ( + <div> + <span>Data: {data?.hello}</span> + <button onClick={() => refetchQueries()}>Refetch queries</button> + <button onClick={() => writeQuery()}>Update word</button> + </div> + ); + }; + + const ParentComponent: React.FC = () => { + return ( + <MockedProvider link={link} cache={cache}> + <div> + <ChildComponent /> + </div> + </MockedProvider> + ); + }; + + render(<ParentComponent />); + + await screen.findByText("Data: foo"); + expect(onCompleted).toBeCalledTimes(1); + await userEvent.click( + screen.getByRole("button", { name: /refetch queries/i }) + ); + // onCompleted increments when refetch occurs since we're hitting the network... + expect(onCompleted).toBeCalledTimes(2); + await screen.findByText("Data: bar"); + await userEvent.click( + screen.getByRole("button", { name: /update word/i }) + ); + // but not on direct cache write, since there's no network request to complete + expect(onCompleted).toBeCalledTimes(2); + await screen.findByText("Data: baz"); + expect(onCompleted).toBeCalledTimes(2); + }); + }); + + describe("Optimistic data", () => { + it("should display rolled back optimistic data when an error occurs", async () => { + const query = gql` + query AllCars { + cars { + id + make + model + } + } + `; + + const carsData = { + cars: [ + { + id: 1, + make: "Audi", + model: "RS8", + __typename: "Car", + }, + ], }; const mutation = gql` @@ -4058,16 +4879,13 @@ describe('useQuery Hook', () => { const carData = { id: 2, - make: 'Ford', - model: 'Pinto', - __typename: 'Car' + make: "Ford", + model: "Pinto", + __typename: "Car", }; const allCarsData = { - cars: [ - carsData.cars[0], - carData - ] + cars: [carsData.cars[0], carData], }; const mocks = [ @@ -4077,9 +4895,9 @@ describe('useQuery Hook', () => { }, { request: { query: mutation }, - error: new Error('Oh no!'), + error: new Error("Oh no!"), delay: 500, - } + }, ]; const cache = new InMemoryCache(); @@ -4100,16 +4918,21 @@ describe('useQuery Hook', () => { cars(existing, { readField }) { const newCarRef = cache.writeFragment({ data: data!.addCar, - fragment: gql`fragment NewCar on Car { - id - make - model - }`, + fragment: gql` + fragment NewCar on Car { + id + make + model + } + `, }); - if (existing.some( - (ref: Reference) => readField('id', ref) === data!.addCar.id - )) { + if ( + existing.some( + (ref: Reference) => + readField("id", ref) === data!.addCar.id + ) + ) { return existing; } @@ -4122,15 +4945,18 @@ describe('useQuery Hook', () => { }), query: useQuery(query), }), - { wrapper }, + { wrapper } ); expect(result.current.query.loading).toBe(true); const mutate = result.current.mutation[0]; - await waitFor(() => { - expect(result.current.query.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.query.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.query.loading).toBe(false); expect(result.current.query.data).toEqual(carsData); @@ -4152,7 +4978,7 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(result.current.mutation[1].loading).toBe(false); - }) + }); // The mutation has completely finished, leaving the query with access to // the original cache data. @@ -4161,15 +4987,20 @@ describe('useQuery Hook', () => { expect(result.current.query.data).toEqual(carsData); expect(onError).toHaveBeenCalledTimes(1); - expect(onError.mock.calls[0][0].message).toBe('Oh no!'); + expect(onError.mock.calls[0][0].message).toBe("Oh no!"); }); }); - describe('Partial refetch', () => { - it('should attempt a refetch when data is missing and partialRefetch is true', async () => { - const errorSpy = jest.spyOn(console, 'error') + describe("Partial refetch", () => { + it("should attempt a refetch when data is missing and partialRefetch is true", async () => { + const errorSpy = jest + .spyOn(console, "error") .mockImplementation(() => {}); - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const link = mockSingleLink( { @@ -4190,17 +5021,16 @@ describe('useQuery Hook', () => { }); const { result } = renderHook( - () => useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), + () => + useQuery(query, { + partialRefetch: true, + notifyOnNetworkStatusChange: true, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -4208,27 +5038,33 @@ describe('useQuery Hook', () => { expect(result.current.error).toBe(undefined); expect(result.current.networkStatus).toBe(NetworkStatus.loading); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.refetch); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.refetch); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); expect(errorSpy).toHaveBeenCalledTimes(1); - expect(errorSpy.mock.calls[0][0]).toMatch('Missing field'); + expect(errorSpy.mock.calls[0][0]).toMatch("Missing field"); errorSpy.mockRestore(); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.error).toBe(undefined); }); - it('should attempt a refetch when data is missing and partialRefetch is true 2', async () => { + it("should attempt a refetch when data is missing and partialRefetch is true 2", async () => { const query = gql` query people { allPeople(first: 1) { @@ -4240,10 +5076,11 @@ describe('useQuery Hook', () => { `; const data = { - allPeople: { people: [{ name: 'Luke Skywalker' }] }, + allPeople: { people: [{ name: "Luke Skywalker" }] }, }; - const errorSpy = jest.spyOn(console, 'error') + const errorSpy = jest + .spyOn(console, "error") .mockImplementation(() => {}); const link = mockSingleLink( { request: { query }, result: { data: {} }, delay: 20 }, @@ -4256,17 +5093,16 @@ describe('useQuery Hook', () => { }); const { result } = renderHook( - () => useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), + () => + useQuery(query, { + partialRefetch: true, + notifyOnNetworkStatusChange: true, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -4274,29 +5110,40 @@ describe('useQuery Hook', () => { expect(result.current.error).toBe(undefined); expect(result.current.networkStatus).toBe(NetworkStatus.loading); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.refetch); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.refetch); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); expect(errorSpy).toHaveBeenCalledTimes(1); - expect(errorSpy.mock.calls[0][0]).toMatch('Missing field'); + expect(errorSpy.mock.calls[0][0]).toMatch("Missing field"); errorSpy.mockRestore(); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual(data); expect(result.current.error).toBe(undefined); }); - it('should attempt a refetch when data is missing, partialRefetch is true and addTypename is false for the cache', async () => { - const errorSpy = jest.spyOn(console, 'error') + it("should attempt a refetch when data is missing, partialRefetch is true and addTypename is false for the cache", async () => { + const errorSpy = jest + .spyOn(console, "error") .mockImplementation(() => {}); - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const link = mockSingleLink( { @@ -4318,17 +5165,16 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ); const { result } = renderHook( - () => useQuery(query, { - partialRefetch: true, - notifyOnNetworkStatusChange: true, - }), - { wrapper }, + () => + useQuery(query, { + partialRefetch: true, + notifyOnNetworkStatusChange: true, + }), + { wrapper } ); expect(result.current.loading).toBe(true); @@ -4336,29 +5182,35 @@ describe('useQuery Hook', () => { expect(result.current.error).toBe(undefined); expect(result.current.networkStatus).toBe(NetworkStatus.loading); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.refetch); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.refetch); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); expect(errorSpy).toHaveBeenCalledTimes(1); - expect(errorSpy.mock.calls[0][0]).toMatch('Missing field'); + expect(errorSpy.mock.calls[0][0]).toMatch("Missing field"); errorSpy.mockRestore(); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - expect(result.current.data).toEqual({ hello: 'world' }); + expect(result.current.data).toEqual({ hello: "world" }); expect(result.current.error).toBe(undefined); }); }); - describe('Client Resolvers', () => { - it('should receive up to date @client(always: true) fields on entity update', async () => { + describe("Client Resolvers", () => { + it("should receive up to date @client(always: true) fields on entity update", async () => { const query = gql` query GetClientData($id: ID) { clientEntity(id: $id) @client(always: true) { @@ -4388,23 +5240,23 @@ describe('useQuery Hook', () => { resolvers: { ClientData: { titleLength(data) { - return data.title.length - } + return data.title.length; + }, }, Query: { - clientEntity(_root, {id}, {cache}) { + clientEntity(_root, { id }, { cache }) { return cache.readFragment({ - id: cache.identify({id, __typename: "ClientData"}), + id: cache.identify({ id, __typename: "ClientData" }), fragment, }); }, }, Mutation: { - addOrUpdate(_root, {id, title}, {cache}) { + addOrUpdate(_root, { id, title }, { cache }) { return cache.writeFragment({ - id: cache.identify({id, __typename: "ClientData"}), + id: cache.identify({ id, __typename: "ClientData" }), fragment, - data: {id, title, __typename: "ClientData"}, + data: { id, title, __typename: "ClientData" }, }); }, }, @@ -4412,8 +5264,8 @@ describe('useQuery Hook', () => { }); const entityId = 1; - const shortTitle = 'Short'; - const longerTitle = 'A little longer'; + const shortTitle = "Short"; + const longerTitle = "A little longer"; client.mutate({ mutation, variables: { @@ -4423,28 +5275,29 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ); const { result } = renderHook( () => useQuery(query, { variables: { id: entityId } }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ clientEntity: { id: entityId, title: shortTitle, titleLength: shortTitle.length, - __typename: 'ClientData', + __typename: "ClientData", }, }); @@ -4454,45 +5307,56 @@ describe('useQuery Hook', () => { variables: { id: entityId, title: longerTitle, - } + }, }); }); - await waitFor(() => { - expect(result.current.data).toEqual({ - clientEntity: { - id: entityId, - title: longerTitle, - titleLength: longerTitle.length, - __typename: "ClientData", - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ + clientEntity: { + id: entityId, + title: longerTitle, + titleLength: longerTitle.length, + __typename: "ClientData", + }, + }); + }, + { interval: 1 } + ); }); }); - describe('Skipping', () => { - const query = gql`query greeting($someVar: Boolean) { hello }`; + describe("Skipping", () => { + const query = gql` + query greeting($someVar: Boolean) { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, { request: { query, variables: { someVar: true }, }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; - it('should skip running a query when `skip` is `true`', async () => { - const query = gql`{ hello }`; + it("should skip running a query when `skip` is `true`", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; @@ -4505,7 +5369,7 @@ describe('useQuery Hook', () => { const { result, rerender } = renderHook( ({ skip }) => useQuery(query, { skip }), - { wrapper, initialProps: { skip: true } }, + { wrapper, initialProps: { skip: true } } ); expect(result.current.loading).toBe(false); @@ -4515,13 +5379,16 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBeFalsy(); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBeFalsy(); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); }); - it('should not make network requests when `skip` is `true`', async () => { + it("should not make network requests when `skip` is `true`", async () => { const linkFn = jest.fn(); const link = new ApolloLink((o, f) => { linkFn(); @@ -4533,23 +5400,24 @@ describe('useQuery Hook', () => { }); const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ); const { result, rerender } = renderHook( ({ skip, variables }) => useQuery(query, { skip, variables }), - { wrapper, initialProps: { skip: false, variables: undefined as any } }, + { wrapper, initialProps: { skip: false, variables: undefined as any } } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); rerender({ skip: true, variables: { someVar: true } }); expect(result.current.loading).toBe(false); @@ -4557,88 +5425,96 @@ describe('useQuery Hook', () => { expect(linkFn).toHaveBeenCalledTimes(1); }); - it('should tear down the query if `skip` is `true`', async () => { + it("should tear down the query if `skip` is `true`", async () => { const client = new ApolloClient({ - link: new ApolloLink(() => Observable.of({ data: { hello: 'world' } })), + link: new ApolloLink(() => Observable.of({ data: { hello: "world" } })), cache: new InMemoryCache(), }); const wrapper = ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ); - const { unmount } = renderHook( - () => useQuery(query, { skip: true }), - { wrapper }, - ); + const { unmount } = renderHook(() => useQuery(query, { skip: true }), { + wrapper, + }); - expect(client.getObservableQueries('all').size).toBe(1); + expect(client.getObservableQueries("all").size).toBe(1); unmount(); - await new Promise(resolve => setTimeout(resolve)); - expect(client.getObservableQueries('all').size).toBe(0); + await new Promise((resolve) => setTimeout(resolve)); + expect(client.getObservableQueries("all").size).toBe(0); }); - it('should treat fetchPolicy standby like skip', async () => { - const query = gql`{ hello }`; + it("should treat fetchPolicy standby like skip", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, - result: { data: { hello: 'world' } }, + result: { data: { hello: "world" } }, }, ]; const { result, rerender } = renderHook( ({ fetchPolicy }) => useQuery(query, { fetchPolicy }), { wrapper: ({ children }) => ( - <MockedProvider mocks={mocks}> - {children} - </MockedProvider> + <MockedProvider mocks={mocks}>{children}</MockedProvider> ), - initialProps: { fetchPolicy: 'standby' as any }, - }, + initialProps: { fetchPolicy: "standby" as any }, + } ); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); - await expect(waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); - rerender({ fetchPolicy: 'cache-first' }); + rerender({ fetchPolicy: "cache-first" }); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBeFalsy(); - }, { interval: 1 }); - expect(result.current.data).toEqual({ hello: 'world' }); + await waitFor( + () => { + expect(result.current.loading).toBeFalsy(); + }, + { interval: 1 } + ); + expect(result.current.data).toEqual({ hello: "world" }); }); // Amusingly, #8270 thinks this is a bug, but #9101 thinks this is not. - it('should refetch when skip is true', async () => { - const query = gql`{ hello }`; - const link = new ApolloLink(() => Observable.of({ - data: { hello: 'world' }, - })); + it("should refetch when skip is true", async () => { + const query = gql` + { + hello + } + `; + const link = new ApolloLink(() => + Observable.of({ + data: { hello: "world" }, + }) + ); - const requestSpy = jest.spyOn(link, 'request'); + const requestSpy = jest.spyOn(link, "request"); const client = new ApolloClient({ cache: new InMemoryCache(), link, }); - const { result } = renderHook( - () => useQuery(query, { skip: true }), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query, { skip: true }), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); let promise; await waitFor(async () => { @@ -4646,29 +5522,41 @@ describe('useQuery Hook', () => { }); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toBe(undefined); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toBe(undefined); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toBe(undefined); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toBe(undefined); + }, + { interval: 1 } + ); expect(requestSpy).toHaveBeenCalledTimes(1); requestSpy.mockRestore(); expect(promise).resolves.toEqual({ - data: {hello: "world"}, + data: { hello: "world" }, loading: false, networkStatus: 7, }); }); - it('should set correct initialFetchPolicy even if skip:true', async () => { - const query = gql`{ hello }`; + it("should set correct initialFetchPolicy even if skip:true", async () => { + const query = gql` + { + hello + } + `; let linkCount = 0; - const link = new ApolloLink(() => Observable.of({ - data: { hello: ++linkCount }, - })); + const link = new ApolloLink(() => + Observable.of({ + data: { hello: ++linkCount }, + }) + ); const client = new ApolloClient({ cache: new InMemoryCache(), @@ -4678,26 +5566,28 @@ describe('useQuery Hook', () => { const correctInitialFetchPolicy: WatchQueryFetchPolicy = "cache-and-network"; - const { result, rerender } = renderHook<QueryResult, { - skip: boolean; - }>( - ({ skip = true }) => useQuery(query, { - // Skipping equates to using a fetchPolicy of "standby", but that - // should not mean we revert to standby whenever we want to go back to - // the initial fetchPolicy (e.g. when variables change). - skip, - fetchPolicy: correctInitialFetchPolicy, - }), + const { result, rerender } = renderHook< + QueryResult, + { + skip: boolean; + } + >( + ({ skip = true }) => + useQuery(query, { + // Skipping equates to using a fetchPolicy of "standby", but that + // should not mean we revert to standby whenever we want to go back to + // the initial fetchPolicy (e.g. when variables change). + skip, + fetchPolicy: correctInitialFetchPolicy, + }), { initialProps: { skip: true, }, wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(false); @@ -4705,77 +5595,75 @@ describe('useQuery Hook', () => { function check( expectedFetchPolicy: WatchQueryFetchPolicy, - expectedInitialFetchPolicy: WatchQueryFetchPolicy, + expectedInitialFetchPolicy: WatchQueryFetchPolicy ) { const { observable } = result.current; - const { - fetchPolicy, - initialFetchPolicy, - } = observable.options; + const { fetchPolicy, initialFetchPolicy } = observable.options; expect(fetchPolicy).toBe(expectedFetchPolicy); expect(initialFetchPolicy).toBe(expectedInitialFetchPolicy); } - check( - "standby", - correctInitialFetchPolicy, - ); + check("standby", correctInitialFetchPolicy); rerender({ skip: false, }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ hello: 1, }); - check( - correctInitialFetchPolicy, - correctInitialFetchPolicy, - ); + check(correctInitialFetchPolicy, correctInitialFetchPolicy); const reasons: string[] = []; - const reobservePromise = result.current.observable.reobserve({ - variables: { - newVar: true, - }, - nextFetchPolicy(currentFetchPolicy, context) { - expect(currentFetchPolicy).toBe("cache-and-network"); - expect(context.initialFetchPolicy).toBe("cache-and-network"); - reasons.push(context.reason); - return currentFetchPolicy; - }, - }).then(result => { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ hello: 2 }); - }); + const reobservePromise = result.current.observable + .reobserve({ + variables: { + newVar: true, + }, + nextFetchPolicy(currentFetchPolicy, context) { + expect(currentFetchPolicy).toBe("cache-and-network"); + expect(context.initialFetchPolicy).toBe("cache-and-network"); + reasons.push(context.reason); + return currentFetchPolicy; + }, + }) + .then((result) => { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ hello: 2 }); + }); expect(result.current.loading).toBe(false); - await waitFor(() => { - expect(result.current.data).toEqual({ - hello: 2, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual({ + hello: 2, + }); + }, + { interval: 1 } + ); await reobservePromise; - expect(reasons).toEqual([ - "variables-changed", - "after-fetch", - ]); + expect(reasons).toEqual(["variables-changed", "after-fetch"]); }); }); - describe('Missing Fields', () => { - it('should log debug messages about MissingFieldErrors from the cache', async () => { - const errorSpy = jest.spyOn(console, 'error').mockImplementation(() => {}); + describe("Missing Fields", () => { + it("should log debug messages about MissingFieldErrors from the cache", async () => { + const errorSpy = jest + .spyOn(console, "error") + .mockImplementation(() => {}); const carQuery: DocumentNode = gql` query cars($id: Int) { @@ -4793,18 +5681,18 @@ describe('useQuery Hook', () => { cars: [ { id: 1, - make: 'Audi', - model: 'RS8', - vine: 'DOLLADOLLABILL', - __typename: 'Car' - } - ] + make: "Audi", + model: "RS8", + vine: "DOLLADOLLABILL", + __typename: "Car", + }, + ], }; const mocks = [ { request: { query: carQuery, variables: { id: 1 } }, - result: { data: carData } + result: { data: carData }, }, ]; @@ -4817,32 +5705,37 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(carQuery, { variables: { id: 1 } }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(carData); expect(result.current.error).toBeUndefined(); - expect(errorSpy).toHaveBeenCalledTimes(1); + expect(errorSpy).toHaveBeenCalled(); expect(errorSpy).toHaveBeenLastCalledWith( - `Missing field 'vin' while writing result ${JSON.stringify({ + `Missing field '%s' while writing result %o`, + "vin", + { id: 1, make: "Audi", model: "RS8", vine: "DOLLADOLLABILL", - __typename: "Car" - }, null, 2)}` + __typename: "Car", + } ); errorSpy.mockRestore(); }); - it('should return partial cache data when `returnPartialData` is true', async () => { + it("should return partial cache data when `returnPartialData` is true", async () => { const cache = new InMemoryCache(); const client = new ApolloClient({ cache, @@ -4867,15 +5760,15 @@ describe('useQuery Hook', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.', + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", }, ], }, @@ -4898,22 +5791,20 @@ describe('useQuery Hook', () => { () => useQuery(partialQuery, { returnPartialData: true }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toEqual({ cars: [ { - __typename: 'Car', + __typename: "Car", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', + __typename: "Repair", + date: "2019-05-08", }, ], }, @@ -4921,7 +5812,7 @@ describe('useQuery Hook', () => { }); }); - it('should not return partial cache data when `returnPartialData` is false', () => { + it("should not return partial cache data when `returnPartialData` is false", () => { const cache = new InMemoryCache(); const client = new ApolloClient({ cache, @@ -4946,15 +5837,15 @@ describe('useQuery Hook', () => { data: { cars: [ { - __typename: 'Car', - make: 'Ford', - model: 'Mustang', - vin: 'PONY123', + __typename: "Car", + make: "Ford", + model: "Mustang", + vin: "PONY123", repairs: [ { - __typename: 'Repair', - date: '2019-05-08', - description: 'Could not get after it.', + __typename: "Repair", + date: "2019-05-08", + description: "Could not get after it.", }, ], }, @@ -4977,18 +5868,16 @@ describe('useQuery Hook', () => { () => useQuery(partialQuery, { returnPartialData: false }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); }); - it('should not return partial cache data when `returnPartialData` is false and new variables are passed in', async () => { + it("should not return partial cache data when `returnPartialData` is false and new variables are passed in", async () => { const cache = new InMemoryCache(); const client = new ApolloClient({ cache, @@ -4997,7 +5886,7 @@ describe('useQuery Hook', () => { const query = gql` query MyCar($id: ID) { - car (id: $id) { + car(id: $id) { id make } @@ -5006,7 +5895,7 @@ describe('useQuery Hook', () => { const partialQuery = gql` query MyCar($id: ID) { - car (id: $id) { + car(id: $id) { id make model @@ -5019,10 +5908,10 @@ describe('useQuery Hook', () => { variables: { id: 1 }, data: { car: { - __typename: 'Car', + __typename: "Car", id: 1, - make: 'Ford', - model: 'Pinto', + make: "Ford", + model: "Pinto", }, }, }); @@ -5032,15 +5921,14 @@ describe('useQuery Hook', () => { variables: { id: 2 }, data: { car: { - __typename: 'Car', + __typename: "Car", id: 2, - make: 'Ford', - model: 'Pinto', + make: "Ford", + model: "Pinto", }, }, }); - let setId: any; const { result } = renderHook( () => { @@ -5054,20 +5942,18 @@ describe('useQuery Hook', () => { }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual({ car: { - __typename: 'Car', + __typename: "Car", id: 2, - make: 'Ford', - model: 'Pinto', + make: "Ford", + model: "Pinto", }, }); @@ -5075,16 +5961,19 @@ describe('useQuery Hook', () => { setId(1); }); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toBe(undefined); }); }); - describe('Previous data', () => { - it('should persist previous data when a query is re-run', async () => { + describe("Previous data", () => { + it("should persist previous data when a query is re-run", async () => { const query = gql` query car { car { @@ -5097,17 +5986,17 @@ describe('useQuery Hook', () => { const data1 = { car: { id: 1, - make: 'Venturi', - __typename: 'Car', - } + make: "Venturi", + __typename: "Car", + }, }; const data2 = { car: { id: 2, - make: 'Wiesmann', - __typename: 'Car', - } + make: "Wiesmann", + __typename: "Car", + }, }; const mocks = [ @@ -5124,42 +6013,54 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { notifyOnNetworkStatusChange: true }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.previousData).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data1); expect(result.current.previousData).toBe(undefined); setTimeout(() => result.current.refetch()); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data1); expect(result.current.previousData).toEqual(data1); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data2); expect(result.current.previousData).toEqual(data1); }); - it('should persist result.previousData across multiple results', async () => { - const query: TypedDocumentNode<{ - car: { - id: string; - make: string; - }; - }, { - vin?: string; - }> = gql` + it("should persist result.previousData across multiple results", async () => { + const query: TypedDocumentNode< + { + car: { + id: string; + make: string; + }; + }, + { + vin?: string; + } + > = gql` query car($vin: String) { car(vin: $vin) { id @@ -5171,24 +6072,24 @@ describe('useQuery Hook', () => { const data1 = { car: { id: 1, - make: 'Venturi', - __typename: 'Car', + make: "Venturi", + __typename: "Car", }, }; const data2 = { car: { id: 2, - make: 'Wiesmann', - __typename: 'Car', + make: "Wiesmann", + __typename: "Car", }, }; const data3 = { car: { id: 3, - make: 'Beetle', - __typename: 'Car', + make: "Beetle", + __typename: "Car", }, }; @@ -5213,23 +6114,29 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { notifyOnNetworkStatusChange: true }), - { wrapper }, + { wrapper } ); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); expect(result.current.previousData).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data1); expect(result.current.previousData).toBe(undefined); setTimeout(() => result.current.refetch()); - await waitFor(() => { - expect(result.current.loading).toBe(true); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(true); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data1); expect(result.current.previousData).toEqual(data1); @@ -5238,63 +6145,82 @@ describe('useQuery Hook', () => { expect(result.current.data).toEqual(data1); expect(result.current.previousData).toEqual(data1); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(data3); expect(result.current.previousData).toEqual(data1); }); - it('should persist result.previousData even if query changes', async () => { + it("should persist result.previousData even if query changes", async () => { const aQuery: TypedDocumentNode<{ a: string; - }> = gql`query A { a }`; + }> = gql` + query A { + a + } + `; const abQuery: TypedDocumentNode<{ a: string; b: number; - }> = gql`query AB { a b }`; + }> = gql` + query AB { + a + b + } + `; const bQuery: TypedDocumentNode<{ b: number; - }> = gql`query B { b }`; + }> = gql` + query B { + b + } + `; let stringOfAs = ""; let countOfBs = 0; const client = new ApolloClient({ cache: new InMemoryCache(), - link: new ApolloLink(request => new Observable(observer => { - switch (request.operationName) { - case "A": { - observer.next({ - data: { - a: stringOfAs += 'a', - }, - }); - break; - } - case "AB": { - observer.next({ - data: { - a: stringOfAs += 'a', - b: countOfBs += 1, - }, - }); - break; - } - case "B": { - observer.next({ - data: { - b: countOfBs += 1, - }, - }); - break; - } - } - setTimeout(() => { - observer.complete(); - }, 10); - })), + link: new ApolloLink( + (request) => + new Observable((observer) => { + switch (request.operationName) { + case "A": { + observer.next({ + data: { + a: (stringOfAs += "a"), + }, + }); + break; + } + case "AB": { + observer.next({ + data: { + a: (stringOfAs += "a"), + b: (countOfBs += 1), + }, + }); + break; + } + case "B": { + observer.next({ + data: { + b: (countOfBs += 1), + }, + }); + break; + } + } + setTimeout(() => { + observer.complete(); + }, 10); + }) + ), }); const { result } = renderHook( @@ -5311,141 +6237,213 @@ describe('useQuery Hook', () => { }, { wrapper: ({ children }: any) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(true); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ a: "a" }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toBeUndefined(); + }, + { interval: 1 } + ); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(true); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ a: "a" }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toBeUndefined(); - }, { interval: 1 }); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ a: "a" }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toBe(undefined); + }, + { interval: 1 } + ); + await expect( + await waitFor( + () => { + result.current.setQuery(abQuery); + }, + { interval: 1 } + ) + ); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ a: "a" }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toBe(undefined); - }, { interval: 1 }); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ a: "aa", b: 1 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toEqual({ a: "a" }); + }, + { interval: 1 } + ); - await expect(await waitFor(() => { - result.current.setQuery(abQuery); - }, { interval: 1 })); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ a: "aa", b: 1 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toEqual({ a: "a" }); + }, + { interval: 1 } + ); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ a: "aa", b: 1 }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toEqual({ a: "a" }); - }, { interval: 1 }); + await waitFor( + () => { + result.current.useQueryResult.reobserve().then((result) => { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ a: "aaa", b: 2 }); + }); + }, + { interval: 1 } + ); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ a: "aa", b: 1 }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toEqual({ a: "a" }); - }, { interval: 1 }); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ a: "aaa", b: 2 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toEqual({ a: "aa", b: 1 }); + }, + { interval: 1 } + ); - await waitFor(() => { - result.current.useQueryResult.reobserve().then(result => { - expect(result.loading).toBe(false); - expect(result.data).toEqual({ a: "aaa", b: 2 }); - }); - }, { interval: 1 }); + await waitFor( + () => { + result.current.setQuery(bQuery); + }, + { interval: 1 } + ); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ a: "aaa", b: 2 }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toEqual({ a: "aa", b: 1 }); - }, { interval: 1 }); - - await waitFor(() => { - result.current.setQuery(bQuery); - }, { interval: 1 }); - - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ b: 3 }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toEqual({ b: 2 }); - }, { interval: 1 }); - await waitFor(() => { - const { loading } = result.current.useQueryResult; - expect(loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - const { data } = result.current.useQueryResult; - expect(data).toEqual({ b: 3 }); - }, { interval: 1 }); - await waitFor(() => { - const { previousData } = result.current.useQueryResult; - expect(previousData).toEqual({ b: 2 }); - }, { interval: 1 }); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ b: 3 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toEqual({ b: 2 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { loading } = result.current.useQueryResult; + expect(loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { data } = result.current.useQueryResult; + expect(data).toEqual({ b: 3 }); + }, + { interval: 1 } + ); + await waitFor( + () => { + const { previousData } = result.current.useQueryResult; + expect(previousData).toEqual({ b: 2 }); + }, + { interval: 1 } + ); }); it("should be cleared when variables change causes cache miss", async () => { const peopleData = [ - { id: 1, name: 'John Smith', gender: 'male' }, - { id: 2, name: 'Sara Smith', gender: 'female' }, - { id: 3, name: 'Budd Deey', gender: 'nonbinary' }, - { id: 4, name: 'Johnny Appleseed', gender: 'male' }, - { id: 5, name: 'Ada Lovelace', gender: 'female' }, + { id: 1, name: "John Smith", gender: "male" }, + { id: 2, name: "Sara Smith", gender: "female" }, + { id: 3, name: "Budd Deey", gender: "nonbinary" }, + { id: 4, name: "Johnny Appleseed", gender: "male" }, + { id: 5, name: "Ada Lovelace", gender: "female" }, ]; - const link = new ApolloLink(operation => { - return new Observable(observer => { + const link = new ApolloLink((operation) => { + return new Observable((observer) => { const { gender } = operation.variables; - new Promise(resolve => setTimeout(resolve, 300)).then(() => { + new Promise((resolve) => setTimeout(resolve, 300)).then(() => { observer.next({ data: { - people: gender === "all" ? peopleData : - gender ? peopleData.filter( - person => person.gender === gender - ) : peopleData, - } + people: + gender === "all" + ? peopleData + : gender + ? peopleData.filter((person) => person.gender === gender) + : peopleData, + }, }); observer.complete(); }); @@ -5477,61 +6475,75 @@ describe('useQuery Hook', () => { ); const { result, rerender } = renderHook( - ({ gender }) => useQuery(query, { - variables: { gender }, - fetchPolicy: 'network-only', - }), - { wrapper, initialProps: { gender: 'all' } }, + ({ gender }) => + useQuery(query, { + variables: { gender }, + fetchPolicy: "network-only", + }), + { wrapper, initialProps: { gender: "all" } } ); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ people: peopleData.map(({ gender, ...person }) => person), }); - rerender({ gender: 'female' }); + rerender({ gender: "female" }); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.setVariables); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ people: peopleData - .filter((person) => person.gender === 'female') + .filter((person) => person.gender === "female") .map(({ gender, ...person }) => person), }); - rerender({ gender: 'nonbinary' }); + rerender({ gender: "nonbinary" }); expect(result.current.loading).toBe(true); expect(result.current.networkStatus).toBe(NetworkStatus.setVariables); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.networkStatus).toBe(NetworkStatus.ready); expect(result.current.data).toEqual({ people: peopleData - .filter((person) => person.gender === 'nonbinary') + .filter((person) => person.gender === "nonbinary") .map(({ gender, ...person }) => person), }); }); }); - describe('defaultOptions', () => { - it('should allow polling options to be passed to the client', async () => { - const query = gql`{ hello }`; + describe("defaultOptions", () => { + it("should allow polling options to be passed to the client", async () => { + const query = gql` + { + hello + } + `; const cache = new InMemoryCache(); const link = mockSingleLink( { @@ -5545,7 +6557,7 @@ describe('useQuery Hook', () => { { request: { query }, result: { data: { hello: "world 3" } }, - }, + } ); const client = new ApolloClient({ @@ -5558,45 +6570,58 @@ describe('useQuery Hook', () => { link, }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 1' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 1" }); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 2' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 2" }); + }, + { interval: 1 } + ); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ hello: 'world 3' }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ hello: "world 3" }); + }, + { interval: 1 } + ); }); }); - describe('canonical cache results', () => { - it('can be disabled via useQuery options', async () => { + describe("canonical cache results", () => { + it("can be disabled via useQuery options", async () => { const cache = new InMemoryCache({ canonizeResults: true, typePolicies: { @@ -5626,20 +6651,19 @@ describe('useQuery Hook', () => { cache.writeQuery({ query, data: { results }, - }) + }); const wrapper = ({ children }: any) => ( - <MockedProvider cache={cache}> - {children} - </MockedProvider> + <MockedProvider cache={cache}>{children}</MockedProvider> ); const { result, rerender } = renderHook( - ({ canonizeResults }) => useQuery(query, { - fetchPolicy: 'cache-only', - canonizeResults, - }), - { wrapper, initialProps: { canonizeResults: false } }, + ({ canonizeResults }) => + useQuery(query, { + fetchPolicy: "cache-only", + canonizeResults, + }), + { wrapper, initialProps: { canonizeResults: false } } ); expect(result.current.loading).toBe(false); @@ -5667,12 +6691,18 @@ describe('useQuery Hook', () => { }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ results }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ results }); + }, + { interval: 1 } + ); expect(result.current.data.results.length).toBe(7); resultSet = new Set(result.current.data.results); // Since canonization is happening now, the duplicate 1 results are @@ -5684,8 +6714,8 @@ describe('useQuery Hook', () => { }); }); - describe('canonical cache results', () => { - it('can be disabled via useQuery options', async () => { + describe("canonical cache results", () => { + it("can be disabled via useQuery options", async () => { const cache = new InMemoryCache({ canonizeResults: true, typePolicies: { @@ -5715,20 +6745,19 @@ describe('useQuery Hook', () => { cache.writeQuery({ query, data: { results }, - }) + }); const wrapper = ({ children }: any) => ( - <MockedProvider cache={cache}> - {children} - </MockedProvider> + <MockedProvider cache={cache}>{children}</MockedProvider> ); const { result, rerender } = renderHook( - ({ canonizeResults }) => useQuery(query, { - fetchPolicy: 'cache-only', - canonizeResults, - }), - { wrapper, initialProps: { canonizeResults: false } }, + ({ canonizeResults }) => + useQuery(query, { + fetchPolicy: "cache-only", + canonizeResults, + }), + { wrapper, initialProps: { canonizeResults: false } } ); expect(result.current.loading).toBe(false); @@ -5756,12 +6785,18 @@ describe('useQuery Hook', () => { }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ results }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ results }); + }, + { interval: 1 } + ); expect(result.current.data.results.length).toBe(7); resultSet = new Set(result.current.data.results); // Since canonization is happening now, the duplicate 1 results are @@ -5781,11 +6816,25 @@ describe('useQuery Hook', () => { const aQuery: TypedDocumentNode<{ a: ABFields; - }> = gql`query A { a { id name }}`; + }> = gql` + query A { + a { + id + name + } + } + `; const bQuery: TypedDocumentNode<{ b: ABFields; - }> = gql`query B { b { id name }}`; + }> = gql` + query B { + b { + id + name + } + } + `; const aData = { a: { @@ -5805,29 +6854,32 @@ describe('useQuery Hook', () => { function makeClient() { return new ApolloClient({ - cache: new InMemoryCache, - link: new ApolloLink(operation => new Observable(observer => { - switch (operation.operationName) { - case "A": - setTimeout(() => { - observer.next({ data: aData }); - observer.complete(); - }); - break; - case "B": - setTimeout(() => { - observer.next({ data: bData }); - observer.complete(); - }, 10); - break; - } - })), + cache: new InMemoryCache(), + link: new ApolloLink( + (operation) => + new Observable((observer) => { + switch (operation.operationName) { + case "A": + setTimeout(() => { + observer.next({ data: aData }); + observer.complete(); + }); + break; + case "B": + setTimeout(() => { + observer.next({ data: bData }); + observer.complete(); + }, 10); + break; + } + }) + ), }); } async function check( aFetchPolicy: WatchQueryFetchPolicy, - bFetchPolicy: WatchQueryFetchPolicy, + bFetchPolicy: WatchQueryFetchPolicy ) { const client = makeClient(); const { result } = renderHook( @@ -5839,7 +6891,7 @@ describe('useQuery Hook', () => { wrapper: ({ children }) => ( <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.a.loading).toBe(true); @@ -5857,55 +6909,39 @@ describe('useQuery Hook', () => { expect(result.current.b.data).toEqual(bData); } - it("cache-first for both", () => check( - "cache-first", - "cache-first", - )); - - it("cache-first first, cache-and-network second", () => check( - "cache-first", - "cache-and-network", - )); - - it("cache-first first, network-only second", () => check( - "cache-first", - "network-only", - )); - - it("cache-and-network for both", () => check( - "cache-and-network", - "cache-and-network", - )); - - it("cache-and-network first, cache-first second", () => check( - "cache-and-network", - "cache-first", - )); - - it("cache-and-network first, network-only second", () => check( - "cache-and-network", - "network-only", - )); - - it("network-only for both", () => check( - "network-only", - "network-only", - )); - - it("network-only first, cache-first second", () => check( - "network-only", - "cache-first", - )); - - it("network-only first, cache-and-network second", () => check( - "network-only", - "cache-and-network", - )); + it("cache-first for both", () => check("cache-first", "cache-first")); + + it("cache-first first, cache-and-network second", () => + check("cache-first", "cache-and-network")); + + it("cache-first first, network-only second", () => + check("cache-first", "network-only")); + + it("cache-and-network for both", () => + check("cache-and-network", "cache-and-network")); + + it("cache-and-network first, cache-first second", () => + check("cache-and-network", "cache-first")); + + it("cache-and-network first, network-only second", () => + check("cache-and-network", "network-only")); + + it("network-only for both", () => check("network-only", "network-only")); + + it("network-only first, cache-first second", () => + check("network-only", "cache-first")); + + it("network-only first, cache-and-network second", () => + check("network-only", "cache-and-network")); }); - describe('regression test issue #9204', () => { - it('should handle a simple query', async () => { - const query = gql`{ hello }`; + describe("regression test issue #9204", () => { + it("should handle a simple query", async () => { + const query = gql` + { + hello + } + `; const mocks = [ { request: { query }, @@ -5914,8 +6950,8 @@ describe('useQuery Hook', () => { ]; const Component = ({ query }: any) => { - const [counter, setCounter] = useState(0) - const result = useQuery(query) + const [counter, setCounter] = useState(0); + const result = useQuery(query); useEffect(() => { /** @@ -5925,17 +6961,19 @@ describe('useQuery Hook', () => { if (counter > 10) { console.error(`Too many results (${counter})`); } else { - setCounter(c => c + 1); + setCounter((c) => c + 1); } - }, [ - result, - result.data, - ]); + }, [result, result.data]); if (result.loading) return null; - return <div>{result.data.hello}{counter}</div>; - } + return ( + <div> + {result.data.hello} + {counter} + </div> + ); + }; render( <MockedProvider mocks={mocks}> @@ -5944,18 +6982,22 @@ describe('useQuery Hook', () => { ); await waitFor(() => { - expect(screen.getByText('world2')).toBeTruthy(); + expect(screen.getByText("world2")).toBeTruthy(); }); }); }); // https://github.com/apollographql/apollo-client/issues/10222 - describe('regression test issue #10222', () => { - it('maintains initial fetch policy when component unmounts and remounts', async () => { + describe("regression test issue #10222", () => { + it("maintains initial fetch policy when component unmounts and remounts", async () => { let helloCount = 1; - const query = gql`{ hello }`; + const query = gql` + { + hello + } + `; const link = new ApolloLink(() => { - return new Observable(observer => { + return new Observable((observer) => { const timer = setTimeout(() => { observer.next({ data: { hello: `hello ${helloCount++}` } }); observer.complete(); @@ -5963,29 +7005,30 @@ describe('useQuery Hook', () => { return () => { clearTimeout(timer); - } - }) - }) + }; + }); + }); const cache = new InMemoryCache(); const client = new ApolloClient({ link, - cache + cache, }); - let setShow: Function + let setShow: Function; const Toggler = ({ children }: { children: ReactNode }) => { const [show, _setShow] = useState(true); setShow = _setShow; return show ? <>{children}</> : null; - } + }; const { result } = renderHook( - () => useQuery(query, { - fetchPolicy: 'network-only', - nextFetchPolicy: 'cache-first' + () => + useQuery(query, { + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", }), { wrapper: ({ children }) => ( @@ -5993,7 +7036,7 @@ describe('useQuery Hook', () => { <Toggler>{children}</Toggler> </ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -6003,8 +7046,8 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(false); }); - expect(result.current.data).toEqual({ hello: 'hello 1' }); - expect(cache.readQuery({ query })).toEqual({ hello: 'hello 1' }) + expect(result.current.data).toEqual({ hello: "hello 1" }); + expect(cache.readQuery({ query })).toEqual({ hello: "hello 1" }); act(() => { setShow(false); @@ -6019,16 +7062,15 @@ describe('useQuery Hook', () => { await waitFor(() => { expect(result.current.loading).toBe(false); - }) + }); - expect(result.current.data).toEqual({ hello: 'hello 2' }); - expect(cache.readQuery({ query })).toEqual({ hello: 'hello 2' }) + expect(result.current.data).toEqual({ hello: "hello 2" }); + expect(cache.readQuery({ query })).toEqual({ hello: "hello 2" }); }); }); - - describe('defer', () => { - it('should handle deferred queries', async () => { + describe("defer", () => { + it("should handle deferred queries", async () => { const query = gql` { greeting { @@ -6049,16 +7091,11 @@ describe('useQuery Hook', () => { cache: new InMemoryCache(), }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); @@ -6067,22 +7104,25 @@ describe('useQuery Hook', () => { result: { data: { greeting: { - message: 'Hello world', - __typename: 'Greeting', + message: "Hello world", + __typename: "Greeting", }, }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ greeting: { - message: 'Hello world', - __typename: 'Greeting', + message: "Hello world", + __typename: "Greeting", }, }); expect(result.current).not.toContain({ hasNext: true }); @@ -6090,39 +7130,47 @@ describe('useQuery Hook', () => { setTimeout(() => { link.simulateResult({ result: { - incremental: [{ - data: { - recipient: { - name: 'Alice', - __typename: 'Person', + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", }, - __typename: 'Greeting', + path: ["greeting"], }, - path: ['greeting'], - }], - hasNext: false + ], + hasNext: false, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - message: 'Hello world', - __typename: 'Greeting', - recipient: { - name: 'Alice', - __typename: 'Person', + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, }, - }, - }); - }, { interval: 1 }); + }); + }, + { interval: 1 } + ); }); - it('should handle deferred queries in lists', async () => { + it("should handle deferred queries in lists", async () => { const query = gql` { greetings { @@ -6143,16 +7191,11 @@ describe('useQuery Hook', () => { cache: new InMemoryCache(), }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); @@ -6161,106 +7204,125 @@ describe('useQuery Hook', () => { result: { data: { greetings: [ - { message: 'Hello world', __typename: 'Greeting' }, - { message: 'Hello again', __typename: 'Greeting' }, + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, ], }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual({ greetings: [ - { message: 'Hello world', __typename: 'Greeting' }, - { message: 'Hello again', __typename: 'Greeting' }, + { message: "Hello world", __typename: "Greeting" }, + { message: "Hello again", __typename: "Greeting" }, ], }); setTimeout(() => { link.simulateResult({ result: { - incremental: [{ - data: { - recipient: { - name: 'Alice', - __typename: 'Person', + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", }, - __typename: 'Greeting', + path: ["greetings", 0], }, - path: ['greetings', 0], - }], + ], hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greetings: [ - { - message: 'Hello world', - __typename: 'Greeting', - recipient: { name: 'Alice', __typename: 'Person' }, - }, - { message: 'Hello again', __typename: 'Greeting' }, - ], - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { message: "Hello again", __typename: "Greeting" }, + ], + }); + }, + { interval: 1 } + ); setTimeout(() => { link.simulateResult({ result: { - incremental: [{ - data: { - recipient: { - name: 'Bob', - __typename: 'Person', + incremental: [ + { + data: { + recipient: { + name: "Bob", + __typename: "Person", + }, + __typename: "Greeting", }, - __typename: 'Greeting', + path: ["greetings", 1], }, - path: ['greetings', 1], - }], - hasNext: false + ], + hasNext: false, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greetings: [ - { - message: 'Hello world', - __typename: 'Greeting', - recipient: { name: 'Alice', __typename: 'Person' }, - }, - { - message: 'Hello again', - __typename: 'Greeting', - recipient: { name: 'Bob', __typename: 'Person' }, - }, - ], - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greetings: [ + { + message: "Hello world", + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + { + message: "Hello again", + __typename: "Greeting", + recipient: { name: "Bob", __typename: "Person" }, + }, + ], + }); + }, + { interval: 1 } + ); }); - it('should handle deferred queries in lists, merging arrays', async () => { + it("should handle deferred queries in lists, merging arrays", async () => { const query = gql` query DeferVariation { allProducts { delivery { ...MyFragment @defer } - sku, + sku id } } @@ -6277,16 +7339,11 @@ describe('useQuery Hook', () => { cache: new InMemoryCache(), }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); @@ -6298,51 +7355,57 @@ describe('useQuery Hook', () => { { __typename: "Product", delivery: { - __typename: "DeliveryEstimates" + __typename: "DeliveryEstimates", }, id: "apollo-federation", - sku: "federation" + sku: "federation", }, { __typename: "Product", delivery: { - __typename: "DeliveryEstimates" + __typename: "DeliveryEstimates", }, id: "apollo-studio", - sku: "studio" - } - ] + sku: "studio", + }, + ], }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates" + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", }, - id: "apollo-federation", - sku: "federation" - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates" + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", }, - id: "apollo-studio", - sku: "studio" - } - ] - }); - }, { interval: 1 }); + ], + }); + }, + { interval: 1 } + ); setTimeout(() => { link.simulateResult({ @@ -6355,11 +7418,7 @@ describe('useQuery Hook', () => { estimatedDelivery: "6/25/2021", fastestDelivery: "6/24/2021", }, - path: [ - "allProducts", - 0, - "delivery" - ] + path: ["allProducts", 0, "delivery"], }, { data: { @@ -6367,49 +7426,51 @@ describe('useQuery Hook', () => { estimatedDelivery: "6/25/2021", fastestDelivery: "6/24/2021", }, - path: [ - "allProducts", - 1, - "delivery" - ] + path: ["allProducts", 1, "delivery"], }, - ] + ], }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - allProducts: [ - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021" + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", }, - id: "apollo-federation", - sku: "federation" - }, - { - __typename: "Product", - delivery: { - __typename: "DeliveryEstimates", - estimatedDelivery: "6/25/2021", - fastestDelivery: "6/24/2021" + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", }, - id: "apollo-studio", - sku: "studio" - } - ] - }); - }, { interval: 1 }); + ], + }); + }, + { interval: 1 } + ); }); - it('should handle deferred queries with fetch policy no-cache', async () => { + it("should handle deferred queries with fetch policy no-cache", async () => { const query = gql` { greeting { @@ -6431,14 +7492,12 @@ describe('useQuery Hook', () => { }); const { result } = renderHook( - () => useQuery(query, {fetchPolicy: 'no-cache'}), + () => useQuery(query, { fetchPolicy: "no-cache" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -6448,63 +7507,77 @@ describe('useQuery Hook', () => { result: { data: { greeting: { - message: 'Hello world', - __typename: 'Greeting', + message: "Hello world", + __typename: "Greeting", }, }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - message: 'Hello world', - __typename: 'Greeting', - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + message: "Hello world", + __typename: "Greeting", + }, + }); + }, + { interval: 1 } + ); setTimeout(() => { link.simulateResult({ result: { - incremental: [{ - data: { - recipient: { - name: 'Alice', - __typename: 'Person', + incremental: [ + { + data: { + recipient: { + name: "Alice", + __typename: "Person", + }, + __typename: "Greeting", }, - __typename: 'Greeting', + path: ["greeting"], }, - path: ['greeting'], - }], - hasNext: false + ], + hasNext: false, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - message: 'Hello world', - __typename: 'Greeting', - recipient: { - name: 'Alice', - __typename: 'Person', + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { + name: "Alice", + __typename: "Person", + }, }, - }, - }); - }, { interval: 1 }); + }); + }, + { interval: 1 } + ); }); - it('should handle deferred queries with errors returned on the incremental batched result', async () => { + it("should handle deferred queries with errors returned on the incremental batched result", async () => { const query = gql` query { hero { @@ -6527,16 +7600,11 @@ describe('useQuery Hook', () => { cache: new InMemoryCache(), }); - const { result } = renderHook( - () => useQuery(query), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.data).toBe(undefined); @@ -6549,40 +7617,46 @@ describe('useQuery Hook', () => { heroFriends: [ { id: "1000", - name: "Luke Skywalker" + name: "Luke Skywalker", }, { id: "1003", - name: "Leia Organa" - } - ] - } + name: "Leia Organa", + }, + ], + }, }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - hero: { - heroFriends: [ - { - id: '1000', - name: 'Luke Skywalker' - }, - { - id: '1003', - name: 'Leia Organa' - }, - ], - name: "R2-D2" - } - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }); + }, + { interval: 1 } + ); setTimeout(() => { link.simulateResult({ @@ -6594,51 +7668,65 @@ describe('useQuery Hook', () => { new GraphQLError( "homeWorld for character with ID 1000 could not be fetched.", { path: ["hero", "heroFriends", 0, "homeWorld"] } - ) + ), ], data: { - "homeWorld": null, - } + homeWorld: null, + }, }, { path: ["hero", "heroFriends", 1], data: { - "homeWorld": "Alderaan", - } + homeWorld: "Alderaan", + }, }, ], - "hasNext": false - } + hasNext: false, + }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.error).toBeInstanceOf(ApolloError); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.error!.message).toBe('homeWorld for character with ID 1000 could not be fetched.'); - }, { interval: 1 }); - await waitFor(() => { - // since default error policy is "none", we do *not* return partial results - expect(result.current.data).toEqual({ - hero: { - heroFriends: [ - { - id: '1000', - name: 'Luke Skywalker' - }, - { - id: '1003', - name: 'Leia Organa' - }, - ], - name: "R2-D2" - } - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.error).toBeInstanceOf(ApolloError); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.error!.message).toBe( + "homeWorld for character with ID 1000 could not be fetched." + ); + }, + { interval: 1 } + ); + await waitFor( + () => { + // since default error policy is "none", we do *not* return partial results + expect(result.current.data).toEqual({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }); + }, + { interval: 1 } + ); }); it('should handle deferred queries with errors returned on the incremental batched result and errorPolicy "all"', async () => { @@ -6668,11 +7756,9 @@ describe('useQuery Hook', () => { () => useQuery(query, { errorPolicy: "all" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -6686,40 +7772,46 @@ describe('useQuery Hook', () => { heroFriends: [ { id: "1000", - name: "Luke Skywalker" + name: "Luke Skywalker", }, { id: "1003", - name: "Leia Organa" - } - ] - } + name: "Leia Organa", + }, + ], + }, }, - hasNext: true + hasNext: true, }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - hero: { - heroFriends: [ - { - id: '1000', - name: 'Luke Skywalker' - }, - { - id: '1003', - name: 'Leia Organa' - }, - ], - name: "R2-D2" - } - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }); + }, + { interval: 1 } + ); setTimeout(() => { link.simulateResult({ @@ -6731,73 +7823,93 @@ describe('useQuery Hook', () => { new GraphQLError( "homeWorld for character with ID 1000 could not be fetched.", { path: ["hero", "heroFriends", 0, "homeWorld"] } - ) + ), ], data: { - "homeWorld": null, + homeWorld: null, }, extensions: { - thing1: 'foo', - thing2: 'bar', + thing1: "foo", + thing2: "bar", }, }, { path: ["hero", "heroFriends", 1], data: { - "homeWorld": "Alderaan", + homeWorld: "Alderaan", }, extensions: { - thing1: 'foo', - thing2: 'bar', + thing1: "foo", + thing2: "bar", }, }, ], - "hasNext": false - } + hasNext: false, + }, }); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - // @ts-ignore - expect(result.current.label).toBe(undefined); - }, { interval: 1 }); - await waitFor(() => { - // @ts-ignore - expect(result.current.extensions).toBe(undefined); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.error).toBeInstanceOf(ApolloError); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.error!.message).toBe('homeWorld for character with ID 1000 could not be fetched.'); - }, { interval: 1 }); - await waitFor(() => { - // since default error policy is "all", we *do* return partial results - expect(result.current.data).toEqual({ - hero: { - heroFriends: [ - { - // the only difference with the previous test - // is that homeWorld is populated since errorPolicy: all - // populates both partial data and error.graphQLErrors - homeWorld: null, - id: '1000', - name: 'Luke Skywalker' - }, - { - // homeWorld is populated due to errorPolicy: all - homeWorld: "Alderaan", - id: '1003', - name: 'Leia Organa' - }, - ], - name: "R2-D2" - } - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + // @ts-ignore + expect(result.current.label).toBe(undefined); + }, + { interval: 1 } + ); + await waitFor( + () => { + // @ts-ignore + expect(result.current.extensions).toBe(undefined); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.error).toBeInstanceOf(ApolloError); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.error!.message).toBe( + "homeWorld for character with ID 1000 could not be fetched." + ); + }, + { interval: 1 } + ); + await waitFor( + () => { + // since default error policy is "all", we *do* return partial results + expect(result.current.data).toEqual({ + hero: { + heroFriends: [ + { + // the only difference with the previous test + // is that homeWorld is populated since errorPolicy: all + // populates both partial data and error.graphQLErrors + homeWorld: null, + id: "1000", + name: "Luke Skywalker", + }, + { + // homeWorld is populated due to errorPolicy: all + homeWorld: "Alderaan", + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }); + }, + { interval: 1 } + ); }); it('returns eventually consistent data from deferred queries with data in the cache while using a "cache-and-network" fetch policy', async () => { @@ -6822,20 +7934,18 @@ describe('useQuery Hook', () => { query, data: { greeting: { - __typename: 'Greeting', - message: 'Hello cached', - recipient: { __typename: 'Person', name: 'Cached Alice' }, + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, }, }, }); const { result } = renderHook( - () => useQuery(query, { fetchPolicy: 'cache-and-network' }), + () => useQuery(query, { fetchPolicy: "cache-and-network" }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), } ); @@ -6844,65 +7954,85 @@ describe('useQuery Hook', () => { expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toEqual({ greeting: { - message: 'Hello cached', - __typename: 'Greeting', - recipient: { __typename: 'Person', name: 'Cached Alice' }, + message: "Hello cached", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, }, }); link.simulateResult({ result: { - data: { greeting: { __typename: 'Greeting', message: 'Hello world' } }, + data: { + greeting: { __typename: "Greeting", message: "Hello world" }, + }, hasNext: true, }, }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - __typename: 'Greeting', - message: 'Hello world', - recipient: { __typename: 'Person', name: 'Cached Alice' }, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }); + }, + { interval: 1 } + ); link.simulateResult({ result: { incremental: [ { data: { - recipient: { name: 'Alice', __typename: 'Person' }, - __typename: 'Greeting', + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", }, - path: ['greeting'], + path: ["greeting"], }, ], hasNext: false, }, }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - __typename: 'Greeting', - message: 'Hello world', - recipient: { __typename: 'Person', name: 'Alice' }, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }); + }, + { interval: 1 } + ); }); it('returns eventually consistent data from deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { @@ -6925,13 +8055,13 @@ describe('useQuery Hook', () => { // We know we are writing partial data to the cache so suppress the console // warning. - const consoleSpy = jest.spyOn(console, 'error').mockImplementation(); + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); cache.writeQuery({ query, data: { greeting: { - __typename: 'Greeting', - recipient: { __typename: 'Person', name: 'Cached Alice' }, + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, }, }, }); @@ -6940,14 +8070,12 @@ describe('useQuery Hook', () => { const { result } = renderHook( () => useQuery(query, { - fetchPolicy: 'cache-first', - returnPartialData: true + fetchPolicy: "cache-first", + returnPartialData: true, }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), } ); @@ -6956,64 +8084,84 @@ describe('useQuery Hook', () => { expect(result.current.networkStatus).toBe(NetworkStatus.loading); expect(result.current.data).toEqual({ greeting: { - __typename: 'Greeting', - recipient: { __typename: 'Person', name: 'Cached Alice' }, + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, }, }); link.simulateResult({ result: { - data: { greeting: { message: 'Hello world', __typename: 'Greeting' } }, + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, hasNext: true, }, }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - __typename: 'Greeting', - message: 'Hello world', - recipient: { __typename: 'Person', name: 'Cached Alice' }, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }); + }, + { interval: 1 } + ); link.simulateResult({ result: { incremental: [ { data: { - __typename: 'Greeting', - recipient: { name: 'Alice', __typename: 'Person' }, + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, }, - path: ['greeting'], + path: ["greeting"], }, ], hasNext: false, }, }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.networkStatus).toBe(NetworkStatus.ready); - }, { interval: 1 }); - await waitFor(() => { - expect(result.current.data).toEqual({ - greeting: { - __typename: 'Greeting', - message: 'Hello world', - recipient: { __typename: 'Person', name: 'Alice' }, - }, - }); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.networkStatus).toBe(NetworkStatus.ready); + }, + { interval: 1 } + ); + await waitFor( + () => { + expect(result.current.data).toEqual({ + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }); + }, + { interval: 1 } + ); }); }); @@ -7027,7 +8175,7 @@ describe('useQuery Hook', () => { fetchPolicy: WatchQueryFetchPolicy, initialQueryValue: TestQueryValue | undefined, shouldFetchOnFirstRender: boolean, - shouldFetchOnSecondRender: boolean + shouldFetchOnSecondRender: boolean, ] >([ [`cache-first`, emptyData, true, false], @@ -7044,7 +8192,12 @@ describe('useQuery Hook', () => { [`standby`, cacheData, false, false], ])( "fetchPolicy %s, cache: %p should fetch during `disableNetworkFetches`: %p and after `disableNetworkFetches` has been disabled: %p", - async (policy, initialQueryValue, shouldFetchOnFirstRender, shouldFetchOnSecondRender) => { + async ( + policy, + initialQueryValue, + shouldFetchOnFirstRender, + shouldFetchOnSecondRender + ) => { const query: TypedDocumentNode<TestQueryValue> = gql` query CallMe { something @@ -7052,10 +8205,10 @@ describe('useQuery Hook', () => { `; const link = new MockLink([ - {request: {query}, result: {data: { something: "bar" }}}, - {request: {query}, result: {data: { something: "baz" }}}, + { request: { query }, result: { data: { something: "bar" } } }, + { request: { query }, result: { data: { something: "baz" } } }, ]); - const requestSpy = jest.spyOn(link, 'request'); + const requestSpy = jest.spyOn(link, "request"); const client = new ApolloClient({ cache: new InMemoryCache(), @@ -7067,13 +8220,18 @@ describe('useQuery Hook', () => { client.disableNetworkFetches = true; const { rerender } = renderHook( - () => useQuery(query, { fetchPolicy: policy, nextFetchPolicy: policy }), + () => + useQuery(query, { fetchPolicy: policy, nextFetchPolicy: policy }), { - wrapper: ({ children }) => <ApolloProvider client={client}>{children}</ApolloProvider>, + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), } ); - expect(requestSpy).toHaveBeenCalledTimes(shouldFetchOnFirstRender ? 1 : 0); + expect(requestSpy).toHaveBeenCalledTimes( + shouldFetchOnFirstRender ? 1 : 0 + ); // We need to wait a moment before the rerender for everything to settle down. // This part is unfortunately bound to be flaky - but in some cases there is @@ -7084,8 +8242,26 @@ describe('useQuery Hook', () => { client.disableNetworkFetches = false; rerender(); - expect(requestSpy).toHaveBeenCalledTimes(shouldFetchOnSecondRender ? 1 : 0); + expect(requestSpy).toHaveBeenCalledTimes( + shouldFetchOnSecondRender ? 1 : 0 + ); } ); }); }); + +describe.skip("Type Tests", () => { + test("NoInfer prevents adding arbitrary additional variables", () => { + const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; + const { variables } = useQuery(typedNode, { + variables: { + bar: 4, + // @ts-expect-error + nonExistingVariable: "string", + }, + }); + variables?.bar; + // @ts-expect-error + variables?.nonExistingVariable; + }); +}); diff --git a/src/react/hooks/__tests__/useReactiveVar.test.tsx b/src/react/hooks/__tests__/useReactiveVar.test.tsx --- a/src/react/hooks/__tests__/useReactiveVar.test.tsx +++ b/src/react/hooks/__tests__/useReactiveVar.test.tsx @@ -5,7 +5,7 @@ import { itAsync } from "../../../testing"; import { makeVar } from "../../../core"; import { useReactiveVar } from "../useReactiveVar"; -const IS_REACT_18 = React.version.startsWith('18'); +const IS_REACT_18 = React.version.startsWith("18"); describe("useReactiveVar Hook", () => { it("works with one component", async () => { @@ -36,7 +36,7 @@ describe("useReactiveVar Hook", () => { return null; } - render(<Component/>); + render(<Component />); await waitFor(() => { expect(renderCount).toBe(3); @@ -46,89 +46,92 @@ describe("useReactiveVar Hook", () => { }); }); - itAsync("works when two components share a variable", async (resolve, reject) => { - const counterVar = makeVar(0); + itAsync( + "works when two components share a variable", + async (resolve, reject) => { + const counterVar = makeVar(0); - let parentRenderCount = 0; - function Parent() { - const count = useReactiveVar(counterVar); + let parentRenderCount = 0; + function Parent() { + const count = useReactiveVar(counterVar); - switch (++parentRenderCount) { - case 1: - expect(count).toBe(0); - break; - case 2: - expect(count).toBe(1); - break; - case 3: - expect(count).toBe(11); - break; - default: - reject(`too many (${parentRenderCount}) parent renders`); + switch (++parentRenderCount) { + case 1: + expect(count).toBe(0); + break; + case 2: + expect(count).toBe(1); + break; + case 3: + expect(count).toBe(11); + break; + default: + reject(`too many (${parentRenderCount}) parent renders`); + } + + return <Child />; } - return <Child/>; - } + let childRenderCount = 0; + function Child() { + const count = useReactiveVar(counterVar); - let childRenderCount = 0; - function Child() { - const count = useReactiveVar(counterVar); + switch (++childRenderCount) { + case 1: + expect(count).toBe(0); + break; + case 2: + expect(count).toBe(1); + break; + case 3: + expect(count).toBe(11); + break; + default: + reject(`too many (${childRenderCount}) child renders`); + } - switch (++childRenderCount) { - case 1: - expect(count).toBe(0); - break; - case 2: - expect(count).toBe(1); - break; - case 3: - expect(count).toBe(11); - break; - default: - reject(`too many (${childRenderCount}) child renders`); + return null; } - return null; - } - - render(<Parent/>); + render(<Parent />); - await waitFor(() => { - expect(parentRenderCount).toBe(1); - }); + await waitFor(() => { + expect(parentRenderCount).toBe(1); + }); - await waitFor(() => { - expect(childRenderCount).toBe(1); - }); + await waitFor(() => { + expect(childRenderCount).toBe(1); + }); - expect(counterVar()).toBe(0); - act(() => { - counterVar(1); - }); + expect(counterVar()).toBe(0); + act(() => { + counterVar(1); + }); - await waitFor(() => { - expect(parentRenderCount).toBe(2); - }); - await waitFor(() => { - expect(childRenderCount).toBe(2); - }); + await waitFor(() => { + expect(parentRenderCount).toBe(2); + }); + await waitFor(() => { + expect(childRenderCount).toBe(2); + }); - expect(counterVar()).toBe(1); - act(() => { - counterVar(counterVar() + 10); - }); + expect(counterVar()).toBe(1); + act(() => { + counterVar(counterVar() + 10); + }); - await waitFor(() => { - expect(parentRenderCount).toBe(3); - }); - await waitFor(() => { - expect(childRenderCount).toBe(3); - }); + await waitFor(() => { + expect(parentRenderCount).toBe(3); + }); + await waitFor(() => { + expect(childRenderCount).toBe(3); + }); - expect(counterVar()).toBe(11); + expect(counterVar()).toBe(11); - resolve(); - }); + resolve(); + } + ); it("does not update if component has been unmounted", async () => { const counterVar = makeVar(0); @@ -168,20 +171,20 @@ describe("useReactiveVar Hook", () => { return error.apply(this, args); }; - const { unmount } = render(<Component/>); + const { unmount } = render(<Component />); await waitFor(() => { expect(attemptedUpdateAfterUnmount).toBe(true); }); await waitFor(() => { expect(renderCount).toBe(3); - }) + }); await waitFor(() => { expect(counterVar()).toBe(6); - }) + }); await waitFor(() => { expect(consoleErrorArgs).toEqual([]); - }) + }); console.error = error; }); @@ -196,13 +199,13 @@ describe("useReactiveVar Hook", () => { counterVar(1); }, []); - return (<div>{count}</div>); + return <div>{count}</div>; } function ComponentTwo() { const count = useReactiveVar(counterVar); - return (<div>{count}</div>); + return <div>{count}</div>; } render( @@ -223,7 +226,7 @@ describe("useReactiveVar Hook", () => { function ComponentOne() { const count = useReactiveVar(counterVar); - return (<div>{count}</div>); + return <div>{count}</div>; } function ComponentTwo() { @@ -233,7 +236,7 @@ describe("useReactiveVar Hook", () => { counterVar(1); }, []); - return (<div>{count}</div>); + return <div>{count}</div>; } render( @@ -264,9 +267,7 @@ describe("useReactiveVar Hook", () => { }); }, []); - return ( - <div /> - ); + return <div />; } render( @@ -291,33 +292,36 @@ describe("useReactiveVar Hook", () => { resolve(); }); - itAsync("works with multiple synchronous calls", async (resolve, reject) => { - const counterVar = makeVar(0); - function Component() { - const count = useReactiveVar(counterVar); - - return (<div>{count}</div>); - } - - render(<Component />); - Promise.resolve().then(() => { - counterVar(1); - counterVar(2); - counterVar(3); - counterVar(4); - counterVar(5); - counterVar(6); - counterVar(7); - counterVar(8); - counterVar(9); - counterVar(10); - }); + itAsync( + "works with multiple synchronous calls", + async (resolve, reject) => { + const counterVar = makeVar(0); + function Component() { + const count = useReactiveVar(counterVar); - await waitFor(() => { - expect(screen.getAllByText("10")).toHaveLength(1); - }); + return <div>{count}</div>; + } - resolve(); - }); + render(<Component />); + Promise.resolve().then(() => { + counterVar(1); + counterVar(2); + counterVar(3); + counterVar(4); + counterVar(5); + counterVar(6); + counterVar(7); + counterVar(8); + counterVar(9); + counterVar(10); + }); + + await waitFor(() => { + expect(screen.getAllByText("10")).toHaveLength(1); + }); + + resolve(); + } + ); }); }); diff --git a/src/react/hooks/__tests__/useSubscription.test.tsx b/src/react/hooks/__tests__/useSubscription.test.tsx --- a/src/react/hooks/__tests__/useSubscription.test.tsx +++ b/src/react/hooks/__tests__/useSubscription.test.tsx @@ -1,20 +1,22 @@ -import React from 'react'; -import { renderHook, waitFor } from '@testing-library/react'; -import gql from 'graphql-tag'; - -import { ApolloClient, ApolloError, ApolloLink, concat } from '../../../core'; -import { PROTOCOL_ERRORS_SYMBOL } from '../../../errors'; -import { InMemoryCache as Cache } from '../../../cache'; -import { ApolloProvider, resetApolloContext } from '../../context'; -import { MockSubscriptionLink } from '../../../testing'; -import { useSubscription } from '../useSubscription'; - -describe('useSubscription Hook', () => { - afterEach(() => { - resetApolloContext(); - }); - - it('should handle a simple subscription properly', async () => { +import React from "react"; +import { renderHook, waitFor } from "@testing-library/react"; +import gql from "graphql-tag"; + +import { + ApolloClient, + ApolloError, + ApolloLink, + concat, + TypedDocumentNode, +} from "../../../core"; +import { PROTOCOL_ERRORS_SYMBOL } from "../../../errors"; +import { InMemoryCache as Cache } from "../../../cache"; +import { ApolloProvider } from "../../context"; +import { MockSubscriptionLink } from "../../../testing"; +import { useSubscription } from "../useSubscription"; + +describe("useSubscription Hook", () => { + it("should handle a simple subscription properly", async () => { const subscription = gql` subscription { car { @@ -23,54 +25,60 @@ describe('useSubscription Hook', () => { } `; - const results = ['Audi', 'BMW', 'Mercedes', 'Hyundai'].map(make => ({ - result: { data: { car: { make } } } + const results = ["Audi", "BMW", "Mercedes", "Hyundai"].map((make) => ({ + result: { data: { car: { make } } }, })); const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - - const { result } = renderHook( - () => useSubscription(subscription), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useSubscription(subscription), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); setTimeout(() => link.simulateResult(results[0])); - await waitFor(() => { - expect(result.current.data).toEqual(results[0].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[0].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); setTimeout(() => link.simulateResult(results[1])); - await waitFor(() => { - expect(result.current.data).toEqual(results[1].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[1].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); setTimeout(() => link.simulateResult(results[2])); - await waitFor(() => { - expect(result.current.data).toEqual(results[2].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[2].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); setTimeout(() => link.simulateResult(results[3])); - await waitFor(() => { - expect(result.current.data).toEqual(results[3].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[3].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); }); - it('should call onError after error results', async () => { + it("should call onError after error results", async () => { const subscription = gql` subscription { car { @@ -79,8 +87,8 @@ describe('useSubscription Hook', () => { } `; - const results = ['Audi', 'BMW', 'Mercedes', 'Hyundai'].map(make => ({ - result: { data: { car: { make } } } + const results = ["Audi", "BMW", "Mercedes", "Hyundai"].map((make) => ({ + result: { data: { car: { make } } }, })); const errorResult = { @@ -91,38 +99,41 @@ describe('useSubscription Hook', () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - const onError = jest.fn(); const { result } = renderHook( () => useSubscription(subscription, { onError }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); setTimeout(() => link.simulateResult(results[0])); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.data).toEqual(results[0].result.data); setTimeout(() => link.simulateResult(errorResult)); - await waitFor(() => { - expect(onError).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onError).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); }); - it('should call onComplete after subscription is complete', async () => { + it("should call onComplete after subscription is complete", async () => { const subscription = gql` subscription { car { @@ -131,37 +142,37 @@ describe('useSubscription Hook', () => { } `; - const results = [{ - result: { data: { car: { make: 'Audi' } } } - }]; + const results = [ + { + result: { data: { car: { make: "Audi" } } }, + }, + ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const onComplete = jest.fn(); - renderHook( - () => useSubscription(subscription, { onComplete }), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + renderHook(() => useSubscription(subscription, { onComplete }), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); link.simulateResult(results[0]); setTimeout(() => link.simulateComplete()); - await waitFor(() => { - expect(onComplete).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onComplete).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); }); - it('should cleanup after the subscription component has been unmounted', async () => { + it("should cleanup after the subscription component has been unmounted", async () => { const subscription = gql` subscription { car { @@ -172,37 +183,39 @@ describe('useSubscription Hook', () => { const results = [ { - result: { data: { car: { make: 'Pagani' } } } - } + result: { data: { car: { make: "Pagani" } } }, + }, ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const onData = jest.fn(); const { result, unmount } = renderHook( - () => useSubscription(subscription, { - onData, - }), + () => + useSubscription(subscription, { + onData, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); setTimeout(() => link.simulateResult(results[0])); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(results[0].result.data); setTimeout(() => { @@ -219,7 +232,7 @@ describe('useSubscription Hook', () => { expect(onData).toHaveBeenCalledTimes(1); }); - it('should never execute a subscription with the skip option', async () => { + it("should never execute a subscription with the skip option", async () => { const subscription = gql` subscription { car { @@ -233,46 +246,50 @@ describe('useSubscription Hook', () => { link.onSetup(onSetup); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const onData = jest.fn(); const { result, unmount, rerender } = renderHook( - ({ variables }) => useSubscription(subscription, { - variables, - skip: true, - onData, - }), + ({ variables }) => + useSubscription(subscription, { + variables, + skip: true, + onData, + }), { initialProps: { variables: { - foo: 'bar' - } + foo: "bar", + }, }, wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ) - }, + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } ); expect(result.current.loading).toBe(false); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); - rerender({ variables: { foo: 'bar2' }}); - await expect(waitFor(() => { - expect(result.current.data).not.toBe(undefined); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + rerender({ variables: { foo: "bar2" } }); + await expect( + waitFor( + () => { + expect(result.current.data).not.toBe(undefined); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); expect(onSetup).toHaveBeenCalledTimes(0); expect(onData).toHaveBeenCalledTimes(0); unmount(); }); - it('should create a subscription after skip has changed from true to a falsy value', async () => { + it("should create a subscription after skip has changed from true to a falsy value", async () => { const subscription = gql` subscription { car { @@ -283,28 +300,26 @@ describe('useSubscription Hook', () => { const results = [ { - result: { data: { car: { make: 'Pagani' } } } + result: { data: { car: { make: "Pagani" } } }, }, { - result: { data: { car: { make: 'Scoop' } } } - } + result: { data: { car: { make: "Scoop" } } }, + }, ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const { result, rerender } = renderHook( ({ skip }) => useSubscription(subscription, { skip }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), initialProps: { skip: true }, - }, + } ); expect(result.current.loading).toBe(false); @@ -320,9 +335,12 @@ describe('useSubscription Hook', () => { link.simulateResult(results[0]); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(results[0].result.data); expect(result.current.error).toBe(undefined); @@ -338,9 +356,14 @@ describe('useSubscription Hook', () => { expect(result.current.data).toBe(undefined); expect(result.current.error).toBe(undefined); - await expect(waitFor(() => { - expect(result.current.data).not.toBe(undefined); - }, { interval: 1, timeout: 20 })).rejects.toThrow(); + await expect( + waitFor( + () => { + expect(result.current.data).not.toBe(undefined); + }, + { interval: 1, timeout: 20 } + ) + ).rejects.toThrow(); // ensure state persists across rerenders rerender({ skip: false }); @@ -352,14 +375,17 @@ describe('useSubscription Hook', () => { link.simulateResult(results[1]); }); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.data).toEqual(results[1].result.data); expect(result.current.error).toBe(undefined); }); - it('should share context set in options', async () => { + it("should share context set in options", async () => { const subscription = gql` subscription { car { @@ -368,32 +394,31 @@ describe('useSubscription Hook', () => { } `; - const results = ['Audi', 'BMW'].map(make => ({ - result: { data: { car: { make } } } + const results = ["Audi", "BMW"].map((make) => ({ + result: { data: { car: { make } } }, })); let context: string; const link = new MockSubscriptionLink(); const contextLink = new ApolloLink((operation, forward) => { - context = operation.getContext()?.make + context = operation.getContext()?.make; return forward(operation); }); const client = new ApolloClient({ link: concat(contextLink, link), - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const { result } = renderHook( - () => useSubscription(subscription, { - context: { make: 'Audi' }, - }), + () => + useSubscription(subscription, { + context: { make: "Audi" }, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); @@ -403,9 +428,12 @@ describe('useSubscription Hook', () => { link.simulateResult(results[0]); }, 100); - await waitFor(() => { - expect(result.current.data).toEqual(results[0].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[0].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.error).toBe(undefined); @@ -413,16 +441,19 @@ describe('useSubscription Hook', () => { link.simulateResult(results[1]); }); - await waitFor(() => { - expect(result.current.data).toEqual(results[1].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.data).toEqual(results[1].result.data); + }, + { interval: 1 } + ); expect(result.current.loading).toBe(false); expect(result.current.error).toBe(undefined); - expect(context!).toBe('Audi'); + expect(context!).toBe("Audi"); }); - it('should handle multiple subscriptions properly', async () => { + it("should handle multiple subscriptions properly", async () => { const subscription = gql` subscription { car { @@ -431,14 +462,14 @@ describe('useSubscription Hook', () => { } `; - const results = ['Audi', 'BMW'].map(make => ({ - result: { data: { car: { make } } } + const results = ["Audi", "BMW"].map((make) => ({ + result: { data: { car: { make } } }, })); const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const { result } = renderHook( @@ -448,11 +479,9 @@ describe('useSubscription Hook', () => { }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.sub1.loading).toBe(true); @@ -466,9 +495,12 @@ describe('useSubscription Hook', () => { link.simulateResult(results[0]); }); - await waitFor(() => { - expect(result.current.sub1.data).toEqual(results[0].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.sub1.data).toEqual(results[0].result.data); + }, + { interval: 1 } + ); expect(result.current.sub1.loading).toBe(false); expect(result.current.sub1.error).toBe(undefined); expect(result.current.sub2.loading).toBe(false); @@ -479,9 +511,12 @@ describe('useSubscription Hook', () => { link.simulateResult(results[1]); }); - await waitFor(() => { - expect(result.current.sub1.data).toEqual(results[1].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.sub1.data).toEqual(results[1].result.data); + }, + { interval: 1 } + ); expect(result.current.sub1.loading).toBe(false); expect(result.current.sub1.error).toBe(undefined); expect(result.current.sub2.loading).toBe(false); @@ -489,8 +524,8 @@ describe('useSubscription Hook', () => { expect(result.current.sub2.data).toEqual(results[1].result.data); }); - it('should handle immediate completions gracefully', async () => { - const errorSpy = jest.spyOn(console, 'error').mockImplementation(() => {}); + it("should handle immediate completions gracefully", async () => { + const errorSpy = jest.spyOn(console, "error").mockImplementation(() => {}); const subscription = gql` subscription { @@ -503,19 +538,14 @@ describe('useSubscription Hook', () => { const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); - const { result } = renderHook( - () => useSubscription(subscription), - { - wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ), - }, - ); + const { result } = renderHook(() => useSubscription(subscription), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); setTimeout(() => { // Simulating the behavior of HttpLink, which calls next and complete in sequence. @@ -525,21 +555,26 @@ describe('useSubscription Hook', () => { expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); - await waitFor(() => { - expect(result.current.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(null); expect(errorSpy).toHaveBeenCalledTimes(1); - expect(errorSpy.mock.calls[0][0]).toBe( - "Missing field 'car' while writing result {}", - ); + expect(errorSpy.mock.calls[0]).toStrictEqual([ + "Missing field '%s' while writing result %o", + "car", + Object.create(null), + ]); errorSpy.mockRestore(); }); - it('should handle immediate completions with multiple subscriptions gracefully', async () => { - const errorSpy = jest.spyOn(console, 'error').mockImplementation(() => {}); + it("should handle immediate completions with multiple subscriptions gracefully", async () => { + const errorSpy = jest.spyOn(console, "error").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -562,11 +597,9 @@ describe('useSubscription Hook', () => { }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.sub1.loading).toBe(true); @@ -584,9 +617,12 @@ describe('useSubscription Hook', () => { link.simulateResult({ result: { data: null } }, /* complete */ true); }); - await waitFor(() => { - expect(result.current.sub1.loading).toBe(false); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.sub1.loading).toBe(false); + }, + { interval: 1 } + ); expect(result.current.sub1.error).toBe(undefined); expect(result.current.sub1.data).toBe(null); @@ -598,20 +634,26 @@ describe('useSubscription Hook', () => { expect(result.current.sub3.data).toBe(null); expect(errorSpy).toHaveBeenCalledTimes(3); - expect(errorSpy.mock.calls[0][0]).toBe( - "Missing field 'car' while writing result {}", - ); - expect(errorSpy.mock.calls[1][0]).toBe( - "Missing field 'car' while writing result {}", - ); - expect(errorSpy.mock.calls[2][0]).toBe( - "Missing field 'car' while writing result {}", - ); + expect(errorSpy.mock.calls[0]).toStrictEqual([ + "Missing field '%s' while writing result %o", + "car", + Object.create(null), + ]); + expect(errorSpy.mock.calls[1]).toStrictEqual([ + "Missing field '%s' while writing result %o", + "car", + Object.create(null), + ]); + expect(errorSpy.mock.calls[2]).toStrictEqual([ + "Missing field '%s' while writing result %o", + "car", + Object.create(null), + ]); errorSpy.mockRestore(); }); test("should warn when using 'onSubscriptionData' and 'onData' together", () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -627,26 +669,29 @@ describe('useSubscription Hook', () => { }); renderHook( - () => useSubscription(subscription, { - onData: jest.fn(), - onSubscriptionData: jest.fn(), - }), + () => + useSubscription(subscription, { + onData: jest.fn(), + onSubscriptionData: jest.fn(), + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(warningSpy).toHaveBeenCalledTimes(1); - expect(warningSpy).toHaveBeenCalledWith(expect.stringContaining("supports only the 'onSubscriptionData' or 'onData' option")); + expect(warningSpy).toHaveBeenCalledWith( + expect.stringContaining( + "supports only the 'onSubscriptionData' or 'onData' option" + ) + ); warningSpy.mockRestore(); }); test("prefers 'onData' when using 'onSubscriptionData' and 'onData' together", async () => { - jest.spyOn(console, 'warn').mockImplementation(() => {}); + jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -657,8 +702,8 @@ describe('useSubscription Hook', () => { const results = [ { - result: { data: { car: { make: 'Pagani' } } } - } + result: { data: { car: { make: "Pagani" } } }, + }, ]; const link = new MockSubscriptionLink(); @@ -671,28 +716,30 @@ describe('useSubscription Hook', () => { const onSubscriptionData = jest.fn(); renderHook( - () => useSubscription(subscription, { - onData, - onSubscriptionData, - }), + () => + useSubscription(subscription, { + onData, + onSubscriptionData, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); setTimeout(() => link.simulateResult(results[0])); - await waitFor(() => { - expect(onData).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onData).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); expect(onSubscriptionData).toHaveBeenCalledTimes(0); }); test("uses 'onSubscriptionData' when 'onData' is absent", async () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -703,8 +750,8 @@ describe('useSubscription Hook', () => { const results = [ { - result: { data: { car: { make: 'Pagani' } } } - } + result: { data: { car: { make: "Pagani" } } }, + }, ]; const link = new MockSubscriptionLink(); @@ -716,27 +763,29 @@ describe('useSubscription Hook', () => { const onSubscriptionData = jest.fn(); renderHook( - () => useSubscription(subscription, { - onSubscriptionData, - }), + () => + useSubscription(subscription, { + onSubscriptionData, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); setTimeout(() => link.simulateResult(results[0])); - await waitFor(() => { - expect(onSubscriptionData).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onSubscriptionData).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); warningSpy.mockRestore(); }); test("only warns once using `onSubscriptionData`", () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -752,16 +801,15 @@ describe('useSubscription Hook', () => { }); const { rerender } = renderHook( - () => useSubscription(subscription, { - onSubscriptionData: jest.fn(), - }), + () => + useSubscription(subscription, { + onSubscriptionData: jest.fn(), + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); rerender(); @@ -771,7 +819,7 @@ describe('useSubscription Hook', () => { }); test("should warn when using 'onComplete' and 'onSubscriptionComplete' together", () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -787,26 +835,29 @@ describe('useSubscription Hook', () => { }); renderHook( - () => useSubscription(subscription, { - onComplete: jest.fn(), - onSubscriptionComplete: jest.fn(), - }), + () => + useSubscription(subscription, { + onComplete: jest.fn(), + onSubscriptionComplete: jest.fn(), + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(warningSpy).toHaveBeenCalledTimes(1); - expect(warningSpy).toHaveBeenCalledWith(expect.stringContaining("supports only the 'onSubscriptionComplete' or 'onComplete' option")); + expect(warningSpy).toHaveBeenCalledWith( + expect.stringContaining( + "supports only the 'onSubscriptionComplete' or 'onComplete' option" + ) + ); warningSpy.mockRestore(); }); test("prefers 'onComplete' when using 'onComplete' and 'onSubscriptionComplete' together", async () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -815,9 +866,11 @@ describe('useSubscription Hook', () => { } `; - const results = [{ - result: { data: { car: { make: 'Audi' } } } - }]; + const results = [ + { + result: { data: { car: { make: "Audi" } } }, + }, + ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ @@ -829,31 +882,33 @@ describe('useSubscription Hook', () => { const onSubscriptionComplete = jest.fn(); renderHook( - () => useSubscription(subscription, { - onComplete, - onSubscriptionComplete, - }), + () => + useSubscription(subscription, { + onComplete, + onSubscriptionComplete, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); link.simulateResult(results[0]); setTimeout(() => link.simulateComplete()); - await waitFor(() => { - expect(onComplete).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onComplete).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); expect(onSubscriptionComplete).toHaveBeenCalledTimes(0); warningSpy.mockRestore(); }); test("uses 'onSubscriptionComplete' when 'onComplete' is absent", async () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -862,9 +917,11 @@ describe('useSubscription Hook', () => { } `; - const results = [{ - result: { data: { car: { make: 'Audi' } } } - }]; + const results = [ + { + result: { data: { car: { make: "Audi" } } }, + }, + ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ @@ -875,29 +932,31 @@ describe('useSubscription Hook', () => { const onSubscriptionComplete = jest.fn(); renderHook( - () => useSubscription(subscription, { - onSubscriptionComplete, - }), + () => + useSubscription(subscription, { + onSubscriptionComplete, + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); link.simulateResult(results[0]); setTimeout(() => link.simulateComplete()); - await waitFor(() => { - expect(onSubscriptionComplete).toHaveBeenCalledTimes(1); - }, { interval: 1 }); + await waitFor( + () => { + expect(onSubscriptionComplete).toHaveBeenCalledTimes(1); + }, + { interval: 1 } + ); warningSpy.mockRestore(); }); test("only warns once using `onSubscriptionComplete`", () => { - const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const warningSpy = jest.spyOn(console, "warn").mockImplementation(() => {}); const subscription = gql` subscription { car { @@ -913,16 +972,15 @@ describe('useSubscription Hook', () => { }); const { rerender } = renderHook( - () => useSubscription(subscription, { - onSubscriptionComplete: jest.fn(), - }), + () => + useSubscription(subscription, { + onSubscriptionComplete: jest.fn(), + }), { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); rerender(); @@ -931,8 +989,8 @@ describe('useSubscription Hook', () => { warningSpy.mockRestore(); }); - describe('multipart subscriptions', () => { - it('should handle a simple subscription properly', async () => { + describe("multipart subscriptions", () => { + it("should handle a simple subscription properly", async () => { const subscription = gql` subscription ANewDieWasCreated { aNewDieWasCreated { @@ -951,54 +1009,55 @@ describe('useSubscription Hook', () => { extensions: { [PROTOCOL_ERRORS_SYMBOL]: [ { - message: 'cannot read message from websocket', + message: "cannot read message from websocket", extensions: [ { - code: "WEBSOCKET_MESSAGE_ERROR" - } + code: "WEBSOCKET_MESSAGE_ERROR", + }, ], }, ], - } + }, }, }, - ] + ]; const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); let renderCount = 0; const { result } = renderHook( () => { renderCount++; - return useSubscription(subscription) + return useSubscription(subscription); }, { wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> + <ApolloProvider client={client}>{children}</ApolloProvider> ), - }, + } ); expect(result.current.loading).toBe(true); expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(undefined); link.simulateResult(results[0]); expect(renderCount).toBe(1); - await waitFor(() => { - expect(result.current.error).toBeInstanceOf(ApolloError); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.error).toBeInstanceOf(ApolloError); + }, + { interval: 1 } + ); expect(result.current.error!.protocolErrors[0].message).toBe( "cannot read message from websocket" ); }); }); - it('should handle simple subscription after old in-flight teardown immediately \ -followed by new in-flight setup', async () => { + it("should handle simple subscription after old in-flight teardown immediately \ +followed by new in-flight setup", async () => { const subscription = gql` subscription { car { @@ -1007,62 +1066,82 @@ followed by new in-flight setup', async () => { } `; - const results = ['Audi', 'BMW'].map(make => ({ + const results = ["Audi", "BMW"].map((make) => ({ result: { data: { car: { make } } }, })); const link = new MockSubscriptionLink(); const client = new ApolloClient({ link, - cache: new Cache({ addTypename: false }) + cache: new Cache({ addTypename: false }), }); const { result, unmount, rerender } = renderHook( ({ coin }) => { const heads = useSubscription(subscription, { variables: {}, - skip: coin === 'tails', - context: { coin: 'heads' } + skip: coin === "tails", + context: { coin: "heads" }, }); const tails = useSubscription(subscription, { variables: {}, - skip: coin === 'heads', - context: { coin: 'tails' } + skip: coin === "heads", + context: { coin: "tails" }, }); return { heads, tails }; }, { initialProps: { - coin: 'heads' + coin: "heads", }, wrapper: ({ children }) => ( - <ApolloProvider client={client}> - {children} - </ApolloProvider> - ) - }, + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + } ); - rerender({ coin: 'tails' }); + rerender({ coin: "tails" }); - await new Promise(resolve => setTimeout(() => resolve('wait'), 20)); + await new Promise((resolve) => setTimeout(() => resolve("wait"), 20)); link.simulateResult(results[0]); - await waitFor(() => { - expect(result.current.tails.data).toEqual(results[0].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.tails.data).toEqual(results[0].result.data); + }, + { interval: 1 } + ); expect(result.current.heads.data).toBeUndefined(); - rerender({ coin: 'heads' }); + rerender({ coin: "heads" }); link.simulateResult(results[1]); - await waitFor(() => { - expect(result.current.heads.data).toEqual(results[1].result.data); - }, { interval: 1 }); + await waitFor( + () => { + expect(result.current.heads.data).toEqual(results[1].result.data); + }, + { interval: 1 } + ); expect(result.current.tails.data).toBeUndefined(); unmount(); }); }); + +describe.skip("Type Tests", () => { + test("NoInfer prevents adding arbitrary additional variables", () => { + const typedNode = {} as TypedDocumentNode<{ foo: string }, { bar: number }>; + const { variables } = useSubscription(typedNode, { + variables: { + bar: 4, + // @ts-expect-error + nonExistingVariable: "string", + }, + }); + variables?.bar; + // @ts-expect-error + variables?.nonExistingVariable; + }); +}); diff --git a/src/react/hooks/__tests__/useSuspenseQuery.test.tsx b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx new file mode 100644 --- /dev/null +++ b/src/react/hooks/__tests__/useSuspenseQuery.test.tsx @@ -0,0 +1,10324 @@ +import React, { Fragment, StrictMode, Suspense } from "react"; +import { + act, + screen, + render, + renderHook, + waitFor, + RenderHookOptions, +} from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { ErrorBoundary } from "react-error-boundary"; +import { GraphQLError } from "graphql"; +import { InvariantError } from "ts-invariant"; +import { equal } from "@wry/equality"; +import { expectTypeOf } from "expect-type"; + +import { + gql, + ApolloCache, + ApolloClient, + ApolloError, + ApolloLink, + DocumentNode, + InMemoryCache, + Observable, + OperationVariables, + SubscribeToMoreOptions, + TypedDocumentNode, + split, + NetworkStatus, + ApolloQueryResult, + ErrorPolicy, +} from "../../../core"; +import { + DeepPartial, + compact, + concatPagination, + getMainDefinition, + offsetLimitPagination, +} from "../../../utilities"; +import { + MockedProvider, + MockedResponse, + MockSubscriptionLink, + MockLink, +} from "../../../testing"; +import { ApolloProvider } from "../../context"; +import { SuspenseQueryHookFetchPolicy, skipToken } from "../../../react"; +import { useSuspenseQuery } from "../useSuspenseQuery"; +import { + RefetchWritePolicy, + WatchQueryFetchPolicy, +} from "../../../core/watchQueryOptions"; + +type RenderSuspenseHookOptions<Props, TSerializedCache = {}> = Omit< + RenderHookOptions<Props>, + "wrapper" +> & { + client?: ApolloClient<TSerializedCache>; + link?: ApolloLink; + cache?: ApolloCache<TSerializedCache>; + mocks?: MockedResponse[]; + strictMode?: boolean; +}; + +interface Renders<Result> { + errors: Error[]; + errorCount: number; + suspenseCount: number; + count: number; + frames: Result[]; +} + +interface SimpleQueryData { + greeting: string; +} + +function renderSuspenseHook<Result, Props>( + render: (initialProps: Props) => Result, + options: RenderSuspenseHookOptions<Props> = Object.create(null) +) { + function SuspenseFallback() { + renders.suspenseCount++; + + return <div>loading</div>; + } + + const renders: Renders<Result> = { + errors: [], + errorCount: 0, + suspenseCount: 0, + count: 0, + frames: [], + }; + + const { mocks = [], strictMode, ...renderHookOptions } = options; + + const client = + options.client || + new ApolloClient({ + cache: options.cache || new InMemoryCache(), + link: options.link || new MockLink(mocks), + }); + + const view = renderHook( + (props) => { + renders.count++; + + const view = render(props); + + renders.frames.push(view); + + return view; + }, + { + ...renderHookOptions, + wrapper: ({ children }) => { + const Wrapper = strictMode ? StrictMode : Fragment; + + return ( + <Wrapper> + <Suspense fallback={<SuspenseFallback />}> + <ErrorBoundary + fallback={<div>Error</div>} + onError={(error) => { + renders.errorCount++; + renders.errors.push(error); + }} + > + <ApolloProvider client={client}>{children}</ApolloProvider> + </ErrorBoundary> + </Suspense> + </Wrapper> + ); + }, + } + ); + + return { ...view, renders }; +} + +function useSimpleQueryCase() { + const query: TypedDocumentNode<SimpleQueryData> = gql` + query UserQuery { + greeting + } + `; + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello" } }, + }, + ]; + + return { query, mocks }; +} + +function usePaginatedCase() { + interface QueryData { + letters: { + letter: string; + position: number; + }[]; + } + + interface Variables { + limit?: number; + offset?: number; + } + + const query: TypedDocumentNode<QueryData, Variables> = gql` + query letters($limit: Int, $offset: Int) { + letters(limit: $limit) { + letter + position + } + } + `; + + const data = "ABCDEFG".split("").map((letter, index) => ({ + __typename: "Letter", + letter, + position: index + 1, + })); + + const link = new ApolloLink((operation) => { + const { offset = 0, limit = 2 } = operation.variables; + const letters = data.slice(offset, offset + limit); + + return new Observable((observer) => { + setTimeout(() => { + observer.next({ data: { letters } }); + observer.complete(); + }, 10); + }); + }); + + return { query, link, data }; +} + +interface ErrorCaseData { + currentUser: { + id: string; + name: string | null; + }; +} + +function useErrorCase<TData extends ErrorCaseData>( + { + data, + networkError, + graphQLErrors, + }: { + data?: TData; + networkError?: Error; + graphQLErrors?: GraphQLError[]; + } = Object.create(null) +) { + const query: TypedDocumentNode<TData, never> = gql` + query MyQuery { + currentUser { + id + name + } + } + `; + + const mock: MockedResponse<TData> = compact({ + request: { query }, + result: (data || graphQLErrors) && compact({ data, errors: graphQLErrors }), + error: networkError, + }); + + return { query, mocks: [mock] }; +} + +interface VariablesCaseData { + character: { + id: string; + name: string; + }; +} + +interface VariablesCaseVariables { + id: string; +} + +function useVariablesQueryCase() { + const CHARACTERS = ["Spider-Man", "Black Widow", "Iron Man", "Hulk"]; + + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = CHARACTERS.map((name, index) => ({ + request: { query, variables: { id: String(index + 1) } }, + result: { + data: { + character: { __typename: "Character", id: String(index + 1), name }, + }, + }, + })); + + return { query, mocks }; +} + +function wait(delay: number) { + return new Promise((resolve) => setTimeout(resolve, delay)); +} + +describe("useSuspenseQuery", () => { + it("validates the GraphQL query as a query", () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + mutation ShouldThrow { + createException + } + `; + + expect(() => { + renderHook(() => useSuspenseQuery(query), { + wrapper: ({ children }) => <MockedProvider>{children}</MockedProvider>, + }); + }).toThrowError( + new InvariantError( + "Running a Query requires a graphql Query, but a Mutation was used instead." + ) + ); + + consoleSpy.mockRestore(); + }); + + it("ensures a valid fetch policy is used", () => { + const INVALID_FETCH_POLICIES = ["cache-only", "standby"]; + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + const { query } = useSimpleQueryCase(); + + INVALID_FETCH_POLICIES.forEach((fetchPolicy: any) => { + expect(() => { + renderHook(() => useSuspenseQuery(query, { fetchPolicy }), { + wrapper: ({ children }) => ( + <MockedProvider>{children}</MockedProvider> + ), + }); + }).toThrowError( + new InvariantError( + `The fetch policy \`${fetchPolicy}\` is not supported with suspense.` + ) + ); + }); + + consoleSpy.mockRestore(); + }); + + it("ensures a valid fetch policy is used when defined via global options", () => { + const INVALID_FETCH_POLICIES: WatchQueryFetchPolicy[] = [ + "cache-only", + "standby", + ]; + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + const { query } = useSimpleQueryCase(); + + INVALID_FETCH_POLICIES.forEach((fetchPolicy) => { + expect(() => { + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([]), + defaultOptions: { + watchQuery: { + fetchPolicy, + }, + }, + }); + + renderHook(() => useSuspenseQuery(query), { + wrapper: ({ children }) => ( + <ApolloProvider client={client}>{children}</ApolloProvider> + ), + }); + }).toThrowError( + new InvariantError( + `The fetch policy \`${fetchPolicy}\` is not supported with suspense.` + ) + ); + }); + + consoleSpy.mockRestore(); + }); + + it("suspends a query and returns results", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { mocks } + ); + + // ensure the hook suspends immediately + expect(renders.suspenseCount).toBe(1); + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + expect(renders.suspenseCount).toBe(1); + expect(renders.count).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends a query with variables and returns results", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + expect(renders.suspenseCount).toBe(1); + expect(renders.count).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("returns the same results for the same variables", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + const previousResult = result.current; + + rerender({ id: "1" }); + + expect(result.current).toBe(previousResult); + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("ensures result is referentially stable", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, rerender } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { mocks, initialProps: { id: "1" } } + ); + + expect(screen.getByText("loading")).toBeInTheDocument(); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + const previousResult = result.current; + + rerender({ id: "1" }); + + expect(result.current).toBe(previousResult); + }); + + it("ensures refetch, fetchMore, and subscribeToMore are referentially stable even after result data has changed", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + const { result } = renderSuspenseHook(() => useSuspenseQuery(query), { + client, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + const previousResult = result.current; + + client.writeQuery({ + query, + data: { greeting: "Updated cache greeting" }, + }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + greeting: "Updated cache greeting", + }); + }); + + expect(result.current.fetchMore).toBe(previousResult.fetchMore); + expect(result.current.refetch).toBe(previousResult.refetch); + expect(result.current.subscribeToMore).toBe(previousResult.subscribeToMore); + }); + + it('enables canonical results when canonizeResults is "true"', async () => { + interface Result { + __typename: string; + value: number; + } + + const cache = new InMemoryCache({ + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<{ results: Result[] }> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + cache.writeQuery({ + query, + data: { results }, + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { canonizeResults: true }), + { cache } + ); + + const { data } = result.current; + const resultSet = new Set(data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(data).toEqual({ results }); + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(5); + expect(values).toEqual([0, 1, 2, 3, 5]); + }); + + it("can disable canonical results when the cache's canonizeResults setting is true", async () => { + interface Result { + __typename: string; + value: number; + } + + const cache = new InMemoryCache({ + canonizeResults: true, + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<{ results: Result[] }> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + cache.writeQuery({ + query, + data: { results }, + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { canonizeResults: false }), + { cache } + ); + + const { data } = result.current; + const resultSet = new Set(data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(data).toEqual({ results }); + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(6); + expect(values).toEqual([0, 1, 1, 2, 3, 5]); + }); + + it("tears down the query on unmount", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + link: new ApolloLink(() => Observable.of(mocks[0].result)), + cache: new InMemoryCache(), + }); + + const { result, unmount } = renderSuspenseHook( + () => useSuspenseQuery(query), + { client } + ); + + await waitFor(() => + expect(result.current.data).toEqual(mocks[0].result.data) + ); + + expect(client.getObservableQueries().size).toBe(1); + expect(client).toHaveSuspenseCacheEntryUsing(query); + + unmount(); + + // We need to wait a tick since the cleanup is run in a setTimeout to + // prevent strict mode bugs. + await wait(0); + + expect(client.getObservableQueries().size).toBe(0); + expect(client).not.toHaveSuspenseCacheEntryUsing(query); + }); + + it("tears down all queries when rendering with multiple variable sets", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + const { rerender, result, unmount } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { client, initialProps: { id: "1" } } + ); + + await waitFor(() => + expect(result.current.data).toEqual(mocks[0].result.data) + ); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[1].result.data); + }); + + unmount(); + + // We need to wait a tick since the cleanup is run in a setTimeout to + // prevent strict mode bugs. + await wait(0); + + expect(client.getObservableQueries().size).toBe(0); + + expect(client).not.toHaveSuspenseCacheEntryUsing(query, { + variables: { id: "1" }, + }); + expect(client).not.toHaveSuspenseCacheEntryUsing(query, { + variables: { id: "2" }, + }); + }); + + it("tears down all queries when multiple clients are used", async () => { + const { query } = useVariablesQueryCase(); + + const client1 = new ApolloClient({ + link: new MockLink([ + { + request: { query, variables: { id: "1" } }, + result: { data: { character: { id: "1", name: "Client 1" } } }, + }, + ]), + cache: new InMemoryCache(), + }); + + const client2 = new ApolloClient({ + link: new MockLink([ + { + request: { query, variables: { id: "1" } }, + result: { data: { character: { id: "1", name: "Client 2" } } }, + }, + ]), + cache: new InMemoryCache(), + }); + + const { rerender, result, unmount } = renderSuspenseHook( + ({ client }) => + useSuspenseQuery(query, { client, variables: { id: "1" } }), + { initialProps: { client: client1 } } + ); + + await waitFor(() => + expect(result.current.data).toEqual({ + character: { id: "1", name: "Client 1" }, + }) + ); + + rerender({ client: client2 }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + character: { id: "1", name: "Client 2" }, + }); + }); + + const variables = { id: "1" }; + + unmount(); + + // We need to wait a tick since the cleanup is run in a setTimeout to + // prevent strict mode bugs. + await wait(0); + + expect(client1.getObservableQueries().size).toBe(0); + expect(client2.getObservableQueries().size).toBe(0); + expect(client1).not.toHaveSuspenseCacheEntryUsing(query, { + variables, + }); + expect(client2).not.toHaveSuspenseCacheEntryUsing(query, { + variables, + }); + }); + + it("tears down the query if the component never renders again after suspending", async () => { + jest.useFakeTimers(); + const { query } = useSimpleQueryCase(); + const user = userEvent.setup({ advanceTimers: jest.advanceTimersByTime }); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function App() { + const [showGreeting, setShowGreeting] = React.useState(true); + + return ( + <ApolloProvider client={client}> + <button onClick={() => setShowGreeting(false)}>Hide greeting</button> + {showGreeting && ( + <Suspense fallback="Loading greeting..."> + <Greeting /> + </Suspense> + )} + </ApolloProvider> + ); + } + + function Greeting() { + const { data } = useSuspenseQuery(query); + + return <span>{data.greeting}</span>; + } + + render(<App />); + + // Ensure <Greeting /> suspends immediately + expect(screen.getByText("Loading greeting...")).toBeInTheDocument(); + + // Hide the greeting before it finishes loading data + await act(() => user.click(screen.getByText("Hide greeting"))); + + expect(screen.queryByText("Loading greeting...")).not.toBeInTheDocument(); + + link.simulateResult({ result: { data: { greeting: "Hello" } } }); + link.simulateComplete(); + + expect(client.getObservableQueries().size).toBe(1); + expect(client).toHaveSuspenseCacheEntryUsing(query); + + jest.advanceTimersByTime(30_000); + + expect(client.getObservableQueries().size).toBe(0); + expect(client).not.toHaveSuspenseCacheEntryUsing(query); + + jest.useRealTimers(); + + // Avoid act warnings for a suspended resource + // eslint-disable-next-line testing-library/no-unnecessary-act + await act(() => wait(0)); + }); + + it("has configurable auto dispose timer if the component never renders again after suspending", async () => { + jest.useFakeTimers(); + const { query } = useSimpleQueryCase(); + const user = userEvent.setup({ advanceTimers: jest.advanceTimersByTime }); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + defaultOptions: { + react: { + suspense: { + autoDisposeTimeoutMs: 5000, + }, + }, + }, + }); + + function App() { + const [showGreeting, setShowGreeting] = React.useState(true); + + return ( + <ApolloProvider client={client}> + <button onClick={() => setShowGreeting(false)}>Hide greeting</button> + {showGreeting && ( + <Suspense fallback="Loading greeting..."> + <Greeting /> + </Suspense> + )} + </ApolloProvider> + ); + } + + function Greeting() { + const { data } = useSuspenseQuery(query); + + return <span>{data.greeting}</span>; + } + + render(<App />); + + // Ensure <Greeting /> suspends immediately + expect(screen.getByText("Loading greeting...")).toBeInTheDocument(); + + // Hide the greeting before it finishes loading data + await act(() => user.click(screen.getByText("Hide greeting"))); + + expect(screen.queryByText("Loading greeting...")).not.toBeInTheDocument(); + + link.simulateResult({ result: { data: { greeting: "Hello" } } }); + link.simulateComplete(); + + expect(client.getObservableQueries().size).toBe(1); + expect(client).toHaveSuspenseCacheEntryUsing(query); + + jest.advanceTimersByTime(5_000); + + expect(client.getObservableQueries().size).toBe(0); + expect(client).not.toHaveSuspenseCacheEntryUsing(query); + + jest.useRealTimers(); + + // Avoid act warnings for a suspended resource + // eslint-disable-next-line testing-library/no-unnecessary-act + await act(() => wait(0)); + }); + + it("cancels auto dispose if the component renders before timer finishes", async () => { + jest.useFakeTimers(); + const { query } = useSimpleQueryCase(); + const link = new ApolloLink(() => { + return new Observable((observer) => { + setTimeout(() => { + observer.next({ data: { greeting: "Hello" } }); + observer.complete(); + }, 10); + }); + }); + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback="Loading greeting..."> + <Greeting /> + </Suspense> + </ApolloProvider> + ); + } + + function Greeting() { + const { data } = useSuspenseQuery(query); + + return <span>{data.greeting}</span>; + } + + render(<App />); + + // Ensure <Greeting /> suspends immediately + expect(screen.getByText("Loading greeting...")).toBeInTheDocument(); + + jest.advanceTimersByTime(10); + + await waitFor(() => { + expect(screen.getByText("Hello")).toBeInTheDocument(); + }); + + jest.advanceTimersByTime(30_000); + + expect(client.getObservableQueries().size).toBe(1); + expect(client).toHaveSuspenseCacheEntryUsing(query); + + jest.useRealTimers(); + }); + + it("allows the client to be overridden", async () => { + const { query } = useSimpleQueryCase(); + + const globalClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "global hello" } }) + ), + cache: new InMemoryCache(), + }); + + const localClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "local hello" } }) + ), + cache: new InMemoryCache(), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { client: localClient }), + { client: globalClient } + ); + + await waitFor(() => + expect(result.current.data).toEqual({ greeting: "local hello" }) + ); + + expect(renders.frames).toMatchObject([ + { + data: { greeting: "local hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("allows the client to be overridden in strict mode", async () => { + const { query } = useSimpleQueryCase(); + + const globalClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "global hello" } }) + ), + cache: new InMemoryCache(), + }); + + const localClient = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "local hello" } }) + ), + cache: new InMemoryCache(), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { client: localClient }), + { strictMode: true, client: globalClient } + ); + + await waitFor(() => + expect(result.current.data).toEqual({ greeting: "local hello" }) + ); + + // React double invokes the render function in strict mode so we expect + // to render 2 frames after the initial suspense. + expect(renders.frames).toMatchObject([ + { + data: { greeting: "local hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "local hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("returns the client used in the result", async () => { + const { query } = useSimpleQueryCase(); + + const client = new ApolloClient({ + link: new ApolloLink(() => + Observable.of({ data: { greeting: "hello" } }) + ), + cache: new InMemoryCache(), + }); + + const { result } = renderSuspenseHook(() => useSuspenseQuery(query), { + client, + }); + + // wait for query to finish suspending to avoid warnings + await waitFor(() => { + expect(result.current.data).toEqual({ greeting: "hello" }); + }); + + expect(result.current.client).toBe(client); + }); + + it("suspends when changing variables", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends and fetches data from new client when changing clients", async () => { + const { query } = useSimpleQueryCase(); + + const client1 = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { + request: { query }, + result: { data: { greeting: "Hello client 1" } }, + }, + ]), + }); + + const client2 = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink([ + { + request: { query }, + result: { data: { greeting: "Hello client 2" } }, + }, + ]), + }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ client }) => useSuspenseQuery(query, { client }), + { initialProps: { client: client1 } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello client 1" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ client: client2 }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello client 2" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello client 1" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello client 2" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("allows custom query key so two components that share same query and variables do not interfere with each other", async () => { + interface Data { + todo: { + id: number; + name: string; + completed: boolean; + }; + } + + interface Variables { + id: number; + } + + const query: TypedDocumentNode<Data, Variables> = gql` + query GetTodo($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: 1 } }, + result: { + data: { todo: { id: 1, name: "Take out trash", completed: false } }, + }, + delay: 20, + }, + // refetch + { + request: { query, variables: { id: 1 } }, + result: { + data: { todo: { id: 1, name: "Take out trash", completed: true } }, + }, + delay: 20, + }, + ]; + + const user = userEvent.setup(); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function Spinner({ name }: { name: string }) { + return <span>Loading {name}</span>; + } + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<Spinner name="first" />}> + <Todo name="first" /> + </Suspense> + <Suspense fallback={<Spinner name="second" />}> + <Todo name="second" /> + </Suspense> + </ApolloProvider> + ); + } + + function Todo({ name }: { name: string }) { + const { data, refetch } = useSuspenseQuery(query, { + // intentionally use no-cache to allow us to verify each suspense + // component is independent of each other + fetchPolicy: "no-cache", + variables: { id: 1 }, + queryKey: [name], + }); + + return ( + <div> + <button onClick={() => refetch()}>Refetch {name}</button> + <span data-testid={[name, "data"].join(".")}> + {data.todo.name} {data.todo.completed && "(completed)"} + </span> + </div> + ); + } + + render(<App />); + + expect(screen.getByText("Loading first")).toBeInTheDocument(); + expect(screen.getByText("Loading second")).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("first.data")).toHaveTextContent( + "Take out trash" + ); + }); + + expect(screen.getByTestId("second.data")).toHaveTextContent( + "Take out trash" + ); + + await act(() => user.click(screen.getByText("Refetch first"))); + + // Ensure that refetching the first todo does not update the second todo + // as well + expect(screen.getByText("Loading first")).toBeInTheDocument(); + expect(screen.queryByText("Loading second")).not.toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByTestId("first.data")).toHaveTextContent( + "Take out trash (completed)" + ); + }); + + // Ensure that refetching the first todo did not affect the second + expect(screen.getByTestId("second.data")).toHaveTextContent( + "Take out trash" + ); + }); + + it("suspends and refetches data when changing query keys", async () => { + const { query } = useSimpleQueryCase(); + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello first fetch" } }, + delay: 20, + }, + { + request: { query }, + result: { data: { greeting: "Hello second fetch" } }, + delay: 20, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ queryKey }) => + // intentionally use a fetch policy that will execute a network request + useSuspenseQuery(query, { queryKey, fetchPolicy: "network-only" }), + { mocks, initialProps: { queryKey: ["first"] } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ queryKey: ["second"] }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends and refetches data when part of the query key changes", async () => { + const { query } = useSimpleQueryCase(); + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello first fetch" } }, + delay: 20, + }, + { + request: { query }, + result: { data: { greeting: "Hello second fetch" } }, + delay: 20, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ queryKey }) => + // intentionally use a fetch policy that will execute a network request + useSuspenseQuery(query, { queryKey, fetchPolicy: "network-only" }), + { mocks, initialProps: { queryKey: ["greeting", 1] } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ queryKey: ["greeting", 2] }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends and refetches when using plain string query keys", async () => { + const { query } = useSimpleQueryCase(); + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello first fetch" } }, + delay: 20, + }, + { + request: { query }, + result: { data: { greeting: "Hello second fetch" } }, + delay: 20, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ queryKey }) => + // intentionally use a fetch policy that will execute a network request + useSuspenseQuery(query, { queryKey, fetchPolicy: "network-only" }), + { mocks, initialProps: { queryKey: "first" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ queryKey: "second" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends and refetches when using numeric query keys", async () => { + const { query } = useSimpleQueryCase(); + + const mocks = [ + { + request: { query }, + result: { data: { greeting: "Hello first fetch" } }, + delay: 20, + }, + { + request: { query }, + result: { data: { greeting: "Hello second fetch" } }, + delay: 20, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ queryKey }) => + // intentionally use a fetch policy that will execute a network request + useSuspenseQuery(query, { queryKey, fetchPolicy: "network-only" }), + { mocks, initialProps: { queryKey: 1 } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ queryKey: 2 }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello first fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello second fetch" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("responds to cache updates after changing variables", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { client, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + client.writeQuery({ + query, + variables: { id: "2" }, + data: { character: { id: "2", name: "Cached hero" } }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { character: { id: "2", name: "Cached hero" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.suspenseCount).toBe(2); + expect(renders.count).toBe(5); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { character: { id: "2", name: "Cached hero" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("uses cached result and does not suspend when switching back to already used variables while using `cache-first` fetch policy", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + variables: { id }, + }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "1" }); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(5); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("uses cached result with network request and does not suspend when switching back to already used variables while using `cache-and-network` fetch policy", async () => { + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Black Widow", + }, + }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Spider-Man (refetch)", + }, + }, + }, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { + fetchPolicy: "cache-and-network", + variables: { id }, + }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "1" }); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(6); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("refetches and suspends when switching back to already used variables while using `network-only` fetch policy", async () => { + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Black Widow", + }, + }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Spider-Man (refetch)", + }, + }, + }, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { + fetchPolicy: "network-only", + variables: { id }, + }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "1" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(6); + expect(renders.suspenseCount).toBe(3); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("refetches and suspends when switching back to already used variables while using `no-cache` fetch policy", async () => { + const query: TypedDocumentNode< + VariablesCaseData, + VariablesCaseVariables + > = gql` + query CharacterQuery($id: ID!) { + character(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { __typename: "Character", id: "1", name: "Spider-Man" }, + }, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Black Widow", + }, + }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Spider-Man (refetch)", + }, + }, + }, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { + fetchPolicy: "no-cache", + variables: { id }, + }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "1" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(6); + expect(renders.suspenseCount).toBe(3); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("responds to cache updates after changing back to already fetched variables", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { variables: { id } }), + { client, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "1" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + client.writeQuery({ + query, + variables: { id: "1" }, + data: { character: { id: "1", name: "Cached hero" } }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { character: { id: "1", name: "Cached hero" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.suspenseCount).toBe(2); + expect(renders.count).toBe(6); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { character: { id: "1", name: "Cached hero" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend when data is in the cache and using a "cache-first" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { cache, mocks } + ); + + expect(result.current).toMatchObject({ + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(1); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not initiate a network request when data is in the cache and using a "cache-first" fetch policy', async () => { + let fetchCount = 0; + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const link = new ApolloLink(() => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks[0]; + + observer.next(mock.result); + observer.complete(); + }); + }); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { cache, link, initialProps: { id: "1" } } + ); + + expect(fetchCount).toBe(0); + }); + + it('suspends when partial data is in the cache and using a "cache-first" fetch policy', async () => { + const fullQuery = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(fullQuery, { fetchPolicy: "cache-first" }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend when partial data is in the cache and using a "cache-first" fetch policy with returnPartialData', async () => { + const fullQuery = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(fullQuery, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends and does not use partial data when changing variables and using a "cache-first" fetch policy with returnPartialData', async () => { + const { query: fullQuery, mocks } = useVariablesQueryCase(); + + const partialQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + variables: { id: "1" }, + }); + + const { result, renders, rerender } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(fullQuery, { + fetchPolicy: "cache-first", + returnPartialData: true, + variables: { id }, + }), + { cache, mocks, initialProps: { id: "1" } } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends when data is in the cache and using a "network-only" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "network-only" }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends when partial data is in the cache and using a "network-only" fetch policy with returnPartialData', async () => { + const fullQuery = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(fullQuery, { + fetchPolicy: "network-only", + returnPartialData: true, + }), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends and does not overwrite cache when data is in the cache and using a "no-cache" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "no-cache" }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + const cachedData = cache.readQuery({ query }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + expect(cachedData).toEqual({ greeting: "hello from cache" }); + }); + + it('maintains results when rerendering a query using a "no-cache" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, rerender, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "no-cache" }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + + rerender(); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends when partial data is in the cache and using a "no-cache" fetch policy with returnPartialData', async () => { + const consoleSpy = jest.spyOn(console, "warn").mockImplementation(); + + const fullQuery = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(fullQuery, { + fetchPolicy: "no-cache", + returnPartialData: true, + }), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + + consoleSpy.mockRestore(); + }); + + it('warns when using returnPartialData with a "no-cache" fetch policy', async () => { + const consoleSpy = jest.spyOn(console, "warn").mockImplementation(); + + const { query, mocks } = useSimpleQueryCase(); + + renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + }), + { mocks } + ); + + expect(console.warn).toHaveBeenCalledTimes(1); + expect(console.warn).toHaveBeenCalledWith( + "Using `returnPartialData` with a `no-cache` fetch policy has no effect. To read partial data from the cache, consider using an alternate fetch policy." + ); + + consoleSpy.mockRestore(); + }); + + it('does not suspend when data is in the cache and using a "cache-and-network" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { cache, mocks } + ); + + expect(result.current).toMatchObject({ + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend when partial data is in the cache and using a "cache-and-network" fetch policy with returnPartialData', async () => { + const fullQuery = gql` + query { + character { + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { data: { character: { id: "1", name: "Doctor Strange" } } }, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + }); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(fullQuery, { + fetchPolicy: "cache-and-network", + returnPartialData: true, + }), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('suspends and does not use partial data when changing variables and using a "cache-and-network" fetch policy with returnPartialData', async () => { + const { query: fullQuery, mocks } = useVariablesQueryCase(); + + const partialQuery = gql` + query ($id: ID!) { + character(id: $id) { + id + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { id: "1" } }, + variables: { id: "1" }, + }); + + const { result, renders, rerender } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(fullQuery, { + fetchPolicy: "cache-and-network", + returnPartialData: true, + variables: { id }, + }), + { cache, mocks, initialProps: { id: "1" } } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { character: { id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "cache-and-network", + ])( + 'writes to the cache when using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useVariablesQueryCase(); + + const cache = new InMemoryCache(); + + const { result } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { fetchPolicy, variables: { id } }), + { cache, mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + const cachedData = cache.readQuery({ query, variables: { id: "1" } }); + + expect(cachedData).toEqual(mocks[0].result.data); + } + ); + + it('does not write to the cache when using a "no-cache" fetch policy', async () => { + const { query, mocks } = useVariablesQueryCase(); + + const cache = new InMemoryCache(); + + const { result } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { fetchPolicy: "no-cache", variables: { id } }), + { cache, mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + const cachedData = cache.readQuery({ query, variables: { id: "1" } }); + + expect(cachedData).toBeNull(); + }); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "cache-and-network", + ])( + 'responds to cache updates when using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { client } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + client.writeQuery({ + query, + data: { greeting: "Updated hello" }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Updated hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + expect(renders.suspenseCount).toBe(1); + expect(renders.count).toBe(3); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Updated hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + } + ); + + it('does not respond to cache updates when using a "no-cache" fetch policy', async () => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "no-cache" }), + { client } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + client.writeQuery({ + query, + data: { greeting: "Updated hello" }, + }); + + // Wait for a while to ensure no updates happen asynchronously + await wait(100); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + expect(renders.suspenseCount).toBe(1); + expect(renders.count).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ])( + 're-suspends the component when changing variables and using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useVariablesQueryCase(); + + const { result, rerender, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { fetchPolicy, variables: { id } }), + { mocks, initialProps: { id: "1" } } + ); + + expect(renders.suspenseCount).toBe(1); + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + // Renders: + // 1. Initiate fetch and suspend + // 2. Unsuspend and return results from initial fetch + // 3. Change variables and suspend + // 5. Unsuspend and return results from refetch + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + } + ); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ])( + 're-suspends the component when changing queries and using a "%s" fetch policy', + async (fetchPolicy) => { + const query1: TypedDocumentNode<{ hello: string }> = gql` + query Query1 { + hello + } + `; + + const query2: TypedDocumentNode<{ world: string }> = gql` + query Query2 { + world + } + `; + + const mocks = [ + { + request: { query: query1 }, + result: { data: { hello: "query1" } }, + }, + { + request: { query: query2 }, + result: { data: { world: "query2" } }, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ query }) => useSuspenseQuery(query, { fetchPolicy }), + { mocks, initialProps: { query: query1 as DocumentNode } } + ); + + expect(renders.suspenseCount).toBe(1); + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ query: query2 }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + // Renders: + // 1. Initiate fetch and suspend + // 2. Unsuspend and return results from initial fetch + // 3. Change queries and suspend + // 5. Unsuspend and return results from refetch + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + } + ); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ])( + 'ensures data is fetched the correct amount of times when changing variables and using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useVariablesQueryCase(); + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.variables, operation.variables) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const { result, rerender } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { fetchPolicy, variables: { id } }), + { link, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + expect(fetchCount).toBe(1); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[1].result.data); + }); + + expect(fetchCount).toBe(2); + } + ); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ])( + 'ensures data is fetched and suspended the correct amount of times in strict mode while using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useVariablesQueryCase(); + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.variables, operation.variables) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const { result, renders } = renderSuspenseHook( + ({ id }) => useSuspenseQuery(query, { fetchPolicy, variables: { id } }), + { strictMode: true, link, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + expect(fetchCount).toBe(1); + + // React double invokes the render function in strict mode so the suspense + // fallback is rendered twice before the promise is resolved + // https://reactjs.org/docs/strict-mode.html#detecting-unexpected-side-effects + expect(renders.suspenseCount).toBe(2); + } + ); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "cache-and-network", + ])( + 'responds to cache updates in strict mode while using a "%s" fetch policy', + async (fetchPolicy) => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { strictMode: true, client } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + client.writeQuery({ + query, + data: { greeting: "Updated hello" }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Updated hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + } + ); + + // https://github.com/apollographql/apollo-client/issues/10478 + it("responds to cache updates when data is already in the cache while using a cache-first fetch policy", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + client.writeQuery({ + query, + data: { greeting: "Hello from cache" }, + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { client } + ); + + expect(result.current.data).toEqual({ greeting: "Hello from cache" }); + + // Allow time for the subscription in the hook to set itself up since it is + // wrapped in a setTimeout (to handle Strict mode bugs). Without this + // `wait`, `subscribe` isn't called until after our test updates the cache + // via `writeQuery`, which then emits the most recent result, which is the + // updated value. + await wait(0); + + client.writeQuery({ + query, + data: { greeting: "Updated hello" }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Updated hello" }, + error: undefined, + }); + }); + }); + + it("uses the default fetch policy from the client when none provided in options", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const client = new ApolloClient({ + cache, + link: new MockLink(mocks), + defaultOptions: { + watchQuery: { + fetchPolicy: "network-only", + }, + }, + }); + + client.writeQuery({ query, data: { greeting: "hello from cache" } }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { client } + ); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("uses default variables from the client when none provided in options", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + defaultOptions: { + watchQuery: { + variables: { id: "2" }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("uses default variables from the client when none provided in options in strict mode", async () => { + const { query, mocks } = useVariablesQueryCase(); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + defaultOptions: { + watchQuery: { + variables: { id: "2" }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { strictMode: true, client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + // React double invokes the render function in strict mode so we expect 2 + // frames to be rendered here. + expect(renders.frames).toMatchObject([ + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("merges global default variables with local variables", async () => { + const query = gql` + query MergedVariablesQuery { + vars + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + return new Observable((observer) => { + observer.next({ data: { vars: operation.variables } }); + observer.complete(); + }); + }), + defaultOptions: { + watchQuery: { + variables: { source: "global", globalOnlyVar: true }, + }, + }, + }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ source }) => + useSuspenseQuery(query, { + fetchPolicy: "network-only", + variables: { source, localOnlyVar: true }, + }), + { client, initialProps: { source: "local" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + vars: { source: "local", globalOnlyVar: true, localOnlyVar: true }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ source: "rerender" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + vars: { source: "rerender", globalOnlyVar: true, localOnlyVar: true }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { + vars: { source: "local", globalOnlyVar: true, localOnlyVar: true }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + vars: { source: "rerender", globalOnlyVar: true, localOnlyVar: true }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("can unset a globally defined variable", async () => { + const query: TypedDocumentNode<{ vars: Record<string, any> }> = gql` + query MergedVariablesQuery { + vars + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + return new Observable((observer) => { + observer.next({ data: { vars: operation.variables } }); + observer.complete(); + }); + }), + defaultOptions: { + watchQuery: { + variables: { source: "global", globalOnlyVar: true }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + variables: { source: "local", globalOnlyVar: undefined }, + }), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { vars: { source: "local" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + // Check to make sure the property itself is not defined, not just set to + // undefined. Unfortunately this is not caught by toMatchObject as + // toMatchObject only checks a if the subset of options are equal, not if + // they have strictly the same keys and values. + expect(result.current.data.vars).not.toHaveProperty("globalOnlyVar"); + + expect(renders.frames).toMatchObject([ + { + data: { vars: { source: "local" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("passes context to the link", async () => { + const query = gql` + query ContextQuery { + context + } + `; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new ApolloLink((operation) => { + return new Observable((observer) => { + const { valueA, valueB } = operation.getContext(); + + observer.next({ data: { context: { valueA, valueB } } }); + observer.complete(); + }); + }), + }); + + const { result } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + context: { valueA: "A", valueB: "B" }, + }), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { context: { valueA: "A", valueB: "B" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + }); + + it("throws network errors by default", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const { query, mocks } = useErrorCase({ + networkError: new Error("Could not fetch"), + }); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + mocks, + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toEqual(new Error("Could not fetch")); + expect(error.graphQLErrors).toEqual([]); + + consoleSpy.mockRestore(); + }); + + it("throws graphql errors by default", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const { query, mocks } = useErrorCase({ + graphQLErrors: [new GraphQLError("`id` should not be null")], + }); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + mocks, + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toBeNull(); + expect(error.graphQLErrors).toEqual([ + new GraphQLError("`id` should not be null"), + ]); + + consoleSpy.mockRestore(); + }); + + it("tears down subscription when throwing an error", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const { query, mocks } = useErrorCase({ + networkError: new Error("Could not fetch"), + }); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + client, + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(client.getObservableQueries().size).toBe(0); + + consoleSpy.mockRestore(); + }); + + it("tears down subscription when throwing an error on refetch", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(client.getObservableQueries().size).toBe(0); + + consoleSpy.mockRestore(); + }); + + it('throws network errors when errorPolicy is set to "none"', async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const { query, mocks } = useErrorCase({ + networkError: new Error("Could not fetch"), + }); + + const { renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "none" }), + { mocks } + ); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toEqual(new Error("Could not fetch")); + expect(error.graphQLErrors).toEqual([]); + + consoleSpy.mockRestore(); + }); + + it('throws graphql errors when errorPolicy is set to "none"', async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const { query, mocks } = useErrorCase({ + graphQLErrors: [new GraphQLError("`id` should not be null")], + }); + + const { renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "none" }), + { mocks } + ); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toBeNull(); + expect(error.graphQLErrors).toEqual([ + new GraphQLError("`id` should not be null"), + ]); + + consoleSpy.mockRestore(); + }); + + it('handles multiple graphql errors when errorPolicy is set to "none"', async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const graphQLErrors = [ + new GraphQLError("Fool me once"), + new GraphQLError("Fool me twice"), + ]; + + const { query, mocks } = useErrorCase({ graphQLErrors }); + + const { renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "none" }), + { mocks } + ); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error!.networkError).toBeNull(); + expect(error!.graphQLErrors).toEqual(graphQLErrors); + + consoleSpy.mockRestore(); + }); + + it('throws network errors when errorPolicy is set to "ignore"', async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + const networkError = new Error("Could not fetch"); + + const { query, mocks } = useErrorCase({ networkError }); + + const { renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { mocks } + ); + + await waitFor(() => { + expect(renders.errorCount).toBe(1); + }); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error!.networkError).toEqual(networkError); + expect(error!.graphQLErrors).toEqual([]); + + consoleSpy.mockRestore(); + }); + + it('does not throw or return graphql errors when errorPolicy is set to "ignore"', async () => { + const { query, mocks } = useErrorCase({ + graphQLErrors: [new GraphQLError("`id` should not be null")], + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { data: undefined, networkStatus: NetworkStatus.ready, error: undefined }, + ]); + }); + + it('returns partial data results and discards GraphQL errors when errorPolicy is set to "ignore"', async () => { + const { query, mocks } = useErrorCase({ + data: { currentUser: { id: "1", name: null } }, + graphQLErrors: [new GraphQLError("`name` could not be found")], + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { currentUser: { id: "1", name: null } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { currentUser: { id: "1", name: null } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('discards multiple graphql errors when errorPolicy is set to "ignore"', async () => { + const { query, mocks } = useErrorCase({ + graphQLErrors: [ + new GraphQLError("Fool me once"), + new GraphQLError("Fool me twice"), + ], + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { data: undefined, networkStatus: NetworkStatus.ready, error: undefined }, + ]); + }); + + it('responds to cache updates and clears errors after an error returns when errorPolicy is set to "ignore"', async () => { + const graphQLError = new GraphQLError("`id` should not be null"); + + const { query, mocks } = useErrorCase({ graphQLErrors: [graphQLError] }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + client.writeQuery({ + query, + data: { + currentUser: { + id: "1", + name: "Cache User", + }, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + currentUser: { + id: "1", + name: "Cache User", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.frames).toMatchObject([ + { + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { currentUser: { id: "1", name: "Cache User" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('throws network errors when errorPolicy is set to "all"', async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const networkError = new Error("Could not fetch"); + + const { query, mocks } = useErrorCase({ networkError }); + + const { renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { mocks } + ); + + await waitFor(() => { + expect(renders.errorCount).toBe(1); + }); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error!.networkError).toEqual(networkError); + expect(error!.graphQLErrors).toEqual([]); + + consoleSpy.mockRestore(); + }); + + it('does not throw and returns graphql errors when errorPolicy is set to "all"', async () => { + const graphQLError = new GraphQLError("`id` should not be null"); + + const { query, mocks } = useErrorCase({ graphQLErrors: [graphQLError] }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + error: new ApolloError({ graphQLErrors: [graphQLError] }), + }); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: undefined, + networkStatus: NetworkStatus.error, + error: new ApolloError({ graphQLErrors: [graphQLError] }), + }, + ]); + + const { error } = result.current; + + expect(error).toBeInstanceOf(ApolloError); + expect(error!.networkError).toBeNull(); + expect(error!.graphQLErrors).toEqual([graphQLError]); + }); + + it('responds to cache updates and clears errors after an error returns when errorPolicy is set to "all"', async () => { + const graphQLError = new GraphQLError("`id` should not be null"); + + const { query, mocks } = useErrorCase({ graphQLErrors: [graphQLError] }); + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { client } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.error, + error: new ApolloError({ graphQLErrors: [graphQLError] }), + }); + }); + + client.writeQuery({ + query, + data: { + currentUser: { + id: "1", + name: "Cache User", + }, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + currentUser: { + id: "1", + name: "Cache User", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.frames).toMatchObject([ + { + data: undefined, + networkStatus: NetworkStatus.error, + error: new ApolloError({ graphQLErrors: [graphQLError] }), + }, + { + data: { currentUser: { id: "1", name: "Cache User" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('handles multiple graphql errors when errorPolicy is set to "all"', async () => { + const graphQLErrors = [ + new GraphQLError("Fool me once"), + new GraphQLError("Fool me twice"), + ]; + + const { query, mocks } = useErrorCase({ graphQLErrors }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { mocks } + ); + + const expectedError = new ApolloError({ graphQLErrors }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + error: expectedError, + }); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.count).toBe(2); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: undefined, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + ]); + + const { error } = result.current; + + expect(error).toBeInstanceOf(ApolloError); + expect(error!.networkError).toBeNull(); + expect(error!.graphQLErrors).toEqual(graphQLErrors); + }); + + it('returns partial data and keeps errors when errorPolicy is set to "all"', async () => { + const graphQLError = new GraphQLError("`name` could not be found"); + + const { query, mocks } = useErrorCase({ + data: { currentUser: { id: "1", name: null } }, + graphQLErrors: [graphQLError], + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { mocks } + ); + + const expectedError = new ApolloError({ graphQLErrors: [graphQLError] }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { currentUser: { id: "1", name: null } }, + networkStatus: NetworkStatus.error, + error: expectedError, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { currentUser: { id: "1", name: null } }, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + ]); + }); + + it('persists errors between rerenders when errorPolicy is set to "all"', async () => { + const graphQLError = new GraphQLError("`name` could not be found"); + + const { query, mocks } = useErrorCase({ + graphQLErrors: [graphQLError], + }); + + const { result, rerender } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { mocks } + ); + + const expectedError = new ApolloError({ graphQLErrors: [graphQLError] }); + + await waitFor(() => { + expect(result.current.error).toEqual(expectedError); + }); + + rerender(); + + expect(result.current.error).toEqual(expectedError); + }); + + it('clears errors when changing variables and errorPolicy is set to "all"', async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const graphQLErrors = [new GraphQLError("Could not fetch user 1")]; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + errors: graphQLErrors, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { user: { id: "2", name: "Captain Marvel" } }, + }, + }, + ]; + + const { result, renders, rerender } = renderSuspenseHook( + ({ id }) => + useSuspenseQuery(query, { errorPolicy: "all", variables: { id } }), + { mocks, initialProps: { id: "1" } } + ); + + const expectedError = new ApolloError({ graphQLErrors }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.error, + error: expectedError, + }); + }); + + rerender({ id: "2" }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: undefined, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("re-suspends when calling `refetch`", async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel (updated)" } }, + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("properly resolves `refetch` when returning a result that is deeply equal to data in the cache", async () => { + type Variables = { + id: string; + }; + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Todo id="1" /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string }) { + const { data, refetch } = useSuspenseQuery(query, { + variables: { id }, + }); + + const { todo } = data; + + return ( + <div> + <button onClick={() => refetch()}>Refetch</button> + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + </div> + ); + } + + render(<App />); + + expect(await screen.findByText("Loading")).toBeInTheDocument(); + + const todo = await screen.findByTestId("todo"); + + expect(todo).toHaveTextContent("Clean room"); + + await act(() => user.click(screen.getByText("Refetch"))); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + await waitFor(() => { + // Suspense will hide the component until the suspense boundary has + // finished loading so it is still in the DOM. + expect(todo).toBeVisible(); + }); + + expect(todo).toHaveTextContent("Clean room"); + }); + + it("re-suspends when calling `refetch` with new variables", async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { user: { id: "2", name: "Captain America" } }, + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch({ id: "2" }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("re-suspends multiple times when calling `refetch` multiple times", async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel (updated)" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel (updated again)" } }, + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { mocks, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(6); + expect(renders.suspenseCount).toBe(3); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("throws errors when errors are returned after calling `refetch`", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(renders.errorCount).toBe(1); + }); + + expect(renders.errors).toEqual([ + new ApolloError({ + graphQLErrors: [new GraphQLError("Something went wrong")], + }), + ]); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + + consoleSpy.mockRestore(); + }); + + it('ignores errors returned after calling `refetch` when errorPolicy is set to "ignore"', async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + errorPolicy: "ignore", + variables: { id: "1" }, + }), + { mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await act(async () => { + await result.current.refetch(); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('returns errors after calling `refetch` when errorPolicy is set to "all"', async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + errorPolicy: "all", + variables: { id: "1" }, + }), + { mocks } + ); + + const expectedError = new ApolloError({ + graphQLErrors: [new GraphQLError("Something went wrong")], + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.error, + error: expectedError, + }); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + ]); + }); + + it('handles partial data results after calling `refetch` when errorPolicy is set to "all"', async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: null } }, + errors: [new GraphQLError("Something went wrong")], + }, + }, + ]; + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + errorPolicy: "all", + variables: { id: "1" }, + }), + { mocks } + ); + + const expectedError = new ApolloError({ + graphQLErrors: [new GraphQLError("Something went wrong")], + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: mocks[1].result.data, + networkStatus: NetworkStatus.error, + error: expectedError, + }); + }); + + expect(renders.errorCount).toBe(0); + expect(renders.errors).toEqual([]); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: mocks[1].result.data, + networkStatus: NetworkStatus.error, + error: expectedError, + }, + ]); + }); + + it("re-suspends when calling `fetchMore` with different variables", async () => { + const { data, query, link } = usePaginatedCase(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { limit: 2 } }), + { link } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(0, 2) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.fetchMore({ variables: { offset: 2 } }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(2, 4) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { letters: data.slice(0, 2) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { letters: data.slice(2, 4) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("properly resolves `fetchMore` when returning a result that is deeply equal to data in the cache", async () => { + const { query, link } = usePaginatedCase(); + + const user = userEvent.setup(); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Letters offset={0} /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Letters({ offset }: { offset: number }) { + const { data, fetchMore } = useSuspenseQuery(query, { + variables: { offset }, + }); + + return ( + <div> + <button onClick={() => fetchMore({ variables: { offset } })}> + Fetch more + </button> + <div data-testid="letters"> + {data.letters.map(({ letter }) => letter).join("")} + </div> + </div> + ); + } + + render(<App />); + + expect(await screen.findByText("Loading")).toBeInTheDocument(); + + const letters = await screen.findByTestId("letters"); + + expect(letters).toHaveTextContent("AB"); + + await act(() => user.click(screen.getByText("Fetch more"))); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + await waitFor(() => { + // Suspense will hide the component until the suspense boundary has + // finished loading so it is still in the DOM. + expect(letters).toBeVisible(); + }); + + expect(letters).toHaveTextContent("AB"); + }); + + it("suspends when refetching after returning cached data for the initial fetch", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { greeting: "hello from cache" }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { cache, mocks } + ); + + expect(result.current).toMatchObject({ + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: "hello from cache" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { greeting: "Hello" }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("properly uses `updateQuery` when calling `fetchMore`", async () => { + const { data, query, link } = usePaginatedCase(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { limit: 2 } }), + { link } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(0, 2) }, + error: undefined, + }); + }); + + act(() => { + result.current.fetchMore({ + variables: { offset: 2 }, + updateQuery: (prev, { fetchMoreResult }) => ({ + letters: prev.letters.concat(fetchMoreResult.letters), + }), + }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(0, 4) }, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { letters: data.slice(0, 2) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { letters: data.slice(0, 4) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("properly uses cache field policies when calling `fetchMore` without `updateQuery`", async () => { + const { data, query, link } = usePaginatedCase(); + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + letters: concatPagination(), + }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { limit: 2 } }), + { cache, link } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(0, 2) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.fetchMore({ variables: { offset: 2 } }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { letters: data.slice(0, 4) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { letters: data.slice(0, 2) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { letters: data.slice(0, 4) }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('honors refetchWritePolicy set to "overwrite"', async () => { + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const { result } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + variables: { min: 0, max: 12 }, + refetchWritePolicy: "overwrite", + }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + act(() => { + result.current.refetch({ min: 12, max: 30 }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [undefined, [13, 17, 19, 23, 29]], + ]); + }); + + it('honors refetchWritePolicy set to "merge"', async () => { + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const { result } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + variables: { min: 0, max: 12 }, + refetchWritePolicy: "merge", + }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + act(() => { + result.current.refetch({ min: 12, max: 30 }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { primes: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + ]); + }); + + it('defaults refetchWritePolicy to "overwrite"', async () => { + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { min: 0, max: 12 } }), + { cache, mocks } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + act(() => { + result.current.refetch({ min: 12, max: 30 }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [undefined, [13, 17, 19, 23, 29]], + ]); + }); + + it("does not suspend when `skip` is true", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { skip: true }), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + it("does not suspend when using `skipToken` token as options", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, skipToken), + { cache, mocks } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + it("suspends when `skip` becomes `false` after it was `true`", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, renders, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { cache, mocks, initialProps: { skip: true } } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + rerender({ skip: false }); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { data: undefined, networkStatus: NetworkStatus.ready, error: undefined }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends when switching away from `skipToken` in options", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const { result, renders, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, skip ? skipToken : void 0), + { mocks, initialProps: { skip: true } } + ); + + expect(renders.suspenseCount).toBe(0); + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + rerender({ skip: false }); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { data: undefined, networkStatus: NetworkStatus.ready, error: undefined }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("renders skip result, does not suspend, and maintains `data` when `skip` becomes `true` after it was `false`", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, renders, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { cache, mocks, initialProps: { skip: false } } + ); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ skip: true }); + + expect(renders.suspenseCount).toBe(1); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("renders skip result, does not suspend, and maintains `data` when skipping a query with `skipToken` as options after it was enabled", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const cache = new InMemoryCache(); + + const { result, renders, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, skip ? skipToken : void 0), + { cache, mocks, initialProps: { skip: false } } + ); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ skip: true }); + + expect(renders.suspenseCount).toBe(1); + + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("does not make network requests when `skip` is `true`", async () => { + const { query, mocks } = useVariablesQueryCase(); + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.variables, operation.variables) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const { result, rerender } = renderSuspenseHook( + ({ skip, id }) => useSuspenseQuery(query, { skip, variables: { id } }), + { mocks, link, initialProps: { skip: true, id: "1" } } + ); + + expect(fetchCount).toBe(0); + + rerender({ skip: false, id: "1" }); + + expect(fetchCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ skip: true, id: "2" }); + + expect(fetchCount).toBe(1); + }); + + it("does not make network requests when using `skipToken` for options", async () => { + const { query, mocks } = useVariablesQueryCase(); + + let fetchCount = 0; + + const link = new ApolloLink((operation) => { + return new Observable((observer) => { + fetchCount++; + + const mock = mocks.find(({ request }) => + equal(request.variables, operation.variables) + ); + + if (!mock) { + throw new Error("Could not find mock for operation"); + } + + observer.next(mock.result); + observer.complete(); + }); + }); + + const { result, rerender } = renderSuspenseHook( + ({ skip, id }) => + useSuspenseQuery(query, skip ? skipToken : { variables: { id } }), + { mocks, link, initialProps: { skip: true, id: "1" } } + ); + + expect(fetchCount).toBe(0); + + rerender({ skip: false, id: "1" }); + + expect(fetchCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ skip: true, id: "2" }); + + expect(fetchCount).toBe(1); + }); + + it("`skip` result is referentially stable", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const { result, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { mocks, initialProps: { skip: true } } + ); + + const skipResult = result.current; + + rerender({ skip: true }); + + expect(result.current).toBe(skipResult); + + rerender({ skip: false }); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + const fetchedSkipResult = result.current; + + rerender({ skip: false }); + + expect(fetchedSkipResult).toBe(fetchedSkipResult); + }); + + it("`skip` result is referentially stable when using `skipToken` as options", async () => { + const { query, mocks } = useSimpleQueryCase(); + + const { result, rerender } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, skip ? skipToken : void 0), + { mocks, initialProps: { skip: true } } + ); + + const skipResult = result.current; + + rerender({ skip: true }); + + expect(result.current).toBe(skipResult); + + rerender({ skip: false }); + + await waitFor(() => { + expect(result.current.data).toEqual(mocks[0].result.data); + }); + + const fetchedSkipResult = result.current; + + rerender({ skip: false }); + + expect(fetchedSkipResult).toBe(fetchedSkipResult); + }); + + it("properly resolves when `skip` becomes false when returning a result that is deeply equal to data in the cache", async () => { + type Variables = { + id: string; + }; + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Todo id="1" /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string }) { + const [skip, setSkip] = React.useState(false); + const { data } = useSuspenseQuery(query, { + // Force a network request that returns the same data from the cache + fetchPolicy: "network-only", + skip, + variables: { id }, + }); + + const todo = data?.todo; + + return ( + <> + <button onClick={() => setSkip((skip) => !skip)}>Toggle skip</button> + {todo && ( + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + )} + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + const todo = await screen.findByTestId("todo"); + expect(todo).toHaveTextContent("Clean room"); + + // skip false -> true + await act(() => user.click(screen.getByText("Toggle skip"))); + expect(todo).toHaveTextContent("Clean room"); + + // skip true -> false + await act(() => user.click(screen.getByText("Toggle skip"))); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + await waitFor(() => { + expect(todo).toBeVisible(); + }); + + expect(todo).toHaveTextContent("Clean room"); + }); + + it("`skip` option works with `startTransition`", async () => { + type Variables = { + id: string; + }; + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + const [id, setId] = React.useState<string | null>(null); + const [isPending, startTransition] = React.useTransition(); + + return ( + <ApolloProvider client={client}> + <button + disabled={isPending} + onClick={() => { + startTransition(() => { + setId("1"); + }); + }} + > + Fetch to-do 1 + </button> + <Suspense fallback={<SuspenseFallback />}> + <Todo id={id} /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string | null }) { + const { data } = useSuspenseQuery(query, { + skip: !id, + variables: { id: id ?? "0" }, + }); + + const todo = data?.todo; + + return todo ? ( + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + ) : null; + } + + render(<App />); + + expect(screen.queryByTestId("todo")).not.toBeInTheDocument(); + + const button = screen.getByText("Fetch to-do 1"); + await act(() => user.click(button)); + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + // We can ensure this works with isPending from useTransition in the process + expect(button).toBeDisabled(); + // Eventually we should see the updated todo content once its done + // suspending. + expect(await screen.findByTestId("todo")).toHaveTextContent("Clean room"); + }); + + it("`skipToken` works with `startTransition` when used for options", async () => { + type Variables = { + id: string; + }; + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + const [id, setId] = React.useState<string | null>(null); + const [isPending, startTransition] = React.useTransition(); + + return ( + <ApolloProvider client={client}> + <button + disabled={isPending} + onClick={() => { + startTransition(() => { + setId("1"); + }); + }} + > + Fetch to-do 1 + </button> + <Suspense fallback={<SuspenseFallback />}> + <Todo id={id} /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string | null }) { + const { data } = useSuspenseQuery( + query, + id ? { variables: { id } } : skipToken + ); + + const todo = data?.todo; + + return todo ? ( + <div data-testid="todo"> + {todo.name} + {todo.completed && " (completed)"} + </div> + ) : null; + } + + render(<App />); + + expect(screen.queryByTestId("todo")).not.toBeInTheDocument(); + + const button = screen.getByText("Fetch to-do 1"); + await act(() => user.click(button)); + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + // We can ensure this works with isPending from useTransition in the process + expect(button).toBeDisabled(); + // Eventually we should see the updated todo content once its done + // suspending. + expect(await screen.findByTestId("todo")).toHaveTextContent("Clean room"); + }); + + it("applies `errorPolicy` on next fetch when it changes between renders", async () => { + const { query, mocks: simpleMocks } = useSimpleQueryCase(); + + const successMock = simpleMocks[0]; + + const mocks = [ + successMock, + { + request: { query }, + result: { + errors: [new GraphQLError("oops")], + }, + }, + ]; + + const { result, rerender, renders } = renderSuspenseHook( + ({ errorPolicy }) => useSuspenseQuery(query, { errorPolicy }), + { mocks, initialProps: { errorPolicy: "none" as ErrorPolicy } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...successMock.result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ errorPolicy: "all" }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...successMock.result, + networkStatus: NetworkStatus.error, + error: new ApolloError({ graphQLErrors: [new GraphQLError("oops")] }), + }); + }); + + expect(renders.errorCount).toBe(0); + expect( + renders.frames.map((f) => ({ + data: f.data, + error: f.error, + networkStatus: f.networkStatus, + })) + ).toMatchObject([ + { + ...successMock.result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...successMock.result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...successMock.result, + networkStatus: NetworkStatus.error, + error: new ApolloError({ graphQLErrors: [new GraphQLError("oops")] }), + }, + ]); + }); + + it("applies `context` on next fetch when it changes between renders", async () => { + const query = gql` + query { + context + } + `; + + const link = new ApolloLink((operation) => { + return Observable.of({ + data: { + context: operation.getContext(), + }, + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ context }) => useSuspenseQuery(query, { context }), + { client, initialProps: { context: { phase: "initialValue" } } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { context: { phase: "initialValue" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ context: { phase: "rerender" } }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + context: { phase: "rerender" }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.frames).toMatchObject([ + { + data: { context: { phase: "initialValue" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { context: { phase: "initialValue" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { context: { phase: "rerender" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + // NOTE: We only test the `false` -> `true` path here. If the option changes + // from `true` -> `false`, the data has already been canonized, so it has no + // effect on the output. + it("returns canonical results immediately when `canonizeResults` changes from `false` to `true` between renders", async () => { + interface Result { + __typename: string; + value: number; + } + + interface Data { + results: Result[]; + } + + const cache = new InMemoryCache({ + typePolicies: { + Result: { + keyFields: false, + }, + }, + }); + + const query: TypedDocumentNode<Data> = gql` + query { + results { + value + } + } + `; + + const results: Result[] = [ + { __typename: "Result", value: 0 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 1 }, + { __typename: "Result", value: 2 }, + { __typename: "Result", value: 3 }, + { __typename: "Result", value: 5 }, + ]; + + cache.writeQuery({ + query, + data: { results }, + }); + + function verifyCanonicalResults(data: Data, canonized: boolean) { + const resultSet = new Set(data.results); + const values = Array.from(resultSet).map((item) => item.value); + + expect(data).toEqual({ results }); + + if (canonized) { + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(5); + expect(values).toEqual([0, 1, 2, 3, 5]); + } else { + expect(data.results.length).toBe(6); + expect(resultSet.size).toBe(6); + expect(values).toEqual([0, 1, 1, 2, 3, 5]); + } + } + + const { result, rerender, renders } = renderSuspenseHook( + ({ canonizeResults }) => useSuspenseQuery(query, { canonizeResults }), + { cache, initialProps: { canonizeResults: false } } + ); + + verifyCanonicalResults(result.current.data, false); + + rerender({ canonizeResults: true }); + + verifyCanonicalResults(result.current.data, true); + expect(renders.count).toBe(2); + }); + + it("applies changed `refetchWritePolicy` to next fetch when changing between renders", async () => { + const query: TypedDocumentNode< + { primes: number[] }, + { min: number; max: number } + > = gql` + query GetPrimes($min: number, $max: number) { + primes(min: $min, max: $max) + } + `; + + const mocks = [ + { + request: { query, variables: { min: 0, max: 12 } }, + result: { data: { primes: [2, 3, 5, 7, 11] } }, + }, + { + request: { query, variables: { min: 12, max: 30 } }, + result: { data: { primes: [13, 17, 19, 23, 29] } }, + delay: 10, + }, + { + request: { query, variables: { min: 30, max: 50 } }, + result: { data: { primes: [31, 37, 41, 43, 47] } }, + delay: 10, + }, + ]; + + const mergeParams: [number[] | undefined, number[]][] = []; + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + primes: { + keyArgs: false, + merge(existing: number[] | undefined, incoming: number[]) { + mergeParams.push([existing, incoming]); + return existing ? existing.concat(incoming) : incoming; + }, + }, + }, + }, + }, + }); + + const { result, rerender } = renderSuspenseHook( + ({ refetchWritePolicy }) => + useSuspenseQuery(query, { + variables: { min: 0, max: 12 }, + refetchWritePolicy, + }), + { + cache, + mocks, + initialProps: { refetchWritePolicy: "merge" as RefetchWritePolicy }, + } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([[undefined, [2, 3, 5, 7, 11]]]); + + act(() => { + result.current.refetch({ min: 12, max: 30 }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { primes: [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + ]); + + rerender({ refetchWritePolicy: "overwrite" }); + + act(() => { + result.current.refetch({ min: 30, max: 50 }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(mergeParams).toEqual([ + [undefined, [2, 3, 5, 7, 11]], + [ + [2, 3, 5, 7, 11], + [13, 17, 19, 23, 29], + ], + [undefined, [31, 37, 41, 43, 47]], + ]); + }); + + it("applies `returnPartialData` on next fetch when it changes between renders", async () => { + const fullQuery = gql` + query { + character { + __typename + id + name + } + } + `; + + const partialQuery = gql` + query { + character { + __typename + id + } + } + `; + + const mocks = [ + { + request: { query: fullQuery }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }, + }, + }, + { + request: { query: fullQuery }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange (refetched)", + }, + }, + }, + delay: 100, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query: partialQuery, + data: { character: { __typename: "Character", id: "1" } }, + }); + + const { result, renders, rerender } = renderSuspenseHook( + ({ returnPartialData }) => + useSuspenseQuery(fullQuery, { returnPartialData }), + { cache, mocks, initialProps: { returnPartialData: false } } + ); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ returnPartialData: true }); + + cache.modify({ + id: cache.identify({ __typename: "Character", id: "1" }), + fields: { + name: (_, { DELETE }) => DELETE, + }, + }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + character: { __typename: "Character", id: "1" }, + }); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(5); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { character: { __typename: "Character", id: "1" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { character: { __typename: "Character", id: "1" } }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("applies updated `fetchPolicy` on next fetch when it changes between renders", async () => { + const query = gql` + query { + character { + __typename + id + name + } + } + `; + + const mocks = [ + { + request: { query }, + result: { + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }, + }, + delay: 10, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + }); + + const { result, /* renders, */ rerender } = renderSuspenseHook( + ({ fetchPolicy }) => useSuspenseQuery(query, { fetchPolicy }), + { + cache, + mocks, + initialProps: { + fetchPolicy: "cache-first" as SuspenseQueryHookFetchPolicy, + }, + } + ); + + expect(result.current).toMatchObject({ + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + rerender({ fetchPolicy: "no-cache" }); + + const cacheKey = cache.identify({ __typename: "Character", id: "1" })!; + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + character: { + __typename: "Character", + id: "1", + name: "Doctor Strange", + }, + }); + }); + + // Because we switched to a `no-cache` fetch policy, we should not see the + // newly fetched data in the cache after the fetch occured. + expect(cache.extract()[cacheKey]).toEqual({ + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }); + + // TODO: Determine why there is an extra render. Unfortunately this is hard + // to track down because the test passes if I run only this test or add a + // `console.log` statement to the `handleNext` function in `QueryReference`. + // expect(renders.count).toBe(4); + // expect(renders.suspenseCount).toBe(1); + // expect(renders.frames).toMatchObject([ + // { + // data: { + // character: { + // __typename: 'Character', + // id: '1', + // name: 'Doctor Strangecache', + // }, + // }, + // networkStatus: NetworkStatus.ready, + // error: undefined, + // }, + // { + // data: { + // character: { + // __typename: 'Character', + // id: '1', + // name: 'Doctor Strangecache', + // }, + // }, + // networkStatus: NetworkStatus.ready, + // error: undefined, + // }, + // { + // ...mocks[0].result, + // networkStatus: NetworkStatus.ready, + // error: undefined, + // }, + // ]); + }); + + it("properly handles changing options along with changing `variables`", async () => { + const query = gql` + query ($id: ID!) { + character(id: $id) { + __typename + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + errors: [new GraphQLError("oops")], + }, + delay: 10, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { + character: { + __typename: "Character", + id: "2", + name: "Hulk", + }, + }, + }, + delay: 10, + }, + ]; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + variables: { + id: "1", + }, + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + }); + + const { result, renders, rerender } = renderSuspenseHook( + ({ errorPolicy, variables }) => + useSuspenseQuery(query, { errorPolicy, variables }), + { + cache, + mocks, + initialProps: { + errorPolicy: "none" as ErrorPolicy, + variables: { id: "1" }, + }, + } + ); + + expect(result.current).toMatchObject({ + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + rerender({ errorPolicy: "none", variables: { id: "2" } }); + + expect(renders.suspenseCount).toBe(1); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + character: { + __typename: "Character", + id: "2", + name: "Hulk", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + rerender({ errorPolicy: "all", variables: { id: "1" } }); + + act(() => { + result.current.refetch(); + }); + + const expectedError = new ApolloError({ + graphQLErrors: [new GraphQLError("oops")], + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + character: { + __typename: "Character", + id: "1", + name: "Doctor Strangecache", + }, + }, + networkStatus: NetworkStatus.error, + error: expectedError, + }); + }); + + expect(renders.errorCount).toBe(0); + }); + + it("does not oversubscribe when suspending multiple times", async () => { + const query = gql` + query UserQuery($id: String!) { + user(id: $id) { + id + name + } + } + `; + + const mocks = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel (updated)" } }, + }, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { user: { id: "1", name: "Captain Marvel (updated again)" } }, + }, + }, + ]; + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link: new MockLink(mocks), + }); + + const { result } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { id: "1" } }), + { client, initialProps: { id: "1" } } + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[0].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[1].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + act(() => { + result.current.refetch(); + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + ...mocks[2].result, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(client.getObservableQueries().size).toBe(1); + }); + + it("suspends deferred queries until initial chunk loads then streams in data as it loads", async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { link } + ); + + expect(renders.suspenseCount).toBe(1); + + link.simulateResult({ + result: { + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it.each<SuspenseQueryHookFetchPolicy>([ + "cache-first", + "network-only", + "no-cache", + "cache-and-network", + ])( + 'suspends deferred queries until initial chunk loads then streams in data as it loads when using a "%s" fetch policy', + async (fetchPolicy) => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy }), + { link } + ); + + expect(renders.suspenseCount).toBe(1); + + link.simulateResult({ + result: { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { message: "Hello world", __typename: "Greeting" }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + } + ); + + it('does not suspend deferred queries with data in the cache and using a "cache-first" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-first" }), + { cache } + ); + + expect(result.current).toMatchObject({ + data: { + greeting: { + message: "Hello world", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend deferred queries with partial data in the cache and using a "cache-first" fetch policy with `returnPartialData`', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + + // We are intentionally writing partial data to the cache. Supress console + // warnings to avoid unnecessary noise in the test. + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + consoleSpy.mockRestore(); + + const { result, renders } = renderSuspenseHook( + () => + useSuspenseQuery(query, { + fetchPolicy: "cache-first", + returnPartialData: true, + }), + { cache, link } + ); + + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + link.simulateResult({ + result: { + data: { greeting: { message: "Hello world", __typename: "Greeting" } }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it('does not suspend deferred queries with data in the cache and using a "cache-and-network" fetch policy', async () => { + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + const cache = new InMemoryCache(); + const client = new ApolloClient({ cache, link }); + + cache.writeQuery({ + query, + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { fetchPolicy: "cache-and-network" }), + { client } + ); + + expect(result.current).toMatchObject({ + data: { + greeting: { + message: "Hello cached", + __typename: "Greeting", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.loading, + error: undefined, + }); + + link.simulateResult({ + result: { + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + __typename: "Greeting", + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(0); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello cached", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.loading, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Cached Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends deferred queries with lists and properly patches results", async () => { + const query = gql` + query { + greetings { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { link } + ); + + expect(renders.suspenseCount).toBe(1); + + link.simulateResult({ + result: { + data: { + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Alice" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + data: { + __typename: "Greeting", + recipient: { __typename: "Person", name: "Bob" }, + }, + path: ["greetings", 1], + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + recipient: { __typename: "Person", name: "Bob" }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { + greetings: [ + { __typename: "Greeting", message: "Hello world" }, + { __typename: "Greeting", message: "Hello again" }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { __typename: "Person", name: "Alice" }, + }, + { + __typename: "Greeting", + message: "Hello again", + recipient: { __typename: "Person", name: "Bob" }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("suspends queries with deferred fragments in lists and properly merges arrays", async () => { + const query = gql` + query DeferVariation { + allProducts { + delivery { + ...MyFragment @defer + } + sku + id + } + } + + fragment MyFragment on DeliveryEstimates { + estimatedDelivery + fastestDelivery + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { link } + ); + + expect(renders.suspenseCount).toBe(1); + + link.simulateResult({ + result: { + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + hasNext: true, + incremental: [ + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 0, "delivery"], + }, + { + data: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + path: ["allProducts", 1, "delivery"], + }, + ], + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + allProducts: [ + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-federation", + sku: "federation", + }, + { + __typename: "Product", + delivery: { + __typename: "DeliveryEstimates", + estimatedDelivery: "6/25/2021", + fastestDelivery: "6/24/2021", + }, + id: "apollo-studio", + sku: "studio", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + }); + + it("incrementally rerenders data returned by a `refetch` for a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ link, cache }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { client } + ); + + link.simulateResult({ + result: { + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + let refetchPromise: Promise<ApolloQueryResult<unknown>>; + act(() => { + refetchPromise = result.current.refetch(); + }); + + link.simulateResult({ + result: { + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await expect(refetchPromise!).resolves.toEqual({ + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(6); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("incrementally renders data returned after skipping a deferred query", async () => { + const query = gql` + query { + greeting { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache(); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ link, cache }); + + const { result, rerender, renders } = renderSuspenseHook( + ({ skip }) => useSuspenseQuery(query, { skip }), + { client, initialProps: { skip: true } } + ); + + expect(result.current).toMatchObject({ + data: undefined, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + rerender({ skip: false }); + + expect(renders.suspenseCount).toBe(1); + + link.simulateResult({ + result: { + data: { greeting: { __typename: "Greeting", message: "Hello world" } }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greeting"], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(4); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { data: undefined, networkStatus: NetworkStatus.ready, error: undefined }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greeting: { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + // TODO: This test is a bit of a lie. `fetchMore` should incrementally + // rerender when using `@defer` but there is currently a bug in the core + // implementation that prevents updates until the final result is returned. + // This test reflects the behavior as it exists today, but will need + // to be updated once the core bug is fixed. + // + // NOTE: A duplicate it.failng test has been added right below this one with + // the expected behavior added in (i.e. the commented code in this test). Once + // the core bug is fixed, this test can be removed in favor of the other test. + // + // https://github.com/apollographql/apollo-client/issues/11034 + it("rerenders data returned by `fetchMore` for a deferred query", async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ link, cache }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { client } + ); + + link.simulateResult({ + result: { + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + let fetchMorePromise: Promise<ApolloQueryResult<unknown>>; + act(() => { + fetchMorePromise = result.current.fetchMore({ variables: { offset: 1 } }); + }); + + link.simulateResult({ + result: { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + hasNext: true, + }, + }); + + // TODO: Re-enable once the core bug is fixed + // await waitFor(() => { + // expect(result.current).toMatchObject({ + // data: { + // greetings: [ + // { + // __typename: 'Greeting', + // message: 'Hello world', + // recipient: { + // __typename: 'Person', + // name: 'Alice', + // }, + // }, + // { + // __typename: 'Greeting', + // message: 'Goodbye', + // }, + // ], + // }, + // networkStatus: NetworkStatus.ready, + // error: undefined, + // }); + // }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await expect(fetchMorePromise!).resolves.toEqual({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + loading: false, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(5); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + // TODO: Re-enable when the core `fetchMore` bug is fixed + // { + // data: { + // greetings: [ + // { + // __typename: 'Greeting', + // message: 'Hello world', + // recipient: { + // __typename: 'Person', + // name: 'Alice', + // }, + // }, + // { + // __typename: 'Greeting', + // message: 'Goodbye', + // }, + // ], + // }, + // networkStatus: NetworkStatus.ready, + // error: undefined, + // }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + // TODO: This is a duplicate of the test above, but with the expected behavior + // added (hence the `it.failing`). Remove the previous test once issue #11034 + // is fixed. + // + // https://github.com/apollographql/apollo-client/issues/11034 + it.failing( + "incrementally rerenders data returned by a `fetchMore` for a deferred query", + async () => { + const query = gql` + query ($offset: Int) { + greetings(offset: $offset) { + message + ... @defer { + recipient { + name + } + } + } + } + `; + + const cache = new InMemoryCache({ + typePolicies: { + Query: { + fields: { + greetings: offsetLimitPagination(), + }, + }, + }, + }); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ link, cache }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { variables: { offset: 0 } }), + { client } + ); + + link.simulateResult({ + result: { + data: { + greetings: [{ __typename: "Greeting", message: "Hello world" }], + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Alice", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + let fetchMorePromise: Promise<ApolloQueryResult<unknown>>; + act(() => { + fetchMorePromise = result.current.fetchMore({ + variables: { offset: 1 }, + }); + }); + + link.simulateResult({ + result: { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + data: { + recipient: { name: "Bob", __typename: "Person" }, + }, + path: ["greetings", 0], + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await expect(fetchMorePromise!).resolves.toEqual({ + data: { + greetings: [ + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + loading: false, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + expect(renders.count).toBe(5); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + greetings: [ + { + __typename: "Greeting", + message: "Hello world", + recipient: { + __typename: "Person", + name: "Alice", + }, + }, + { + __typename: "Greeting", + message: "Goodbye", + recipient: { + __typename: "Person", + name: "Bob", + }, + }, + ], + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + } + ); + + it("throws network errors returned by deferred queries", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + link, + }); + + link.simulateResult({ + error: new Error("Could not fetch"), + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toEqual(new Error("Could not fetch")); + expect(error.graphQLErrors).toEqual([]); + + consoleSpy.mockRestore(); + }); + + it("throws graphql errors returned by deferred queries", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + link, + }); + + link.simulateResult({ + result: { + errors: [new GraphQLError("Could not fetch greeting")], + }, + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toBeNull(); + expect(error.graphQLErrors).toEqual([ + new GraphQLError("Could not fetch greeting"), + ]); + + consoleSpy.mockRestore(); + }); + + it("throws errors returned by deferred queries that include partial data", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query { + greeting { + message + ... on Greeting @defer { + recipient { + name + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { renders } = renderSuspenseHook(() => useSuspenseQuery(query), { + link, + }); + + link.simulateResult({ + result: { + data: { greeting: null }, + errors: [new GraphQLError("Could not fetch greeting")], + }, + }); + + await waitFor(() => expect(renders.errorCount).toBe(1)); + + expect(renders.errors.length).toBe(1); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toEqual([]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toBeNull(); + expect(error.graphQLErrors).toEqual([ + new GraphQLError("Could not fetch greeting"), + ]); + + consoleSpy.mockRestore(); + }); + + it("discards partial data and throws errors returned in incremental chunks", async () => { + const consoleSpy = jest.spyOn(console, "error").mockImplementation(); + + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query), + { link } + ); + + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + data: { + homeWorld: null, + }, + }, + // This chunk is ignored since errorPolicy `none` throws away partial + // data + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(renders.errorCount).toBe(1); + }); + + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + + const [error] = renders.errors as ApolloError[]; + + expect(error).toBeInstanceOf(ApolloError); + expect(error.networkError).toBeNull(); + expect(error.graphQLErrors).toEqual([ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ]); + + consoleSpy.mockRestore(); + }); + + it("adds partial data and does not throw errors returned in incremental chunks but returns them in `error` property with errorPolicy set to `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { link } + ); + + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + data: { + homeWorld: null, + }, + }, + // Unlike the default (errorPolicy = `none`), this data will be + // added to the final result + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.error, + error: new ApolloError({ + graphQLErrors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + }), + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.error, + error: new ApolloError({ + graphQLErrors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + }), + }, + ]); + }); + + it("adds partial data and discards errors returned in incremental chunks with errorPolicy set to `ignore`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const link = new MockSubscriptionLink(); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { link } + ); + + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult({ + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(3); + expect(renders.suspenseCount).toBe(1); + expect(renders.frames).toMatchObject([ + { + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + }, + { + id: "1003", + name: "Leia Organa", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + hero: { + heroFriends: [ + { + id: "1000", + name: "Luke Skywalker", + homeWorld: null, + }, + { + id: "1003", + name: "Leia Organa", + homeWorld: "Alderaan", + }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("can refetch and respond to cache updates after encountering an error in an incremental chunk for a deferred query when `errorPolicy` is `all`", async () => { + const query = gql` + query { + hero { + name + heroFriends { + id + name + ... @defer { + homeWorld + } + } + } + } + `; + + const cache = new InMemoryCache(); + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ link, cache }); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "all" }), + { client } + ); + + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + errors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + data: { + homeWorld: null, + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.error, + error: new ApolloError({ + graphQLErrors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + }), + }); + }); + + let refetchPromise: Promise<ApolloQueryResult<unknown>>; + act(() => { + refetchPromise = result.current.refetch(); + }); + + link.simulateResult({ + result: { + data: { + hero: { + name: "R2-D2", + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + }, + }, + hasNext: true, + }, + }); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + link.simulateResult( + { + result: { + incremental: [ + { + path: ["hero", "heroFriends", 0], + data: { + homeWorld: "Alderaan", + }, + }, + { + path: ["hero", "heroFriends", 1], + data: { + homeWorld: "Alderaan", + }, + }, + ], + hasNext: false, + }, + }, + true + ); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + await expect(refetchPromise!).resolves.toEqual({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + loading: false, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + + cache.updateQuery({ query }, (data) => ({ + hero: { + ...data.hero, + name: "C3PO", + }, + })); + + await waitFor(() => { + expect(result.current).toMatchObject({ + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "C3PO", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }); + }); + + expect(renders.count).toBe(7); + expect(renders.suspenseCount).toBe(2); + expect(renders.frames).toMatchObject([ + { + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.error, + error: new ApolloError({ + graphQLErrors: [ + new GraphQLError( + "homeWorld for character with ID 1000 could not be fetched.", + { path: ["hero", "heroFriends", 0, "homeWorld"] } + ), + ], + }), + }, + { + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker" }, + { id: "1003", name: "Leia Organa" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "R2-D2", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + { + data: { + hero: { + heroFriends: [ + { id: "1000", name: "Luke Skywalker", homeWorld: "Alderaan" }, + { id: "1003", name: "Leia Organa", homeWorld: "Alderaan" }, + ], + name: "C3PO", + }, + }, + networkStatus: NetworkStatus.ready, + error: undefined, + }, + ]); + }); + + it("can subscribe to subscriptions and react to cache updates via `subscribeToMore`", async () => { + interface SubscriptionData { + greetingUpdated: string; + } + + interface QueryData { + greeting: string; + } + + type UpdateQueryFn = NonNullable< + SubscribeToMoreOptions< + QueryData | undefined, + OperationVariables, + SubscriptionData + >["updateQuery"] + >; + + const { mocks, query } = useSimpleQueryCase(); + + const wsLink = new MockSubscriptionLink(); + const mockLink = new MockLink(mocks); + + const link = split( + ({ query }) => { + const definition = getMainDefinition(query); + + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + mockLink + ); + + const { result, renders } = renderSuspenseHook( + () => useSuspenseQuery(query, { errorPolicy: "ignore" }), + { link } + ); + + await waitFor(() => { + expect(result.current.data).toEqual({ greeting: "Hello" }); + }); + + const updateQuery = jest.fn< + ReturnType<UpdateQueryFn>, + Parameters<UpdateQueryFn> + >((_, { subscriptionData: { data } }) => { + return { greeting: data.greetingUpdated }; + }); + + result.current.subscribeToMore<SubscriptionData>({ + document: gql` + subscription { + greetingUpdated + } + `, + updateQuery, + }); + + wsLink.simulateResult({ + result: { + data: { + greetingUpdated: "Subscription hello", + }, + }, + }); + + await waitFor(() => { + expect(result.current.data).toEqual({ + greeting: "Subscription hello", + }); + }); + + expect(updateQuery).toHaveBeenCalledTimes(1); + expect(updateQuery).toHaveBeenCalledWith( + { greeting: "Hello" }, + { + subscriptionData: { + data: { greetingUpdated: "Subscription hello" }, + }, + variables: {}, + } + ); + + expect(renders.count).toBe(3); + expect(renders.frames).toMatchObject([ + { data: { greeting: "Hello" }, networkStatus: NetworkStatus.ready }, + { + data: { greeting: "Subscription hello" }, + networkStatus: NetworkStatus.ready, + }, + ]); + }); + + it("works with useDeferredValue", async () => { + const user = userEvent.setup(); + + interface Variables { + query: string; + } + + interface Data { + search: { query: string }; + } + + const QUERY: TypedDocumentNode<Data, Variables> = gql` + query SearchQuery($query: String!) { + search(query: $query) { + query + } + } + `; + + const link = new ApolloLink(({ variables }) => { + return new Observable((observer) => { + setTimeout(() => { + observer.next({ + data: { search: { query: variables.query } }, + }); + observer.complete(); + }, 10); + }); + }); + + const client = new ApolloClient({ + link, + cache: new InMemoryCache(), + }); + + function App() { + const [query, setValue] = React.useState(""); + const deferredQuery = React.useDeferredValue(query); + + return ( + <ApolloProvider client={client}> + <label htmlFor="searchInput">Search</label> + <input + id="searchInput" + type="text" + value={query} + onChange={(e) => setValue(e.target.value)} + /> + <Suspense fallback={<SuspenseFallback />}> + <Results query={deferredQuery} /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Results({ query }: { query: string }) { + const { data } = useSuspenseQuery(QUERY, { variables: { query } }); + + return <div data-testid="result">{data.search.query}</div>; + } + + render(<App />); + + const input = screen.getByLabelText("Search"); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("result")).toBeInTheDocument(); + + await act(() => user.type(input, "ab")); + + await waitFor(() => { + expect(screen.getByTestId("result")).toHaveTextContent("ab"); + }); + + await act(() => user.type(input, "c")); + + // useDeferredValue will try rerendering the component with the new value + // in the background. If it suspends with the new value, React will show the + // stale UI until the component is done suspending. + // + // Here we should not see the suspense fallback while the component suspends + // until the search finishes loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + expect(screen.getByTestId("result")).toHaveTextContent("ab"); + + // Eventually we should see the updated text content once its done + // suspending. + await waitFor(() => { + expect(screen.getByTestId("result")).toHaveTextContent("abc"); + }); + }); + + it("works with startTransition to change variables", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "2" } }, + result: { + data: { todo: { id: "2", name: "Take out trash", completed: true } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + const [id, setId] = React.useState("1"); + + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Todo id={id} onChange={setId} /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ + id, + onChange, + }: { + id: string; + onChange: (id: string) => void; + }) { + const { data } = useSuspenseQuery(query, { variables: { id } }); + const [isPending, startTransition] = React.useTransition(); + const { todo } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + onChange("2"); + }); + }} + > + Refresh + </button> + <div data-testid="todo" aria-busy={isPending}> + {todo.name} + {todo.completed && " (completed)"} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todo")).toBeInTheDocument(); + + const todo = screen.getByTestId("todo"); + const button = screen.getByText("Refresh"); + + expect(todo).toHaveTextContent("Clean room"); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todo).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todo content once its done + // suspending. + await waitFor(() => { + expect(todo).toHaveTextContent("Take out trash (completed)"); + }); + }); + + it("`refetch` works with startTransition to allow React to show stale UI until finished suspending", async () => { + type Variables = { + id: string; + }; + + interface Data { + todo: { + id: string; + name: string; + completed: boolean; + }; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodoItemQuery($id: ID!) { + todo(id: $id) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: false } }, + }, + delay: 10, + }, + { + request: { query, variables: { id: "1" } }, + result: { + data: { todo: { id: "1", name: "Clean room", completed: true } }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache(), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Todo id="1" /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todo({ id }: { id: string }) { + const { data, refetch } = useSuspenseQuery(query, { variables: { id } }); + const [isPending, startTransition] = React.useTransition(); + const { todo } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + refetch(); + }); + }} + > + Refresh + </button> + <div data-testid="todo" aria-busy={isPending}> + {todo.name} + {todo.completed && " (completed)"} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todo")).toBeInTheDocument(); + + const todo = screen.getByTestId("todo"); + const button = screen.getByText("Refresh"); + + expect(todo).toHaveTextContent("Clean room"); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todo).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todo content once its done + // suspending. + await waitFor(() => { + expect(todo).toHaveTextContent("Clean room (completed)"); + }); + }); + + it("`fetchMore` works with startTransition to allow React to show stale UI until finished suspending", async () => { + type Variables = { + offset: number; + }; + + interface Todo { + __typename: "Todo"; + id: string; + name: string; + completed: boolean; + } + + interface Data { + todos: Todo[]; + } + const user = userEvent.setup(); + + const query: TypedDocumentNode<Data, Variables> = gql` + query TodosQuery($offset: Int!) { + todos(offset: $offset) { + id + name + completed + } + } + `; + + const mocks: MockedResponse<Data, Variables>[] = [ + { + request: { query, variables: { offset: 0 } }, + result: { + data: { + todos: [ + { + __typename: "Todo", + id: "1", + name: "Clean room", + completed: false, + }, + ], + }, + }, + delay: 10, + }, + { + request: { query, variables: { offset: 1 } }, + result: { + data: { + todos: [ + { + __typename: "Todo", + id: "2", + name: "Take out trash", + completed: true, + }, + ], + }, + }, + delay: 10, + }, + ]; + + const client = new ApolloClient({ + link: new MockLink(mocks), + cache: new InMemoryCache({ + typePolicies: { + Query: { + fields: { + todos: offsetLimitPagination(), + }, + }, + }, + }), + }); + + function App() { + return ( + <ApolloProvider client={client}> + <Suspense fallback={<SuspenseFallback />}> + <Todos /> + </Suspense> + </ApolloProvider> + ); + } + + function SuspenseFallback() { + return <p>Loading</p>; + } + + function Todos() { + const { data, fetchMore } = useSuspenseQuery(query, { + variables: { offset: 0 }, + }); + const [isPending, startTransition] = React.useTransition(); + const { todos } = data; + + return ( + <> + <button + onClick={() => { + startTransition(() => { + fetchMore({ variables: { offset: 1 } }); + }); + }} + > + Load more + </button> + <div data-testid="todos" aria-busy={isPending}> + {todos.map((todo) => ( + <div data-testid={`todo:${todo.id}`} key={todo.id}> + {todo.name} + {todo.completed && " (completed)"} + </div> + ))} + </div> + </> + ); + } + + render(<App />); + + expect(screen.getByText("Loading")).toBeInTheDocument(); + + expect(await screen.findByTestId("todos")).toBeInTheDocument(); + + const todos = screen.getByTestId("todos"); + const todo1 = screen.getByTestId("todo:1"); + const button = screen.getByText("Load more"); + + expect(todo1).toBeInTheDocument(); + + await act(() => user.click(button)); + + // startTransition will avoid rendering the suspense fallback for already + // revealed content if the state update inside the transition causes the + // component to suspend. + // + // Here we should not see the suspense fallback while the component suspends + // until the todo is finished loading. Seeing the suspense fallback is an + // indication that we are suspending the component too late in the process. + expect(screen.queryByText("Loading")).not.toBeInTheDocument(); + + // We can ensure this works with isPending from useTransition in the process + expect(todos).toHaveAttribute("aria-busy", "true"); + + // Ensure we are showing the stale UI until the new todo has loaded + expect(todo1).toHaveTextContent("Clean room"); + + // Eventually we should see the updated todos content once its done + // suspending. + await waitFor(() => { + expect(screen.getByTestId("todo:2")).toHaveTextContent( + "Take out trash (completed)" + ); + expect(todo1).toHaveTextContent("Clean room"); + }); + }); + + describe.skip("type tests", () => { + it("returns unknown when TData cannot be inferred", () => { + const query = gql` + query { + hello + } + `; + + const { data } = useSuspenseQuery(query); + + expectTypeOf(data).toEqualTypeOf<unknown>(); + }); + + it("disallows wider variables type than specified", () => { + const { query } = useVariablesQueryCase(); + + // @ts-expect-error should not allow wider TVariables type + useSuspenseQuery(query, { variables: { id: "1", foo: "bar" } }); + }); + + it("returns TData in default case", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it('returns TData | undefined with errorPolicy: "ignore"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: "ignore", + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + errorPolicy: "ignore", + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData | undefined with errorPolicy: "all"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: "all", + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + errorPolicy: "all", + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it('returns TData with errorPolicy: "none"', () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + errorPolicy: "none", + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData | undefined>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + errorPolicy: "none", + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData | undefined>(); + }); + + it("returns DeepPartial<TData> with returnPartialData: true", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + returnPartialData: true, + }); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + }); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns TData with returnPartialData: false", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + returnPartialData: false, + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: false, + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it("returns TData | undefined when skip is present", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + skip: true, + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + skip: true, + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + + // TypeScript is too smart and using a `const` or `let` boolean variable + // for the `skip` option results in a false positive. Using an options + // object allows us to properly check for a dynamic case. + const options = { + skip: true, + }; + + const { data: dynamic } = useSuspenseQuery(query, { + skip: options.skip, + }); + + expectTypeOf(dynamic).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(dynamic).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns TData | undefined when using `skipToken` as options", () => { + const { query } = useVariablesQueryCase(); + const options = { + skip: true, + }; + + const { data: inferred } = useSuspenseQuery( + query, + options.skip ? skipToken : { variables: { id: "1" } } + ); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, options.skip ? skipToken : { variables: { id: "1" } }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns TData | undefined when using `skipToken` with undefined options", () => { + const { query } = useVariablesQueryCase(); + const options = { + skip: true, + }; + + const { data: inferred } = useSuspenseQuery( + query, + options.skip ? skipToken : undefined + ); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, options.skip ? skipToken : undefined); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData | undefined>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns DeepPartial<TData> | undefined when using `skipToken` as options with `returnPartialData`", () => { + const { query } = useVariablesQueryCase(); + const options = { + skip: true, + }; + + const { data: inferred } = useSuspenseQuery( + query, + options.skip ? skipToken : { returnPartialData: true } + ); + + expectTypeOf(inferred).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, options.skip ? skipToken : { returnPartialData: true }); + + expectTypeOf(explicit).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns TData when passing an option that does not affect TData", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + fetchPolicy: "no-cache", + }); + + expectTypeOf(inferred).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(inferred).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + fetchPolicy: "no-cache", + }); + + expectTypeOf(explicit).toEqualTypeOf<VariablesCaseData>(); + expectTypeOf(explicit).not.toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + }); + + it("handles combinations of options", () => { + // TypeScript is too smart and using a `const` or `let` boolean variable + // for the `skip` option results in a false positive. Using an options + // object allows us to properly check for a dynamic case which is the + // typical usage of this option. + const options = { + skip: true, + }; + + const { query } = useVariablesQueryCase(); + + const { data: inferredPartialDataIgnore } = useSuspenseQuery(query, { + returnPartialData: true, + errorPolicy: "ignore", + }); + + expectTypeOf(inferredPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + inferredPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitPartialDataIgnore } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: "ignore", + }); + + expectTypeOf(explicitPartialDataIgnore).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + explicitPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: inferredPartialDataNone } = useSuspenseQuery(query, { + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(inferredPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + inferredPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitPartialDataNone } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(explicitPartialDataNone).toEqualTypeOf< + DeepPartial<VariablesCaseData> + >(); + expectTypeOf( + explicitPartialDataNone + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: inferredSkipIgnore } = useSuspenseQuery(query, { + skip: options.skip, + errorPolicy: "ignore", + }); + + expectTypeOf(inferredSkipIgnore).toEqualTypeOf< + VariablesCaseData | undefined + >(); + expectTypeOf( + inferredPartialDataIgnore + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitSkipIgnore } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + skip: options.skip, + errorPolicy: "ignore", + }); + + expectTypeOf(explicitSkipIgnore).toEqualTypeOf< + VariablesCaseData | undefined + >(); + expectTypeOf(explicitSkipIgnore).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: inferredSkipNone } = useSuspenseQuery(query, { + skip: options.skip, + errorPolicy: "none", + }); + + expectTypeOf(inferredSkipNone).toEqualTypeOf< + VariablesCaseData | undefined + >(); + expectTypeOf(inferredSkipNone).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitSkipNone } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + skip: options.skip, + errorPolicy: "none", + }); + + expectTypeOf(explicitSkipNone).toEqualTypeOf< + VariablesCaseData | undefined + >(); + expectTypeOf(explicitSkipNone).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: inferredPartialDataNoneSkip } = useSuspenseQuery(query, { + skip: options.skip, + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(inferredPartialDataNoneSkip).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + inferredPartialDataNoneSkip + ).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicitPartialDataNoneSkip } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + skip: options.skip, + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(explicitPartialDataNoneSkip).toEqualTypeOf< + DeepPartial<VariablesCaseData> | undefined + >(); + expectTypeOf( + explicitPartialDataNoneSkip + ).not.toEqualTypeOf<VariablesCaseData>(); + }); + + it("returns correct TData type when combined options that do not affect TData", () => { + const { query } = useVariablesQueryCase(); + + const { data: inferred } = useSuspenseQuery(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(inferred).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(inferred).not.toEqualTypeOf<VariablesCaseData>(); + + const { data: explicit } = useSuspenseQuery< + VariablesCaseData, + VariablesCaseVariables + >(query, { + fetchPolicy: "no-cache", + returnPartialData: true, + errorPolicy: "none", + }); + + expectTypeOf(explicit).toEqualTypeOf<DeepPartial<VariablesCaseData>>(); + expectTypeOf(explicit).not.toEqualTypeOf<VariablesCaseData>(); + }); + }); +}); diff --git a/src/react/hooks/internal/__tests__/useDeepMemo.test.ts b/src/react/hooks/internal/__tests__/useDeepMemo.test.ts new file mode 100644 --- /dev/null +++ b/src/react/hooks/internal/__tests__/useDeepMemo.test.ts @@ -0,0 +1,47 @@ +import { renderHook } from "@testing-library/react"; +import { useDeepMemo } from "../useDeepMemo"; + +describe("useDeepMemo", () => { + it("ensures the value is initialized", () => { + const { result } = renderHook(() => + useDeepMemo(() => ({ test: true }), []) + ); + + expect(result.current).toEqual({ test: true }); + }); + + it("returns memoized value when its dependencies are deeply equal", () => { + const { result, rerender } = renderHook( + ({ active, items, user }) => { + useDeepMemo(() => ({ active, items, user }), [items, name, active]); + }, + { + initialProps: { + active: true, + items: [1, 2], + user: { name: "John Doe" }, + }, + } + ); + + const previousResult = result.current; + + rerender({ active: true, items: [1, 2], user: { name: "John Doe" } }); + + expect(result.current).toBe(previousResult); + }); + + it("returns updated value if a dependency changes", () => { + const { result, rerender } = renderHook( + ({ items }) => useDeepMemo(() => ({ items }), [items]), + { initialProps: { items: [1] } } + ); + + const previousResult = result.current; + + rerender({ items: [1, 2] }); + + expect(result.current).not.toBe(previousResult); + expect(result.current).toEqual({ items: [1, 2] }); + }); +}); diff --git a/src/react/parser/__tests__/parser.test.ts b/src/react/parser/__tests__/parser.test.ts --- a/src/react/parser/__tests__/parser.test.ts +++ b/src/react/parser/__tests__/parser.test.ts @@ -1,11 +1,11 @@ -import gql from 'graphql-tag'; +import gql from "graphql-tag"; -import { parser, DocumentType } from '..'; +import { parser, DocumentType } from ".."; type OperationDefinition = any; -describe('parser', () => { - it('should error if both a query and a mutation is present', () => { +describe("parser", () => { + it("should error if both a query and a mutation is present", () => { const query = gql` query { user { @@ -13,7 +13,7 @@ describe('parser', () => { } } - mutation($t: String) { + mutation ($t: String) { addT(t: $t) { user { name @@ -25,7 +25,7 @@ describe('parser', () => { expect(parser.bind(null, query)).toThrowError(/react-apollo only supports/); }); - it('should error if multiple operations are present', () => { + it("should error if multiple operations are present", () => { const query = gql` query One { user { @@ -43,7 +43,7 @@ describe('parser', () => { expect(parser.bind(null, query)).toThrowError(/react-apollo only supports/); }); - it('should error if not a DocumentNode', () => { + it("should error if not a DocumentNode", () => { const query = ` query One { user { name } } `; @@ -52,7 +52,7 @@ describe('parser', () => { ); }); - it('should return the name of the operation', () => { + it("should return the name of the operation", () => { const query = gql` query One { user { @@ -60,7 +60,7 @@ describe('parser', () => { } } `; - expect(parser(query).name).toBe('One'); + expect(parser(query).name).toBe("One"); const mutation = gql` mutation One { @@ -69,7 +69,7 @@ describe('parser', () => { } } `; - expect(parser(mutation).name).toBe('One'); + expect(parser(mutation).name).toBe("One"); const subscription = gql` subscription One { @@ -78,10 +78,10 @@ describe('parser', () => { } } `; - expect(parser(subscription).name).toBe('One'); + expect(parser(subscription).name).toBe("One"); }); - it('should return data as the name of the operation if not named', () => { + it("should return data as the name of the operation if not named", () => { const query = gql` query { user { @@ -89,7 +89,7 @@ describe('parser', () => { } } `; - expect(parser(query).name).toBe('data'); + expect(parser(query).name).toBe("data"); const unnamedQuery = gql` { @@ -98,7 +98,7 @@ describe('parser', () => { } } `; - expect(parser(unnamedQuery).name).toBe('data'); + expect(parser(unnamedQuery).name).toBe("data"); const mutation = gql` mutation { @@ -107,7 +107,7 @@ describe('parser', () => { } } `; - expect(parser(mutation).name).toBe('data'); + expect(parser(mutation).name).toBe("data"); const subscription = gql` subscription { @@ -116,10 +116,10 @@ describe('parser', () => { } } `; - expect(parser(subscription).name).toBe('data'); + expect(parser(subscription).name).toBe("data"); }); - it('should return the type of operation', () => { + it("should return the type of operation", () => { const query = gql` query One { user { @@ -157,7 +157,7 @@ describe('parser', () => { expect(parser(subscription).type).toBe(DocumentType.Subscription); }); - it('should return the variable definitions of the operation', () => { + it("should return the variable definitions of the operation", () => { const query = gql` query One($t: String!) { user(t: $t) { @@ -191,7 +191,7 @@ describe('parser', () => { ); }); - it('should not error if the operation has no variables', () => { + it("should not error if the operation has no variables", () => { const query = gql` query { user(t: $t) { diff --git a/src/react/ssr/__tests__/useLazyQuery.test.tsx b/src/react/ssr/__tests__/useLazyQuery.test.tsx --- a/src/react/ssr/__tests__/useLazyQuery.test.tsx +++ b/src/react/ssr/__tests__/useLazyQuery.test.tsx @@ -1,15 +1,15 @@ /** @jest-environment node */ -import React from 'react'; -import { DocumentNode } from 'graphql'; -import gql from 'graphql-tag'; -import { mockSingleLink } from '../../../testing'; -import { ApolloClient } from '../../../core'; -import { InMemoryCache } from '../../../cache'; -import { ApolloProvider } from '../../context'; -import { useLazyQuery } from '../../hooks'; -import { renderToStringWithData } from '../../ssr'; +import React from "react"; +import { DocumentNode } from "graphql"; +import gql from "graphql-tag"; +import { mockSingleLink } from "../../../testing"; +import { ApolloClient } from "../../../core"; +import { InMemoryCache } from "../../../cache"; +import { ApolloProvider } from "../../context"; +import { useLazyQuery } from "../../hooks"; +import { renderToStringWithData } from "../../ssr"; -describe('useLazyQuery Hook SSR', () => { +describe("useLazyQuery Hook SSR", () => { const CAR_QUERY: DocumentNode = gql` query { cars { @@ -23,24 +23,24 @@ describe('useLazyQuery Hook SSR', () => { const CAR_RESULT_DATA = { cars: [ { - make: 'Audi', - model: 'RS8', - vin: 'DOLLADOLLABILL', - __typename: 'Car' - } - ] + make: "Audi", + model: "RS8", + vin: "DOLLADOLLABILL", + __typename: "Car", + }, + ], }; - it('should run query only after calling the lazy mode execute function', () => { + it("should run query only after calling the lazy mode execute function", () => { const link = mockSingleLink({ request: { query: CAR_QUERY }, - result: { data: CAR_RESULT_DATA } + result: { data: CAR_RESULT_DATA }, }); const client = new ApolloClient({ cache: new InMemoryCache(), link, - ssrMode: true + ssrMode: true, }); const Component = () => { @@ -66,7 +66,7 @@ describe('useLazyQuery Hook SSR', () => { </ApolloProvider> ); - return renderToStringWithData(app).then(markup => { + return renderToStringWithData(app).then((markup) => { expect(markup).toMatch(/Audi/); }); }); diff --git a/src/react/ssr/__tests__/useQuery.test.tsx b/src/react/ssr/__tests__/useQuery.test.tsx --- a/src/react/ssr/__tests__/useQuery.test.tsx +++ b/src/react/ssr/__tests__/useQuery.test.tsx @@ -1,15 +1,15 @@ /** @jest-environment node */ -import React from 'react'; -import { DocumentNode } from 'graphql'; -import gql from 'graphql-tag'; -import { MockedProvider, mockSingleLink } from '../../../testing'; -import { ApolloClient } from '../../../core'; -import { InMemoryCache } from '../../../cache'; -import { ApolloProvider } from '../../context'; -import { useApolloClient, useQuery } from '../../hooks'; -import { renderToStringWithData } from '..'; - -describe('useQuery Hook SSR', () => { +import React from "react"; +import { DocumentNode } from "graphql"; +import gql from "graphql-tag"; +import { MockedProvider, mockSingleLink } from "../../../testing"; +import { ApolloClient } from "../../../core"; +import { InMemoryCache } from "../../../cache"; +import { ApolloProvider } from "../../context"; +import { useApolloClient, useQuery } from "../../hooks"; +import { renderToStringWithData } from ".."; + +describe("useQuery Hook SSR", () => { const CAR_QUERY: DocumentNode = gql` query { cars { @@ -23,24 +23,24 @@ describe('useQuery Hook SSR', () => { const CAR_RESULT_DATA = { cars: [ { - make: 'Audi', - model: 'RS8', - vin: 'DOLLADOLLABILL', - __typename: 'Car' - } - ] + make: "Audi", + model: "RS8", + vin: "DOLLADOLLABILL", + __typename: "Car", + }, + ], }; const CAR_MOCKS = [ { request: { - query: CAR_QUERY + query: CAR_QUERY, }, - result: { data: CAR_RESULT_DATA } - } + result: { data: CAR_RESULT_DATA }, + }, ]; - it('should support SSR', () => { + it("should support SSR", () => { const Component = () => { const { loading, data } = useQuery(CAR_QUERY); if (!loading) { @@ -61,12 +61,12 @@ describe('useQuery Hook SSR', () => { </MockedProvider> ); - return renderToStringWithData(app).then(markup => { + return renderToStringWithData(app).then((markup) => { expect(markup).toMatch(/Audi/); }); }); - it('should initialize data as `undefined` when loading', () => { + it("should initialize data as `undefined` when loading", () => { const Component = () => { const { data, loading } = useQuery(CAR_QUERY); if (loading) { @@ -84,7 +84,7 @@ describe('useQuery Hook SSR', () => { return renderToStringWithData(app); }); - it('should skip SSR tree rendering and return a loading state if `ssr` option is `false`', async () => { + it("should skip SSR tree rendering and return a loading state if `ssr` option is `false`", async () => { let renderCount = 0; const Component = () => { const { data, loading } = useQuery(CAR_QUERY, { ssr: false }); @@ -105,13 +105,13 @@ describe('useQuery Hook SSR', () => { </MockedProvider> ); - return renderToStringWithData(app).then(result => { + return renderToStringWithData(app).then((result) => { expect(renderCount).toBe(1); - expect(result).toEqual(''); + expect(result).toEqual(""); }); }); - it('should skip SSR tree rendering and not return a loading state loading if `ssr` option is `false` and `skip` is `true`', async () => { + it("should skip SSR tree rendering and not return a loading state loading if `ssr` option is `false` and `skip` is `true`", async () => { let renderCount = 0; const Component = () => { const { data, loading } = useQuery(CAR_QUERY, { ssr: false, skip: true }); @@ -129,67 +129,63 @@ describe('useQuery Hook SSR', () => { </MockedProvider> ); - return renderToStringWithData(app).then(result => { + return renderToStringWithData(app).then((result) => { expect(renderCount).toBe(1); - expect(result).toEqual(''); + expect(result).toEqual(""); }); }); - it('should skip both SSR tree rendering and SSR component rendering if `ssr` option is `false` and `ssrMode` is `true`', - async () => { - const link = mockSingleLink({ - request: { query: CAR_QUERY }, - result: { data: CAR_RESULT_DATA } - }); + it("should skip both SSR tree rendering and SSR component rendering if `ssr` option is `false` and `ssrMode` is `true`", async () => { + const link = mockSingleLink({ + request: { query: CAR_QUERY }, + result: { data: CAR_RESULT_DATA }, + }); - const client = new ApolloClient({ - cache: new InMemoryCache(), - link, - ssrMode: true - }); + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + ssrMode: true, + }); - let renderCount = 0; - const Component = () => { - const { data, loading } = useQuery(CAR_QUERY, { ssr: false }); - - let content = null; - switch (renderCount) { - case 0: - expect(loading).toBeTruthy(); - expect(data).toBeUndefined(); - break; - case 1: // FAIL; should not render a second time - default: - throw new Error("Duplicate render"); - } + let renderCount = 0; + const Component = () => { + const { data, loading } = useQuery(CAR_QUERY, { ssr: false }); - renderCount += 1; - return content; - }; + let content = null; + switch (renderCount) { + case 0: + expect(loading).toBeTruthy(); + expect(data).toBeUndefined(); + break; + case 1: // FAIL; should not render a second time + default: + throw new Error("Duplicate render"); + } - const app = ( - <ApolloProvider client={client}> - <Component /> - </ApolloProvider> - ); + renderCount += 1; + return content; + }; - const view = await renderToStringWithData(app); - expect(renderCount).toBe(1); - expect(view).toEqual(''); - await new Promise((resolve) => setTimeout(resolve, 20)); - expect(renderCount).toBe(1); - expect(view).toEqual(''); - } - ); + const app = ( + <ApolloProvider client={client}> + <Component /> + </ApolloProvider> + ); - it('should skip SSR tree rendering if `skip` option is `true`', async () => { + const view = await renderToStringWithData(app); + expect(renderCount).toBe(1); + expect(view).toEqual(""); + await new Promise((resolve) => setTimeout(resolve, 20)); + expect(renderCount).toBe(1); + expect(view).toEqual(""); + }); + + it("should skip SSR tree rendering if `skip` option is `true`", async () => { let renderCount = 0; const Component = () => { - const { - loading, - networkStatus, - data, - } = useQuery(CAR_QUERY, { skip: true }); + const { loading, networkStatus, data } = useQuery(CAR_QUERY, { + skip: true, + }); renderCount += 1; expect(loading).toBeFalsy(); @@ -205,20 +201,17 @@ describe('useQuery Hook SSR', () => { </MockedProvider> ); - return renderToStringWithData(app).then(result => { + return renderToStringWithData(app).then((result) => { expect(renderCount).toBe(1); - expect(result).toBe(''); + expect(result).toBe(""); }); }); - it('should render SSR tree rendering if `skip` option is `true` for only one instance of the query', async () => { + it("should render SSR tree rendering if `skip` option is `true` for only one instance of the query", async () => { let renderCount = 0; const AnotherComponent = () => { - const { - loading, - data, - } = useQuery(CAR_QUERY, { skip: false }); + const { loading, data } = useQuery(CAR_QUERY, { skip: false }); renderCount += 1; @@ -236,10 +229,7 @@ describe('useQuery Hook SSR', () => { }; const Component = () => { - const { - loading, - data, - } = useQuery(CAR_QUERY, { skip: true }); + const { loading, data } = useQuery(CAR_QUERY, { skip: true }); renderCount += 1; expect(loading).toBeFalsy(); @@ -254,14 +244,14 @@ describe('useQuery Hook SSR', () => { </MockedProvider> ); - return renderToStringWithData(app).then(result => { + return renderToStringWithData(app).then((result) => { expect(renderCount).toBe(4); expect(result).toMatch(/Audi/); expect(result).toMatch(/RS8/); }); }); - it('should return data written previously to cache during SSR pass if using cache-only fetchPolicy', async () => { + it("should return data written previously to cache during SSR pass if using cache-only fetchPolicy", async () => { const cache = new InMemoryCache({ typePolicies: { Order: { @@ -290,11 +280,11 @@ describe('useQuery Hook SSR', () => { const initialData = { getSearchResults: { - __typename: 'SearchResults', - locale: 'en-US', + __typename: "SearchResults", + locale: "en-US", order: { - __typename: 'Order', - selection: 'RELEVANCE', + __typename: "Order", + selection: "RELEVANCE", }, pagination: { pageLimit: 3, @@ -310,10 +300,10 @@ describe('useQuery Hook SSR', () => { const spy = jest.fn(); const Component = () => { - useApolloClient().writeQuery({ query, data: initialData });; + useApolloClient().writeQuery({ query, data: initialData }); const { loading, data } = useQuery(query, { - fetchPolicy: 'cache-only', + fetchPolicy: "cache-only", }); spy(loading); @@ -323,30 +313,21 @@ describe('useQuery Hook SSR', () => { const { getSearchResults: { - pagination: { - pageLimit, - }, + pagination: { pageLimit }, }, } = data; - return ( - <div> - {pageLimit} - </div> - ); + return <div>{pageLimit}</div>; } return null; }; const app = ( - <MockedProvider - addTypename - cache={cache} - > + <MockedProvider addTypename cache={cache}> <Component /> </MockedProvider> ); - return renderToStringWithData(app).then(markup => { + return renderToStringWithData(app).then((markup) => { expect(spy).toHaveBeenNthCalledWith(1, false); expect(markup).toMatch(/<div.*>3<\/div>/); expect(cache.extract()).toMatchSnapshot(); diff --git a/src/react/ssr/__tests__/useReactiveVar.test.tsx b/src/react/ssr/__tests__/useReactiveVar.test.tsx --- a/src/react/ssr/__tests__/useReactiveVar.test.tsx +++ b/src/react/ssr/__tests__/useReactiveVar.test.tsx @@ -1,13 +1,13 @@ /** @jest-environment node */ -import React from 'react'; -import { makeVar } from '../../../core'; -import { useReactiveVar } from '../../hooks'; +import React from "react"; +import { makeVar } from "../../../core"; +import { useReactiveVar } from "../../hooks"; import { itAsync } from "../../../testing"; -import { renderToStringWithData } from '../'; +import { renderToStringWithData } from "../"; -describe('useReactiveVar Hook SSR', () => { +describe("useReactiveVar Hook SSR", () => { itAsync("does not cause warnings", (resolve, reject) => { - const mock = jest.spyOn(console, 'error'); + const mock = jest.spyOn(console, "error"); const counterVar = makeVar(0); function Component() { const count = useReactiveVar(counterVar); @@ -16,11 +16,14 @@ describe('useReactiveVar Hook SSR', () => { return <div>{count}</div>; } - renderToStringWithData(<Component />).then((value) => { - expect(value).toEqual('<div>0</div>'); - expect(mock).toHaveBeenCalledTimes(0); - }).finally(() => { - mock.mockRestore(); - }).then(resolve, reject); + renderToStringWithData(<Component />) + .then((value) => { + expect(value).toEqual("<div>0</div>"); + expect(mock).toHaveBeenCalledTimes(0); + }) + .finally(() => { + mock.mockRestore(); + }) + .then(resolve, reject); }); }); diff --git a/src/testing/core/index.ts b/src/testing/core/index.ts --- a/src/testing/core/index.ts +++ b/src/testing/core/index.ts @@ -1,16 +1,15 @@ -export { - MockLink, - mockSingleLink, +export type { MockedResponse, MockLinkOptions, - ResultFunction -} from './mocking/mockLink'; + ResultFunction, +} from "./mocking/mockLink.js"; +export { MockLink, mockSingleLink } from "./mocking/mockLink.js"; export { MockSubscriptionLink, - mockObservableLink -} from './mocking/mockSubscriptionLink'; -export { createMockClient } from './mocking/mockClient'; -export { default as subscribeAndCount } from './subscribeAndCount'; -export { itAsync } from './itAsync'; -export { wait, tick } from './wait' -export * from './withConsoleSpy'; + mockObservableLink, +} from "./mocking/mockSubscriptionLink.js"; +export { createMockClient } from "./mocking/mockClient.js"; +export { default as subscribeAndCount } from "./subscribeAndCount.js"; +export { itAsync } from "./itAsync.js"; +export { wait, tick } from "./wait.js"; +export * from "./withConsoleSpy.js"; diff --git a/src/testing/core/mocking/mockClient.ts b/src/testing/core/mocking/mockClient.ts --- a/src/testing/core/mocking/mockClient.ts +++ b/src/testing/core/mocking/mockClient.ts @@ -1,8 +1,9 @@ -import { DocumentNode } from 'graphql'; +import type { DocumentNode } from 'graphql'; -import { ApolloClient } from '../../../core'; -import { InMemoryCache, NormalizedCacheObject } from '../../../cache'; -import { mockSingleLink } from './mockLink'; +import { ApolloClient } from '../../../core/index.js'; +import type { NormalizedCacheObject } from '../../../cache/index.js'; +import { InMemoryCache } from '../../../cache/index.js'; +import { mockSingleLink } from './mockLink.js'; export function createMockClient<TData>( data: TData, diff --git a/src/testing/core/mocking/mockLink.ts b/src/testing/core/mocking/mockLink.ts --- a/src/testing/core/mocking/mockLink.ts +++ b/src/testing/core/mocking/mockLink.ts @@ -1,14 +1,14 @@ -import { invariant } from '../../../utilities/globals'; +import { invariant } from '../../../utilities/globals/index.js'; -import { print } from 'graphql'; import { equal } from '@wry/equality'; -import { - ApolloLink, +import type { Operation, GraphQLRequest, - FetchResult, -} from '../../../link/core'; + FetchResult} from '../../../link/core/index.js'; +import { + ApolloLink +} from '../../../link/core/index.js'; import { Observable, @@ -17,7 +17,8 @@ import { removeConnectionDirectiveFromDocument, cloneDeep, stringifyForDisplay, -} from '../../../utilities'; + print +} from '../../../utilities/index.js'; export type ResultFunction<T> = () => T; @@ -116,7 +117,7 @@ ${unmatchedVars.map(d => ` ${stringifyForDisplay(d)}`).join('\n')} if (this.showWarnings) { console.warn( - configError.message + + configError.message + '\nThis typically indicates a configuration error in your mocks ' + 'setup, usually due to a typo or mismatched variable.' ); diff --git a/src/testing/core/mocking/mockQueryManager.ts b/src/testing/core/mocking/mockQueryManager.ts --- a/src/testing/core/mocking/mockQueryManager.ts +++ b/src/testing/core/mocking/mockQueryManager.ts @@ -1,6 +1,7 @@ -import { QueryManager } from '../../../core/QueryManager'; -import { mockSingleLink, MockedResponse } from './mockLink'; -import { InMemoryCache } from '../../../cache'; +import { QueryManager } from '../../../core/QueryManager.js'; +import type { MockedResponse } from './mockLink.js'; +import { mockSingleLink } from './mockLink.js'; +import { InMemoryCache } from '../../../cache/index.js'; // Helper method for the tests that construct a query manager out of a // a list of mocked responses for a mocked network interface. diff --git a/src/testing/core/mocking/mockSubscriptionLink.ts b/src/testing/core/mocking/mockSubscriptionLink.ts --- a/src/testing/core/mocking/mockSubscriptionLink.ts +++ b/src/testing/core/mocking/mockSubscriptionLink.ts @@ -1,5 +1,6 @@ -import { Observable } from '../../../utilities'; -import { ApolloLink, FetchResult, Operation } from '../../../link/core'; +import { Observable } from '../../../utilities/index.js'; +import type { FetchResult, Operation } from '../../../link/core/index.js'; +import { ApolloLink } from '../../../link/core/index.js'; export interface MockedSubscription { request: Operation; diff --git a/src/testing/core/mocking/mockWatchQuery.ts b/src/testing/core/mocking/mockWatchQuery.ts --- a/src/testing/core/mocking/mockWatchQuery.ts +++ b/src/testing/core/mocking/mockWatchQuery.ts @@ -1,6 +1,6 @@ -import { MockedResponse } from './mockLink'; -import mockQueryManager from './mockQueryManager'; -import { ObservableQuery } from '../../../core'; +import type { MockedResponse } from './mockLink.js'; +import mockQueryManager from './mockQueryManager.js'; +import type { ObservableQuery } from '../../../core/index.js'; export default (...mockedResponses: MockedResponse[]): ObservableQuery<any> => { const queryManager = mockQueryManager(...mockedResponses); diff --git a/src/testing/core/observableToPromise.ts b/src/testing/core/observableToPromise.ts --- a/src/testing/core/observableToPromise.ts +++ b/src/testing/core/observableToPromise.ts @@ -1,5 +1,5 @@ -import { ObservableQuery, ApolloQueryResult } from '../../core'; -import { ObservableSubscription } from '../../utilities'; +import type { ObservableQuery, ApolloQueryResult } from '../../core/index.js'; +import type { ObservableSubscription } from '../../utilities/index.js'; /** * diff --git a/src/testing/core/subscribeAndCount.ts b/src/testing/core/subscribeAndCount.ts --- a/src/testing/core/subscribeAndCount.ts +++ b/src/testing/core/subscribeAndCount.ts @@ -1,4 +1,5 @@ -import { ObservableSubscription, asyncMap, Observable } from '../../utilities'; +import type { ObservableSubscription, Observable } from '../../utilities/index.js'; +import { asyncMap } from '../../utilities/index.js'; export default function subscribeAndCount<TResult>( reject: (reason: any) => any, diff --git a/src/testing/index.ts b/src/testing/index.ts --- a/src/testing/index.ts +++ b/src/testing/index.ts @@ -1,3 +1,4 @@ -import '../utilities/globals'; -export { MockedProvider, MockedProviderProps } from './react/MockedProvider'; -export * from './core'; +import '../utilities/globals/index.js'; +export type { MockedProviderProps } from './react/MockedProvider.js'; +export { MockedProvider } from './react/MockedProvider.js'; +export * from './core/index.js'; diff --git a/src/testing/matchers/index.d.ts b/src/testing/matchers/index.d.ts new file mode 100644 --- /dev/null +++ b/src/testing/matchers/index.d.ts @@ -0,0 +1,32 @@ +import type { + ApolloClient, + DocumentNode, + OperationVariables, +} from "../../core/index.js"; + +interface ApolloCustomMatchers<R = void, T = {}> { + /** + * Used to determine if two GraphQL query documents are equal to each other by + * comparing their printed values. The document must be parsed by `gql`. + */ + toMatchDocument(document: DocumentNode): R; + + /** + * Used to determine if the Suspense cache has a cache entry. + */ + toHaveSuspenseCacheEntryUsing: T extends ApolloClient<any> + ? ( + query: DocumentNode, + options?: { + variables?: OperationVariables; + queryKey?: string | number | any[]; + } + ) => R + : { error: "matcher needs to be called on an ApolloClient instance" }; +} + +declare global { + namespace jest { + interface Matchers<R = void, T = {}> extends ApolloCustomMatchers<R, T> {} + } +} diff --git a/src/testing/matchers/index.ts b/src/testing/matchers/index.ts new file mode 100644 --- /dev/null +++ b/src/testing/matchers/index.ts @@ -0,0 +1,8 @@ +import { expect } from "@jest/globals"; +import { toMatchDocument } from "./toMatchDocument.js"; +import { toHaveSuspenseCacheEntryUsing } from "./toHaveSuspenseCacheEntryUsing.js"; + +expect.extend({ + toHaveSuspenseCacheEntryUsing, + toMatchDocument, +}); diff --git a/src/testing/matchers/toHaveSuspenseCacheEntryUsing.ts b/src/testing/matchers/toHaveSuspenseCacheEntryUsing.ts new file mode 100644 --- /dev/null +++ b/src/testing/matchers/toHaveSuspenseCacheEntryUsing.ts @@ -0,0 +1,43 @@ +import type { MatcherFunction } from "expect"; +import type { DocumentNode } from "graphql"; +import type { OperationVariables } from "../../core/index.js"; +import { ApolloClient } from "../../core/index.js"; +import { canonicalStringify } from "../../cache/index.js"; +import { getSuspenseCache } from "../../react/cache/index.js"; +import type { CacheKey } from "../../react/cache/types.js"; + +export const toHaveSuspenseCacheEntryUsing: MatcherFunction< + [ + query: DocumentNode, + options: { + variables?: OperationVariables; + queryKey?: string | number | any[]; + }, + ] +> = function ( + client, + query, + { variables, queryKey = [] } = Object.create(null) +) { + if (!(client instanceof ApolloClient)) { + throw new Error("Actual must be an instance of `ApolloClient`"); + } + + const suspenseCache = getSuspenseCache(client); + + const cacheKey: CacheKey = [ + query, + canonicalStringify(variables), + ...([] as any[]).concat(queryKey), + ]; + const queryRef = suspenseCache["queryRefs"].lookupArray(cacheKey)?.current; + + return { + pass: !!queryRef, + message: () => { + return `Expected suspense cache ${ + queryRef ? "not " : "" + }to have cache entry using key`; + }, + }; +}; diff --git a/src/testing/matchers/toMatchDocument.ts b/src/testing/matchers/toMatchDocument.ts new file mode 100644 --- /dev/null +++ b/src/testing/matchers/toMatchDocument.ts @@ -0,0 +1,63 @@ +import { checkDocument, print } from "../../utilities/index.js"; +import type { DocumentNode } from "../../core/index.js"; +import type { MatcherFunction } from "expect"; + +export const toMatchDocument: MatcherFunction<[document: DocumentNode]> = + function (actual, document) { + const hint = this.utils.matcherHint("toMatchDocument"); + const actualDocument = print( + validateDocument( + actual, + hint + + `\n\n${this.utils.RECEIVED_COLOR( + "received" + )} document must be a parsed document.` + ) + ); + const expectedDocument = print( + validateDocument( + document, + hint + + `\n\n${this.utils.EXPECTED_COLOR( + "expected" + )} document must be a parsed document.` + ) + ); + + const pass = actualDocument === expectedDocument; + + return { + pass, + message: () => { + const hint = this.utils.matcherHint( + "toMatchDocument", + undefined, + undefined, + { isNot: this.isNot } + ); + + if (pass) { + return ( + hint + + "\n\n" + + "Received:\n\n" + + this.utils.RECEIVED_COLOR(actualDocument) + ); + } + + return ( + hint + "\n\n" + this.utils.diff(expectedDocument, actualDocument) + ); + }, + }; + }; + +function validateDocument(document: unknown, message: string) { + try { + checkDocument(document as DocumentNode); + } catch (e) { + throw new Error(message); + } + + return document as DocumentNode; +} diff --git a/src/testing/react/MockedProvider.tsx b/src/testing/react/MockedProvider.tsx --- a/src/testing/react/MockedProvider.tsx +++ b/src/testing/react/MockedProvider.tsx @@ -1,12 +1,14 @@ import * as React from 'react'; -import { ApolloClient, DefaultOptions } from '../../core'; -import { InMemoryCache as Cache } from '../../cache'; -import { ApolloProvider } from '../../react/context'; -import { MockLink, MockedResponse } from '../core'; -import { ApolloLink } from '../../link/core'; -import { Resolvers } from '../../core'; -import { ApolloCache } from '../../cache'; +import type { DefaultOptions } from '../../core/index.js'; +import { ApolloClient } from '../../core/index.js'; +import { InMemoryCache as Cache } from '../../cache/index.js'; +import { ApolloProvider } from '../../react/context/index.js'; +import type { MockedResponse } from '../core/index.js'; +import { MockLink } from '../core/index.js'; +import type { ApolloLink } from '../../link/core/index.js'; +import type { Resolvers } from '../../core/index.js'; +import type { ApolloCache } from '../../cache/index.js'; export interface MockedProviderProps<TSerializedCache = {}> { mocks?: ReadonlyArray<MockedResponse>; @@ -55,13 +57,17 @@ export class MockedProvider extends React.Component< resolvers, }); - this.state = { client }; + this.state = { + client, + }; } public render() { const { children, childProps } = this.props; + const { client } = this.state; + return React.isValidElement(children) ? ( - <ApolloProvider client={this.state.client}> + <ApolloProvider client={client}> {React.cloneElement(React.Children.only(children), { ...childProps })} </ApolloProvider> ) : null; diff --git a/src/utilities/common/__tests__/omitDeep.ts b/src/utilities/common/__tests__/omitDeep.ts new file mode 100644 --- /dev/null +++ b/src/utilities/common/__tests__/omitDeep.ts @@ -0,0 +1,137 @@ +import { omitDeep } from "../omitDeep"; + +test("omits the key from a shallow object", () => { + expect(omitDeep({ omit: "a", keep: "b", other: "c" }, "omit")).toEqual({ + keep: "b", + other: "c", + }); +}); + +test("omits the key from arbitrarily nested object", () => { + expect( + omitDeep( + { + omit: "a", + keep: { + omit: "a", + keep: "b", + other: { omit: "a", keep: "b", other: "c" }, + }, + }, + "omit" + ) + ).toEqual({ + keep: { + keep: "b", + other: { keep: "b", other: "c" }, + }, + }); +}); + +test("omits the key from arrays", () => { + expect( + omitDeep( + [ + { omit: "a", keep: "b", other: "c" }, + { omit: "a", keep: "b", other: "c" }, + ], + "omit" + ) + ).toEqual([ + { keep: "b", other: "c" }, + { keep: "b", other: "c" }, + ]); +}); + +test("omits the key from arbitrarily nested arrays", () => { + expect( + omitDeep( + [ + [{ omit: "a", keep: "b", other: "c" }], + [ + { omit: "a", keep: "b", other: "c" }, + [{ omit: "a", keep: "b", other: "c" }], + ], + ], + "omit" + ) + ).toEqual([ + [{ keep: "b", other: "c" }], + [{ keep: "b", other: "c" }, [{ keep: "b", other: "c" }]], + ]); +}); + +test("returns primitives unchanged", () => { + expect(omitDeep("a", "ignored")).toBe("a"); + expect(omitDeep(1, "ignored")).toBe(1); + expect(omitDeep(true, "ignored")).toBe(true); + expect(omitDeep(null, "ignored")).toBe(null); + expect(omitDeep(undefined, "ignored")).toBe(undefined); + expect(omitDeep(Symbol.for("foo"), "ignored")).toBe(Symbol.for("foo")); +}); + +test("handles circular references", () => { + let b: any; + const a = { omit: "foo", b }; + b = { a, omit: "foo" }; + a.b = b; + + const variables = { a, b, omit: "foo" }; + + const result = omitDeep(variables, "omit"); + + expect(result).not.toHaveProperty("omit"); + expect(result.a).not.toHaveProperty("omit"); + expect(result.b).not.toHaveProperty("omit"); +}); + +test("returns same object unmodified if key is not found", () => { + const obj = { + a: "a", + b: "b", + c: { d: "d", e: "e" }, + }; + + const arr = [{ a: "a", b: "b", c: "c" }, { foo: "bar" }]; + + expect(omitDeep(obj, "omit")).toBe(obj); + expect(omitDeep(arr, "omit")).toBe(arr); +}); + +test("returns unmodified subtrees for subtrees that do not contain the key", () => { + const original = { + a: "a", + foo: { bar: "true" }, + baz: [{ foo: "bar" }], + omitOne: [{ foo: "bar", omit: true }, { foo: "bar" }], + }; + + const result = omitDeep(original, "omit"); + + expect(result).not.toBe(original); + expect(result.foo).toBe(original.foo); + expect(result.baz).toBe(original.baz); + expect(result.omitOne).not.toBe(original.omitOne); + expect(result.omitOne[0]).not.toBe(original.omitOne[0]); + expect(result.omitOne[1]).toBe(original.omitOne[1]); +}); + +test("only considers plain objects and ignores class instances when omitting properties", () => { + class Thing { + foo = "bar"; + omit = false; + } + + const thing = new Thing(); + const original = { thing }; + + const result = omitDeep(original, "omit"); + + expect(result.thing).toBe(thing); + expect(result.thing).toHaveProperty("omit", false); + + const modifiedThing = omitDeep(thing, "omit"); + + expect(modifiedThing).toBe(thing); + expect(modifiedThing).toHaveProperty("omit", false); +}); diff --git a/src/utilities/common/__tests__/stripTypename.ts b/src/utilities/common/__tests__/stripTypename.ts new file mode 100644 --- /dev/null +++ b/src/utilities/common/__tests__/stripTypename.ts @@ -0,0 +1,60 @@ +import { stripTypename } from "../stripTypename"; + +test("omits __typename from a shallow object", () => { + expect( + stripTypename({ __typename: "Person", firstName: "Foo", lastName: "Bar" }) + ).toEqual({ firstName: "Foo", lastName: "Bar" }); +}); + +test("omits __typename from arbitrarily nested object", () => { + expect( + stripTypename({ + __typename: "Profile", + user: { + __typename: "User", + firstName: "Foo", + lastName: "Bar", + location: { + __typename: "Location", + city: "Denver", + country: "USA", + }, + }, + }) + ).toEqual({ + user: { + firstName: "Foo", + lastName: "Bar", + location: { + city: "Denver", + country: "USA", + }, + }, + }); +}); + +test("omits the __typename from arrays", () => { + expect( + stripTypename([ + { __typename: "Todo", name: "Take out trash" }, + { __typename: "Todo", name: "Clean room" }, + ]) + ).toEqual([{ name: "Take out trash" }, { name: "Clean room" }]); +}); + +test("omits __typename from arbitrarily nested arrays", () => { + expect( + stripTypename([ + [{ __typename: "Foo", foo: "foo" }], + [{ __typename: "Bar", bar: "bar" }, [{ __typename: "Baz", baz: "baz" }]], + ]) + ).toEqual([[{ foo: "foo" }], [{ bar: "bar" }, [{ baz: "baz" }]]]); +}); + +test("returns primitives unchanged", () => { + expect(stripTypename("a")).toBe("a"); + expect(stripTypename(1)).toBe(1); + expect(stripTypename(true)).toBe(true); + expect(stripTypename(null)).toBe(null); + expect(stripTypename(undefined)).toBe(undefined); +}); diff --git a/src/utilities/graphql/__tests__/DocumentTransform.ts b/src/utilities/graphql/__tests__/DocumentTransform.ts new file mode 100644 --- /dev/null +++ b/src/utilities/graphql/__tests__/DocumentTransform.ts @@ -0,0 +1,676 @@ +import { DocumentTransform } from "../DocumentTransform"; +import { isMutationOperation, isQueryOperation } from "../operations"; +import { removeDirectivesFromDocument } from "../transform"; +import { gql } from "graphql-tag"; +import { DocumentNode, visit, Kind } from "graphql"; + +function stripDirective(directive: string) { + return (document: DocumentNode) => { + return removeDirectivesFromDocument([{ name: directive }], document)!; + }; +} + +function renameDirective(target: string, replacement: string) { + return (document: DocumentNode) => { + return visit(document, { + Directive(node) { + if (node.name.value === target) { + return { + ...node, + name: { kind: Kind.NAME, value: replacement }, + }; + } + }, + }); + }; +} + +function addClientDirectiveToField(fieldName: string) { + return (document: DocumentNode) => { + return visit(document, { + Field: { + leave: (node) => { + if (node.name.value === fieldName) { + return { + ...node, + directives: [ + { + kind: Kind.DIRECTIVE, + name: { kind: Kind.NAME, value: "client" }, + }, + ], + }; + } + }, + }, + }); + }; +} + +test("can transform a document", () => { + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const transform = new DocumentTransform(stripDirective("client")); + + const result = transform.transformDocument(query); + + expect(result).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); +}); + +test("returns unmodified document if trying to transform an already computed result", () => { + const query = gql` + query TestQuery { + user { + name @custom + isLoggedIn @client + } + } + `; + + const cachedTransform = new DocumentTransform(stripDirective("client")); + const uncachedTransform = new DocumentTransform(stripDirective("custom"), { + cache: false, + }); + + const withoutClient = cachedTransform.transformDocument(query); + const withoutCustom = uncachedTransform.transformDocument(query); + + expect(cachedTransform.transformDocument(withoutClient)).toBe(withoutClient); + + expect(uncachedTransform.transformDocument(withoutCustom)).toBe( + withoutCustom + ); +}); + +test("caches the result of the transform by default", () => { + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const transform = jest.fn(stripDirective("client")); + const documentTransform = new DocumentTransform(transform); + + const result1 = documentTransform.transformDocument(query); + + expect(result1).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); +}); + +test("allows custom cache keys to be defined", () => { + const query = gql` + query TestQuery { + user @network { + name + } + } + `; + + const onlineQuery = gql` + query TestQuery { + user { + name + } + } + `; + + const offlineQuery = gql` + query TestQuery { + user @client { + name + } + } + `; + + let online = true; + + const onlineTransform = new DocumentTransform(stripDirective("network")); + const offlineTransform = new DocumentTransform( + renameDirective("network", "client") + ); + + const transform = jest.fn((document: DocumentNode) => { + return online + ? onlineTransform.transformDocument(document) + : offlineTransform.transformDocument(document); + }); + + const documentTransform = new DocumentTransform(transform, { + getCacheKey: (document) => [document, online], + }); + + const result1 = documentTransform.transformDocument(query); + + expect(result1).toMatchDocument(onlineQuery); + expect(transform).toHaveBeenCalledTimes(1); + + online = false; + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(offlineQuery); + expect(transform).toHaveBeenCalledTimes(2); + + online = true; + + const result3 = documentTransform.transformDocument(query); + + expect(result3).toMatchDocument(onlineQuery); + expect(transform).toHaveBeenCalledTimes(2); + + online = false; + + const result4 = documentTransform.transformDocument(query); + + expect(result4).toMatchDocument(offlineQuery); + expect(transform).toHaveBeenCalledTimes(2); +}); + +test("can disable caching the result output", () => { + const query = gql` + query { + user { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query { + user { + name + isLoggedIn + } + } + `; + + const transform = jest.fn(stripDirective("client")); + const documentTransform = new DocumentTransform(transform, { cache: false }); + + const result1 = documentTransform.transformDocument(query); + + expect(result1).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(2); +}); + +test("can combine 2 transforms with `concat`", async () => { + const query = gql` + query TestQuery { + user @nonreactive { + name + isLoggedIn @client + } + } + `; + + const stripClient = new DocumentTransform(stripDirective("client")); + const stripNonReactive = new DocumentTransform(stripDirective("nonreactive")); + const documentTransform = stripClient.concat(stripNonReactive); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); +}); + +test("runs concatenated transform after original transform", () => { + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const addClientDirectiveToName = new DocumentTransform( + addClientDirectiveToField("name") + ); + + expect(addClientDirectiveToName.transformDocument(query)).toMatchDocument(gql` + query TestQuery { + user { + name @client + isLoggedIn @client + } + } + `); + + const stripClient = new DocumentTransform(stripDirective("client")); + const documentTransform = addClientDirectiveToName.concat(stripClient); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(expected); +}); + +test("can combine multiple transforms with `concat`", async () => { + const query = gql` + query TestQuery { + user @nonreactive { + name @connection + isLoggedIn @client + } + } + `; + + const stripClient = new DocumentTransform(stripDirective("client")); + const stripNonReactive = new DocumentTransform(stripDirective("nonreactive")); + const stripConnection = new DocumentTransform(stripDirective("connection")); + const documentTransform = stripClient + .concat(stripNonReactive) + .concat(stripConnection); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); +}); + +test("caches the result output from a combined transform when both transforms are cached", async () => { + const query = gql` + query TestQuery { + user @nonreactive { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const stripClient = jest.fn(stripDirective("client")); + const stripNonReactive = jest.fn(stripDirective("nonreactive")); + + const stripClientTransform = new DocumentTransform(stripClient); + const stripNonReactiveTransform = new DocumentTransform(stripNonReactive); + const documentTransform = stripClientTransform.concat( + stripNonReactiveTransform + ); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(expected); + expect(stripClient).toHaveBeenCalledTimes(1); + expect(stripNonReactive).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + expect(stripClient).toHaveBeenCalledTimes(1); + expect(stripNonReactive).toHaveBeenCalledTimes(1); +}); + +test("allows non cached transforms to be run when concatenated", async () => { + const query = gql` + query TestQuery { + user @nonreactive { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const stripClient = jest.fn(stripDirective("client")); + const stripNonReactive = jest.fn(stripDirective("nonreactive")); + + const stripClientTransform = new DocumentTransform(stripClient, { + cache: true, + }); + const stripNonReactiveTransform = new DocumentTransform(stripNonReactive, { + cache: false, + }); + + // Try ordering the transforms both ways to ensure the cached transform has + // no effect on whether the non-cached transform runs + const documentTransform = + stripNonReactiveTransform.concat(stripClientTransform); + + const reversedTransform = stripClientTransform.concat( + stripNonReactiveTransform + ); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(expected); + expect(stripClient).toHaveBeenCalledTimes(1); + expect(stripNonReactive).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + // Even though stripClient is cached, it is called a second time because + // stripNonReactive returns a new document instance each time it runs. + expect(stripClient).toHaveBeenCalledTimes(2); + expect(stripNonReactive).toHaveBeenCalledTimes(2); + + stripClient.mockClear(); + stripNonReactive.mockClear(); + + const reversed = reversedTransform.transformDocument(query); + + expect(reversed).toMatchDocument(expected); + expect(stripClient).toHaveBeenCalledTimes(1); + expect(stripNonReactive).toHaveBeenCalledTimes(1); + + const reversed2 = reversedTransform.transformDocument(query); + + expect(reversed2).toMatchDocument(expected); + // Now that the cached transform is first, we can make sure it doesn't run + // again. We verify the non-cached that is run after the cached transform does + // get a chance to execute. + expect(stripClient).toHaveBeenCalledTimes(1); + expect(stripNonReactive).toHaveBeenCalledTimes(2); +}); + +test("can conditionally run transforms using `DocumentTransform.split`", () => { + const mutation = gql` + mutation TestMutation { + incrementCounter @client { + count + } + } + `; + + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const documentTransform = DocumentTransform.split( + isQueryOperation, + new DocumentTransform(stripDirective("client")) + ); + + const queryResult = documentTransform.transformDocument(query); + const mutationResult = documentTransform.transformDocument(mutation); + + expect(queryResult).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); + + expect(mutationResult).toMatchDocument(mutation); +}); + +test("properly caches the result of `filter` when the original transform is cached", () => { + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const transform = jest.fn(stripDirective("client")); + const documentTransform = DocumentTransform.split( + isQueryOperation, + new DocumentTransform(transform, { cache: true }) + ); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); +}); + +test("reruns transform returned from `DocumentTransform.split` when the original transform is not cached", () => { + const query = gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const expected = gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `; + + const transform = jest.fn(stripDirective("client")); + const documentTransform = DocumentTransform.split( + isQueryOperation, + new DocumentTransform(transform, { cache: false }) + ); + + const result = documentTransform.transformDocument(query); + + expect(result).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(1); + + const result2 = documentTransform.transformDocument(query); + + expect(result2).toMatchDocument(expected); + expect(transform).toHaveBeenCalledTimes(2); +}); + +test("properly handles combinations of `DocumentTransform.split` and `filter`", () => { + const mutation = gql` + mutation TestMutation { + incrementCounter @client { + count @nonreactive + } + } + `; + + const query = gql` + query TestQuery { + user { + name @nonreactive + isLoggedIn @client + } + } + `; + + const stripClient = new DocumentTransform(stripDirective("client")); + const stripNonReactive = new DocumentTransform(stripDirective("nonreactive")); + + // Strip both @client and @nonreactive but only on query types + const queryOnlyTransform = DocumentTransform.split( + isQueryOperation, + stripClient.concat(stripNonReactive) + ); + + // Only strip @client from mutations but remove @nonreactive from all + const conditionalStrip = DocumentTransform.split( + isMutationOperation, + stripClient + ).concat(stripNonReactive); + + expect(queryOnlyTransform.transformDocument(query)).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); + + expect(queryOnlyTransform.transformDocument(mutation)).toMatchDocument( + mutation + ); + + expect(conditionalStrip.transformDocument(query)).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `); + + expect(conditionalStrip.transformDocument(mutation)).toMatchDocument(gql` + mutation TestMutation { + incrementCounter { + count + } + } + `); +}); + +test("executes other transform when using `DocumentTransform.split` when condition is false", () => { + const mutation = gql` + mutation TestMutation { + incrementCounter @client { + count @nonreactive + } + } + `; + + const query = gql` + query TestQuery { + user { + name @nonreactive + isLoggedIn @client + } + } + `; + + const stripClient = new DocumentTransform(stripDirective("client")); + const stripNonReactive = new DocumentTransform(stripDirective("nonreactive")); + + // strip both directives for queries, but only @nonreactive for mutations + const documentTransform = DocumentTransform.split( + isQueryOperation, + stripClient.concat(stripNonReactive), + stripNonReactive + ); + + expect(documentTransform.transformDocument(query)).toMatchDocument(gql` + query TestQuery { + user { + name + isLoggedIn + } + } + `); + + expect(documentTransform.transformDocument(mutation)).toMatchDocument(gql` + mutation TestMutation { + incrementCounter @client { + count + } + } + `); +}); + +test("errors when passing a document that has not been parsed with `gql`", () => { + const query = ` + query TestQuery { + user { + name + isLoggedIn @client + } + } + `; + + const documentTransform = new DocumentTransform((document) => document); + + expect(() => { + documentTransform.transformDocument(query as unknown as DocumentNode); + }).toThrowError(/wrap the query string in a "gql" tag/); +});
ApolloProvider causing rerenders in consumers if ApolloProvider is rerendered even with the same client argument <!-- Thanks for filing an issue on Apollo Client! Please make sure that you include the following information to ensure that your issue is actionable. If you don't follow the template, your issue may end up being closed without anyone looking at it carefully, because it is not actionable for us without the information in this template. **PLEASE NOTE:** Feature requests and non-bug related discussions are no longer managed in this repo. Feature requests should be opened in https://github.com/apollographql/apollo-feature-requests. --> **Intended outcome:** Whenever `ApolloProvider` is rerendered (e.g. if the parent rerenders) but receives the same `client`, this should not cause the context (used by `useQuery`, `useMutation`, but also the consumers) to force a rerender. I tried to use `ApolloProvider` to provide the Apollo Client inside a separate React environment (React-Konva). I tried to utilize the new type policies to prevent passing down props by using queries that fetch data from the cache directly by id. All this again to optimize the rather slow React-Konva scene. However, even without the cache changing, simply moving the scene around (which rerenders the parent component of ApolloProvider and thus ApolloProvider itself) caused all objects in the scene with queries to rerender. **Actual outcome:** Instead of objects not being rerendered as long as no data changes in the Apollo cache, all objects rerender whenever something changes up regardless of whether `client` remains the same. **How to reproduce the issue:** Create an `ApolloProvider` in a component that will rerender, notice that all memoized components deeper down in the tree still rerender even when the stored cache value doesn't change. For a working example (with an alternative that doesn't have this issue) see: https://codesandbox.io/s/apollo-context-rerenders-umlrx From what I gather, the following happens: - ApolloProvider is a functional component - If this is used inside React-Konva, it will not find the ApolloContext.Provider value from above, which causes the embedded `if` to execute (`value === undefined/null`). - This embedded `if` uses `Object.assign({}, context, { client })` to create a context. - However, given that it's a functional component, it will do this for every instantiation. - So each time, `context` is different and React will rerender all context dependent components, causing this issue. Locally, I fixed this by simply using my own provider: ```js export const SimpleApolloProvider = ({client, children}) => { const ApolloContext = getApolloContext(); // Use useMemo to prevent context from updating during // rerenders with the same client. Generally the client // will remain the same throughout the lifetime of the // application, but when {client} is used, (or as official // implementation context = Object.assign({}, context, { client })) // each rerender will create a new object, causing *all* // useQuery/useMutation hooks to rerender due to changed context // value. const context = useMemo(() => ({ client }), [client]) return ( <ApolloContext.Provider value={context}> {children} </ApolloContext.Provider> ) } ``` I don't need the reuse feature from ApolloProvider here so this is sufficient for my use case. But I believe it's useful to cache the `context` object value so the (potentially many) consumers don't needlessly rerender. **Versions** ``` System: OS: Linux 5.10 Arch Linux Binaries: Node: 6.14.4 - ~/.nvm/versions/node/v6.14.4/bin/node Yarn: 1.21.1 - ~/.nvm/versions/node/v6.14.4/bin/yarn npm: 3.10.10 - ~/.nvm/versions/node/v6.14.4/bin/npm Browsers: Firefox: 84.0.2 npmPackages: @apollo/client: ^3.3.7 => 3.3.7 apollo-link-queue: ^3.0.0 => 3.0.0 apollo-link-serialize: ^3.1.1 => 3.1.1 apollo-utilities: ^1.3.3 => 1.3.3 ``` Duplicate `@wry/trie` in dependency tree The `@wry/trie` dependency is currently duplicated in Apollo's dependency tree. An update to `optimism@0.17.5` would be required to get rid of this duplication, see https://github.com/benjamn/optimism/pull/551.
Running into this issue as well on latest npm apollo 3.4.16
2022-12-08T19:10:19Z
3.7
apollographql/apollo-client
10,143
apollographql__apollo-client-10143
[ "10105" ]
03c8fa48f07e8f0f2563eaac195e25f228740c59
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -303,8 +303,15 @@ export class ObservableQuery< // Compares newResult to the snapshot we took of this.lastResult when it was // first received. - public isDifferentFromLastResult(newResult: ApolloQueryResult<TData>) { - return !this.last || !equal(this.last.result, newResult); + public isDifferentFromLastResult( + newResult: ApolloQueryResult<TData>, + variables?: TVariables + ) { + return ( + !this.last || + !equal(this.last.result, newResult) || + (variables && !equal(this.last.variables, variables)) + ); } private getLast<K extends keyof Last<TData, TVariables>>( @@ -872,7 +879,7 @@ Did you mean to call refetch(variables) instead of refetch({ variables })?`); variables: TVariables | undefined, ) { const lastError = this.getLastError(); - if (lastError || this.isDifferentFromLastResult(result)) { + if (lastError || this.isDifferentFromLastResult(result, variables)) { if (lastError || !result.partial || this.options.returnPartialData) { this.updateLastResult(result, variables); }
diff --git a/src/__tests__/local-state/general.ts b/src/__tests__/local-state/general.ts --- a/src/__tests__/local-state/general.ts +++ b/src/__tests__/local-state/general.ts @@ -1,5 +1,16 @@ import gql from 'graphql-tag'; -import { DocumentNode, GraphQLError, getIntrospectionQuery } from 'graphql'; +import { + graphql, + GraphQLInt, + print, + DocumentNode, + GraphQLError, + getIntrospectionQuery, + GraphQLSchema, + GraphQLObjectType, + GraphQLID, + GraphQLString +} from 'graphql'; import { Observable } from '../../utilities'; import { ApolloLink } from '../../link/core'; @@ -819,6 +830,102 @@ describe('Combining client and server state/operations', () => { }, 10); }); + itAsync('query resolves with loading: false if subsequent responses contain the same data', (resolve, reject) => { + const request = { + query: gql` + query people($id: Int) { + people(id: $id) { + id + name + } + } + `, + variables: { + id: 1, + }, + notifyOnNetworkStatusChange: true + }; + + const PersonType = new GraphQLObjectType({ + name: "Person", + fields: { + id: { type: GraphQLID }, + name: { type: GraphQLString } + } + }); + + const peopleData = [ + { id: 1, name: "John Smith" }, + { id: 2, name: "Sara Smith" }, + { id: 3, name: "Budd Deey" } + ]; + + const QueryType = new GraphQLObjectType({ + name: "Query", + fields: { + people: { + type: PersonType, + args: { + id: { + type: GraphQLInt + } + }, + resolve: (_, { id }) => { + return peopleData; + } + } + } + }); + + const schema = new GraphQLSchema({ query: QueryType }); + + const link = new ApolloLink(operation => { + // @ts-ignore + return new Observable(async observer => { + const { query, operationName, variables } = operation; + try { + const result = await graphql({ + schema, + source: print(query), + variableValues: variables, + operationName, + }); + observer.next(result); + observer.complete(); + } catch (err) { + observer.error(err); + } + }); + }); + + const client = new ApolloClient({ + cache: new InMemoryCache(), + link, + }); + + const observer = client.watchQuery(request); + + let count = 0; + observer.subscribe({ + next: ({ loading, data }) => { + if (count === 0) expect(loading).toBe(false); + if (count === 1) expect(loading).toBe(true); + if (count === 2) { + expect(loading).toBe(false) + resolve(); + }; + count++; + }, + error: reject, + }); + + setTimeout(() => { + observer.refetch({ + id: 2 + }); + }, 1); + }); + itAsync('should correctly propagate an error from a client resolver', async (resolve, reject) => { const data = { list: { diff --git a/src/core/__tests__/fetchPolicies.ts b/src/core/__tests__/fetchPolicies.ts --- a/src/core/__tests__/fetchPolicies.ts +++ b/src/core/__tests__/fetchPolicies.ts @@ -1163,7 +1163,17 @@ describe("nextFetchPolicy", () => { // resets the fetchPolicy to context.initialPolicy), so cache-first is // still what we see here. expect(observable.options.fetchPolicy).toBe("cache-first"); + } else if (count === 3) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + expect(observable.options.fetchPolicy).toBe("cache-first"); setTimeout(resolve, 20); } else { reject(`Too many results (${count})`);
"Loading" stays true when the same result is returned twice in Apollo Client 3.6.9 This has been a long-standing issue that I've seen numerous people bring up, but I may have found the cause. Here is the problem: ```js const request = { query: gql` query test($foo: String) { test(foo: $foo) { value } } `, variables: { foo: "bar" }, notifyOnNetworkStatusChange: true }; const observer = client.watchQuery(request); observer.subscribe({ next: ({ loading }) => { console.log(loading); } }); ``` When the GraphQL query returns the same result successfully and subsequently, even if the variables have changed, the `loading` flag remains `true` even after the second request returns successfully. It does not return back to `false` until the result returns something different than it did the last time. The expected outcome is that `loading` flips to `true` when the request is in flight and then flips back to `false` after the request completes, even if the result is the same between the two requests. I believe there's a good chance this bug is due to [this bit of code](https://github.com/apollographql/apollo-client/blob/d0da9d396a749e90b008305ecfd37b17d39e3d38/src/core/ObservableQuery.ts#L874-L880): ```js private reportResult( result: ApolloQueryResult<TData>, variables: TVariables | undefined, ) { const lastError = this.getLastError(); if (lastError || this.isDifferentFromLastResult(result)) { if (lastError || !result.partial || this.options.returnPartialData) { this.updateLastResult(result, variables); } iterateObserversSafely(this.observers, 'next', result); } } ``` I believe that maybe it was _supposed_ to be: ```js private reportResult( result: ApolloQueryResult<TData>, variables: TVariables | undefined, ) { const lastError = this.getLastError(); if (lastError || !result.partial || this.options.returnPartialData || this.isDifferentFromLastResult(result)) { this.updateLastResult(result, variables); iterateObserversSafely(this.observers, 'next', result); } } ``` In the former, the observer will never iterate if the request is successful and the next result is the same as the previous result. Maybe this was just a minor coding oversight. But it would be nice to get feedback from the Apollo Client team to make sure this aligns with their thinking. The way I was able to confirm this was the issue is that, when I added a timestamp of the request to the GraphQL API result, the problem was completely gone. So when the result is different with each request, it works as intended. But I shouldn't have to do this in a production setting. Note I am running `@apollo/client` 3.6.9 and `graphql` 16.6.0, both the newest versions. I am also _not_ using React. So if a fix for this was previously introduced for React, I'm afraid it wouldn't have fixed the root of the problem in Apollo Client itself. Many tickets about this have been opened by others, all with suggestions of different workarounds, like changing `networkPolicy` to `network-only`, `no-cache`, or `cache-and-network`. Or even setting `pollInterval` to `0`. But none of these worked for me, presumably because the root problem seems to be related to the conditions under which the observer will iterate. But it is interesting that the cache is still checked even when using a `networkPolicy` of `no-cache`. Here are some of the related issues I found on this topic: https://github.com/apollographql/apollo-client/issues/6334 https://github.com/apollographql/apollo-client/issues/9845 https://github.com/apollographql/apollo-client/pull/9844 (another attempted fix, but I wonder if this one was overkill) https://github.com/apollographql/apollo-client/pull/6417 (a merged PR from two years ago attempting to fix this, but not successfully) https://github.com/apollographql/apollo-client/issues/9689 (maybe related) https://github.com/apollographql/apollo-client/issues/9668 (also maybe related) Would greatly appreciate any insight on this. I currently don't have any other workarounds other than to add a timestamp to our API, but since we have a public-facing API, this would not be ideal. Thank you!
Sorry to hear this is a problem @michaelcbrook - any chance you can provide a small runnable reproduction (repro [template](https://github.com/apollographql/react-apollo-error-template), repro [codesandbox](https://codesandbox.io/s/github/apollographql/react-apollo-error-template)) that demonstrates the problem? This will help expedite a fix. Hey @hwillson, This one gave me a run for my money to cut down to a reproduction, but I finally got it: https://codesandbox.io/s/competent-worker-7qu3p1?file=/src/index.jsx Turns out there are some more specific criteria that trigger this bug. In working on the reproduction, I found this error only happens under these conditions: 1) Variables must contain non-primitive values (I confirmed an array of objects creates the issue) 2) The non-primitive value must change between requests 3) The next result must be the same as the previous result, even though the variables have changed Of note, the array of objects is being passed as a JSON GraphQL type via the scalars provided by this package: https://www.npmjs.com/package/graphql-scalars ...but that GraphQLJSON definition is set on the server, not the client... I have not yet tested whether or not this issue still happens with a natively provided data type instead of a custom scalar, but seeing as though Apollo Client _should_ be naive to that type (I think), that may not have any bearing on the problem, at least in my mind. But I wanted to get your feedback and see whether that's consistent with your thinking or not. As for my proposed solution, I'm less certain now in light of what I found after creating this reproduction. But that's beyond my knowledge. Thanks! In case this can help, in our team we were able to replicate this issue by making two parallel async calls with the same params, as follow: ``` LOG filter: NEARBY LOG input: {"coordinates": {"userLat": 25, "userLng": -80.1}, "first": 20} LOG filter: FAVORITE LOG input: {"coordinates": {"userLat": 25, "userLng": -80.1}, "first": 20} ``` The GraphQL query looks something like: ``` gql` query GetPlaces($input: PlacesInput) { places(input: $input) { pageInfo { hasNextPage endCursor } totalCount nodes { ...PlaceNodeFragment } } } ${PlaceNodeFragmentDoc}`; ```
2022-09-28T22:10:20Z
3.7
apollographql/apollo-client
10,134
apollographql__apollo-client-10134
[ "10116" ]
14423c2a5a2c32426f235ccfd283f558e47a3b7b
diff --git a/src/react/hooks/useSubscription.ts b/src/react/hooks/useSubscription.ts --- a/src/react/hooks/useSubscription.ts +++ b/src/react/hooks/useSubscription.ts @@ -2,6 +2,7 @@ import '../../utilities/globals'; import { useState, useRef, useEffect } from 'react'; import { DocumentNode } from 'graphql'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import { invariant } from '../../utilities/globals' import { equal } from '@wry/equality'; import { DocumentType, verifyDocumentType } from '../parser'; @@ -16,6 +17,7 @@ export function useSubscription<TData = any, TVariables = OperationVariables>( subscription: DocumentNode | TypedDocumentNode<TData, TVariables>, options?: SubscriptionHookOptions<TData, TVariables>, ) { + const hasIssuedDeprecationWarningRef = useRef(false); const client = useApolloClient(options?.client); verifyDocumentType(subscription, DocumentType.Subscription); const [result, setResult] = useState<SubscriptionResult<TData>>({ @@ -25,6 +27,26 @@ export function useSubscription<TData = any, TVariables = OperationVariables>( variables: options?.variables, }); + if (!hasIssuedDeprecationWarningRef.current) { + hasIssuedDeprecationWarningRef.current = true; + + if (options?.onSubscriptionData) { + invariant.warn( + options.onData + ? "'useSubscription' supports only the 'onSubscriptionData' or 'onData' option, but not both. Only the 'onData' option will be used." + : "'onSubscriptionData' is deprecated and will be removed in a future major version. Please use the 'onData' option instead." + ); + } + + if (options?.onSubscriptionComplete) { + invariant.warn( + options.onComplete + ? "'useSubscription' supports only the 'onSubscriptionComplete' or 'onComplete' option, but not both. Only the 'onComplete' option will be used." + : "'onSubscriptionComplete' is deprecated and will be removed in a future major version. Please use the 'onComplete' option instead." + ); + } + } + const [observable, setObservable] = useState(() => { if (options?.skip) { return null; @@ -107,10 +129,17 @@ export function useSubscription<TData = any, TVariables = OperationVariables>( }; setResult(result); - ref.current.options?.onSubscriptionData?.({ - client, - subscriptionData: result - }); + if (ref.current.options?.onData) { + ref.current.options.onData({ + client, + data: result + }); + } else if (ref.current.options?.onSubscriptionData) { + ref.current.options.onSubscriptionData({ + client, + subscriptionData: result + }); + } }, error(error) { setResult({ @@ -122,7 +151,11 @@ export function useSubscription<TData = any, TVariables = OperationVariables>( ref.current.options?.onError?.(error); }, complete() { - ref.current.options?.onSubscriptionComplete?.(); + if (ref.current.options?.onComplete) { + ref.current.options.onComplete(); + } else if (ref.current.options?.onSubscriptionComplete) { + ref.current.options.onSubscriptionComplete(); + } }, }); diff --git a/src/react/types/types.ts b/src/react/types/types.ts --- a/src/react/types/types.ts +++ b/src/react/types/types.ts @@ -202,6 +202,11 @@ export type MutationTuple< /* Subscription types */ +export interface OnDataOptions<TData = any> { + client: ApolloClient<object>; + data: SubscriptionResult<TData>; +} + export interface OnSubscriptionDataOptions<TData = any> { client: ApolloClient<object>; subscriptionData: SubscriptionResult<TData>; @@ -219,8 +224,16 @@ export interface BaseSubscriptionOptions< client?: ApolloClient<object>; skip?: boolean; context?: DefaultContext; + onComplete?: () => void; + onData?: (options: OnDataOptions<TData>) => any; + /** + * @deprecated Use onData instead + */ onSubscriptionData?: (options: OnSubscriptionDataOptions<TData>) => any; onError?: (error: ApolloError) => void; + /** + * @deprecated Use onComplete instead + */ onSubscriptionComplete?: () => void; }
diff --git a/src/react/components/__tests__/client/Subscription.test.tsx b/src/react/components/__tests__/client/Subscription.test.tsx --- a/src/react/components/__tests__/client/Subscription.test.tsx +++ b/src/react/components/__tests__/client/Subscription.test.tsx @@ -88,7 +88,37 @@ itAsync('executes the subscription', (resolve, reject) => { waitFor(() => expect(renderCount).toBe(5)).then(resolve, reject); }); -itAsync('calls onSubscriptionData if given', (resolve, reject) => { +it('calls onData if given', async () => { + let count = 0; + + const Component = () => ( + <Subscription + subscription={subscription} + onData={(opts: any) => { + expect(opts.client).toBeInstanceOf(ApolloClient); + const { data } = opts.data; + expect(data).toEqual(results[count].result.data); + count++; + }} + /> + ); + + render( + <ApolloProvider client={client}> + <Component /> + </ApolloProvider> + ); + + const interval = setInterval(() => { + link.simulateResult(results[count]); + if (count >= 3) clearInterval(interval); + }, 10); + + await waitFor(() => expect(count).toBe(4)); +}); + +it('calls onSubscriptionData with deprecation warning if given', async () => { + const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); let count = 0; const Component = () => ( @@ -109,22 +139,60 @@ itAsync('calls onSubscriptionData if given', (resolve, reject) => { </ApolloProvider> ); + expect(consoleWarnSpy).toHaveBeenCalledTimes(1); + expect(consoleWarnSpy).toHaveBeenCalledWith( + expect.stringContaining("'onSubscriptionData' is deprecated") + ); + const interval = setInterval(() => { link.simulateResult(results[count]); if (count >= 3) clearInterval(interval); }, 10); - waitFor(() => expect(count).toBe(4)).then(resolve, reject); + await waitFor(() => expect(count).toBe(4)) + + consoleWarnSpy.mockRestore(); }); -itAsync('should call onSubscriptionComplete if specified', (resolve, reject) => { +it('should call onComplete if specified', async () => { let count = 0; let done = false; const Component = () => ( <Subscription subscription={subscription} - onSubscriptionData={() => { + onData={() => { + count++; + }} + onComplete={() => { + done = true; + }} + /> + ); + + render( + <ApolloProvider client={client}> + <Component /> + </ApolloProvider> + ); + + const interval = setInterval(() => { + link.simulateResult(results[count], count === 3); + if (count >= 3) clearInterval(interval); + }, 10); + + await waitFor(() => expect(done).toBeTruthy()); +}); + +it('should call onSubscriptionComplete with deprecation warning if specified', async () => { + const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + let count = 0; + + let done = false; + const Component = () => ( + <Subscription + subscription={subscription} + onData={() => { count++; }} onSubscriptionComplete={() => { @@ -139,12 +207,19 @@ itAsync('should call onSubscriptionComplete if specified', (resolve, reject) => </ApolloProvider> ); + expect(consoleWarnSpy).toHaveBeenCalledTimes(1); + expect(consoleWarnSpy).toHaveBeenCalledWith( + expect.stringContaining("'onSubscriptionComplete' is deprecated") + ); + const interval = setInterval(() => { link.simulateResult(results[count], count === 3); if (count >= 3) clearInterval(interval); }, 10); - waitFor(() => expect(done).toBeTruthy()).then(resolve, reject); + await waitFor(() => expect(done).toBeTruthy()); + + consoleWarnSpy.mockRestore(); }); itAsync('executes subscription for the variables passed in the props', (resolve, reject) => { diff --git a/src/react/hooks/__tests__/useSubscription.test.tsx b/src/react/hooks/__tests__/useSubscription.test.tsx --- a/src/react/hooks/__tests__/useSubscription.test.tsx +++ b/src/react/hooks/__tests__/useSubscription.test.tsx @@ -9,6 +9,10 @@ import { MockSubscriptionLink } from '../../../testing'; import { useSubscription } from '../useSubscription'; describe('useSubscription Hook', () => { + afterEach(() => { + jest.restoreAllMocks(); + }); + it('should handle a simple subscription properly', async () => { const subscription = gql` subscription { @@ -112,6 +116,45 @@ describe('useSubscription Hook', () => { }); }); + it('should call onComplete after subscription is complete', async () => { + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = [{ + result: { data: { car: { make: 'Audi' } } } + }]; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }) + }); + + const onComplete = jest.fn(); + const { waitForNextUpdate } = renderHook( + () => useSubscription(subscription, { onComplete }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + link.simulateResult(results[0]); + + setTimeout(() => link.simulateComplete()); + await waitForNextUpdate(); + + expect(onComplete).toHaveBeenCalledTimes(1); + }); + it('should cleanup after the subscription component has been unmounted', async () => { const subscription = gql` subscription { @@ -133,10 +176,10 @@ describe('useSubscription Hook', () => { cache: new Cache({ addTypename: false }) }); - const onSubscriptionData = jest.fn(); + const onData = jest.fn(); const { result, unmount, waitForNextUpdate } = renderHook( () => useSubscription(subscription, { - onSubscriptionData, + onData, }), { wrapper: ({ children }) => ( @@ -156,17 +199,17 @@ describe('useSubscription Hook', () => { expect(result.current.error).toBe(undefined); expect(result.current.data).toBe(results[0].result.data); setTimeout(() => { - expect(onSubscriptionData).toHaveBeenCalledTimes(1); + expect(onData).toHaveBeenCalledTimes(1); // After the component has been unmounted, the internal // ObservableQuery should be stopped, meaning it shouldn't - // receive any new data (so the onSubscriptionDataCount should + // receive any new data (so the onDataCount should // stay at 1). unmount(); link.simulateResult(results[0]); }); await new Promise((resolve) => setTimeout(resolve, 100)); - expect(onSubscriptionData).toHaveBeenCalledTimes(1); + expect(onData).toHaveBeenCalledTimes(1); }); it('should never execute a subscription with the skip option', async () => { @@ -186,7 +229,7 @@ describe('useSubscription Hook', () => { cache: new Cache({ addTypename: false }) }); - const onSubscriptionData = jest.fn(); + const onData = jest.fn(); const wrapper: React.FC<PropsWithChildren<{ variables: { foo: string } }>> = ({ children }) => ( <ApolloProvider client={client}> {children} @@ -197,7 +240,7 @@ describe('useSubscription Hook', () => { ({ variables }) => useSubscription(subscription, { variables, skip: true, - onSubscriptionData, + onData, }), { initialProps: { @@ -218,7 +261,7 @@ describe('useSubscription Hook', () => { .rejects.toThrow('Timed out'); expect(onSetup).toHaveBeenCalledTimes(0); - expect(onSubscriptionData).toHaveBeenCalledTimes(0); + expect(onData).toHaveBeenCalledTimes(0); unmount(); }); @@ -557,4 +600,322 @@ describe('useSubscription Hook', () => { ); errorSpy.mockRestore(); }); + + test("should warn when using 'onSubscriptionData' and 'onData' together", () => { + const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + renderHook( + () => useSubscription(subscription, { + onData: jest.fn(), + onSubscriptionData: jest.fn(), + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + expect(warningSpy).toHaveBeenCalledTimes(1); + expect(warningSpy).toHaveBeenCalledWith(expect.stringContaining("supports only the 'onSubscriptionData' or 'onData' option")); + }); + + test("prefers 'onData' when using 'onSubscriptionData' and 'onData' together", async () => { + jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = [ + { + result: { data: { car: { make: 'Pagani' } } } + } + ]; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onData = jest.fn(); + const onSubscriptionData = jest.fn(); + + const { waitForNextUpdate } = renderHook( + () => useSubscription(subscription, { + onData, + onSubscriptionData, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + setTimeout(() => link.simulateResult(results[0])); + await waitForNextUpdate(); + + setTimeout(() => { + expect(onData).toHaveBeenCalledTimes(1); + expect(onSubscriptionData).toHaveBeenCalledTimes(0); + }); + }); + + test("uses 'onSubscriptionData' when 'onData' is absent", async () => { + jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = [ + { + result: { data: { car: { make: 'Pagani' } } } + } + ]; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onSubscriptionData = jest.fn(); + + const { waitForNextUpdate } = renderHook( + () => useSubscription(subscription, { + onSubscriptionData, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + setTimeout(() => link.simulateResult(results[0])); + await waitForNextUpdate(); + + setTimeout(() => { + expect(onSubscriptionData).toHaveBeenCalledTimes(1); + }); + }); + + test("only warns once using `onSubscriptionData`", () => { + const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const { rerender } = renderHook( + () => useSubscription(subscription, { + onSubscriptionData: jest.fn(), + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + rerender(); + + expect(warningSpy).toHaveBeenCalledTimes(1); + }); + + test("should warn when using 'onComplete' and 'onSubscriptionComplete' together", () => { + const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + renderHook( + () => useSubscription(subscription, { + onComplete: jest.fn(), + onSubscriptionComplete: jest.fn(), + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + expect(warningSpy).toHaveBeenCalledTimes(1); + expect(warningSpy).toHaveBeenCalledWith(expect.stringContaining("supports only the 'onSubscriptionComplete' or 'onComplete' option")); + }); + + test("prefers 'onComplete' when using 'onComplete' and 'onSubscriptionComplete' together", async () => { + jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = [{ + result: { data: { car: { make: 'Audi' } } } + }]; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onComplete = jest.fn(); + const onSubscriptionComplete = jest.fn(); + + const { waitForNextUpdate } = renderHook( + () => useSubscription(subscription, { + onComplete, + onSubscriptionComplete, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + link.simulateResult(results[0]); + + setTimeout(() => link.simulateComplete()); + await waitForNextUpdate(); + + expect(onComplete).toHaveBeenCalledTimes(1); + expect(onSubscriptionComplete).toHaveBeenCalledTimes(0); + }); + + test("uses 'onSubscriptionComplete' when 'onComplete' is absent", async () => { + jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const results = [{ + result: { data: { car: { make: 'Audi' } } } + }]; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const onSubscriptionComplete = jest.fn(); + + const { waitForNextUpdate } = renderHook( + () => useSubscription(subscription, { + onSubscriptionComplete, + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + link.simulateResult(results[0]); + + setTimeout(() => link.simulateComplete()); + await waitForNextUpdate(); + + expect(onSubscriptionComplete).toHaveBeenCalledTimes(1); + }); + + test("only warns once using `onSubscriptionComplete`", () => { + const warningSpy = jest.spyOn(console, 'warn').mockImplementation(() => {}); + const subscription = gql` + subscription { + car { + make + } + } + `; + + const link = new MockSubscriptionLink(); + const client = new ApolloClient({ + link, + cache: new Cache({ addTypename: false }), + }); + + const { rerender } = renderHook( + () => useSubscription(subscription, { + onSubscriptionComplete: jest.fn(), + }), + { + wrapper: ({ children }) => ( + <ApolloProvider client={client}> + {children} + </ApolloProvider> + ), + }, + ); + + rerender(); + + expect(warningSpy).toHaveBeenCalledTimes(1); + }); });
change `onSubscriptionData` and `onSubscriptionComplete` to `onData` and `onComplete` - relates to #9495 - see [comment](https://github.com/apollographql/apollo-client/pull/9495/files#r866194974) As a follow up to #9495 we want to change `onSubscriptionData` and `onSubscriptionComplete` to `onData` and `onComplete` so we are consistent in our types naming convention.
@jpvajda I'd like to consider taking this on. Since `onSubscriptionData` and `onSubscriptionComplete` are currently in the stable API, I assume we'd want to deprecate these existing APIs (by showing a deprecation message) in addition to adding the new `onData` and `onComplete`. Does this sound correct? Should we allow users to use both together? My initial thinking is no, since one will replace the other. Perhaps we error in this situation?
2022-09-27T15:59:45Z
3.7
apollographql/apollo-client
9,808
apollographql__apollo-client-9808
[ "9804" ]
83935e8e1ea2c3eb4a0f10fffbbfb4d51cfc02d2
diff --git a/src/cache/inmemory/policies.ts b/src/cache/inmemory/policies.ts --- a/src/cache/inmemory/policies.ts +++ b/src/cache/inmemory/policies.ts @@ -912,7 +912,7 @@ function makeFieldFunctionOptions( canRead, readField<T>() { return policies.readField<T>( - normalizeReadFieldOptions(arguments, objectOrReference, context), + normalizeReadFieldOptions(arguments, objectOrReference, variables), context, ); },
diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -1650,6 +1650,80 @@ describe("type policies", function () { expect(cache.extract(true)).toEqual(expectedExtraction); }); + it("should return correct variables in read function", function () { + const cache = new InMemoryCache({ + typePolicies: { + Country: { + fields: { + isCanada: { + read(_, { readField }) { + return readField("name") === "CA"; + } + }, + name: { + read(_, { variables }) { + return variables?.code; + } + } + } + } + } + }); + + cache.writeQuery({ + query: gql` + query Countries($code: ID!) { + country(code: $code) { + name + } + } + `, + data: { + country: { + __typename: "Country", + name: "CA", + }, + }, + variables: { + code: "CA", + }, + }); + + const expectedExtraction = { + ROOT_QUERY: { + __typename: "Query", + "country({\"code\":\"CA\"})": { + __typename: "Country", + name: "CA", + }, + }, + }; + + expect(cache.extract(true)).toEqual(expectedExtraction); + + const expectedResult = { + country: { + __typename: "Country", + name: "CA", + isCanada: true, + }, + }; + + expect(cache.readQuery({ + query: gql` + query Countries($code: ID!) { + country(code: $code) { + name + isCanada @client + } + } + `, + variables: { + code: "CA", + }, + })).toEqual(expectedResult); + }); + it("read and merge can cooperate through options.storage", function () { const cache = new InMemoryCache({ typePolicies: {
FieldFunctionOptions return incorrect variables <!-- Thanks for filing an issue on Apollo Client! Please make sure that you include the following information to ensure that your issue is actionable. If you don't follow the template, your issue may end up being closed without anyone looking at it carefully, because it is not actionable for us without the information in this template. **PLEASE NOTE:** Feature requests and non-bug related discussions are no longer managed in this repo. Feature requests should be opened in https://github.com/apollographql/apollo-feature-requests. --> **Intended outcome:** <!-- What you were trying to accomplish when the bug occurred, and as much code as possible related to the source of the problem. --> Prior to 3.5.0 and according to the doc, we can add a local-only field and utilize the policy file to populate the data by using the `read` and `readField` functions if necessary. Code may look something like this: ``` fields: { isCanada: { read(_, { readField }) { return readField("name") === "Canada"; } }, name: { read(name, { variables }) { return name; } } } ``` The `variables` in the `read` function should always return the actual variable passed to this particular query. So something like: ``` { code: "CA" } ``` **Actual outcome:** <!-- A description of what actually happened, including a screenshot or copy-paste of any related error messages, logs, or other output that might be related. Places to look for information include your browser console, server console, and network logs. Please avoid non-specific phrases like “didn’t work” or “broke”. --> Instead of returning the actual variables, it returns other metadata like: <img width="221" alt="image" src="https://user-images.githubusercontent.com/7050316/172903626-434f3813-554b-4767-a552-6b026cf20817.png"> The actual variables are in `variables.variables` now **How to reproduce the issue:** <!-- If possible, please create a reproduction using https://github.com/apollographql/react-apollo-error-template and link to it here. If you prefer an in-browser way to create reproduction, try: https://codesandbox.io/s/github/apollographql/react-apollo-error-template Instructions for how the issue can be reproduced by a maintainer or contributor. Be as specific as possible, and only mention what is necessary to reproduce the bug. If possible, try to isolate the exact circumstances in which the bug occurs and avoid speculation over what the cause might be. --> Set up a local field and the read function for that field. In that read function, have it access another field using `readField` function. Set up another read function for the field being accessed, then console log the variables. https://codesandbox.io/s/quizzical-panka-x9v60j?file=/src/index.js Sorry I know I'm probably not supposed to share speculation but this is what I've found: In 3.5.0, it passes the `context` object to `normalizeReadFieldOptions` function and later `options.variables` would essentially equal to that `context` object https://github.com/apollographql/apollo-client/blob/538b79594d02189f511622d8fed85b679d69dfd3/src/cache/inmemory/policies.ts#L915 In 3.4.13, `options.variables` would equal to the `variables` object https://github.com/apollographql/apollo-client/blob/92141607ff6c650145350c222a2434cd7008e4b5/src/cache/inmemory/policies.ts#L884 There was probably a reason I wasn't aware of but just want to share what I've found, thank you **Versions** <!-- Run the following command in your project directory, and paste its (automatically copied to clipboard) results here: `npx envinfo@latest --preset apollo --clipboard` --> System: OS: macOS 12.3.1 Binaries: Node: 14.19.3 - /usr/local/opt/node@14/bin/node npm: 6.14.17 - /usr/local/opt/node@14/bin/npm Browsers: Chrome: 102.0.5005.61 Edge: 102.0.1245.33 Firefox: 101.0 Safari: 15.4 npmPackages: @apollo/client: ^3.5.0 => 3.5.0
2022-06-10T19:25:30Z
3.6
apollographql/apollo-client
9,369
apollographql__apollo-client-9369
[ "8345" ]
915f34d8056278c2ac6121e5e63f65193d01b45c
diff --git a/config/entryPoints.js b/config/entryPoints.js --- a/config/entryPoints.js +++ b/config/entryPoints.js @@ -12,6 +12,7 @@ const entryPoints = [ { dirs: ['link', 'persisted-queries'] }, { dirs: ['link', 'retry'] }, { dirs: ['link', 'schema'] }, + { dirs: ['link', 'subscriptions'] }, { dirs: ['link', 'utils'] }, { dirs: ['link', 'ws'] }, { dirs: ['react'] }, diff --git a/docs/gatsby-config.js b/docs/gatsby-config.js --- a/docs/gatsby-config.js +++ b/docs/gatsby-config.js @@ -109,6 +109,7 @@ module.exports = { 'api/link/apollo-link-rest', 'api/link/apollo-link-retry', 'api/link/apollo-link-schema', + 'api/link/apollo-link-subscriptions', 'api/link/apollo-link-ws', 'api/link/community-links' ], diff --git a/src/link/subscriptions/index.ts b/src/link/subscriptions/index.ts new file mode 100644 --- /dev/null +++ b/src/link/subscriptions/index.ts @@ -0,0 +1,86 @@ +// This file is adapted from the graphql-ws npm package: +// https://github.com/enisdenjo/graphql-ws +// +// Most of the file comes from that package's README; some other parts (such as +// isLikeCloseEvent) come from its source. +// +// Here's the license of the original code: +// +// The MIT License (MIT) +// +// Copyright (c) 2020-2021 Denis Badurina +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +import { print } from "graphql"; +import type { Client } from "graphql-ws"; + +import { ApolloLink, Operation, FetchResult } from "../core"; +import { isNonNullObject, Observable } from "../../utilities"; +import { ApolloError } from "../../errors"; + +interface LikeCloseEvent { + /** Returns the WebSocket connection close code provided by the server. */ + readonly code: number; + /** Returns the WebSocket connection close reason provided by the server. */ + readonly reason: string; +} + +function isLikeCloseEvent(val: unknown): val is LikeCloseEvent { + return isNonNullObject(val) && 'code' in val && 'reason' in val; +} + + +export class GraphQLWsLink extends ApolloLink { + constructor(public readonly client: Client) { + super(); + } + + public request(operation: Operation): Observable<FetchResult> { + return new Observable((observer) => { + return this.client.subscribe<FetchResult>( + { ...operation, query: print(operation.query) }, + { + next: observer.next.bind(observer), + complete: observer.complete.bind(observer), + error: (err) => { + if (err instanceof Error) { + return observer.error(err); + } + + if (isLikeCloseEvent(err)) { + return observer.error( + // reason will be available on clean closes + new Error( + `Socket closed with event ${err.code} ${err.reason || ""}` + ) + ); + } + + return observer.error( + new ApolloError({ + graphQLErrors: Array.isArray(err) ? err : [err], + }) + ); + }, + } + ); + }); + } +}
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -210,6 +210,12 @@ Array [ ] `; +exports[`exports of public entry points @apollo/client/link/subscriptions 1`] = ` +Array [ + "GraphQLWsLink", +] +`; + exports[`exports of public entry points @apollo/client/link/utils 1`] = ` Array [ "createOperation", diff --git a/src/__tests__/exports.ts b/src/__tests__/exports.ts --- a/src/__tests__/exports.ts +++ b/src/__tests__/exports.ts @@ -11,6 +11,7 @@ import * as linkHTTP from "../link/http"; import * as linkPersistedQueries from "../link/persisted-queries"; import * as linkRetry from "../link/retry"; import * as linkSchema from "../link/schema"; +import * as linkSubscriptions from "../link/subscriptions"; import * as linkUtils from "../link/utils"; import * as linkWS from "../link/ws"; import * as react from "../react"; @@ -52,6 +53,7 @@ describe('exports of public entry points', () => { check("@apollo/client/link/persisted-queries", linkPersistedQueries); check("@apollo/client/link/retry", linkRetry); check("@apollo/client/link/schema", linkSchema); + check("@apollo/client/link/subscriptions", linkSubscriptions); check("@apollo/client/link/utils", linkUtils); check("@apollo/client/link/ws", linkWS); check("@apollo/client/react", react); diff --git a/src/link/subscriptions/__tests__/graphqlWsLink.ts b/src/link/subscriptions/__tests__/graphqlWsLink.ts new file mode 100644 --- /dev/null +++ b/src/link/subscriptions/__tests__/graphqlWsLink.ts @@ -0,0 +1,106 @@ +import { Client } from "graphql-ws"; +import { ExecutionResult } from "graphql"; +import gql from "graphql-tag"; + +import { Observable } from "../../../utilities"; +import { execute } from "../../core"; +import { GraphQLWsLink } from ".."; + +const query = gql` + query SampleQuery { + stub { + id + } + } +`; + +const mutation = gql` + mutation SampleMutation { + stub { + id + } + } +`; + +const subscription = gql` + subscription SampleSubscription { + stub { + id + } + } +`; + +function mockClient(subscribe: Client["subscribe"]): Client { + return { + subscribe, + // GraphQLWsLink doesn't use these methods + on: () => () => {}, + dispose: () => {}, + }; +} + +async function observableToArray<T>(o: Observable<T>): Promise<T[]> { + const out: T[] = []; + await o.forEach((v) => out.push(v)); + return out; +} + +describe("GraphQLWSlink", () => { + it("constructs", () => { + const client = mockClient(() => () => {}); + expect(() => new GraphQLWsLink(client)).not.toThrow(); + }); + + // TODO some sort of dependency injection + + // it('should pass the correct initialization parameters to the Subscription Client', () => { + // }); + + it("should call subscribe on the client for a query", async () => { + const result = { data: { data: "result" } } as ExecutionResult<any, any>; + const subscribe: Client["subscribe"] = (_, sink) => { + sink.next(result); + sink.complete(); + return () => {}; + }; + const client = mockClient(subscribe); + const link = new GraphQLWsLink(client); + + const obs = execute(link, { query }); + await expect(observableToArray(obs)).resolves.toEqual([result]); + }); + + it("should call subscribe on the client for a mutation", async () => { + const result = { data: { data: "result" } } as ExecutionResult<any, any>; + const subscribe: Client["subscribe"] = (_, sink) => { + sink.next(result); + sink.complete(); + return () => {}; + }; + const client = mockClient(subscribe); + const link = new GraphQLWsLink(client); + + const obs = execute(link, { query: mutation }); + await expect(observableToArray(obs)).resolves.toEqual([result]); + }); + + it("should call next with multiple results for subscription", async () => { + const results = [ + { data: { data: "result1" } }, + { data: { data: "result2" } }, + ] as ExecutionResult<any, any>[]; + const subscribe: Client["subscribe"] = (_, sink) => { + const copy = [...results]; + for (const r of copy) { + sink.next(r); + } + sink.complete(); + return () => {}; + }; + const client = mockClient(subscribe); + const link = new GraphQLWsLink(client); + + const obs = execute(link, { query: subscription }); + await expect(observableToArray(obs)).resolves.toEqual(results); + }); +}); diff --git a/src/link/ws/__tests__/webSocketLink.ts b/src/link/ws/__tests__/webSocketLink.ts --- a/src/link/ws/__tests__/webSocketLink.ts +++ b/src/link/ws/__tests__/webSocketLink.ts @@ -88,7 +88,7 @@ describe('WebSocketLink', () => { client.request.mockReturnValueOnce(observable); const link = new WebSocketLink(client); - const obs = execute(link, { query: mutation }); + const obs = execute(link, { query: subscription }); expect(obs).toEqual(observable); obs.subscribe(data => { expect(data).toEqual(result);
Why use WebSocketLink from '@apollo/client/link/ws' 1) Sorry for my english. 2) thanks for your time. In your documentation (https://www.apollographql.com/docs/react/api/link/apollo-link-ws/) you encourage to use WebSocket which depends on subscriptions-transport-ws npm package. but apollographql/subscriptions-transport-ws repo says > "The subscriptions-transport-ws library is not being actively maintained. It is recommended that you use the graphql-ws library instead." Same case here https://www.apollographql.com/docs/graphql-subscriptions/authentication/ . By other side in the next link you implement difference approach to the same solution, will be great or unify then, or explain pros/cons, or when use what: https://www.apollographql.com/docs/graphql-subscriptions/express/ https://www.apollographql.com/docs/apollo-server/data/subscriptions/#example-authentication-with-onconnect Finally I would like to ask you about an example to how implement graphql-ws with apollo server.
+1, I think it would be better to link to the new `graphql-ws` library which is being actively maintained. To the maintainers: was there a particular reason for using `subscriptions-transport-ws` in the documentation example? If not, happy to work on getting the documentation updated for the replacement library I tried to implement graphql-ws with apollo (following the example in graphql-ws documentation) but i had no success. Will be great have a oficial documentation to how to implement. +1 Example is here: https://github.com/enisdenjo/graphql-ws#apollo-client
2022-01-28T20:35:32Z
3.5
apollographql/apollo-client
9,328
apollographql__apollo-client-9328
[ "9142" ]
a3e8ae0ccbc2fdd0cc087a8cd89221ca9eed1bca
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -326,7 +326,7 @@ export class ObservableQuery< // (no-cache, network-only, or cache-and-network), override it with // network-only to force the refetch for this fetchQuery call. const { fetchPolicy } = this.options; - if (fetchPolicy === 'standby' || fetchPolicy === 'cache-and-network') { + if (fetchPolicy === 'cache-and-network') { reobserveOptions.fetchPolicy = fetchPolicy; } else if (fetchPolicy === 'no-cache') { reobserveOptions.fetchPolicy = 'no-cache'; diff --git a/src/react/hooks/useLazyQuery.ts b/src/react/hooks/useLazyQuery.ts --- a/src/react/hooks/useLazyQuery.ts +++ b/src/react/hooks/useLazyQuery.ts @@ -1,10 +1,9 @@ import { DocumentNode } from 'graphql'; import { TypedDocumentNode } from '@graphql-typed-document-node/core'; -import { useCallback, useEffect, useState } from 'react'; +import { useCallback, useMemo, useState } from 'react'; import { LazyQueryHookOptions, - LazyQueryResult, QueryLazyOptions, QueryTuple, } from '../types/types'; @@ -25,54 +24,21 @@ export function useLazyQuery<TData = any, TVariables = OperationVariables>( query: DocumentNode | TypedDocumentNode<TData, TVariables>, options?: LazyQueryHookOptions<TData, TVariables> ): QueryTuple<TData, TVariables> { - const [execution, setExecution] = useState< - { - called: boolean, - options?: QueryLazyOptions<TVariables>, - resolves: Array<(result: LazyQueryResult<TData, TVariables>) => void>, - } - >({ + const [execution, setExecution] = useState<{ + called: boolean, + options?: QueryLazyOptions<TVariables>, + }>({ called: false, - resolves: [], }); - const execute = useCallback< - QueryTuple<TData, TVariables>[0] - >((executeOptions?: QueryLazyOptions<TVariables>) => { - let resolve!: (result: LazyQueryResult<TData, TVariables>) => void; - const promise = new Promise<LazyQueryResult<TData, TVariables>>( - (resolve1) => (resolve = resolve1), - ); - setExecution((execution) => { - if (execution.called) { - result && result.refetch(executeOptions?.variables); - } - - return { - called: true, - resolves: [...execution.resolves, resolve], - options: executeOptions, - }; - }); - - return promise; - }, []); - let result = useQuery<TData, TVariables>(query, { ...options, ...execution.options, - // We don’t set skip to execution.called, because we need useQuery to call - // addQueryPromise, so that ssr calls waits for execute to be called. + // We don’t set skip to execution.called, because some useQuery SSR code + // checks skip for some reason. fetchPolicy: execution.called ? options?.fetchPolicy : 'standby', skip: undefined, }); - useEffect(() => { - const { resolves } = execution; - if (!result.loading && resolves.length) { - setExecution((execution) => ({ ...execution, resolves: [] })); - resolves.forEach((resolve) => resolve(result)); - } - }, [result, execution]); if (!execution.called) { result = { @@ -82,16 +48,42 @@ export function useLazyQuery<TData = any, TVariables = OperationVariables>( error: void 0, called: false, }; + } - + // We use useMemo here to make sure the eager methods have a stable identity. + const eagerMethods = useMemo(() => { + const eagerMethods: Record<string, any> = {}; for (const key of EAGER_METHODS) { const method = result[key]; - result[key] = (...args: any) => { + eagerMethods[key] = (...args: any) => { setExecution((execution) => ({ ...execution, called: true })); return (method as any)(...args); }; } - } + + return eagerMethods; + }, []); + + result.error = result.error || void 0; + Object.assign(result, eagerMethods); + + const execute = useCallback< + QueryTuple<TData, TVariables>[0] + >((executeOptions?: QueryLazyOptions<TVariables>) => { + setExecution({ called: true, options: executeOptions }); + return result.refetch(executeOptions?.variables).then((result1) => { + const result2 = { + ...result, + data: result1.data, + error: result1.error, + called: true, + loading: false, + }; + + Object.assign(result2, eagerMethods); + return result2; + }); + }, []); return [execute, result]; }
diff --git a/src/react/hooks/__tests__/useLazyQuery.test.tsx b/src/react/hooks/__tests__/useLazyQuery.test.tsx --- a/src/react/hooks/__tests__/useLazyQuery.test.tsx +++ b/src/react/hooks/__tests__/useLazyQuery.test.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import { GraphQLError } from 'graphql'; import gql from 'graphql-tag'; import { renderHook } from '@testing-library/react-hooks'; @@ -450,6 +451,7 @@ describe('useLazyQuery Hook', () => { expect(result.current[1].previousData).toBe(undefined); setTimeout(() => execute({ variables: { id: 2 }})); + await waitForNextUpdate(); expect(result.current[1].loading).toBe(true); expect(result.current[1].data).toBe(undefined); @@ -530,8 +532,10 @@ describe('useLazyQuery Hook', () => { expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toBe(undefined); const execute = result.current[0]; - const mock = jest.fn(); - setTimeout(() => mock(execute())); + let executeResult: any; + setTimeout(() => { + executeResult = execute(); + }); await waitForNextUpdate(); expect(result.current[1].loading).toBe(true); @@ -539,9 +543,186 @@ describe('useLazyQuery Hook', () => { await waitForNextUpdate(); expect(result.current[1].loading).toBe(false); expect(result.current[1].data).toEqual({ hello: 'world' }); + await expect(executeResult).resolves.toEqual(result.current[1]); + }); + + it('should have matching results from execution function and hook', async () => { + const query = gql` + query GetCountries($filter: String) { + countries(filter: $filter) { + code + name + } + } + `; + + const mocks = [ + { + request: { + query, + variables: { + filter: "PA", + }, + }, + result: { + data: { + countries: { + code: "PA", + name: "Panama", + }, + }, + }, + delay: 20, + }, + { + request: { + query, + variables: { + filter: "BA", + }, + }, + result: { + data: { + countries: { + code: "BA", + name: "Bahamas", + }, + }, + }, + delay: 20, + }, + ]; + + const { result, waitForNextUpdate } = renderHook( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}> + {children} + </MockedProvider> + ), + }, + ); + + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + const execute = result.current[0]; + let executeResult: any; + setTimeout(() => { + executeResult = execute({ variables: { filter: "PA" } }); + }); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(true); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toEqual({ + countries: { + code: "PA", + name: "Panama", + }, + }); + + expect(executeResult).toBeInstanceOf(Promise); + expect((await executeResult).data).toEqual({ + countries: { + code: "PA", + name: "Panama", + }, + }); + + setTimeout(() => { + executeResult = execute({ variables: { filter: "BA" } }); + }); + + await waitForNextUpdate(); + // TODO: Get rid of this render. + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toEqual({ + countries: { + code: "BA", + name: "Bahamas", + }, + }); + + expect(executeResult).toBeInstanceOf(Promise); + expect((await executeResult).data).toEqual({ + countries: { + code: "BA", + name: "Bahamas", + }, + }); + }); + + it('the promise should reject with errors the “way useMutation does”', async () => { + const query = gql`{ hello }`; + const mocks = [ + { + request: { query }, + result: { + errors: [new GraphQLError('error 1')], + }, + delay: 20, + }, + { + request: { query }, + result: { + errors: [new GraphQLError('error 2')], + }, + delay: 20, + }, + ]; + + const { result, waitForNextUpdate } = renderHook( + () => useLazyQuery(query), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}> + {children} + </MockedProvider> + ), + }, + ); + + const execute = result.current[0]; + let executeResult: any; + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + setTimeout(() => { + executeResult = execute(); + executeResult.catch(() => {}); + }); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(true); + expect(result.current[1].data).toBe(undefined); + expect(result.current[1].error).toBe(undefined); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + expect(result.current[1].error).toEqual(new Error('error 1')); + + await expect(executeResult).rejects.toEqual(new Error('error 1')); + + setTimeout(() => { + executeResult = execute(); + executeResult.catch(() => {}); + }); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + expect(result.current[1].error).toEqual(new Error('error 1')); + + await waitForNextUpdate(); + expect(result.current[1].loading).toBe(false); + expect(result.current[1].data).toBe(undefined); + expect(result.current[1].error).toEqual(new Error('error 2')); - expect(mock).toHaveBeenCalledTimes(1); - expect(mock.mock.calls[0][0]).toBeInstanceOf(Promise); - expect(await mock.mock.calls[0][0]).toEqual(result.current[1]); + await expect(executeResult).rejects.toEqual(new Error('error 2')); }); }); diff --git a/src/react/hooks/__tests__/useMutation.test.tsx b/src/react/hooks/__tests__/useMutation.test.tsx --- a/src/react/hooks/__tests__/useMutation.test.tsx +++ b/src/react/hooks/__tests__/useMutation.test.tsx @@ -272,6 +272,49 @@ describe('useMutation Hook', () => { expect(onError.mock.calls[0][0].message).toBe(CREATE_TODO_ERROR); }); + it('should reject when there’s only an error and no error policy is set', async () => { + const variables = { + description: 'Get milk!' + }; + + const mocks = [ + { + request: { + query: CREATE_TODO_MUTATION, + variables, + }, + result: { + errors: [new GraphQLError(CREATE_TODO_ERROR)], + }, + } + ]; + + const { result } = renderHook( + () => useMutation(CREATE_TODO_MUTATION), + { wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}> + {children} + </MockedProvider> + )}, + ); + + const createTodo = result.current[0]; + let fetchError: any; + await act(async () => { + // need to call createTodo this way to get “act” warnings to go away. + try { + await createTodo({ variables }); + } catch (err) { + fetchError = err; + return; + } + + throw new Error("function did not error"); + }); + + expect(fetchError).toEqual(new GraphQLError(CREATE_TODO_ERROR)); + }); + it(`should reject when errorPolicy is 'none'`, async () => { const variables = { description: 'Get milk!' @@ -341,7 +384,47 @@ describe('useMutation Hook', () => { expect(fetchResult.data).toEqual(CREATE_TODO_RESULT); expect(fetchResult.errors[0].message).toEqual(CREATE_TODO_ERROR); - }) + }); + + it(`should ignore errors when errorPolicy is 'ignore'`, async () => { + const errorMock = jest.spyOn(console, "error") + .mockImplementation(() => {}); + const variables = { + description: 'Get milk!' + }; + + const mocks = [ + { + request: { + query: CREATE_TODO_MUTATION, + variables, + }, + result: { + errors: [new GraphQLError(CREATE_TODO_ERROR)], + }, + } + ]; + + const { result } = renderHook( + () => useMutation(CREATE_TODO_MUTATION, { errorPolicy: "ignore" }), + { wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}> + {children} + </MockedProvider> + )}, + ); + + const createTodo = result.current[0]; + let fetchResult: any; + await act(async () => { + fetchResult = await createTodo({ variables }); + }); + + expect(fetchResult).toEqual({}); + expect(errorMock).toHaveBeenCalledTimes(1); + expect(errorMock.mock.calls[0][0]).toMatch("Missing field"); + errorMock.mockRestore(); + }); }); it('should return the current client instance in the result object', async () => { diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -3071,7 +3071,8 @@ describe('useQuery Hook', () => { expect(result.current.data).toEqual({ hello: 'world' }); }); - it('should not refetch when skip is true', async () => { + // Amusingly, #8270 thinks this is a bug, but #9101 thinks this is not. + it('should refetch when skip is true', async () => { const query = gql`{ hello }`; const link = new ApolloLink(() => Observable.of({ data: { hello: 'world' }, @@ -3098,13 +3099,18 @@ describe('useQuery Hook', () => { expect(result.current.data).toBe(undefined); await expect(waitForNextUpdate({ timeout: 20 })) .rejects.toThrow('Timed out'); - result.current.refetch(); - await expect(waitForNextUpdate({ timeout: 20 })) - .rejects.toThrow('Timed out'); + const promise = result.current.refetch(); + // TODO: Not really sure about who is causing this render. + await waitForNextUpdate(); expect(result.current.loading).toBe(false); expect(result.current.data).toBe(undefined); - expect(requestSpy).toHaveBeenCalledTimes(0); + expect(requestSpy).toHaveBeenCalledTimes(1); requestSpy.mockRestore(); + expect(promise).resolves.toEqual({ + data: {hello: "world"}, + loading: false, + networkStatus: 7, + }); }); });
useLazyQuery promise .catch not catching errors, when useMutation does I should be able to add a `.catch` to the `useLazyQuery` promise, which should be called when the response is a graphql _or_ network error. Right now, the `.catch` does not fire in either case, and the error is present in the result variable inside the `.then`. This is inconsistent with the `useMutation` which does fire the `.catch` on errors. ---------------- **Intended outcome:** ```js someQuery() .then((r) => console.log('query done!', r)) .catch((e) => console.log('query error!', e)) ``` I should get `query error!` when there is an error. **Actual outcome:** ```js someQuery() .then((r) => console.log('query done!', r)) .catch((e) => console.log('query error!', e)) ``` I get `query done!` when there is an error. ---------------- **How to reproduce the issue:** <details><summary>Click to open code example</summary> The following example uses the `MockedProvider` but has the exact same result when using the normal `ApolloProvider` ```js import React from 'react'; import ReactDOM from 'react-dom'; import './index.css'; import {MockedProvider} from "@apollo/client/testing"; import {gql, InMemoryCache, useLazyQuery, useMutation} from "@apollo/client"; const cache = new InMemoryCache(); const SOME_MUTATION = gql` mutation SomeMutation { test } `; const SOME_QUERY = gql` query SomeQuery { test } `; const mocks = [ { request: { query: SOME_MUTATION, }, result: { errors: [{message: 'ERROR'}] } }, { request: { query: SOME_QUERY, }, result: { errors: [{message: 'ERROR'}] } } ] function App() { const [someMutation] = useMutation(SOME_MUTATION); const [someQuery] = useLazyQuery(SOME_QUERY); const startMutation = () => { someMutation() .then((r) => console.log('mutation done!', r)) .catch((e) => console.log('mutation error!', e)) } const startUseLazyQuery = () => { someQuery() .then((r) => console.log('query done!', r)) .catch((e) => console.log('query error!', e)) } return ( <div> <button onClick={startMutation}>Click me for mutation</button> <br/> <button onClick={startUseLazyQuery}>Click me for lazy query</button> </div> ) } ReactDOM.render( <React.StrictMode> <div className="App"> <MockedProvider cache={cache} mocks={mocks}> <App /> </MockedProvider> </div> </React.StrictMode>, document.getElementById('root') ); ``` </details> ---------------- **Versions** ``` System: OS: Linux 4.15 Ubuntu 18.04.6 LTS (Bionic Beaver) Binaries: Node: 12.22.7 - /usr/bin/node Yarn: 1.21.1 - /usr/bin/yarn npm: 6.14.15 - /usr/bin/npm Browsers: Chrome: 95.0.4638.54 Firefox: 94.0 npmPackages: @apollo/client: ^3.5.5 => 3.5.5 ```
@MattBred Thanks for reporting this! Seems like [this `Promise`](https://github.com/apollographql/apollo-client/blob/5d0c881a4c20c5c48842d9a8d246d26a917eccf9/src/react/hooks/useLazyQuery.ts#L44) needs to be rejected on errors, but we're currently only using its `resolve` function. Can you take a look @brainkim? I object to the bug label because this is new 3.5 functionality but yeah this seem right to do. Though, I don’t understand why people don’t just call `ApolloClient.query()` directly at some point.
2022-01-19T20:51:08Z
3.5
apollographql/apollo-client
9,222
apollographql__apollo-client-9222
[ "8426" ]
2e138aa88e7eda59fffd7424023ec633ef19814e
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -24,6 +24,7 @@ import { FetchMoreQueryOptions, SubscribeToMoreOptions, WatchQueryFetchPolicy, + NextFetchPolicyContext, } from './watchQueryOptions'; import { QueryInfo } from './QueryInfo'; import { MissingFieldError } from '../cache'; @@ -583,6 +584,45 @@ once, rather than every time you call fetchMore.`); this.updatePolling(); } + // Update options.fetchPolicy according to options.nextFetchPolicy. + private applyNextFetchPolicy( + reason: NextFetchPolicyContext<TData, TVariables>["reason"], + // It's possible to use this method to apply options.nextFetchPolicy to + // options.fetchPolicy even if options !== this.options, though that happens + // most often when the options are temporary, used for only one request and + // then thrown away, so nextFetchPolicy may not end up mattering. + options: WatchQueryOptions<TVariables, TData> = this.options, + ) { + if (options.nextFetchPolicy) { + const { fetchPolicy = "cache-first" } = options; + + // When someone chooses "cache-and-network" or "network-only" as their + // initial FetchPolicy, they often do not want future cache updates to + // trigger unconditional network requests, which is what repeatedly + // applying the "cache-and-network" or "network-only" policies would seem + // to imply. Instead, when the cache reports an update after the initial + // network request, it may be desirable for subsequent network requests to + // be triggered only if the cache result is incomplete. To that end, the + // options.nextFetchPolicy option provides an easy way to update + // options.fetchPolicy after the initial network request, without having to + // call observableQuery.setOptions. + if (typeof options.nextFetchPolicy === "function") { + options.fetchPolicy = options.nextFetchPolicy(fetchPolicy, { + reason, + options, + observable: this, + initialPolicy: this.initialFetchPolicy, + }); + } else if (reason === "variables-changed") { + options.fetchPolicy = this.initialFetchPolicy; + } else { + options.fetchPolicy = options.nextFetchPolicy; + } + } + + return options.fetchPolicy; + } + private fetch( options: WatchQueryOptions<TVariables, TData>, newNetworkStatus?: NetworkStatus, @@ -709,7 +749,7 @@ once, rather than every time you call fetchMore.`); !newOptions.fetchPolicy && !equal(newOptions.variables, oldVariables) ) { - options.fetchPolicy = this.initialFetchPolicy; + this.applyNextFetchPolicy("variables-changed"); if (newNetworkStatus === void 0) { newNetworkStatus = NetworkStatus.setVariables; } @@ -831,36 +871,3 @@ export function logMissingFieldErrors( }`, missing); } } - -// Adopt options.nextFetchPolicy (if defined) as a replacement for -// options.fetchPolicy. Since this method also removes options.nextFetchPolicy -// from options, the adoption tends to be idempotent, unless nextFetchPolicy -// is a function that keeps setting options.nextFetchPolicy (uncommon). -export function applyNextFetchPolicy<TData, TVars>( - options: Pick< - WatchQueryOptions<TVars, TData>, - | "fetchPolicy" - | "nextFetchPolicy" - >, -) { - const { - fetchPolicy = "cache-first", - nextFetchPolicy, - } = options; - - if (nextFetchPolicy) { - // When someone chooses "cache-and-network" or "network-only" as their - // initial FetchPolicy, they often do not want future cache updates to - // trigger unconditional network requests, which is what repeatedly - // applying the "cache-and-network" or "network-only" policies would seem - // to imply. Instead, when the cache reports an update after the initial - // network request, it may be desirable for subsequent network requests to - // be triggered only if the cache result is incomplete. To that end, the - // options.nextFetchPolicy option provides an easy way to update - // options.fetchPolicy after the initial network request, without having to - // call observableQuery.setOptions. - options.fetchPolicy = typeof nextFetchPolicy === "function" - ? nextFetchPolicy.call(options, fetchPolicy) - : nextFetchPolicy; - } -} diff --git a/src/core/QueryManager.ts b/src/core/QueryManager.ts --- a/src/core/QueryManager.ts +++ b/src/core/QueryManager.ts @@ -35,7 +35,7 @@ import { ErrorPolicy, MutationFetchPolicy, } from './watchQueryOptions'; -import { ObservableQuery, applyNextFetchPolicy, logMissingFieldErrors } from './ObservableQuery'; +import { ObservableQuery, logMissingFieldErrors } from './ObservableQuery'; import { NetworkStatus, isNetworkRequestInFlight } from './networkStatus'; import { ApolloQueryResult, @@ -1155,7 +1155,10 @@ export class QueryManager<TStore> { concast.cleanup(() => { this.fetchCancelFns.delete(queryId); - applyNextFetchPolicy(options); + + if (queryInfo.observableQuery) { + queryInfo.observableQuery["applyNextFetchPolicy"]("after-fetch", options); + } }); return concast; diff --git a/src/core/index.ts b/src/core/index.ts --- a/src/core/index.ts +++ b/src/core/index.ts @@ -12,7 +12,6 @@ export { ObservableQuery, FetchMoreOptions, UpdateQueryOptions, - applyNextFetchPolicy, } from './ObservableQuery'; export { QueryOptions, diff --git a/src/core/watchQueryOptions.ts b/src/core/watchQueryOptions.ts --- a/src/core/watchQueryOptions.ts +++ b/src/core/watchQueryOptions.ts @@ -11,6 +11,7 @@ import { InternalRefetchQueriesInclude, } from './types'; import { ApolloCache } from '../cache'; +import { ObservableQuery } from './ObservableQuery'; /** * fetchPolicy determines where the client may return a result from. The options are: @@ -125,7 +126,8 @@ export interface WatchQueryOptions<TVariables = OperationVariables, TData = any> */ nextFetchPolicy?: WatchQueryFetchPolicy | (( this: WatchQueryOptions<TVariables, TData>, - lastFetchPolicy: WatchQueryFetchPolicy, + currentFetchPolicy: WatchQueryFetchPolicy, + context: NextFetchPolicyContext<TData, TVariables>, ) => WatchQueryFetchPolicy); /** * Specifies whether a {@link NetworkStatus.refetch} operation should merge @@ -136,6 +138,15 @@ export interface WatchQueryOptions<TVariables = OperationVariables, TData = any> refetchWritePolicy?: RefetchWritePolicy; } +export interface NextFetchPolicyContext<TData, TVariables> { + reason: + | "after-fetch" + | "variables-changed"; + observable: ObservableQuery<TData, TVariables>; + options: WatchQueryOptions<TVariables, TData>; + initialPolicy: WatchQueryFetchPolicy; +} + export interface FetchMoreQueryOptions<TVariables, TData = any> { query?: DocumentNode | TypedDocumentNode<TData, TVariables>; variables?: Partial<TVariables>;
diff --git a/src/__tests__/__snapshots__/exports.ts.snap b/src/__tests__/__snapshots__/exports.ts.snap --- a/src/__tests__/__snapshots__/exports.ts.snap +++ b/src/__tests__/__snapshots__/exports.ts.snap @@ -16,7 +16,6 @@ Array [ "NetworkStatus", "Observable", "ObservableQuery", - "applyNextFetchPolicy", "checkFetcher", "concat", "createHttpLink", @@ -93,7 +92,6 @@ Array [ "NetworkStatus", "Observable", "ObservableQuery", - "applyNextFetchPolicy", "checkFetcher", "concat", "createHttpLink", diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -3312,10 +3312,16 @@ describe('@connection', () => { defaultOptions: { watchQuery: { - nextFetchPolicy(fetchPolicy) { + nextFetchPolicy(fetchPolicy, context) { expect(++nextFetchPolicyCallCount).toBe(1); expect(this.query).toBe(query); expect(fetchPolicy).toBe("cache-first"); + + expect(context.reason).toBe("after-fetch"); + expect(context.observable).toBe(obs); + expect(context.options).toBe(obs.options); + expect(context.initialPolicy).toBe("cache-first"); + // Usually options.nextFetchPolicy applies only once, but a // nextFetchPolicy function can set this.nextFetchPolicy // again to perform an additional transition. @@ -3323,6 +3329,7 @@ describe('@connection', () => { ++nextFetchPolicyCallCount; return "cache-first"; }; + return "cache-and-network"; }, }, diff --git a/src/core/__tests__/fetchPolicies.ts b/src/core/__tests__/fetchPolicies.ts --- a/src/core/__tests__/fetchPolicies.ts +++ b/src/core/__tests__/fetchPolicies.ts @@ -9,6 +9,10 @@ import { itAsync, mockSingleLink, } from '../../testing'; +import { TypedDocumentNode } from '@graphql-typed-document-node/core'; +import { WatchQueryFetchPolicy, WatchQueryOptions } from '../watchQueryOptions'; +import { ApolloQueryResult } from '../types'; +import { ObservableQuery } from '../ObservableQuery'; const query = gql` query { @@ -745,3 +749,436 @@ describe('cache-and-network', function() { }); }); }); + +describe("nextFetchPolicy", () => { + type TData = { + linkCounter: number; + opName: string; + opVars: Record<string, any>; + } + + const EchoQuery: TypedDocumentNode<TData> = gql` + query EchoQuery { + linkCounter + opName + opVars + } + `; + + function makeLink() { + let linkCounter = 0; + return new ApolloLink(request => new Observable(observer => { + setTimeout(() => { + observer.next({ + data: { + linkCounter: ++linkCounter, + opName: request.operationName, + opVars: request.variables, + }, + }); + observer.complete(); + }, 10); + })); + } + + const checkNextFetchPolicy = <TData, TVars>(args: { + fetchPolicy: WatchQueryFetchPolicy; + nextFetchPolicy: WatchQueryOptions<TVars, TData>["nextFetchPolicy"]; + useDefaultOptions: boolean; + onResult(info: { + count: number; + result: ApolloQueryResult<TData>; + observable: ObservableQuery<TData, TVars>; + resolve(result?: any): void; + reject(reason?: any): void; + }): void; + }) => itAsync(`transitions ${args.fetchPolicy} to ${ + typeof args.nextFetchPolicy === "function" + ? args.nextFetchPolicy.name + : args.nextFetchPolicy + } (${ + args.useDefaultOptions ? "" : "not " + }using defaults)`, (resolve, reject) => { + const client = new ApolloClient({ + link: makeLink(), + cache: new InMemoryCache(), + defaultOptions: { + watchQuery: args.useDefaultOptions ? { + nextFetchPolicy: args.nextFetchPolicy, + } : {}, + }, + }); + + const watchQueryOptions: WatchQueryOptions<TVars, TData> = { + query: EchoQuery, + fetchPolicy: args.fetchPolicy, + }; + + if (!args.useDefaultOptions) { + watchQueryOptions.nextFetchPolicy = args.nextFetchPolicy; + } + + const observable = client.watchQuery(watchQueryOptions); + + expect(observable.options.fetchPolicy).toBe(args.fetchPolicy); + + subscribeAndCount(reject, observable, (count, result) => { + return args.onResult({ + observable, + count, + result, + resolve, + reject, + }); + }); + }); + + type CheckOptions = Parameters<typeof checkNextFetchPolicy>[0]; + type NextFetchPolicy = CheckOptions["nextFetchPolicy"]; + type OnResultCallback = CheckOptions["onResult"]; + + // We'll use this same OnResultCallback for multiple tests, to make it easier + // to tell that the behavior of the tests is the same. + const onResultNetworkOnlyToCacheFirst: OnResultCallback = ({ + observable, + count, + result, + resolve, + reject, + }) => { + if (count === 1) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.refetch({ + refetching: true, + }).then(result => { + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + }).catch(reject); + + } else if (count === 2) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.reobserve({ + variables: { + refetching: false, + }, + }).then(result => { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }); + }).catch(reject); + + // Changing variables resets the fetchPolicy to its initial value. + expect(observable.options.fetchPolicy).toBe("network-only"); + + } else if (count === 3) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }); + + // But nextFetchPolicy is applied again after the first request. + expect(observable.options.fetchPolicy).toBe("cache-first"); + + setTimeout(resolve, 20); + } else { + reject(`Too many results (${count})`); + } + }; + + checkNextFetchPolicy({ + useDefaultOptions: false, + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", + onResult: onResultNetworkOnlyToCacheFirst, + }); + + checkNextFetchPolicy({ + useDefaultOptions: true, + fetchPolicy: "network-only", + nextFetchPolicy: "cache-first", + onResult: onResultNetworkOnlyToCacheFirst, + }); + + const nextFetchPolicyNetworkOnlyToCacheFirst: NextFetchPolicy = function ( + currentFetchPolicy, + context, + ): WatchQueryFetchPolicy { + expect(currentFetchPolicy).toBe(context.options.fetchPolicy); + switch (context.reason) { + case "variables-changed": + return context.initialPolicy; + default: + case "after-fetch": + return "cache-first"; + } + }; + + checkNextFetchPolicy({ + useDefaultOptions: false, + fetchPolicy: "network-only", + nextFetchPolicy: nextFetchPolicyNetworkOnlyToCacheFirst, + onResult: onResultNetworkOnlyToCacheFirst, + }); + + checkNextFetchPolicy({ + useDefaultOptions: true, + fetchPolicy: "network-only", + nextFetchPolicy: nextFetchPolicyNetworkOnlyToCacheFirst, + onResult: onResultNetworkOnlyToCacheFirst, + }); + + const onResultCacheAndNetworkToCacheFirst: OnResultCallback = ({ + observable, + count, + result, + resolve, + reject, + }) => { + if (count === 1) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.refetch({ + refetching: true, + }).then(result => { + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + }).catch(reject); + + } else if (count === 2) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.reobserve({ + variables: { + refetching: false, + }, + }).then(result => { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }); + }).catch(reject); + + // Changing variables resets the fetchPolicy to its initial value. + // expect(observable.options.fetchPolicy).toBe("cache-and-network"); + + } else if (count === 3) { + expect(result.loading).toBe(true); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + + // But nextFetchPolicy is applied again after the first request. + expect(observable.options.fetchPolicy).toBe("cache-first"); + + } else if (count === 4) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 3, + opName: "EchoQuery", + opVars: { + refetching: false, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + setTimeout(resolve, 20); + } else { + reject(`Too many results (${count})`); + } + }; + + checkNextFetchPolicy({ + useDefaultOptions: false, + fetchPolicy: "cache-and-network", + nextFetchPolicy: "cache-first", + onResult: onResultCacheAndNetworkToCacheFirst, + }); + + checkNextFetchPolicy({ + useDefaultOptions: true, + fetchPolicy: "cache-and-network", + nextFetchPolicy: "cache-first", + onResult: onResultCacheAndNetworkToCacheFirst, + }); + + const nextFetchPolicyCacheAndNetworkToCacheFirst: NextFetchPolicy = function ( + currentFetchPolicy, + context, + ): WatchQueryFetchPolicy { + expect(currentFetchPolicy).toBe(context.options.fetchPolicy); + switch (context.reason) { + case "variables-changed": + return context.initialPolicy; + default: + case "after-fetch": + return "cache-first"; + } + }; + + checkNextFetchPolicy({ + useDefaultOptions: false, + fetchPolicy: "cache-and-network", + nextFetchPolicy: nextFetchPolicyCacheAndNetworkToCacheFirst, + onResult: onResultCacheAndNetworkToCacheFirst, + }); + + checkNextFetchPolicy({ + useDefaultOptions: true, + fetchPolicy: "cache-and-network", + nextFetchPolicy: nextFetchPolicyCacheAndNetworkToCacheFirst, + onResult: onResultCacheAndNetworkToCacheFirst, + }); + + const nextFetchPolicyAlwaysCacheFirst: NextFetchPolicy = function ( + currentFetchPolicy, + context, + ): WatchQueryFetchPolicy { + expect(currentFetchPolicy).toBe(context.options.fetchPolicy); + // Return cache-first no matter what context.reason was. + return "cache-first"; + }; + + const onResultCacheAndNetworkAlwaysCacheFirst: OnResultCallback = ({ + observable, + count, + result, + resolve, + reject, + }) => { + if (count === 1) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 1, + opName: "EchoQuery", + opVars: {}, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.refetch({ + refetching: true, + }).then(result => { + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + }).catch(reject); + + } else if (count === 2) { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + + expect(observable.options.fetchPolicy).toBe("cache-first"); + + observable.reobserve({ + variables: { + refetching: false, + }, + }).then(result => { + expect(result.loading).toBe(false); + expect(result.data).toEqual({ + linkCounter: 2, + opName: "EchoQuery", + opVars: { + refetching: true, + }, + }); + }).catch(reject); + + // The nextFetchPolicy function we provided always returnes cache-first, + // even when context.reason is variables-changed (which by default + // resets the fetchPolicy to context.initialPolicy), so cache-first is + // still what we see here. + expect(observable.options.fetchPolicy).toBe("cache-first"); + + setTimeout(resolve, 20); + } else { + reject(`Too many results (${count})`); + } + }; + + checkNextFetchPolicy({ + useDefaultOptions: false, + fetchPolicy: "cache-and-network", + nextFetchPolicy: nextFetchPolicyAlwaysCacheFirst, + onResult: onResultCacheAndNetworkAlwaysCacheFirst, + }); + + checkNextFetchPolicy({ + useDefaultOptions: true, + fetchPolicy: "cache-and-network", + nextFetchPolicy: nextFetchPolicyAlwaysCacheFirst, + onResult: onResultCacheAndNetworkAlwaysCacheFirst, + }); +});
[v3.4 Regression] Changing variables uses nextFetchPolicy instead of fetchPolicy **Intended outcome:** I have a query with `fetchPolicy: 'network-only'` and `nextFetchPolicy: 'cache-first'`. When I change the variables, it should fetch new results from the server regardless of what's in the cache. **Actual outcome:** Changing the variables appears to use the `nextFetchPolicy` and does not fetch new data from the server. **How to reproduce the issue:** https://github.com/dylanwulf/react-apollo-error-template/tree/changing-variables-nextFetchPolicy Please use branch `changing-variables-nextFetchPolicy`. Reproduction instructions: Open dev tools and watch the console. Click any of the unselected radio buttons, see that a request to the server is made (this is printed to the console instead of the network tab due to the apollo link). Click "All", see that a request to the server is NOT made. It looks like this problem first showed up in `v3.4.0-rc.5` and still exists in `v3.4.0-rc.14`
Related: - https://github.com/apollographql/apollo-client/issues/7437 - https://github.com/apollographql/apollo-client/issues/7311 - https://github.com/apollographql/apollo-client/issues/6839 @brainkim I thought it was interesting that #7437 is happening in v3.3 whereas I only saw my issue in v3.4. I played around a little and I was able to get the issue to happen in v3.3 if `fetchPolicy` and `nextFetchPolicy` are set in `defaultOptions` instead of directly in `useQuery` options. Possibly related to #6839? (which is another issue I would love to be fixed) @dylanwulf The operative change in `3.4.0-rc.5` was the addition of this line in PR #8346: https://github.com/apollographql/apollo-client/blob/f11a163a4e507441bd51c2e0f9571aad853d5856/src/react/data/QueryData.ts#L203 Calling `applyNextFetchPolicy` here advances immediately to `nextFetchPolicy` without ever using `fetchPolicy`, which seems incorrect to me. Removing this line fixes the reproduction you provided, though you might want to remove `notifyOnNetworkStatusChange: true` from your `useQuery` options to avoid receiving intermediate `loading: true` results with cache data. On a more general note, since you mentioned #6839, I have to admit I'm not comfortable with the role `useQuery` plays in resetting `fetchPolicy` and `nextFetchPolicy`, which happens simply because `useQuery` is called again with (very likely) the same initial options object, every time the component rerenders. I happen to think `fetchPolicy` should be reset when variables change, but I don't think `useQuery` is the right/best mechanism to accomplish that. @benjamn thanks for looking into this! awesome to hear you found the root cause > though you might want to remove `notifyOnNetworkStatusChange: true` from your `useQuery` options to avoid receiving intermediate `loading: true` results with cache data. When I change the variables, I seem to be receiving intermediate `loading: true` results with cache data whether `notifyOnNetworkStatusChange` is `true` or `false` (both in v3.3, and v3.4 with that change to `QueryData.ts` you described). But I can always add in some simple logic to hide results whenever `loading` is `true`. > which happens simply because `useQuery` is called again with (very likely) the same initial options object, every time the component rerenders. I happen to think `fetchPolicy` should be reset when variables change, but I don't think useQuery is the right/best mechanism to accomplish that. I agree, I think that when the variables change it should use the `fetchPolicy` that's being passed into the options (whether that's in the `useQuery` options or the `defaultOptions`) Between #6839 and https://github.com/apollographql/apollo-client/issues/8426#issuecomment-869912317, `defaultOptions` seem pretty much unusable since I have to explicitly specify `fetchPolicy` and `nextFetchPolicy` in every single `useQuery` anyway. But at least that's a problem with a workaround. > When I change the variables, I seem to be receiving intermediate `loading: true` results with cache data whether `notifyOnNetworkStatusChange` is `true` or `false` (both in v3.3, and v3.4 with that change to `QueryData.ts` you described). @dylanwulf I looked into this, and it seems to be a consequence of using React `useState` to store/modify the `gender` variable. Changing the `gender` using `setGender` forces React to rerender the component immediately, even though we might want the update to take place in the background for `network-only` fetches. Since `useQuery` can't return the final data yet, it returns a `loading: true` result, regardless of `notifyOnNetworkStatusChange`. When using `client.watchQuery` and the `ObservableQuery` API directly, changing the variables with `notifyOnNetworkStatusChange: false` should skip the `loading: true` result, so you only see the final update. I'm not sure there's much we can do about the React situation right now, other than reporting the truth about the `loading: true` state of the results `useQuery` is forced to render. For what it's worth, we are actively rethinking these issues with `useQuery` in the context of the a new `useFragment` hook (see #8236). > forces React to rerender the component immediately Oooh ok that makes sense, thank you for the explanation Alright, except for the weirdness about `notifyOnNetworkStatusChange: false` not working with React and `useQuery`, I believe this is fixed in `@apollo/client@3.4.0-rc.18`. Thanks for pushing this issue (and #6839) forward @dylanwulf!
2021-12-20T20:41:54Z
3.6
apollographql/apollo-client
8,718
apollographql__apollo-client-8718
[ "8697" ]
f7bda84dbd3a80ef2b73c3ec3716ad48c41bbad6
diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -207,21 +207,35 @@ export class ObservableQuery< networkStatus, } as ApolloQueryResult<TData>; - // If this.options.query has @client(always: true) fields, we cannot trust - // diff.result, since it was read from the cache without running local - // resolvers (and it's too late to run resolvers now, since we must return a - // result synchronously). - if (!this.queryManager.transform(this.options.query).hasForcedResolvers) { + const { fetchPolicy = "cache-first" } = this.options; + // The presence of lastResult means a result has been received and + // this.options.variables haven't changed since then, so its absence means + // either there hasn't been a result yet (so these policies definitely + // should skip the cache) or there's been a result but it was for different + // variables (again, skipping the cache seems right). + const shouldReturnCachedData = lastResult || ( + fetchPolicy !== 'network-only' && + fetchPolicy !== 'no-cache' && + fetchPolicy !== 'standby' + ); + if ( + shouldReturnCachedData && + // If this.options.query has @client(always: true) fields, we cannot + // trust diff.result, since it was read from the cache without running + // local resolvers (and it's too late to run resolvers now, since we must + // return a result synchronously). + !this.queryManager.transform(this.options.query).hasForcedResolvers + ) { const diff = this.queryInfo.getDiff(); if (diff.complete || this.options.returnPartialData) { result.data = diff.result; } + if (equal(result.data, {})) { result.data = void 0 as any; } - const { fetchPolicy = "cache-first" } = this.options; if (diff.complete) { // If the diff is complete, and we're using a FetchPolicy that // terminates after a complete cache read, we can assume the next
diff --git a/src/core/__tests__/ObservableQuery.ts b/src/core/__tests__/ObservableQuery.ts --- a/src/core/__tests__/ObservableQuery.ts +++ b/src/core/__tests__/ObservableQuery.ts @@ -2027,7 +2027,7 @@ describe('ObservableQuery', () => { }); expect(observable.getCurrentResult()).toEqual({ - data: dataOne, + data: undefined, loading: true, networkStatus: NetworkStatus.loading, }); @@ -2036,7 +2036,7 @@ describe('ObservableQuery', () => { if (handleCount === 1) { expect(subResult).toEqual({ loading: true, - data: dataOne, + data: undefined, networkStatus: NetworkStatus.loading, }); } else if (handleCount === 2) { diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -523,6 +523,40 @@ describe('useQuery Hook', () => { expect(result.current.loading).toBe(false); expect(result.current.data).toEqual(mocks[1].result.data); }); + + it('should not use the cache when using `network-only`', async () => { + const query = gql`{ hello }`; + const mocks = [ + { + request: { query }, + result: { data: { hello: 'from link' } }, + }, + ]; + + const cache = new InMemoryCache(); + cache.writeQuery({ + query, + data: { hello: 'from cache' }, + }); + + const { result, waitForNextUpdate } = renderHook( + () => useQuery(query, { fetchPolicy: 'network-only' }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks} cache={cache}> + {children} + </MockedProvider> + ), + }, + ); + + expect(result.current.loading).toBe(true); + expect(result.current.data).toBe(undefined); + + await waitForNextUpdate(); + expect(result.current.loading).toBe(false); + expect(result.current.data).toEqual({ hello: 'from link' }); + }); }); describe('polling', () => {
useQuery with fetchPolicy: network-only return cache data at first render when remount component <!-- Thanks for filing an issue on Apollo Client! Please make sure that you include the following information to ensure that your issue is actionable. If you don't follow the template, your issue may end up being closed without anyone looking at it carefully, because it is not actionable for us without the information in this template. **PLEASE NOTE:** Feature requests and non-bug related discussions are no longer managed in this repo. Feature requests should be opened in https://github.com/apollographql/apollo-feature-requests. --> I'm trying to use `@apollo/client@3.4` and find out behavior change of `useQuery` with `fetchPolicy: 'network-only'` between AC3.4 and AC3.3, not sure if it's a bug or intended behavior. **Intended outcome:** <!-- What you were trying to accomplish when the bug occurred, and as much code as possible related to the source of the problem. --> For example we render an `<A />` component, and query with `fetchPolicy: 'network-only'`: ```jsx function A() { const { data, loading } = useQuery(query, { fetchPolicy: 'network-only' }; return <Blablabla data={data} /> } ``` Then we, unmount `<A />`, and remount `<A />` again. e.g. route or state change. I expect when we remount `<A />`, the initial `data` returned by `useQuery` should be `undefined`, and after data fetching, the data will be renewed. which is the behavior of AC3.3 . **Actual outcome:** <!-- A description of what actually happened, including a screenshot or copy-paste of any related error messages, logs, or other output that might be related. Places to look for information include your browser console, server console, and network logs. Please avoid non-specific phrases like “didn’t work” or “broke”. --> When I try the above use case on AC3.4, the initial returned `data` in `<A />` on remount will be cache data, not `undefined`. **How to reproduce the issue:** <!-- If possible, please create a reproduction using https://github.com/apollographql/react-apollo-error-template and link to it here. If you prefer an in-browser way to create reproduction, try: https://codesandbox.io/s/github/apollographql/react-apollo-error-template Instructions for how the issue can be reproduced by a maintainer or contributor. Be as specific as possible, and only mention what is necessary to reproduce the bug. If possible, try to isolate the exact circumstances in which the bug occurs and avoid speculation over what the cause might be. --> reproduction: https://codesandbox.io/s/fervent-tesla-sb8oe?file=/src/App.js Reproduction steps: 1. At first the App will render `<A />`. There's a console.log in useEffect of `<A />` to record `data` value on mount so you can see console printing `data on mount: undefined`. 2. click link B to switch to `<B />` 3. click link A to remount `<A />`. In AC3.4 the console will printing full data, but in AC3.3 (or AC2) it will print undefined. **Versions** <!-- Run the following command in your project directory, and paste its (automatically copied to clipboard) results here: `npx envinfo@latest --preset apollo --clipboard` --> ``` System: OS: macOS 10.15.3 Binaries: Node: 12.16.3 - ~/.nvm/versions/node/v12.16.3/bin/node Yarn: 1.22.4 - /usr/local/bin/yarn npm: 6.14.4 - ~/.nvm/versions/node/v12.16.3/bin/npm Browsers: Chrome: 92.0.4515.159 Firefox: 89.0 Safari: 13.0.5 npmPackages: @apollo/client: ^3.4.9 => 3.4.9 ```
Same here. After upgrading from 3.3 to 3.4 this started happening. I checked that last 3.3.21 version worked as expected. Change behaviour started from 3.4.0 and still occurs in 3.4.9 release.
2021-08-27T15:50:42Z
3.4
apollographql/apollo-client
8,574
apollographql__apollo-client-8574
[ "8571" ]
b8b831f8cd575afab1440b5afda9f40971893791
diff --git a/src/react/data/QueryData.ts b/src/react/data/QueryData.ts --- a/src/react/data/QueryData.ts +++ b/src/react/data/QueryData.ts @@ -101,11 +101,10 @@ export class QueryData<TData, TVariables> extends OperationData< public afterExecute({ lazy = false }: { lazy?: boolean } = {}) { this.isMounted = true; const options = this.getOptions(); - const ssrDisabled = options.ssr === false; if ( this.currentObservable && - !ssrDisabled && - !this.ssrInitiated() + !this.ssrInitiated() && + !this.client.disableNetworkFetches ) { this.startQuerySubscription(); }
diff --git a/src/react/hooks/__tests__/useQuery.test.tsx b/src/react/hooks/__tests__/useQuery.test.tsx --- a/src/react/hooks/__tests__/useQuery.test.tsx +++ b/src/react/hooks/__tests__/useQuery.test.tsx @@ -376,6 +376,31 @@ describe('useQuery Hook', () => { unmount(); expect(client.getObservableQueries().size).toBe(0); }); + + it('should work with ssr: false', async () => { + const query = gql`{ hello }`; + const mocks = [ + { + request: { query }, + result: { data: { hello: "world" } }, + }, + ]; + + const { result, waitForNextUpdate } = renderHook( + () => useQuery(query, { ssr: false }), + { + wrapper: ({ children }) => ( + <MockedProvider mocks={mocks}>{children}</MockedProvider> + ), + }, + ); + + expect(result.current.loading).toBe(true); + expect(result.current.data).toBe(undefined); + await waitForNextUpdate(); + expect(result.current.loading).toBe(false); + expect(result.current.data).toEqual({ hello: "world" }); + }); }); describe('polling', () => {
useQuery with {ssr: false} is not fetched on client-side **Intended outcome:** Invoking a `useQuery` with `{ssr: false}` skips the query on SSR but fetches it on client-side. **Actual outcome:** Invoking a `useQuery` with `{ssr: false}` does not fetch the query on client-side either. It returns `{data: undefined, loading: true}` indefinitely. **How to reproduce the issue:** - clone this fork https://github.com/hevele-moda/react-apollo-error-template - run `npm install && npm run` - Notice that the page shows 'Loading...' and the query never gets resolved. **Versions** ``` System: OS: Linux 5.4 Ubuntu 20.04.2 LTS (Focal Fossa) Binaries: Node: 14.17.1 - ~/.nvm/versions/node/v14.17.1/bin/node Yarn: 1.22.10 - /src/discovery/node_modules/.bin/yarn npm: 7.20.3 - /src/discovery/node_modules/.bin/npm Browsers: Chrome: 92.0.4515.107 Firefox: 90.0 npmPackages: @apollo/client: ^3.4.1 => 3.4.1 apollo: ^2.33.4 => 2.33.4 apollo-link-rest: ^0.8.0-beta.0 => 0.8.0-beta.0 apollo-storybook-react: ^0.2.4 => 0.2.4 ```
@hevele-moda Thanks for the reproduction! This appears to be a regression introduced in `@apollo/client@3.4.0-rc.13`, possibly from commit 43d64356e656c42bd28523394e07c1c0584aa4b2. Does that sound plausible @brainkim?
2021-08-02T20:16:41Z
3.4
apollographql/apollo-client
8,438
apollographql__apollo-client-8438
[ "8267" ]
f4bc1e0f114c4119fb0dfedfa8f9cd3922a8441b
diff --git a/src/utilities/policies/pagination.ts b/src/utilities/policies/pagination.ts --- a/src/utilities/policies/pagination.ts +++ b/src/utilities/policies/pagination.ts @@ -98,20 +98,25 @@ export function relayStylePagination<TNode = Reference>( if (!existing) return; const edges: TRelayEdge<TNode>[] = []; - let startCursor = ""; - let endCursor = ""; + let firstEdgeCursor = ""; + let lastEdgeCursor = ""; existing.edges.forEach(edge => { // Edges themselves could be Reference objects, so it's important // to use readField to access the edge.edge.node property. if (canRead(readField("node", edge))) { edges.push(edge); if (edge.cursor) { - startCursor = startCursor || edge.cursor; - endCursor = edge.cursor; + firstEdgeCursor = firstEdgeCursor || edge.cursor || ""; + lastEdgeCursor = edge.cursor || lastEdgeCursor; } } }); + const { + startCursor, + endCursor, + } = existing.pageInfo || {}; + return { // Some implementations return additional Connection fields, such // as existing.totalCount. These fields are saved by the merge @@ -120,8 +125,10 @@ export function relayStylePagination<TNode = Reference>( edges, pageInfo: { ...existing.pageInfo, - startCursor, - endCursor, + // If existing.pageInfo.{start,end}Cursor are undefined or "", default + // to firstEdgeCursor and/or lastEdgeCursor. + startCursor: startCursor || firstEdgeCursor, + endCursor: endCursor || lastEdgeCursor, }, }; },
diff --git a/src/cache/inmemory/__tests__/policies.ts b/src/cache/inmemory/__tests__/policies.ts --- a/src/cache/inmemory/__tests__/policies.ts +++ b/src/cache/inmemory/__tests__/policies.ts @@ -3461,7 +3461,7 @@ describe("type policies", function () { edges, pageInfo: { __typename: "PageInfo", - startCursor: thirdPageInfo.startCursor, + startCursor: fourthPageInfo.startCursor, endCursor: fifthPageInfo.endCursor, hasPreviousPage: false, hasNextPage: true, diff --git a/src/utilities/policies/__tests__/relayStylePagination.test.ts b/src/utilities/policies/__tests__/relayStylePagination.test.ts --- a/src/utilities/policies/__tests__/relayStylePagination.test.ts +++ b/src/utilities/policies/__tests__/relayStylePagination.test.ts @@ -1,9 +1,116 @@ -import { FieldFunctionOptions, InMemoryCache, isReference, makeReference } from '../../../core'; +import { FieldFunctionOptions, InMemoryCache, isReference, makeReference, StoreObject } from '../../../cache'; import { relayStylePagination, TRelayPageInfo } from '../pagination'; describe('relayStylePagination', () => { const policy = relayStylePagination(); + describe('read', () => { + const fakeEdges = [ + { node: { __ref: "A" }, cursor: "cursorA" }, + { node: { __ref: "B" }, cursor: "cursorB" }, + { node: { __ref: "C" }, cursor: "cursorC" }, + ]; + + const fakeReadOptions = { + canRead() { return true }, + readField(key: string, obj: StoreObject) { + return obj && obj[key]; + }, + } as any as FieldFunctionOptions; + + it("should prefer existing.pageInfo.startCursor", () => { + const resultWithStartCursor = policy.read!({ + edges: fakeEdges, + pageInfo: { + startCursor: "preferredStartCursor", + hasPreviousPage: false, + hasNextPage: true, + } as TRelayPageInfo, + }, fakeReadOptions); + + expect( + resultWithStartCursor && + resultWithStartCursor.pageInfo + ).toEqual({ + startCursor: "preferredStartCursor", + endCursor: "cursorC", + hasPreviousPage: false, + hasNextPage: true, + }); + }); + + it("should prefer existing.pageInfo.endCursor", () => { + const resultWithEndCursor = policy.read!({ + edges: fakeEdges, + pageInfo: { + endCursor: "preferredEndCursor", + hasPreviousPage: false, + hasNextPage: true, + } as TRelayPageInfo, + }, fakeReadOptions); + + expect( + resultWithEndCursor && + resultWithEndCursor.pageInfo + ).toEqual({ + startCursor: "cursorA", + endCursor: "preferredEndCursor", + hasPreviousPage: false, + hasNextPage: true, + }); + }); + + it("should prefer existing.pageInfo.{start,end}Cursor", () => { + const resultWithEndCursor = policy.read!({ + edges: fakeEdges, + pageInfo: { + startCursor: "preferredStartCursor", + endCursor: "preferredEndCursor", + hasPreviousPage: false, + hasNextPage: true, + }, + }, fakeReadOptions); + + expect( + resultWithEndCursor && + resultWithEndCursor.pageInfo + ).toEqual({ + startCursor: "preferredStartCursor", + endCursor: "preferredEndCursor", + hasPreviousPage: false, + hasNextPage: true, + }); + }); + + it("should override pageInfo.{start,end}Cursor if empty strings", () => { + const resultWithEndCursor = policy.read!({ + edges: [ + { node: { __ref: "A" }, cursor: "" }, + { node: { __ref: "B" }, cursor: "cursorB" }, + { node: { __ref: "C" }, cursor: "" }, + { node: { __ref: "D" }, cursor: "cursorD" }, + { node: { __ref: "E" } }, + ], + pageInfo: { + startCursor: "", + endCursor: "", + hasPreviousPage: false, + hasNextPage: true, + }, + }, fakeReadOptions); + + expect( + resultWithEndCursor && + resultWithEndCursor.pageInfo + ).toEqual({ + startCursor: "cursorB", + endCursor: "cursorD", + hasPreviousPage: false, + hasNextPage: true, + }); + }); + }); + describe('merge', () => { const merge = policy.merge; // The merge function should exist, make TS aware
`relayStylePagination` `read` method clobbers `pageInfo.startCursor` and `pageInfo.endCursor` **Intended outcome:** With a query as follows: ```graphql query { someConnectionField { edges { node { id } } pageInfo { hasNextPage endCursor } } } ``` I would expect `endCursor` to be the value returned from the API, not `""`. **Actual outcome:** Even though the API returns a valid cursor in `endCursor`, the `relayStylePagination` type policy attempts to read it from `edges[0].cursor` (or the last edge's cursor) and completely replaces the one in `pageInfo. These should default to `pageInfo.{startCursor,endCursor}` and not `""`. https://github.com/apollographql/apollo-client/blob/cefb3c32621de6973191c7abb6edd365d53cbf41/src/utilities/policies/pagination.ts#L101-L102 **How to reproduce the issue:** Perform a query as described above while using `relayStylePagination`.
Having the same issue, can't use ```relayStylePagination()``` at all. Cursors are always empty. So i just copied all implementation and replaced https://github.com/apollographql/apollo-client/blob/54fe88a4dd7d3f0850f37a5dcf93c8404bf6e566/src/utilities/policies/pagination.ts#L101-L102 By ```javascript let { startCursor } = existing.pageInfo; let { endCursor } = existing.pageInfo; ``` This is a serious issue. Relay-style pagination is one of the core features offered by all GraphQL implementations, and is included in the official GraphQL docs. Can this please receive further attention?
2021-06-28T23:44:58Z
3.3
apollographql/apollo-client
8,394
apollographql__apollo-client-8394
[ "6886" ]
bfe0e75662d3fb7cbedb5dc17518c741eeeaafe3
diff --git a/config/helpers.ts b/config/helpers.ts --- a/config/helpers.ts +++ b/config/helpers.ts @@ -11,7 +11,7 @@ export function eachFile(dir: string, callback: ( ) => any) { const promises: Promise<any>[] = []; - return new Promise((resolve, reject) => { + return new Promise<void>((resolve, reject) => { glob(`${dir}/**/*.js`, (error, files) => { if (error) return reject(error); diff --git a/src/core/ObservableQuery.ts b/src/core/ObservableQuery.ts --- a/src/core/ObservableQuery.ts +++ b/src/core/ObservableQuery.ts @@ -61,9 +61,9 @@ export class ObservableQuery< private observers = new Set<Observer<ApolloQueryResult<TData>>>(); private subscriptions = new Set<ObservableSubscription>(); - private lastResult: ApolloQueryResult<TData>; - private lastResultSnapshot: ApolloQueryResult<TData>; - private lastError: ApolloError; + private lastResult: ApolloQueryResult<TData> | undefined; + private lastResultSnapshot: ApolloQueryResult<TData> | undefined; + private lastError: ApolloError | undefined; private queryInfo: QueryInfo; constructor({ @@ -134,11 +134,11 @@ export class ObservableQuery< (lastResult && lastResult.networkStatus) || NetworkStatus.ready; - const result: ApolloQueryResult<TData> = { + const result = { ...lastResult, loading: isNetworkRequestInFlight(networkStatus), networkStatus, - }; + } as ApolloQueryResult<TData>; if (this.isTornDown) { return result; @@ -208,11 +208,11 @@ export class ObservableQuery< // Returns the last result that observer.next was called with. This is not the same as // getCurrentResult! If you're not sure which you need, then you probably need getCurrentResult. - public getLastResult(): ApolloQueryResult<TData> { + public getLastResult(): ApolloQueryResult<TData> | undefined { return this.lastResult; } - public getLastError(): ApolloError { + public getLastError(): ApolloError | undefined { return this.lastError; } @@ -623,7 +623,7 @@ once, rather than every time you call fetchMore.`); errors: error.graphQLErrors, networkStatus: NetworkStatus.error, loading: false, - }); + } as ApolloQueryResult<TData>); iterateObserversSafely(this.observers, 'error', this.lastError = error); },
diff --git a/src/__tests__/client.ts b/src/__tests__/client.ts --- a/src/__tests__/client.ts +++ b/src/__tests__/client.ts @@ -2582,8 +2582,9 @@ describe('client', () => { const lastError = observable.getLastError(); const lastResult = observable.getLastResult(); - expect(lastResult.loading).toBeFalsy(); - expect(lastResult.networkStatus).toBe(8); + expect(lastResult).toBeTruthy(); + expect(lastResult!.loading).toBe(false); + expect(lastResult!.networkStatus).toBe(8); observable.resetLastResults(); subscription = observable.subscribe(observerOptions); diff --git a/src/__tests__/graphqlSubscriptions.ts b/src/__tests__/graphqlSubscriptions.ts --- a/src/__tests__/graphqlSubscriptions.ts +++ b/src/__tests__/graphqlSubscriptions.ts @@ -190,7 +190,7 @@ describe('GraphQL Subscriptions', () => { const promises = []; for (let i = 0; i < 2; i += 1) { promises.push( - new Promise((resolve, reject) => { + new Promise<void>((resolve, reject) => { obs.subscribe({ next(result) { fail('Should have hit the error block'); @@ -234,7 +234,7 @@ describe('GraphQL Subscriptions', () => { cache: new InMemoryCache({ addTypename: false }), }); - return new Promise(resolve => { + return new Promise<void>(resolve => { client.subscribe(defaultOptions).subscribe({ complete() { resolve(); diff --git a/src/__tests__/local-state/resolvers.ts b/src/__tests__/local-state/resolvers.ts --- a/src/__tests__/local-state/resolvers.ts +++ b/src/__tests__/local-state/resolvers.ts @@ -447,7 +447,7 @@ describe('Basic resolver capabilities', () => { }); function check(result: ApolloQueryResult<any>) { - return new Promise(resolve => { + return new Promise<void>(resolve => { expect(result.data.developer.id).toBe(developerId); expect(result.data.developer.handle).toBe('@benjamn'); expect(result.data.developer.tickets.length).toBe(ticketsPerDev); diff --git a/src/cache/inmemory/__tests__/readFromStore.ts b/src/cache/inmemory/__tests__/readFromStore.ts --- a/src/cache/inmemory/__tests__/readFromStore.ts +++ b/src/cache/inmemory/__tests__/readFromStore.ts @@ -1710,7 +1710,7 @@ describe('reading from the store', () => { ...snapshotAfterGC, __META: zeusMeta, }; - delete snapshotWithoutAres["Deity:{\"name\":\"Ares\"}"]; + delete (snapshotWithoutAres as any)["Deity:{\"name\":\"Ares\"}"]; expect(cache.extract()).toEqual(snapshotWithoutAres); // Ares already removed, so no new garbage to collect. expect(cache.gc()).toEqual([]); diff --git a/src/core/__tests__/QueryManager/index.ts b/src/core/__tests__/QueryManager/index.ts --- a/src/core/__tests__/QueryManager/index.ts +++ b/src/core/__tests__/QueryManager/index.ts @@ -5440,7 +5440,7 @@ describe('QueryManager', () => { describe('awaitRefetchQueries', () => { const awaitRefetchTest = ({ awaitRefetchQueries, testQueryError = false }: MutationBaseOptions<any, any, any> & { testQueryError?: boolean }) => - new Promise((resolve, reject) => { + new Promise<void>((resolve, reject) => { const query = gql` query getAuthors($id: ID!) { author(id: $id) { @@ -5540,7 +5540,7 @@ describe('QueryManager', () => { expect(stripSymbols(result.data)).toEqual(secondReqData); }, ) - .then(resolve) + .then(() => resolve()) .catch(error => { const isRefetchError = awaitRefetchQueries && testQueryError && error.message.includes(refetchError?.message); diff --git a/src/link/http/__tests__/HttpLink.ts b/src/link/http/__tests__/HttpLink.ts --- a/src/link/http/__tests__/HttpLink.ts +++ b/src/link/http/__tests__/HttpLink.ts @@ -11,6 +11,7 @@ import { ClientParseError } from '../serializeFetchParameter'; import { ServerParseError } from '../parseAndCheckHttpResponse'; import { ServerError } from '../../..'; import DoneCallback = jest.DoneCallback; +import { voidFetchDuringEachTest } from './helpers'; const sampleQuery = gql` query SampleQuery { @@ -907,15 +908,7 @@ describe('HttpLink', () => { }); describe('Dev warnings', () => { - let oldFetch: WindowOrWorkerGlobalScope['fetch'];; - beforeEach(() => { - oldFetch = window.fetch; - delete window.fetch; - }); - - afterEach(() => { - window.fetch = oldFetch; - }); + voidFetchDuringEachTest(); it('warns if fetch is undeclared', done => { try { diff --git a/src/link/http/__tests__/checkFetcher.ts b/src/link/http/__tests__/checkFetcher.ts --- a/src/link/http/__tests__/checkFetcher.ts +++ b/src/link/http/__tests__/checkFetcher.ts @@ -1,15 +1,8 @@ import { checkFetcher } from '../checkFetcher'; +import { voidFetchDuringEachTest } from './helpers'; describe('checkFetcher', () => { - let oldFetch: WindowOrWorkerGlobalScope['fetch']; - beforeEach(() => { - oldFetch = window.fetch; - delete window.fetch; - }); - - afterEach(() => { - window.fetch = oldFetch; - }); + voidFetchDuringEachTest(); it('throws if no fetch is present', () => { expect(() => checkFetcher(undefined)).toThrow( diff --git a/src/link/http/__tests__/helpers.ts b/src/link/http/__tests__/helpers.ts new file mode 100644 --- /dev/null +++ b/src/link/http/__tests__/helpers.ts @@ -0,0 +1,29 @@ +export function voidFetchDuringEachTest() { + let fetchDesc = Object.getOwnPropertyDescriptor(window, "fetch"); + + beforeEach(() => { + fetchDesc = fetchDesc || Object.getOwnPropertyDescriptor(window, "fetch"); + if (fetchDesc?.configurable) { + delete (window as any).fetch; + } + }); + + afterEach(() => { + if (fetchDesc?.configurable) { + Object.defineProperty(window, "fetch", fetchDesc); + } + }); +} + +describe("voidFetchDuringEachTest", () => { + voidFetchDuringEachTest(); + + it("hides the global.fetch function", () => { + expect(window.fetch).toBe(void 0); + expect(() => fetch).toThrowError(ReferenceError); + }); + + it("globalThis === window", () => { + expect(globalThis).toBe(window); + }); +}); diff --git a/src/link/retry/__tests__/retryLink.ts b/src/link/retry/__tests__/retryLink.ts --- a/src/link/retry/__tests__/retryLink.ts +++ b/src/link/retry/__tests__/retryLink.ts @@ -67,7 +67,7 @@ describe('RetryLink', () => { const firstTry = fromError(standardError); // Hold the test hostage until we're hit let secondTry; - const untilSecondTry = new Promise(resolve => { + const untilSecondTry = new Promise<void>(resolve => { secondTry = { subscribe(observer: any) { resolve(); // Release hold on test. diff --git a/src/react/hoc/__tests__/queries/errors.test.tsx b/src/react/hoc/__tests__/queries/errors.test.tsx --- a/src/react/hoc/__tests__/queries/errors.test.tsx +++ b/src/react/hoc/__tests__/queries/errors.test.tsx @@ -252,7 +252,7 @@ describe('[queries] errors', () => { }); it('will not log a warning when there is an error that is not caught in the render method when using query', () => - new Promise((resolve, reject) => { + new Promise<void>((resolve, reject) => { const query: DocumentNode = gql` query people { allPeople(first: 1) {
chore(deps): update dependency typescript to v4 [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [typescript](https://www.typescriptlang.org/) ([source](https://togithub.com/Microsoft/TypeScript)) | [`3.9.9` -> `4.3.2`](https://renovatebot.com/diffs/npm/typescript/3.9.9/4.3.2) | [![age](https://badges.renovateapi.com/packages/npm/typescript/4.3.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/npm/typescript/4.3.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/npm/typescript/4.3.2/compatibility-slim/3.9.9)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/npm/typescript/4.3.2/confidence-slim/3.9.9)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes <details> <summary>Microsoft/TypeScript</summary> ### [`v4.3.2`](https://togithub.com/Microsoft/TypeScript/releases/v4.3.2) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.2.4...v4.3.2) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-3). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.3.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.3.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.3.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.3.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.3.2](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.3.2%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-43) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.2.4`](https://togithub.com/Microsoft/TypeScript/releases/v4.2.4) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.2.3...v4.2.4) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-2). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.2.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.2 (Stable)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.2%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.3](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.3%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.4](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.4%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-424) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.2.3`](https://togithub.com/Microsoft/TypeScript/releases/v4.2.3) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.2.2...v4.2.3) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-2). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.2.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.2 (Stable)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.2%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.3](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.3%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-423) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.2.2`](https://togithub.com/Microsoft/TypeScript/releases/v4.2.2) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.1.5...v4.2.2) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-2). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.2.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.2.2](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.2.2%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-42) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.1.5`](https://togithub.com/Microsoft/TypeScript/releases/v4.1.5) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.1.4...v4.1.5) This release contains a fix for [an issue when language service plugins have no specified name](https://togithub.com/microsoft/TypeScript/issues/42718). ### [`v4.1.4`](https://togithub.com/Microsoft/TypeScript/releases/v4.1.4) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.1.3...v4.1.4) This release contains fixes for a [security risk involving language service plugin loading](https://togithub.com/microsoft/TypeScript/issues/42712). More details are available [here](https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-1639). ### [`v4.1.3`](https://togithub.com/Microsoft/TypeScript/releases/v4.1.3) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.1.2...v4.1.3) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-1). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.1.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.1.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.1.2 (Stable)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.2%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.1.3 (Stable)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.3%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-41) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.1.2`](https://togithub.com/Microsoft/TypeScript/releases/v4.1.2) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.0.7...v4.1.2) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-1). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.1.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.1.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.1.2 (Stable)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.1.2%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-41) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.0.7`](https://togithub.com/Microsoft/TypeScript/releases/v4.0.7) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.0.6...v4.0.7) This release contains a fix for [an issue when language service plugins have no specified name](https://togithub.com/microsoft/TypeScript/issues/42718). ### [`v4.0.6`](https://togithub.com/Microsoft/TypeScript/releases/v4.0.6) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.0.5...v4.0.6) This release contains fixes for a [security risk involving language service plugin loading](https://togithub.com/microsoft/TypeScript/issues/42712). More details are available [here](https://msrc.microsoft.com/update-guide/vulnerability/CVE-2021-1639). ### [`v4.0.5`](https://togithub.com/Microsoft/TypeScript/releases/v4.0.5) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.0.3...v4.0.5) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-0/). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.0.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.2](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.2%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.3](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=milestone%3A%22TypeScript+4.0.3%22+is%3Aclosed+). - TypeScript 4.0.4 NOT FOUND - [fixed issues query for TypeScript v4.0.5](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=milestone%3A%22TypeScript+4.0.5%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) ### [`v4.0.3`](https://togithub.com/Microsoft/TypeScript/releases/v4.0.3) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v4.0.2...v4.0.3) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-0/). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.0.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.2](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.2%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.3](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=milestone%3A%22TypeScript+4.0.3%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-40) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) ### [`v4.0.2`](https://togithub.com/Microsoft/TypeScript/releases/v4.0.2) [Compare Source](https://togithub.com/Microsoft/TypeScript/compare/v3.9.9...v4.0.2) For release notes, check out the [release announcement](https://devblogs.microsoft.com/typescript/announcing-typescript-4-0/). For the complete list of fixed issues, check out the - [fixed issues query for TypeScript v4.0.0 (Beta)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.0%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.1 (RC)](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.1%22+is%3Aclosed+). - [fixed issues query for TypeScript v4.0.2](https://togithub.com/Microsoft/TypeScript/issues?utf8=%E2%9C%93\&q=is%3Aissue+milestone%3A%22TypeScript+4.0.2%22+is%3Aclosed+). Downloads are available on: - [npm](https://www.npmjs.com/package/typescript) - [Visual Studio 2017/2019](https://marketplace.visualstudio.com/items?itemName=TypeScriptTeam.TypeScript-40) ([Select new version in project options](https://togithub.com/Microsoft/TypeScript/wiki/Updating-TypeScript-in-Visual-Studio-2017)) - [NuGet package](https://www.nuget.org/packages/Microsoft.TypeScript.MSBuild) </details> --- ### Configuration 📅 **Schedule**: "every weekend" in timezone America/Los_Angeles. 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻️ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/apollographql/apollo-client).
2021-06-17T17:01:05Z
3.4