From 335b81140bbd2db5eeca0a80a3a02d1d3305e0c1 Mon Sep 17 00:00:00 2001 From: James Brown <64858662+james-d-brown@users.noreply.github.com> Date: Fri, 16 Aug 2024 15:52:24 +0100 Subject: [PATCH 1/6] Support feature-specific offsets to allow for manual datum adjustments or similar constant adjustments, #36. --- src/wres/pipeline/pooling/PoolFactory.java | 283 ++++++++++++++---- .../pipeline/EvaluationUtilitiesTest.java | 5 +- wres-config/nonsrc/schema.yml | 16 +- .../wres/config/yaml/DeclarationFactory.java | 4 +- .../config/yaml/DeclarationInterpolator.java | 8 +- .../wres/config/yaml/DeclarationMigrator.java | 10 +- .../config/yaml/DeclarationUtilities.java | 8 +- .../config/yaml/DeclarationValidator.java | 1 - .../config/yaml/components/FeatureGroups.java | 19 +- .../wres/config/yaml/components/Features.java | 18 +- .../wres/config/yaml/components/Offset.java | 11 + .../FeatureGroupsDeserializer.java | 16 +- .../deserializers/FeaturesDeserializer.java | 116 +++++-- .../config/yaml/DeclarationFactoryTest.java | 52 +++- .../yaml/DeclarationInterpolatorTest.java | 31 +- .../config/yaml/DeclarationUtilitiesTest.java | 26 +- .../config/yaml/DeclarationValidatorTest.java | 29 +- .../thresholds/ThresholdSlicerTest.java | 9 +- .../src/wres/io/project/DatabaseProject.java | 7 + .../src/wres/io/project/InMemoryProject.java | 7 + wres-io/src/wres/io/project/Project.java | 13 + .../src/wres/io/project/ProjectUtilities.java | 105 ++++++- .../wres/io/project/DatabaseProjectTest.java | 5 +- .../wres/io/project/ProjectUtilitiesTest.java | 13 +- .../database/AnalysisRetrieverTest.java | 5 +- .../EnsembleForecastRetrieverTest.java | 5 +- .../EnsembleRetrieverFactoryTest.java | 5 +- .../database/ObservationRetrieverTest.java | 5 +- .../SingleValuedForecastRetrieverTest.java | 5 +- .../SingleValuedRetrieverFactoryTest.java | 5 +- .../reading/wrds/geography/FeatureFiller.java | 13 +- .../wres/reading/ReaderUtilitiesTest.java | 19 +- .../reading/wrds/ahps/WrdsAhpsReaderTest.java | 9 +- .../wrds/geography/FeatureFillerTest.java | 12 +- 34 files changed, 741 insertions(+), 154 deletions(-) create mode 100644 wres-config/src/wres/config/yaml/components/Offset.java diff --git a/src/wres/pipeline/pooling/PoolFactory.java b/src/wres/pipeline/pooling/PoolFactory.java index 8091183eb9..d8aa768c94 100644 --- a/src/wres/pipeline/pooling/PoolFactory.java +++ b/src/wres/pipeline/pooling/PoolFactory.java @@ -44,6 +44,7 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.GeneratedBaseline; import wres.config.yaml.components.GeneratedBaselines; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.Season; import wres.config.yaml.components.Source; import wres.config.yaml.components.Values; @@ -81,7 +82,9 @@ import wres.io.retrieving.CachingSupplier; import wres.io.retrieving.RetrieverFactory; import wres.statistics.generated.Evaluation; +import wres.statistics.generated.Geometry; import wres.statistics.generated.GeometryGroup; +import wres.statistics.generated.GeometryTuple; import wres.statistics.generated.SummaryStatistic; import wres.statistics.generated.TimeScale; @@ -560,10 +563,18 @@ else if ( method == GeneratedBaselines.CLIMATOLOGY ) } } - // Create any required transformers for value constraints and units + // Left value transfer, which is a composition of several transformations + Map offsets = project.getOffsets(); + Map leftOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getLeft(), + Map.Entry::getValue ) ); + Function leftOffsetGenerator = + this.getOffsetTransformer( leftOffsets, DatasetOrientation.LEFT ); DoubleUnaryOperator valueTransformer = this.getValueTransformer( declaration.values() ); - UnaryOperator> valueAndUnitTransformer = - this.getValueTransformer( valueTransformer ); + UnaryOperator> leftValueTransformer = + this.getValueTransformer( leftOffsetGenerator, valueTransformer ); // Apply any valid time season transformer based on the right-ish data type UnaryOperator> validTimeSeasonTransformer = @@ -571,8 +582,34 @@ else if ( method == GeneratedBaselines.CLIMATOLOGY ) declaration.right() .type() ); - UnaryOperator> composedTransformer = - t -> valueAndUnitTransformer.apply( validTimeSeasonTransformer.apply( t ) ); + UnaryOperator> composedLeftTransformer = + t -> leftValueTransformer.apply( validTimeSeasonTransformer.apply( t ) ); + + // Right transformer, which is a composition of several transformations + Map rightOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getRight(), + Map.Entry::getValue ) ); + Function rightOffsetGenerator = + this.getOffsetTransformer( rightOffsets, DatasetOrientation.RIGHT ); + UnaryOperator> rightValueTransformer = + this.getValueTransformer( rightOffsetGenerator, valueTransformer ); + UnaryOperator> composedRightTransformer = + t -> rightValueTransformer.apply( validTimeSeasonTransformer.apply( t ) ); + + // Baseline transformer, which is a composition of several transformations + Map baselineOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getBaseline(), + Map.Entry::getValue ) ); + Function baselineOffsetGenerator = + this.getOffsetTransformer( baselineOffsets, DatasetOrientation.BASELINE ); + UnaryOperator> baselineValueTransformer = + this.getValueTransformer( baselineOffsetGenerator, valueTransformer ); + UnaryOperator> composedBaselineTransformer = + t -> baselineValueTransformer.apply( validTimeSeasonTransformer.apply( t ) ); // Currently only a seasonal filter for reference times, which applies equally to all sides Predicate> filter = this.getReferenceTimeSeasonFilter( declaration.season() ); @@ -604,9 +641,9 @@ else if ( method == GeneratedBaselines.CLIMATOLOGY ) .setRetrieverFactory( retrieverFactory ) .setPoolRequests( poolRequests ) .setBaselineGenerator( baselineGenerator ) - .setLeftTransformer( composedTransformer ) - .setRightTransformer( composedTransformer ) - .setBaselineTransformer( composedTransformer ) + .setLeftTransformer( composedLeftTransformer ) + .setRightTransformer( composedRightTransformer ) + .setBaselineTransformer( composedBaselineTransformer ) .setLeftFilter( filter ) .setRightFilter( filter ) .setBaselineFilter( filter ) @@ -700,10 +737,20 @@ private List>>>> leftUpscaler, this.getProject() .getDesiredTimeScale() ); - // Left transformer + // Left transformer, which is a composition of several transformations + Map offsets = project.getOffsets(); + Map leftOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getLeft(), + Map.Entry::getValue ) ); + Function leftOffsetGenerator = + this.getOffsetTransformer( leftOffsets, DatasetOrientation.LEFT ); + + DoubleUnaryOperator leftValueTransformer = this.getValueTransformer( declaration.values() ); UnaryOperator> leftValueAndUnitTransformer = - this.getValueTransformer( leftValueTransformer ); + this.getValueTransformer( leftOffsetGenerator, leftValueTransformer ); // Apply any valid time season transformer based on the right-ish data type UnaryOperator> leftValidTimeSeasonTransformer = @@ -719,13 +766,20 @@ private List>>>> // the ensemble members themselves are not resampled boolean cacheSorted = Objects.nonNull( declaration.sampleUncertainty() ); - // Right transformer + // Right transformer, which is a composition of several transformations + Map rightOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getRight(), + Map.Entry::getValue ) ); + Function rightOffsetGenerator = + this.getOffsetTransformer( rightOffsets, DatasetOrientation.RIGHT ); Dataset right = DeclarationUtilities.getDeclaredDataset( project.getDeclaration(), DatasetOrientation.RIGHT ); UnaryOperator> rightValueTransformer = this.getValueTransformer( leftValueTransformer, right, cacheSorted ); UnaryOperator> rightValueAndUnitTransformer = - this.getValueTransformer( rightValueTransformer ); + this.getValueTransformer( rightOffsetGenerator, rightValueTransformer ); UnaryOperator> rightValidTimeSeasonTransformer = this.getValidTimeSeasonTransformer( declaration.season(), @@ -735,8 +789,16 @@ private List>>>> UnaryOperator> composedRightTransformer = t -> rightValueAndUnitTransformer.apply( rightValidTimeSeasonTransformer.apply( t ) ); - // Baseline transformer + // Baseline transformer, which is a composition of several transformations Dataset baseline = null; + Map baselineOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getBaseline(), + Map.Entry::getValue ) ); + Function baselineOffsetGenerator = + this.getOffsetTransformer( baselineOffsets, DatasetOrientation.BASELINE ); + if ( project.hasBaseline() ) { baseline = DeclarationUtilities.getDeclaredDataset( project.getDeclaration(), @@ -748,7 +810,7 @@ private List>>>> cacheSorted ); UnaryOperator> baselineValueAndUnitTransformer = - this.getValueTransformer( baselineValueTransformer ); + this.getValueTransformer( baselineOffsetGenerator, baselineValueTransformer ); // Re-use the right season transformer UnaryOperator> composedBaselineTransformer = @@ -871,38 +933,62 @@ private List>>>> leftUpscaler, this.getProject() .getDesiredTimeScale() ); - // Left transformer + // Left transformer, which is a composition of several transformations + Map offsets = project.getOffsets(); + Map leftOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getLeft(), + Map.Entry::getValue ) ); + Function leftOffsetGenerator = + this.getOffsetTransformer( leftOffsets, DatasetOrientation.LEFT ); + DoubleUnaryOperator leftValueTransformer = this.getValueTransformer( declaration.values() ); UnaryOperator> leftValueAndUnitTransformer = - this.getValueTransformer( leftValueTransformer ); + this.getValueTransformer( leftOffsetGenerator, leftValueTransformer ); // Whether to cache the sorted ensemble members, trading cpu for memory. Do this when performing sampling // uncertainty estimation, because repeated sorting for each realization is otherwise extremely expensive and // the ensemble members themselves are not resampled boolean cacheSorted = Objects.nonNull( declaration.sampleUncertainty() ); - // Right transformer + // Right transformer, which is a composition of several transformations + Map rightOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getRight(), + Map.Entry::getValue ) ); + Function rightOffsetGenerator = + this.getOffsetTransformer( rightOffsets, DatasetOrientation.RIGHT ); + Dataset right = DeclarationUtilities.getDeclaredDataset( project.getDeclaration(), DatasetOrientation.RIGHT ); UnaryOperator> rightValueTransformer = this.getValueTransformer( leftValueTransformer, right, cacheSorted ); UnaryOperator> rightValueAndUnitTransformer = - this.getValueTransformer( rightValueTransformer ); + this.getValueTransformer( rightOffsetGenerator, rightValueTransformer ); - // Baseline transformer + // Baseline transformer, which is a composition of several transformations Dataset baseline = null; if ( project.hasBaseline() ) { baseline = DeclarationUtilities.getDeclaredDataset( project.getDeclaration(), DatasetOrientation.BASELINE ); } + Map baselineOffsets = offsets.entrySet() + .stream() + .collect( Collectors.toMap( f -> f.getKey() + .getBaseline(), + Map.Entry::getValue ) ); + Function baselineOffsetGenerator = + this.getOffsetTransformer( baselineOffsets, DatasetOrientation.BASELINE ); UnaryOperator> baselineValueTransformer = this.getValueTransformer( leftValueTransformer, baseline, cacheSorted ); UnaryOperator> baselineValueAndUnitTransformer = - this.getValueTransformer( baselineValueTransformer ); + this.getValueTransformer( baselineOffsetGenerator, baselineValueTransformer ); // Currently only a seasonal filter, which applies equally to all sides Predicate> singleValuedFilter = this.getReferenceTimeSeasonFilter( declaration.season() ); @@ -1188,14 +1274,16 @@ private long getNextPoolId() } /** - * Returns a transformer that applies a unit conversion, followed by the input transformation to a single-valued - * time-series. + * Returns a transformer that applies a sequence of value transformations, including a unit conversion, offset and + * a prescribed single-valued transformation. * + * @param offsetGenerator the offset generator * @param basicTransformer the transformer to apply after a unit conversion * @return a transformer that applies a unit conversion followed by the input transformer */ - private UnaryOperator> getValueTransformer( DoubleUnaryOperator basicTransformer ) + private UnaryOperator> getValueTransformer( Function offsetGenerator, + DoubleUnaryOperator basicTransformer ) { return toTransform -> { @@ -1206,26 +1294,34 @@ private UnaryOperator> getValueTransformer( DoubleUnaryOperat .getDesiredMeasurementUnitName(); Map aliases = this.getUnitMapper() .getUnitAliases(); - DoubleUnaryOperator innerUnitMapper = this.getUnitMapper( existingUnitString, - desiredUnitString, - aliases, - toTransform.getTimeScale(), - this.getProject() - .getDesiredTimeScale() ); + DoubleUnaryOperator innerUnitMapper = this.getUnitTransformer( existingUnitString, + desiredUnitString, + aliases, + toTransform.getTimeScale(), + this.getProject() + .getDesiredTimeScale() ); UnaryOperator metaMapper = metadata -> toTransform.getMetadata() .toBuilder() .setUnit( desiredUnitString ) .build(); - DoubleUnaryOperator transformer = basicTransformer.compose( innerUnitMapper ); + Function, DoubleUnaryOperator> offsetTransformerGenerator = + this.getOffsetTransformer( offsetGenerator ); + DoubleUnaryOperator offsetTransformer = offsetTransformerGenerator.apply( toTransform ); + + // Order of composition is critical because composed function applies "before this" function and unit + // conversion must happen first, as offsets are declared in desired units, hence the counterintuitive order. + DoubleUnaryOperator transformer = basicTransformer.compose( offsetTransformer ) + .compose( innerUnitMapper ); + TimeSeries transformed = TimeSeriesSlicer.transform( toTransform, transformer::applyAsDouble, metaMapper ); if ( LOGGER.isTraceEnabled() ) { - LOGGER.trace( "Tranformed the values associated with time-series {}, producing a new time-series {}.", + LOGGER.trace( "Transformed the values associated with time-series {}, producing a new time-series {}.", toTransform.hashCode(), transformed.hashCode() ); } @@ -1235,14 +1331,16 @@ private UnaryOperator> getValueTransformer( DoubleUnaryOperat } /** - * Returns a transformer that applies a unit conversion, followed by the input transformation to an ensemble - * time-series. + * Returns a transformer that applies a sequence of value transformations, including a unit conversion, offset and + * a prescribed ensemble transformation. * + * @param offsetGenerator the offset generator * @param basicTransformer the transformer to apply after a unit conversion * @return a transformer that applies a unit conversion followed by the input transformer */ - private UnaryOperator> getValueTransformer( UnaryOperator> basicTransformer ) + private UnaryOperator> getValueTransformer( Function offsetGenerator, + UnaryOperator> basicTransformer ) { return toTransform -> { // Apply the unit mapping first, then the basic transformer @@ -1253,20 +1351,30 @@ private UnaryOperator> getValueTransformer( UnaryOperator aliases = this.getUnitMapper() .getUnitAliases(); - DoubleUnaryOperator innerUnitMapper = this.getUnitMapper( existingUnitString, - desiredUnitString, - aliases, - toTransform.getTimeScale(), - this.getProject() - .getDesiredTimeScale() ); - UnaryOperator> ensembleUnitMapper = this.getEnsembleUnitMapper( innerUnitMapper ); - UnaryOperator> transformer = event -> basicTransformer.compose( ensembleUnitMapper ) + DoubleUnaryOperator innerUnitMapper = this.getUnitTransformer( existingUnitString, + desiredUnitString, + aliases, + toTransform.getTimeScale(), + this.getProject() + .getDesiredTimeScale() ); + UnaryOperator> ensembleUnitMapper = this.getEnsembleValueTransformer( innerUnitMapper ); + + Function, DoubleUnaryOperator> offsetTransformerGenerator = + this.getOffsetTransformer( offsetGenerator ); + DoubleUnaryOperator offsetTransformer = offsetTransformerGenerator.apply( toTransform ); + UnaryOperator> ensembleOffsetTransformer = + this.getEnsembleValueTransformer( offsetTransformer ); + + // Order of composition is critical because composed function applies "before this" function and unit + // conversion must happen first, as offsets are declared in desired units, hence the counterintuitive order. + UnaryOperator> transformer = event -> basicTransformer.compose( ensembleOffsetTransformer ) + .compose( ensembleUnitMapper ) .apply( event ); TimeSeries transformed = TimeSeriesSlicer.transformByEvent( toTransform, transformer ); if ( LOGGER.isTraceEnabled() ) { - LOGGER.trace( "Tranformed the values associated with time-series {}, producing a new time-series {}.", + LOGGER.trace( "Transformed the values associated with time-series {}, producing a new time-series {}.", toTransform.hashCode(), transformed.hashCode() ); } @@ -1282,11 +1390,11 @@ private UnaryOperator> getValueTransformer( UnaryOperator aliases, - TimeScaleOuter existingTimeScale, - TimeScaleOuter desiredTimeScale ) + private DoubleUnaryOperator getUnitTransformer( String existingUnitString, + String desiredUnitString, + Map aliases, + TimeScaleOuter existingTimeScale, + TimeScaleOuter desiredTimeScale ) { DoubleUnaryOperator converter = this.converterCache.getIfPresent( existingUnitString ); @@ -1355,18 +1463,89 @@ private DoubleUnaryOperator getUnitMapper( String existingUnitString, } /** - * @param unitMapper the unit mapper - * @return a function that maps the units of an ensemble event + * Creates a geometry-specific time-series value transformer using the supplied offsets. Specifically, when the + * time-series geometry contains a corresponding offset in the supplied input, the offset value is added to the + * time-series event value, otherwise the original value is returned. + * + * @param offsets the geometry-specific offsets + * @param orientation the dataset orientation + * @return a function that returns an offset transformer for a specific geometry + */ + + private Function getOffsetTransformer( Map offsets, + DatasetOrientation orientation ) + { + Objects.requireNonNull( offsets ); + + return geometry -> + { + // Short circuit + if ( offsets.isEmpty() + || !offsets.containsKey( geometry ) ) + { + return a -> a; // Identity transform + } + + Offset add = offsets.get( geometry ); + switch ( orientation ) + { + case LEFT -> + { + return a -> a + add.left(); + } + case RIGHT -> + { + return a -> a + add.right(); + } + case BASELINE -> + { + return a -> a + add.baseline(); + } + // Identity transform + default -> + { + return a -> a; + } + } + }; + } + + /** + * Creates a geometry-specific time-series value transformer using the supplied generator. + * + * @param the time-series event value type + * @param offsetGenerator the offset generator + * @return a function that returns an offset transformer for a specific time-series + */ + + private Function, DoubleUnaryOperator> getOffsetTransformer( Function offsetGenerator ) + { + Objects.requireNonNull( offsetGenerator ); + + return series -> + { + Geometry geometry = series.getMetadata() + .getFeature() + .getGeometry(); + + return offsetGenerator.apply( geometry ); + }; + } + + /** + * Creates an ensemble value transformer from a single-valued transformer. + * @param valueMapper the single-valued value transformer + * @return a function that transforms the values of an ensemble event */ - private UnaryOperator> getEnsembleUnitMapper( DoubleUnaryOperator unitMapper ) + private UnaryOperator> getEnsembleValueTransformer( DoubleUnaryOperator valueMapper ) { return ensembleEvent -> { Ensemble ensemble = ensembleEvent.getValue(); double[] values = ensemble.getMembers(); double[] mappedValues = Arrays.stream( values ) - .map( unitMapper ) + .map( valueMapper ) .toArray(); Ensemble convertedEnsemble = Ensemble.of( mappedValues, diff --git a/test/wres/pipeline/EvaluationUtilitiesTest.java b/test/wres/pipeline/EvaluationUtilitiesTest.java index d84c7eb5aa..b0620d074a 100644 --- a/test/wres/pipeline/EvaluationUtilitiesTest.java +++ b/test/wres/pipeline/EvaluationUtilitiesTest.java @@ -15,6 +15,7 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.LeadTimeInterval; import wres.config.yaml.components.ThresholdBuilder; import wres.config.yaml.components.ThresholdType; @@ -261,7 +262,9 @@ void testGetSummaryStatisticsCalculatorsWithTwoTimeWindowsAcrossFeatureGroups() Set geometryGroups = Set.of( firstGroup, secondGroup ); - FeatureGroups featureGroups = new FeatureGroups( geometryGroups ); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( geometryGroups ) + .build(); EvaluationDeclaration evaluation = EvaluationDeclarationBuilder.builder() .leadTimes( leadTimeInterval ) diff --git a/wres-config/nonsrc/schema.yml b/wres-config/nonsrc/schema.yml index 3df710bf1c..8232558304 100644 --- a/wres-config/nonsrc/schema.yml +++ b/wres-config/nonsrc/schema.yml @@ -566,9 +566,9 @@ definitions: cross-pairing chosen." anyOf: - "$ref": "#/definitions/CrossPairEnum" - - "$ref": "#/definitions/CrossPairFull" + - "$ref": "#/definitions/CrossPairWithParameters" - CrossPairFull: + CrossPairWithParameters: type: object additionalProperties: false properties: @@ -665,25 +665,25 @@ definitions: properties: observed: anyOf: - - "$ref": "#/definitions/FeatureTupleFull" + - "$ref": "#/definitions/FeatureWithParameters" - type: string minLength: 1 maxLength: 32 predicted: anyOf: - - "$ref": "#/definitions/FeatureTupleFull" + - "$ref": "#/definitions/FeatureWithParameters" - type: string minLength: 1 maxLength: 32 baseline: anyOf: - - "$ref": "#/definitions/FeatureTupleFull" + - "$ref": "#/definitions/FeatureWithParameters" - type: string minLength: 1 maxLength: 32 - FeatureTupleFull: - title: A feature tuple with additional properties + FeatureWithParameters: + title: A feature with additional properties type: object additionalProperties: false properties: @@ -695,6 +695,8 @@ definitions: type: string minLength: 1 maxLength: 2083 + offset: + type: number required: - name diff --git a/wres-config/src/wres/config/yaml/DeclarationFactory.java b/wres-config/src/wres/config/yaml/DeclarationFactory.java index af6326e592..40813fd64a 100644 --- a/wres-config/src/wres/config/yaml/DeclarationFactory.java +++ b/wres-config/src/wres/config/yaml/DeclarationFactory.java @@ -165,11 +165,11 @@ public class DeclarationFactory .enable( DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY ) .enable( DeserializationFeature.FAIL_ON_READING_DUP_TREE_KEY ) .enable( JsonParser.Feature.STRICT_DUPLICATE_DETECTION ) + .disable( DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES ) .build() .registerModule( new ProtobufModule() ) .registerModule( new JavaTimeModule() ) - .configure( DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, - true ); + .enable( DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES ); /** Mapper for serialization. */ private static final ObjectMapper SERIALIZER = diff --git a/wres-config/src/wres/config/yaml/DeclarationInterpolator.java b/wres-config/src/wres/config/yaml/DeclarationInterpolator.java index 616230d763..098fab2d25 100644 --- a/wres-config/src/wres/config/yaml/DeclarationInterpolator.java +++ b/wres-config/src/wres/config/yaml/DeclarationInterpolator.java @@ -34,6 +34,7 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.Features; import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Formats; @@ -486,7 +487,8 @@ private static void interpolateSparseFeatures( EvaluationDeclarationBuilder buil .geometries(); Set denseFeatures = DeclarationInterpolator.interpolateSparseFeatures( features, hasBaseline ); - Features adjustedFeatures = new Features( denseFeatures ); + Features adjustedFeatures = new Features( denseFeatures, builder.features() + .offsets() ); builder.features( adjustedFeatures ); } @@ -512,7 +514,9 @@ private static void interpolateSparseFeatures( EvaluationDeclarationBuilder buil .build(); adjustedGeoGroups.add( nextAdjustedGroup ); } - FeatureGroups adjustedFeatureGroups = new FeatureGroups( adjustedGeoGroups ); + FeatureGroups adjustedFeatureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( adjustedGeoGroups ) + .build(); builder.featureGroups( adjustedFeatureGroups ); } } diff --git a/wres-config/src/wres/config/yaml/DeclarationMigrator.java b/wres-config/src/wres/config/yaml/DeclarationMigrator.java index b4dc50b04b..8960cacf3e 100644 --- a/wres-config/src/wres/config/yaml/DeclarationMigrator.java +++ b/wres-config/src/wres/config/yaml/DeclarationMigrator.java @@ -82,10 +82,12 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.FeatureService; import wres.config.yaml.components.FeatureServiceBuilder; import wres.config.yaml.components.FeatureServiceGroup; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Format; import wres.config.yaml.components.Formats; import wres.config.yaml.components.GeneratedBaseline; @@ -293,7 +295,9 @@ private static void migrateFeatures( List features, EvaluationDecl if ( !features.isEmpty() ) { Set geometries = DeclarationMigrator.migrateFeatures( features ); - Features wrappedFeatures = new Features( geometries ); + Features wrappedFeatures = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); builder.features( wrappedFeatures ); } } @@ -312,7 +316,9 @@ private static void migrateFeatureGroups( List featureGroups, Evalu featureGroups.stream() .map( DeclarationMigrator::migrateFeatureGroup ) .collect( Collectors.toSet() ); - FeatureGroups wrappedGroups = new FeatureGroups( geometryGroups ); + FeatureGroups wrappedGroups = FeatureGroupsBuilder.builder() + .geometryGroups( geometryGroups ) + .build(); builder.featureGroups( wrappedGroups ); } } diff --git a/wres-config/src/wres/config/yaml/DeclarationUtilities.java b/wres-config/src/wres/config/yaml/DeclarationUtilities.java index b1b79bc6c7..66995c18f1 100644 --- a/wres-config/src/wres/config/yaml/DeclarationUtilities.java +++ b/wres-config/src/wres/config/yaml/DeclarationUtilities.java @@ -49,6 +49,7 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.FeatureServiceGroup; import wres.config.yaml.components.Features; import wres.config.yaml.components.LeadTimeInterval; @@ -2243,7 +2244,8 @@ private static EvaluationDeclaration removeFeaturesWithoutThresholds( Evaluation .collect( Collectors.toSet() ); // Set the new features - Features filteredFeatures = new Features( filtered ); + Features filteredFeatures = new Features( filtered, declaration.features() + .offsets() ); builder.features( filteredFeatures ); if ( LOGGER.isWarnEnabled() @@ -2305,7 +2307,9 @@ private static EvaluationDeclaration removeFeaturesWithoutThresholds( Evaluation .toList() ); } - FeatureGroups finalFeatureGroups = new FeatureGroups( adjustedGroups ); + FeatureGroups finalFeatureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( adjustedGroups ) + .build(); builder.featureGroups( finalFeatureGroups ); } diff --git a/wres-config/src/wres/config/yaml/DeclarationValidator.java b/wres-config/src/wres/config/yaml/DeclarationValidator.java index db4cc38d7b..e3cf325432 100644 --- a/wres-config/src/wres/config/yaml/DeclarationValidator.java +++ b/wres-config/src/wres/config/yaml/DeclarationValidator.java @@ -510,7 +510,6 @@ private static List validateDatasets( EvaluationDeclarati // Data types are valid List typesValid = DeclarationValidator.typesAreValid( declaration ); events.addAll( typesValid ); - // Ensembles cannot be present on both left and right sides List ensembles = DeclarationValidator.ensembleOnOneSideOnly( declaration ); events.addAll( ensembles ); diff --git a/wres-config/src/wres/config/yaml/components/FeatureGroups.java b/wres-config/src/wres/config/yaml/components/FeatureGroups.java index e62d54f7a3..8e03d663c6 100644 --- a/wres-config/src/wres/config/yaml/components/FeatureGroups.java +++ b/wres-config/src/wres/config/yaml/components/FeatureGroups.java @@ -1,7 +1,9 @@ package wres.config.yaml.components; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.StringJoiner; @@ -14,15 +16,18 @@ import wres.config.yaml.deserializers.FeatureGroupsDeserializer; import wres.config.yaml.serializers.FeatureGroupsSerializer; import wres.statistics.generated.GeometryGroup; +import wres.statistics.generated.GeometryTuple; /** - * Geographic feature groups. + * Geographic feature groups and associated offset values (e.g., datum offsets). Absence of an offset for a given + * feature with a group implies no/zero offset. * @param geometryGroups the feature groups + * @param offsets the offset values associated with features in the group, such as a datum offset, if any */ @RecordBuilder @JsonSerialize( using = FeatureGroupsSerializer.class ) @JsonDeserialize( using = FeatureGroupsDeserializer.class ) -public record FeatureGroups( Set geometryGroups ) +public record FeatureGroups( Set geometryGroups, Map offsets ) { /** * Sets the default values. @@ -40,6 +45,16 @@ public record FeatureGroups( Set geometryGroups ) // Immutable copy, preserving insertion order geometryGroups = Collections.unmodifiableSet( new LinkedHashSet<>( geometryGroups ) ); } + + if ( Objects.isNull( offsets ) ) + { + offsets = Collections.emptyMap(); + } + else + { + // Immutable copy, preserving insertion order + offsets = Collections.unmodifiableMap( new LinkedHashMap<>( offsets ) ); + } } @Override diff --git a/wres-config/src/wres/config/yaml/components/Features.java b/wres-config/src/wres/config/yaml/components/Features.java index 898892f080..aab319103d 100644 --- a/wres-config/src/wres/config/yaml/components/Features.java +++ b/wres-config/src/wres/config/yaml/components/Features.java @@ -1,7 +1,9 @@ package wres.config.yaml.components; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.StringJoiner; @@ -16,13 +18,15 @@ import wres.statistics.generated.GeometryTuple; /** - * Geographic features. + * Geographic features and associated offset values (e.g., datum offsets). Absence of an offset for a given feature + * implies no/zero offset. * @param geometries the features + * @param offsets the offset values, such as a datum offset, if any */ @RecordBuilder @JsonSerialize( using = FeaturesSerializer.class ) @JsonDeserialize( using = FeaturesDeserializer.class ) -public record Features( Set geometries ) +public record Features( Set geometries, Map offsets ) { /** * Sets the default values. @@ -39,6 +43,16 @@ public record Features( Set geometries ) // Immutable copy, preserving insertion order geometries = Collections.unmodifiableSet( new LinkedHashSet<>( geometries ) ); } + + if ( Objects.isNull( offsets ) ) + { + offsets = Collections.emptyMap(); + } + else + { + // Immutable copy, preserving insertion order + offsets = Collections.unmodifiableMap( new LinkedHashMap<>( offsets ) ); + } } @Override diff --git a/wres-config/src/wres/config/yaml/components/Offset.java b/wres-config/src/wres/config/yaml/components/Offset.java new file mode 100644 index 0000000000..6eabc80a13 --- /dev/null +++ b/wres-config/src/wres/config/yaml/components/Offset.java @@ -0,0 +1,11 @@ +package wres.config.yaml.components; + +/** + * The offset value to apply to each dataset orientation. For example, a datum offset. + * @param left the left-oriented offset + * @param right the right-oriented offset + * @param baseline the baseline-oriented offset + */ +public record Offset( double left, double right, double baseline ) +{ +} diff --git a/wres-config/src/wres/config/yaml/deserializers/FeatureGroupsDeserializer.java b/wres-config/src/wres/config/yaml/deserializers/FeatureGroupsDeserializer.java index 109a930e2e..5a53a901fa 100644 --- a/wres-config/src/wres/config/yaml/deserializers/FeatureGroupsDeserializer.java +++ b/wres-config/src/wres/config/yaml/deserializers/FeatureGroupsDeserializer.java @@ -1,8 +1,9 @@ package wres.config.yaml.deserializers; import java.io.IOException; -import java.util.Collections; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -16,7 +17,9 @@ import org.slf4j.LoggerFactory; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.Offset; import wres.statistics.generated.GeometryGroup; import wres.statistics.generated.GeometryTuple; @@ -71,6 +74,8 @@ private FeatureGroups getFeatureGroupsFromArray( ObjectReader reader, { // Preserve insertion order Set featureGroups = new LinkedHashSet<>(); + Map featureOffsets = new LinkedHashMap<>(); + int nodeCount = featureGroupsNode.size(); for ( int i = 0; i < nodeCount; i++ ) @@ -78,7 +83,6 @@ private FeatureGroups getFeatureGroupsFromArray( ObjectReader reader, JsonNode nextNode = featureGroupsNode.get( i ); String groupName = ""; Set geometries = null; - // Group name if ( nextNode.has( "name" ) ) { @@ -95,8 +99,9 @@ private FeatureGroups getFeatureGroupsFromArray( ObjectReader reader, parser.setCodec( reader ); Features features = FEATURES_DESERIALIZER.deserialize( parser, context ); geometries = features.geometries(); + featureOffsets.putAll( features.offsets() ); LOGGER.debug( "Discovered the following collection of geometries associated with a feature group " - + "named '{}': {}.", groupName, new Features( geometries ) ); + + "named '{}': {}.", groupName, features ); } // Create the group @@ -110,6 +115,9 @@ private FeatureGroups getFeatureGroupsFromArray( ObjectReader reader, } } - return new FeatureGroups( Collections.unmodifiableSet( featureGroups ) ); + return FeatureGroupsBuilder.builder() + .geometryGroups( featureGroups ) + .offsets( featureOffsets ) + .build(); } } diff --git a/wres-config/src/wres/config/yaml/deserializers/FeaturesDeserializer.java b/wres-config/src/wres/config/yaml/deserializers/FeaturesDeserializer.java index aa48650a40..7d4262f18e 100644 --- a/wres-config/src/wres/config/yaml/deserializers/FeaturesDeserializer.java +++ b/wres-config/src/wres/config/yaml/deserializers/FeaturesDeserializer.java @@ -1,8 +1,9 @@ package wres.config.yaml.deserializers; import java.io.IOException; -import java.util.Collections; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.Map; import java.util.Objects; import java.util.Set; @@ -14,10 +15,13 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; +import wres.config.yaml.components.Offset; import wres.statistics.generated.Geometry; import wres.statistics.generated.GeometryTuple; @@ -31,8 +35,13 @@ public class FeaturesDeserializer extends JsonDeserializer /** Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger( FeaturesDeserializer.class ); + /** Re-used string. */ private static final String OBSERVED = "observed"; + + /** Re-used string. */ private static final String PREDICTED = "predicted"; + + /** Re-used string. */ private static final String BASELINE = "baseline"; @Override @@ -78,26 +87,33 @@ private Features getFeaturesFromArray( ObjectReader reader, { // Preserve insertion order Set features = new LinkedHashSet<>(); + Map offsets = new LinkedHashMap<>(); + int nodeCount = featuresNode.size(); for ( int i = 0; i < nodeCount; i++ ) { JsonNode nextNode = featuresNode.get( i ); + // Explicit feature declaration if ( nextNode.has( OBSERVED ) || nextNode.has( PREDICTED ) || nextNode.has( BASELINE ) ) { - GeometryTuple nextFeature = this.getGeometryTuple( reader, nextNode ); - features.add( nextFeature ); + Pair nextFeature = this.getGeometryTuple( reader, nextNode ); + features.add( nextFeature.getKey() ); + if ( Objects.nonNull( nextFeature.getValue() ) ) + { + offsets.put( nextFeature.getKey(), nextFeature.getValue() ); + } } else { // Apply to the left side only and fill out later because this depends on other declaration, such as // whether a baseline is declared - Geometry leftGeometry = this.getGeometry( reader, nextNode ); + Pair leftGeometry = this.getGeometry( reader, nextNode ); GeometryTuple tuple = GeometryTuple.newBuilder() - .setLeft( leftGeometry ) + .setLeft( leftGeometry.getKey() ) .build(); features.add( tuple ); @@ -109,7 +125,10 @@ private Features getFeaturesFromArray( ObjectReader reader, } } - return new Features( Collections.unmodifiableSet( features ) ); + return FeaturesBuilder.builder() + .geometries( features ) + .offsets( offsets ) + .build(); } /** @@ -120,33 +139,42 @@ private Features getFeaturesFromArray( ObjectReader reader, * @throws IOException if the geometries could not be mapped */ - private GeometryTuple getGeometryTuple( ObjectReader reader, JsonNode node ) throws IOException + private Pair getGeometryTuple( ObjectReader reader, JsonNode node ) throws IOException { GeometryTuple.Builder builder = GeometryTuple.newBuilder(); + Double leftOffset = null; + Double rightOffset = null; + Double baselineOffset = null; + if ( node.has( OBSERVED ) ) { // Full feature description JsonNode leftNode = node.get( OBSERVED ); - Geometry leftGeometry = this.getGeometry( reader, leftNode ); - builder.setLeft( leftGeometry ); + Pair leftGeometry = this.getGeometry( reader, leftNode ); + builder.setLeft( leftGeometry.getKey() ); + leftOffset = leftGeometry.getValue(); } if ( node.has( PREDICTED ) ) { JsonNode rightNode = node.get( PREDICTED ); - Geometry rightGeometry = this.getGeometry( reader, rightNode ); - builder.setRight( rightGeometry ); + Pair rightGeometry = this.getGeometry( reader, rightNode ); + builder.setRight( rightGeometry.getKey() ); + rightOffset = rightGeometry.getValue(); } if ( node.has( BASELINE ) ) { JsonNode baselineNode = node.get( BASELINE ); - Geometry baselineGeometry = this.getGeometry( reader, baselineNode ); - builder.setBaseline( baselineGeometry ); + Pair baselineGeometry = this.getGeometry( reader, baselineNode ); + builder.setBaseline( baselineGeometry.getKey() ); + baselineOffset = baselineGeometry.getValue(); } - return builder.build(); + Offset offset = this.getOffset( leftOffset, rightOffset, baselineOffset ); + + return Pair.of( builder.build(), offset ); } /** @@ -157,18 +185,68 @@ private GeometryTuple getGeometryTuple( ObjectReader reader, JsonNode node ) thr * @throws IOException if the node could not be read */ - private Geometry getGeometry( ObjectReader reader, JsonNode geometryNode ) throws IOException + private Pair getGeometry( ObjectReader reader, JsonNode geometryNode ) throws IOException { if ( geometryNode.has( "name" ) ) { - return reader.readValue( geometryNode, Geometry.class ); + Double offset = null; + + if ( geometryNode.has( "offset" ) ) + { + offset = geometryNode.get( "offset" ) + .asDouble(); + } + + Geometry geometry = reader.readValue( geometryNode, Geometry.class ); + return Pair.of( geometry, offset ); } else { String name = geometryNode.asText(); - return Geometry.newBuilder() - .setName( name ) - .build(); + Geometry geometry = Geometry.newBuilder() + .setName( name ) + .build(); + return Pair.of( geometry, null ); + } + } + + /** + * Returns an {@link Offset} from the supplied numerical offset values. + * @param leftOffset the left offset value + * @param rightOffset the right offset value + * @param baselineOffset the baseline offset value + * @return the offset + */ + private Offset getOffset( Double leftOffset, Double rightOffset, Double baselineOffset ) + { + Offset offset = null; + + if ( Objects.nonNull( leftOffset ) + || Objects.nonNull( rightOffset ) + || Objects.nonNull( baselineOffset ) ) + { + double left = 0.0; + double right = 0.0; + double baseline = 0.0; + + if ( Objects.nonNull( leftOffset ) ) + { + left = leftOffset; + } + + if ( Objects.nonNull( rightOffset ) ) + { + right = rightOffset; + } + + if ( Objects.nonNull( baselineOffset ) ) + { + baseline = baselineOffset; + } + + offset = new Offset( left, right, baseline ); } + + return offset; } } diff --git a/wres-config/test/wres/config/yaml/DeclarationFactoryTest.java b/wres-config/test/wres/config/yaml/DeclarationFactoryTest.java index 8c76803a38..cdc2b69fdf 100644 --- a/wres-config/test/wres/config/yaml/DeclarationFactoryTest.java +++ b/wres-config/test/wres/config/yaml/DeclarationFactoryTest.java @@ -57,6 +57,7 @@ import wres.config.yaml.components.LeadTimeInterval; import wres.config.yaml.components.Metric; import wres.config.yaml.components.MetricParameters; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.SamplingUncertainty; import wres.config.yaml.components.Season; import wres.config.yaml.components.Source; @@ -695,11 +696,13 @@ void testDeserializeWithFeaturesAndPredictedNameOnly() throws IOException EvaluationDeclaration actual = DeclarationFactory.from( yaml ); GeometryTuple first = GeometryTuple.newBuilder() - .setRight( Geometry.newBuilder().setName( DRRC2 ) ) + .setRight( Geometry.newBuilder() + .setName( DRRC2 ) ) .build(); GeometryTuple second = GeometryTuple.newBuilder() - .setRight( Geometry.newBuilder().setName( DOLC2 ) ) + .setRight( Geometry.newBuilder() + .setName( DOLC2 ) ) .build(); Set geometries = Set.of( first, second ); @@ -716,6 +719,51 @@ void testDeserializeWithFeaturesAndPredictedNameOnly() throws IOException assertEquals( expected, actual ); } + @Test + void testDeserializeWithParameterizedFeatures() throws IOException + { + String yaml = """ + observed: + - some_file.csv + predicted: + - another_file.csv + features: + - observed: + name: DRRC2 + offset: 0.25 + wkt: POINT (-76.825 39.225, -76.825 39.275 ) + predicted: + name: DRRC2 + offset: 0.35 + """; + + EvaluationDeclaration actual = DeclarationFactory.from( yaml ); + + String expectedWkt = "POINT (-76.825 39.225, -76.825 39.275 )"; + GeometryTuple feature = GeometryTuple.newBuilder() + .setLeft( Geometry.newBuilder() + .setName( DRRC2 ) + .setWkt( expectedWkt ) ) + .setRight( Geometry.newBuilder() + .setName( DRRC2 ) ) + .build(); + + Map offsets = Map.of( feature, new Offset( 0.25, 0.35, 0 ) ); + Set geometries = Set.of( feature ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .offsets( offsets ) + .build(); + + EvaluationDeclaration expected = EvaluationDeclarationBuilder.builder() + .left( this.observedDataset ) + .right( this.predictedDataset ) + .features( features ) + .build(); + + assertEquals( expected, actual ); + } + @Test void testDeserializeWithFeatureServiceAndNoGroups() throws IOException { diff --git a/wres-config/test/wres/config/yaml/DeclarationInterpolatorTest.java b/wres-config/test/wres/config/yaml/DeclarationInterpolatorTest.java index 651588c738..a27bf8f885 100644 --- a/wres-config/test/wres/config/yaml/DeclarationInterpolatorTest.java +++ b/wres-config/test/wres/config/yaml/DeclarationInterpolatorTest.java @@ -5,6 +5,7 @@ import java.time.Duration; import java.time.ZoneOffset; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -36,7 +37,9 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Formats; import wres.config.yaml.components.LeadTimeIntervalBuilder; import wres.config.yaml.components.Metric; @@ -862,8 +865,12 @@ void testGetSparseFeaturesToInterpolate() .setRegionName( "A group!" ).build(); // Create the declaration - Features features = new Features( geometries ); - FeatureGroups featureGroups = new FeatureGroups( Set.of( featureGroup ) ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( Collections.singleton( featureGroup ) ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .features( features ) .featureGroups( featureGroups ) @@ -1868,7 +1875,9 @@ void testDeserializeAndInterpolateWithShortAndLongFeatures() throws IOException geometries.add( first ); geometries.add( second ); geometries.add( third ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); assertEquals( features, actualInterpolated.features() ); } @@ -2075,12 +2084,16 @@ private static EvaluationDeclaration getBoilerplateEvaluationWith( Set geometryTuples = Set.of( one, two, three ); - Features features = new Features( geometryTuples ); + Features features = FeaturesBuilder.builder() + .geometries( geometryTuples ) + .build(); GeometryGroup group = GeometryGroup.newBuilder() .addAllGeometryTuples( geometryTuples ) .setRegionName( "foorbarbaz" ) @@ -2537,7 +2545,9 @@ void testRemoveFeaturesWithoutThresholds() EvaluationDeclaration actual = DeclarationUtilities.removeFeaturesWithoutThresholds( declaration ); - Features expectedFeatures = new Features( Set.of( one ) ); + Features expectedFeatures = FeaturesBuilder.builder() + .geometries( Collections.singleton( one ) ) + .build(); GeometryGroup expectedGroup = GeometryGroup.newBuilder() .addGeometryTuples( one ) .setRegionName( "foorbarbaz" ) @@ -2587,7 +2597,9 @@ void testRemoveFeaturesWithoutThresholdsWhenThresholdsContainSingleDataOrientati .build(); Set geometryTuples = Set.of( one ); - Features features = new Features( geometryTuples ); + Features features = FeaturesBuilder.builder() + .geometries( geometryTuples ) + .build(); GeometryGroup group = GeometryGroup.newBuilder() .addAllGeometryTuples( geometryTuples ) .setRegionName( "foorbarbaz" ) @@ -2604,7 +2616,9 @@ void testRemoveFeaturesWithoutThresholdsWhenThresholdsContainSingleDataOrientati EvaluationDeclaration actual = DeclarationUtilities.removeFeaturesWithoutThresholds( declaration ); - Features expectedFeatures = new Features( Set.of( one ) ); + Features expectedFeatures = FeaturesBuilder.builder() + .geometries( Collections.singleton( one ) ) + .build(); GeometryGroup expectedGroup = GeometryGroup.newBuilder() .addGeometryTuples( one ) .setRegionName( "foorbarbaz" ) diff --git a/wres-config/test/wres/config/yaml/DeclarationValidatorTest.java b/wres-config/test/wres/config/yaml/DeclarationValidatorTest.java index 6c101d6e36..a0ffb25c08 100644 --- a/wres-config/test/wres/config/yaml/DeclarationValidatorTest.java +++ b/wres-config/test/wres/config/yaml/DeclarationValidatorTest.java @@ -38,9 +38,11 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.FeatureService; import wres.config.yaml.components.FeatureServiceGroup; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Formats; import wres.config.yaml.components.GeneratedBaseline; import wres.config.yaml.components.GeneratedBaselineBuilder; @@ -1039,7 +1041,9 @@ void testFeaturesIncludeBaselineAndMissingBaselineDatasetResultsInErrors() .setName( "foo" ) ) .build() ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .left( this.defaultDataset ) @@ -1074,7 +1078,9 @@ void testEvaluationIncludesSparseFeaturesFromDifferentFeatureAuthoritiesAndNoFea .setName( "bar" ) ) .build() ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .left( left ) @@ -1113,7 +1119,9 @@ void testCovariatesWithInconsistentFeatureAuthoritiesProducesError() .setName( "bar" ) ) .build() ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); CovariateDataset covariateDataset = new CovariateDataset( covariate, null, null, null, null ); List covariates = List.of( covariateDataset ); @@ -1282,7 +1290,8 @@ void testInvalidNetcdfDeclarationResultsInErrorsAndWarnings() .setNetcdf2( Outputs.Netcdf2Format.getDefaultInstance() ) .setNetcdf( Outputs.NetcdfFormat.getDefaultInstance() ) .build(); - FeatureGroups featureGroups = new FeatureGroups( Set.of() ); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .left( this.defaultDataset ) .right( this.defaultDataset ) @@ -1327,7 +1336,9 @@ void testInvalidThresholdServiceDeclarationResultsInErrors() .setLeft( Geometry.newBuilder() .setName( "foo" ) ) .build() ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .left( this.defaultDataset ) .right( this.defaultDataset ) @@ -1583,7 +1594,9 @@ void testFeaturefulThresholdsNotCorrelatedWithFeaturesToEvaluateProducesErrors() EvaluationDeclarationBuilder.builder() .left( this.defaultDataset ) .right( this.defaultDataset ) - .features( new Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .thresholds( Set.of( wrappedOne, wrappedTwo, wrappedThree, @@ -2116,7 +2129,9 @@ void testForecastDatasetWithWebSourceDoesNotRequireValidDates() .setName( "foo" ) ) .build() ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .left( this.defaultDataset ) diff --git a/wres-datamodel/test/wres/datamodel/thresholds/ThresholdSlicerTest.java b/wres-datamodel/test/wres/datamodel/thresholds/ThresholdSlicerTest.java index d6ed9776d6..0199a135e5 100644 --- a/wres-datamodel/test/wres/datamodel/thresholds/ThresholdSlicerTest.java +++ b/wres-datamodel/test/wres/datamodel/thresholds/ThresholdSlicerTest.java @@ -22,6 +22,7 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Metric; import wres.config.yaml.components.MetricBuilder; import wres.config.yaml.components.MetricParametersBuilder; @@ -584,7 +585,9 @@ void testGetMetricsAndThresholdsForProcessing() .setRight( twoRight ) .build(); - Features features = new Features( Set.of( singletonOne, singletonTwo ) ); + Features features = FeaturesBuilder.builder() + .geometries( Set.of( singletonOne, singletonTwo ) ) + .build(); EvaluationDeclaration evaluation = EvaluationDeclarationBuilder.builder() .left( dataset ) .right( dataset ) @@ -687,7 +690,9 @@ void testGetMetricsAndThresholdsForProcessingWithMultipleFeaturesPerLeftFeature( .setRight( twoRight ) .build(); - Features features = new Features( Set.of( singletonOne, singletonTwo ) ); + Features features = FeaturesBuilder.builder() + .geometries( Set.of( singletonOne, singletonTwo ) ) + .build(); EvaluationDeclaration evaluation = EvaluationDeclarationBuilder.builder() .left( dataset ) .right( dataset ) diff --git a/wres-io/src/wres/io/project/DatabaseProject.java b/wres-io/src/wres/io/project/DatabaseProject.java index bc3792fb5e..002ddafbb7 100644 --- a/wres-io/src/wres/io/project/DatabaseProject.java +++ b/wres-io/src/wres/io/project/DatabaseProject.java @@ -36,6 +36,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EnsembleFilter; import wres.config.yaml.components.EvaluationDeclaration; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.TimeScale; import wres.config.yaml.components.Variable; import wres.datamodel.space.FeatureTuple; @@ -245,6 +246,12 @@ public Set getFeatures() return this.features; } + @Override + public Map getOffsets() + { + return ProjectUtilities.getOffsets( this.getDeclaration() ); + } + @Override public Set getCovariateFeatures( String variableName ) { diff --git a/wres-io/src/wres/io/project/InMemoryProject.java b/wres-io/src/wres/io/project/InMemoryProject.java index 31c1413165..3907b386f3 100644 --- a/wres-io/src/wres/io/project/InMemoryProject.java +++ b/wres-io/src/wres/io/project/InMemoryProject.java @@ -31,6 +31,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EnsembleFilter; import wres.config.yaml.components.EvaluationDeclaration; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.TimeScale; import wres.config.yaml.components.Variable; import wres.datamodel.space.FeatureTuple; @@ -389,6 +390,12 @@ public boolean isUpscalingLenient( DatasetOrientation orientation ) .rescaleLenience() ); } + @Override + public Map getOffsets() + { + return ProjectUtilities.getOffsets( this.getDeclaration() ); + } + @Override public String toString() { diff --git a/wres-io/src/wres/io/project/Project.java b/wres-io/src/wres/io/project/Project.java index 58b39e59fd..dbc1823746 100755 --- a/wres-io/src/wres/io/project/Project.java +++ b/wres-io/src/wres/io/project/Project.java @@ -2,12 +2,14 @@ import java.time.Duration; import java.time.MonthDay; +import java.util.Map; import java.util.Set; import java.util.SortedSet; import wres.config.yaml.components.Dataset; import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.Variable; import wres.datamodel.scale.TimeScaleOuter; import wres.datamodel.space.Feature; @@ -15,6 +17,7 @@ import wres.datamodel.space.FeatureTuple; import wres.io.retrieving.DataAccessException; import wres.datamodel.time.TimeWindowOuter; +import wres.statistics.generated.GeometryTuple; /** *

Wraps an {@link EvaluationDeclaration} and updates it to reflect the information ingested from time-series data @@ -136,6 +139,16 @@ public interface Project SortedSet getEnsembleLabels( DatasetOrientation orientation ); + /** + * Returns the feature-specific offsets that should be applied to the time-series event values. For example, an + * offset may represent a datum adjustment. The offsets are gathered from all contexts, including singleton features + * and feature groups. + * + * @return the offsets + */ + + Map getOffsets(); + /** * @param orientation the orientation of the data source * @return true if the data source uses gridded data, false otherwise diff --git a/wres-io/src/wres/io/project/ProjectUtilities.java b/wres-io/src/wres/io/project/ProjectUtilities.java index 0a0e3fb0e9..2d61d30c6d 100644 --- a/wres-io/src/wres/io/project/ProjectUtilities.java +++ b/wres-io/src/wres/io/project/ProjectUtilities.java @@ -7,6 +7,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -35,7 +36,9 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.Offset; import wres.config.yaml.components.SpatialMask; import wres.config.yaml.components.TimeScale; import wres.config.yaml.components.TimeScaleLenience; @@ -948,14 +951,25 @@ static EvaluationDeclaration interpolate( EvaluationDeclaration declaration, .map( FeatureTuple::getGeometryTuple ) .collect( Collectors.toUnmodifiableSet() ); - Features dataFeatures = new Features( unwrappedFeatures ); - FeatureGroups dataFeatureGroups = new FeatureGroups( featureGroups.stream() - .map( FeatureGroup::getGeometryGroup ) - // Non-singletons only - .filter( g -> g.getGeometryTuplesList() - .size() - > 1 ) - .collect( Collectors.toSet() ) ); + // Match and gather any offsets for the declared features using the ingest-augmented features + boolean hasBaseline = DeclarationUtilities.hasBaseline( declaration ); + Map declaredOffsets = ProjectUtilities.getOffsets( declaration ); + Map offsets = + ProjectUtilities.getOffsetsForMatchingFeatures( unwrappedFeatures, + declaredOffsets, + hasBaseline ); + + Features dataFeatures = new Features( unwrappedFeatures, offsets ); + FeatureGroups dataFeatureGroups + = FeatureGroupsBuilder.builder() + .geometryGroups( featureGroups.stream() + .map( FeatureGroup::getGeometryGroup ) + // Non-singletons only + .filter( g -> g.getGeometryTuplesList() + .size() + > 1 ) + .collect( Collectors.toSet() ) ) + .build(); declaration = EvaluationDeclarationBuilder.builder( declaration ) .features( dataFeatures ) .featureGroups( dataFeatureGroups ) @@ -1097,6 +1111,80 @@ static Set covariateFeaturesSelectSomeData( CovariateDataset covariate, return matchingFeatures; } + /** + * Inspects the declaration for feature-specific offsets in all contexts and returns them. + * @param declaration the declaration, not null + * @return the offsets + * @throws NullPointerException if the declaration is null + */ + static Map getOffsets( EvaluationDeclaration declaration ) + { + Objects.requireNonNull( declaration ); + + Map offsets = new HashMap<>(); + + + if ( Objects.nonNull( declaration.features() ) ) + { + offsets.putAll( declaration.features() + .offsets() ); + } + + if ( Objects.nonNull( declaration.featureGroups() ) ) + { + offsets.putAll( declaration.featureGroups() + .offsets() ); + } + + return Collections.unmodifiableMap( offsets ); + } + + /** + * Finds the offsets for features with matching feature names. + * + * @param toMatch the feature names for which offsets should be matched + * @param toSearch the offsets to search + * @param hasBaseline whether the evaluation contains a baseline dataset + * @return the offsets for matching features + */ + private static Map getOffsetsForMatchingFeatures( Set toMatch, + Map toSearch, + boolean hasBaseline ) + { + Map offsets = new HashMap<>(); + + if ( Objects.nonNull( toSearch ) ) + { + for ( GeometryTuple next : toMatch ) + { + Optional> matched = + toSearch.entrySet() + .stream() + .filter( f -> Objects.equals( next.getLeft() + .getName(), f.getKey() + .getLeft() + .getName() ) + && Objects.equals( next.getRight() + .getName(), + f.getKey() + .getRight() + .getName() ) + && ( !hasBaseline + || Objects.equals( next.getBaseline() + .getName(), + f.getKey() + .getBaseline() + .getName() ) ) ) + .findFirst(); + + matched.ifPresent( geometryTupleOffsetEntry + -> offsets.put( next, geometryTupleOffsetEntry.getValue() ) ); + } + } + + return Collections.unmodifiableMap( offsets ); + } + /** * Retrieves the matching features using the inputs. * @param ingestedCovariateFeatures the ingested covariate features to filter @@ -1651,6 +1739,7 @@ private static String getFeatureGroupNameFrom( GeometryGroup declaredGroup, * Searches for a matching feature tuple and throws an exception if more than one is found. * @param featureToFind the declared feature to find * @param featuresToSearch the fully elaborated feature tuples to search + * @param nextGroup the next feature group * @return a matching tuple or null if no tuple was found * @throws DeclarationException if more than one matching tuple was found */ diff --git a/wres-io/test/wres/io/project/DatabaseProjectTest.java b/wres-io/test/wres/io/project/DatabaseProjectTest.java index 33aa908ee2..82d50db992 100644 --- a/wres-io/test/wres/io/project/DatabaseProjectTest.java +++ b/wres-io/test/wres/io/project/DatabaseProjectTest.java @@ -44,6 +44,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.messages.MessageFactory; import wres.datamodel.space.FeatureGroup; import wres.datamodel.space.Feature; @@ -258,7 +259,9 @@ private DatabaseProject getProject() throws SQLException EvaluationDeclarationBuilder.builder() .left( left ) .right( right ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesEnsembleOne(); diff --git a/wres-io/test/wres/io/project/ProjectUtilitiesTest.java b/wres-io/test/wres/io/project/ProjectUtilitiesTest.java index 0c0b276111..8cb5a5ced3 100644 --- a/wres-io/test/wres/io/project/ProjectUtilitiesTest.java +++ b/wres-io/test/wres/io/project/ProjectUtilitiesTest.java @@ -1,5 +1,6 @@ package wres.io.project; +import java.util.Collections; import java.util.Set; import org.junit.jupiter.api.Test; @@ -18,7 +19,8 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureGroups; -import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeatureGroupsBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.SpatialMask; import wres.config.yaml.components.SpatialMaskBuilder; import wres.config.yaml.components.VariableBuilder; @@ -67,9 +69,14 @@ void testGetFeatureGroups() GeometryGroup geoGroup = GeometryGroup.newBuilder() .addAllGeometryTuples( geometries ) .build(); - FeatureGroups featureGroups = new FeatureGroups( Set.of( geoGroup ) ); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( Collections.singleton( geoGroup ) ) + .build(); + EvaluationDeclaration evaluation = EvaluationDeclarationBuilder.builder() - .features( new Features( geometries ) ) + .features( FeaturesBuilder.builder() + .geometries( geometries ) + .build() ) .featureGroups( featureGroups ) .build(); diff --git a/wres-io/test/wres/io/retrieving/database/AnalysisRetrieverTest.java b/wres-io/test/wres/io/retrieving/database/AnalysisRetrieverTest.java index 3e303a36d0..c6ecff5d56 100644 --- a/wres-io/test/wres/io/retrieving/database/AnalysisRetrieverTest.java +++ b/wres-io/test/wres/io/retrieving/database/AnalysisRetrieverTest.java @@ -40,6 +40,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.time.TimeSeriesMetadata; import wres.datamodel.scale.TimeScaleOuter; import wres.datamodel.time.Event; @@ -521,7 +522,9 @@ private void addThreeAnalysisTimeSeriesToTheDatabase() throws SQLException EvaluationDeclarationBuilder.builder() .left( left ) .right( right ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesDoubleOne( ANALYSIS_START_TIME ); diff --git a/wres-io/test/wres/io/retrieving/database/EnsembleForecastRetrieverTest.java b/wres-io/test/wres/io/retrieving/database/EnsembleForecastRetrieverTest.java index 9ecc6c6a2e..0b608bfe46 100644 --- a/wres-io/test/wres/io/retrieving/database/EnsembleForecastRetrieverTest.java +++ b/wres-io/test/wres/io/retrieving/database/EnsembleForecastRetrieverTest.java @@ -39,6 +39,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.time.TimeSeriesMetadata; import wres.datamodel.types.Ensemble; import wres.datamodel.types.Ensemble.Labels; @@ -350,7 +351,9 @@ private void addOneForecastTimeSeriesWithFiveEventsAndThreeMembersToTheDatabase( EvaluationDeclarationBuilder.builder() .left( left ) .right( right ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesEnsembleOne(); diff --git a/wres-io/test/wres/io/retrieving/database/EnsembleRetrieverFactoryTest.java b/wres-io/test/wres/io/retrieving/database/EnsembleRetrieverFactoryTest.java index 920ed31ad8..91b707615e 100644 --- a/wres-io/test/wres/io/retrieving/database/EnsembleRetrieverFactoryTest.java +++ b/wres-io/test/wres/io/retrieving/database/EnsembleRetrieverFactoryTest.java @@ -40,6 +40,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Variable; import wres.config.yaml.components.VariableBuilder; import wres.datamodel.time.TimeSeriesMetadata; @@ -479,7 +480,9 @@ private void addTimeSeriesToDatabase() throws SQLException .left( left ) .right( right ) .baseline( baseline ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesEnsembleOne(); diff --git a/wres-io/test/wres/io/retrieving/database/ObservationRetrieverTest.java b/wres-io/test/wres/io/retrieving/database/ObservationRetrieverTest.java index 258e010977..6cb5475f37 100644 --- a/wres-io/test/wres/io/retrieving/database/ObservationRetrieverTest.java +++ b/wres-io/test/wres/io/retrieving/database/ObservationRetrieverTest.java @@ -39,6 +39,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.time.TimeSeriesMetadata; import wres.datamodel.scale.TimeScaleOuter; import wres.datamodel.time.Event; @@ -406,7 +407,9 @@ private void addAnObservedTimeSeriesWithTenEventsToTheDatabase() throws SQLExcep EvaluationDeclarationBuilder.builder() .left( left ) .right( right ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesDoubleWithNoReferenceTimes(); diff --git a/wres-io/test/wres/io/retrieving/database/SingleValuedForecastRetrieverTest.java b/wres-io/test/wres/io/retrieving/database/SingleValuedForecastRetrieverTest.java index 4af19f8b78..3d2505c02c 100644 --- a/wres-io/test/wres/io/retrieving/database/SingleValuedForecastRetrieverTest.java +++ b/wres-io/test/wres/io/retrieving/database/SingleValuedForecastRetrieverTest.java @@ -40,6 +40,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.time.TimeSeriesMetadata; import wres.datamodel.scale.TimeScaleOuter; import wres.datamodel.time.Event; @@ -403,7 +404,9 @@ private void addTwoForecastTimeSeriesEachWithFiveEventsToTheDatabase() throws SQ EvaluationDeclarationBuilder.builder() .left( left ) .right( right ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); TimeSeries timeSeriesOne = TestData.generateTimeSeriesDoubleOne( T0 ); diff --git a/wres-io/test/wres/io/retrieving/database/SingleValuedRetrieverFactoryTest.java b/wres-io/test/wres/io/retrieving/database/SingleValuedRetrieverFactoryTest.java index eb31231c22..ace7a35067 100644 --- a/wres-io/test/wres/io/retrieving/database/SingleValuedRetrieverFactoryTest.java +++ b/wres-io/test/wres/io/retrieving/database/SingleValuedRetrieverFactoryTest.java @@ -41,6 +41,7 @@ import wres.config.yaml.components.DatasetOrientation; import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Variable; import wres.config.yaml.components.VariableBuilder; import wres.datamodel.time.TimeSeriesMetadata; @@ -510,7 +511,9 @@ private void addTwoForecastTimeSeriesEachWithFiveEventsToTheDatabase() throws SQ .right( right ) .baseline( baseline ) .covariates( List.of( covariateDataset ) ) - .features( new wres.config.yaml.components.Features( features ) ) + .features( FeaturesBuilder.builder() + .geometries( features ) + .build() ) .build(); LOGGER.debug( "leftData: {}", leftData ); diff --git a/wres-reading/src/wres/reading/wrds/geography/FeatureFiller.java b/wres-reading/src/wres/reading/wrds/geography/FeatureFiller.java index 8b121594fe..f2ec5d1944 100644 --- a/wres-reading/src/wres/reading/wrds/geography/FeatureFiller.java +++ b/wres-reading/src/wres/reading/wrds/geography/FeatureFiller.java @@ -25,8 +25,10 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.FeatureServiceGroup; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.datamodel.space.FeatureTuple; import wres.reading.PreReadException; import wres.reading.ReaderUtilities; @@ -194,14 +196,19 @@ private static EvaluationDeclaration fillFeatures( EvaluationDeclaration evaluat } // No features? - if ( filledSingletonFeatures.isEmpty() && filledGroupedFeatures.isEmpty() ) + if ( filledSingletonFeatures.isEmpty() + && filledGroupedFeatures.isEmpty() ) { throw new PreReadException( "No geographic features found to evaluate." ); } // Set the features and feature groups - Features features = new Features( filledSingletonFeatures ); - FeatureGroups featureGroups = new FeatureGroups( filledGroupedFeatures ); + Features features = FeaturesBuilder.builder() + .geometries( filledSingletonFeatures ) + .build(); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( filledGroupedFeatures ) + .build(); return EvaluationDeclarationBuilder.builder( evaluation ) .features( features ) .featureGroups( featureGroups ) diff --git a/wres-reading/test/wres/reading/ReaderUtilitiesTest.java b/wres-reading/test/wres/reading/ReaderUtilitiesTest.java index 94435ae6ac..09c1cfb5a3 100644 --- a/wres-reading/test/wres/reading/ReaderUtilitiesTest.java +++ b/wres-reading/test/wres/reading/ReaderUtilitiesTest.java @@ -6,6 +6,7 @@ import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collections; import java.util.HashSet; import java.util.Set; @@ -30,7 +31,9 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureGroups; +import wres.config.yaml.components.FeatureGroupsBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Metric; import wres.config.yaml.components.MetricBuilder; import wres.config.yaml.components.MetricParametersBuilder; @@ -706,7 +709,9 @@ void testReadThresholdsFromFileSystemAndFillDeclaration() throws IOException .build(); Set geometries = Set.of( first, second, third, fourth, fifth ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); Set metrics = Set.of( new Metric( MetricConstants.MEAN_ABSOLUTE_ERROR, null ) ); @@ -1190,12 +1195,16 @@ void testFillThresholdsRemovesFeaturesWithoutThresholds() throws IOException .build(); Set geometries = Set.of( first, second, third, fourth, fifth, sixth ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); GeometryGroup group = GeometryGroup.newBuilder() .addAllGeometryTuples( geometries ) .setRegionName( "FOO REGION" ) .build(); - FeatureGroups featureGroups = new FeatureGroups( Set.of( group ) ); + FeatureGroups featureGroups = FeatureGroupsBuilder.builder() + .geometryGroups( Collections.singleton( group ) ) + .build(); Set metrics = Set.of( new Metric( MetricConstants.MEAN_ABSOLUTE_ERROR, null ) ); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() @@ -1280,7 +1289,9 @@ void testFillThresholdsUsesHandbook5IdentifiersForNwsFeatureAuthority() .setName( "SMAF1FOO" ) ) .build(); Set geometries = Set.of( first, second, third, fourth, fifth ); - Features features = new Features( geometries ); + Features features = FeaturesBuilder.builder() + .geometries( geometries ) + .build(); Set metrics = Set.of( new Metric( MetricConstants.MEAN_ABSOLUTE_ERROR, null ) ); diff --git a/wres-reading/test/wres/reading/wrds/ahps/WrdsAhpsReaderTest.java b/wres-reading/test/wres/reading/wrds/ahps/WrdsAhpsReaderTest.java index f6e42962ca..a4f63adf4b 100644 --- a/wres-reading/test/wres/reading/wrds/ahps/WrdsAhpsReaderTest.java +++ b/wres-reading/test/wres/reading/wrds/ahps/WrdsAhpsReaderTest.java @@ -30,6 +30,7 @@ import wres.config.yaml.components.EvaluationDeclaration; import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.Source; import wres.config.yaml.components.SourceBuilder; import wres.config.yaml.components.SourceInterface; @@ -968,7 +969,9 @@ void testReadReturnsThreeForecastTimeSeriesInOneChunk() GeometryTuple geometryTuple = GeometryTuple.newBuilder() .setRight( geometry ) .build(); - Features features = new Features( Set.of( geometryTuple ) ); + Features features = FeaturesBuilder.builder() + .geometries( Set.of( geometryTuple ) ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .validDates( interval ) @@ -1069,7 +1072,9 @@ void testReadReturnsThreeChunkedObservedTimeSeries() GeometryTuple geometryTuple = GeometryTuple.newBuilder() .setLeft( geometry ) .build(); - Features features = new Features( Set.of( geometryTuple ) ); + Features features = FeaturesBuilder.builder() + .geometries( Set.of( geometryTuple ) ) + .build(); EvaluationDeclaration declaration = EvaluationDeclarationBuilder.builder() .validDates( interval ) diff --git a/wres-reading/test/wres/reading/wrds/geography/FeatureFillerTest.java b/wres-reading/test/wres/reading/wrds/geography/FeatureFillerTest.java index 0d83f78d8d..01a008b6be 100644 --- a/wres-reading/test/wres/reading/wrds/geography/FeatureFillerTest.java +++ b/wres-reading/test/wres/reading/wrds/geography/FeatureFillerTest.java @@ -27,7 +27,7 @@ import wres.config.yaml.components.EvaluationDeclarationBuilder; import wres.config.yaml.components.FeatureAuthority; import wres.config.yaml.components.FeatureServiceGroup; -import wres.config.yaml.components.Features; +import wres.config.yaml.components.FeaturesBuilder; import wres.config.yaml.components.UnitAlias; import wres.statistics.generated.Geometry; import wres.statistics.generated.GeometryGroup; @@ -190,7 +190,7 @@ void testFillOutImplicitFeatureGroupUsingMockedFeatureService() throws URISyntax } @Test - void testFillOutFeaturesUsingResponseFromFileSystem() throws URISyntaxException, IOException + void testFillOutFeaturesUsingResponseFromFileSystem() throws IOException { try ( FileSystem fileSystem = Jimfs.newFileSystem( Configuration.unix() ) ) { @@ -256,7 +256,7 @@ void testFillOutFeaturesUsingResponseFromFileSystem() throws URISyntaxException, .setLeft( Geometry.newBuilder() .setName( "bar" ) ) .setRight( Geometry.newBuilder() - .setName( "baz" ) ) + .setName( "baz" ) ) .build(); Set expected = Set.of( expectedFeature ); @@ -443,10 +443,12 @@ private static EvaluationDeclaration getBoilerplateEvaluationWith( Set Date: Fri, 16 Aug 2024 16:45:54 +0100 Subject: [PATCH 2/6] Fix a missing transitive dependency for a markdown-to-html plugin, #36. --- build.gradle | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/build.gradle b/build.gradle index 184a95f387..afaad9713b 100755 --- a/build.gradle +++ b/build.gradle @@ -13,6 +13,14 @@ import java.time.ZoneId /* Used by gradle plugins outside the official gradle plugins repository */ buildscript { + + // Work around for missing transitive dependency for plugin org.kordamp.gradle.markdown + // https://github.com/kordamp/markdown-gradle-plugin/issues/36 + configurations.all { + resolutionStrategy.dependencySubstitution { + substitute module("com.overzealous:remark:1.1.0") using module( 'com.wavefront:remark:2023-07.07' ) because "not available on maven central anymore" + } + } repositories { mavenCentral() } From d869a1b0752db205babb99d86b7ba8d3dfca4276 Mon Sep 17 00:00:00 2001 From: James Brown <64858662+james-d-brown@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:39:56 +0100 Subject: [PATCH 3/6] Remove a redundant directory, #36. --- build.gradle | 4 ++-- .../config/project/verificationProjectTest/.gitignore | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) delete mode 100644 testoutput/wres/configcontrol/config/project/verificationProjectTest/.gitignore diff --git a/build.gradle b/build.gradle index afaad9713b..ee7a8e4f82 100755 --- a/build.gradle +++ b/build.gradle @@ -14,11 +14,11 @@ import java.time.ZoneId /* Used by gradle plugins outside the official gradle plugins repository */ buildscript { - // Work around for missing transitive dependency for plugin org.kordamp.gradle.markdown + // Workaround for missing transitive dependency for plugin org.kordamp.gradle.markdown // https://github.com/kordamp/markdown-gradle-plugin/issues/36 configurations.all { resolutionStrategy.dependencySubstitution { - substitute module("com.overzealous:remark:1.1.0") using module( 'com.wavefront:remark:2023-07.07' ) because "not available on maven central anymore" + substitute module( 'com.overzealous:remark:1.1.0' ) using module( 'com.wavefront:remark:2023-07.07' ) because "not available on maven central anymore" } } repositories { diff --git a/testoutput/wres/configcontrol/config/project/verificationProjectTest/.gitignore b/testoutput/wres/configcontrol/config/project/verificationProjectTest/.gitignore deleted file mode 100644 index 72e8ffc0db..0000000000 --- a/testoutput/wres/configcontrol/config/project/verificationProjectTest/.gitignore +++ /dev/null @@ -1 +0,0 @@ -* From 1243b25e7fef47f7efc42302128301bec4ba7312 Mon Sep 17 00:00:00 2001 From: James Brown <64858662+james-d-brown@users.noreply.github.com> Date: Tue, 20 Aug 2024 11:50:01 +0100 Subject: [PATCH 4/6] Fix some documentation, #36. --- src/wres/pipeline/EvaluationUtilities.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wres/pipeline/EvaluationUtilities.java b/src/wres/pipeline/EvaluationUtilities.java index 06ccbed2f1..ada9c12953 100644 --- a/src/wres/pipeline/EvaluationUtilities.java +++ b/src/wres/pipeline/EvaluationUtilities.java @@ -1732,7 +1732,7 @@ private static BinaryOperator getMetadataAdapterForThresholds() wres.statistics.generated.Pool latestPool = isBaselinePool ? latest.getBaselinePool() : latest.getPool(); - // Clear the threshold values unless they are equal across statistics + // Remove the threshold values unless they are equal across statistics if ( existingPool.hasEventThreshold() && !Objects.equals( existingPool.getEventThreshold() .getLeftThresholdValue(), From af45e81e8734fa0911b97a5a9afd1955bc936204 Mon Sep 17 00:00:00 2001 From: Arvin Esmailzadeh Date: Wed, 28 Aug 2024 14:43:37 +0000 Subject: [PATCH 5/6] Deployment 6.25 yml files --- compose-entry.yml | 14 +++++++------- compose-workers.yml | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/compose-entry.yml b/compose-entry.yml index fc4f4c6f6c..5fc1791528 100644 --- a/compose-entry.yml +++ b/compose-entry.yml @@ -16,7 +16,7 @@ volumes: device: "${NFS_HOME_DIR_DEVICE}" services: persister: - image: "${DOCKER_REGISTRY}/wres/wres-redis:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-redis:20240821-1d5b305" restart: always volumes: # For the job data @@ -33,7 +33,7 @@ services: tasker: ports: - "443:8443" - image: "${DOCKER_REGISTRY}/wres/wres-tasker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-tasker:20240821-1d5b305" restart: always depends_on: broker: @@ -68,7 +68,7 @@ services: ports: - "5671:5671" - "15671:15671" - image: "${DOCKER_REGISTRY}/wres/wres-broker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-broker:20240821-1d5b305" restart: always volumes: # For certificates and keys that correspond to them: @@ -87,7 +87,7 @@ services: healthcheck: start_period: 5m worker: - image: "${DOCKER_REGISTRY}/wres/wres-worker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-worker:20240821-1d5b305" restart: always depends_on: broker: @@ -137,7 +137,7 @@ services: - 5673 ports: - 15673:15673 - image: "${DOCKER_REGISTRY}/wres/wres-eventsbroker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-eventsbroker:20240821-1d5b305" restart: always volumes: # For heap-dump files @@ -159,7 +159,7 @@ services: depends_on: eventsbroker: condition: service_started - image: "${DOCKER_REGISTRY}/wres/wres-graphics:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-graphics:20240821-1d5b305" restart: always volumes: # To write graphics outputs @@ -181,7 +181,7 @@ services: depends_on: eventsbroker: condition: service_started - image: "${DOCKER_REGISTRY}/wres/wres-writing:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-writing:20240821-1d5b305" restart: always volumes: # To write numeric outputs diff --git a/compose-workers.yml b/compose-workers.yml index 8183a0eba2..c34a297065 100644 --- a/compose-workers.yml +++ b/compose-workers.yml @@ -16,7 +16,7 @@ volumes: device: "${NFS_HOME_DIR_DEVICE}" services: worker: - image: "${DOCKER_REGISTRY}/wres/wres-worker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-worker:20240821-1d5b305" restart: always depends_on: eventsbroker: @@ -64,7 +64,7 @@ services: - 5673 ports: - 15673:15673 - image: "${DOCKER_REGISTRY}/wres/wres-eventsbroker:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-eventsbroker:20240821-1d5b305" restart: always volumes: # For heap-dump files @@ -86,7 +86,7 @@ services: depends_on: eventsbroker: condition: service_started - image: "${DOCKER_REGISTRY}/wres/wres-graphics:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-graphics:20240821-1d5b305" restart: always volumes: # To write graphics outputs @@ -108,7 +108,7 @@ services: depends_on: eventsbroker: condition: service_started - image: "${DOCKER_REGISTRY}/wres/wres-writing:20240701-5b07a6e" + image: "${DOCKER_REGISTRY}/wres/wres-writing:20240821-1d5b305" restart: always volumes: # To write numeric outputs From 27f236de389cda6f963c2d24296d1081866fd81e Mon Sep 17 00:00:00 2001 From: James Brown <64858662+james-d-brown@users.noreply.github.com> Date: Thu, 29 Aug 2024 12:38:53 +0100 Subject: [PATCH 6/6] Simplify cross-pairing and improve documentation, #36. --- wres-config/nonsrc/schema.yml | 17 +++--- .../datamodel/time/TimeSeriesCrossPairer.java | 54 +++++++++---------- .../time/TimeSeriesCrossPairerTest.java | 44 +++++++++++---- 3 files changed, 70 insertions(+), 45 deletions(-) diff --git a/wres-config/nonsrc/schema.yml b/wres-config/nonsrc/schema.yml index 8232558304..6f7da2758b 100644 --- a/wres-config/nonsrc/schema.yml +++ b/wres-config/nonsrc/schema.yml @@ -1489,13 +1489,16 @@ definitions: is present, evaluates common events by reference time and valid time. If absent, retains all events of each type. With fuzzy matching, each time- series is matched with its nearest, corresponding, time-series according to - the total duration between all reference times of a corresponding type. In - other words, if there is an exact match, that will be used, else the time- - series whose reference times are nearest overall. Once a time-series has - been matched, it cannot be re-used. Always uses exact matching for valid - times. The scope of the cross-pairing always includes the predicted and - baseline datasets, where defined, and may be further controlled using the - 'scope' parameter." + the total duration between all reference times across the candidate time- + series. In other words, if there is an exact match, that will be used, else + the time-series whose reference times are nearest overall. With exact + matching, both the type of reference time and the time itself must + correspond. With fuzzy matching, all types of reference time are considered + equal and hence used to calculate the total duration between all reference + times of the candidate series. Once a time-series has been matched, it + cannot be re-used. Always uses exact matching for valid times. The scope of + the cross-pairing always includes the predicted and baseline datasets, where + defined, and may be further controlled using the 'scope' parameter." type: string enum: - exact diff --git a/wres-datamodel/src/wres/datamodel/time/TimeSeriesCrossPairer.java b/wres-datamodel/src/wres/datamodel/time/TimeSeriesCrossPairer.java index 5091cddbbd..6ee5a37910 100644 --- a/wres-datamodel/src/wres/datamodel/time/TimeSeriesCrossPairer.java +++ b/wres-datamodel/src/wres/datamodel/time/TimeSeriesCrossPairer.java @@ -359,9 +359,31 @@ private Duration getTotalDurationBetweenCommonTimeTypes( TimeSeries

fi Set common = new HashSet<>( firstTimes.keySet() ); common.retainAll( secondTimes.keySet() ); - // Filter non-matching reference time types - if ( method != CrossPairMethod.FUZZY ) + // For exact matching, the reference time types must match + if ( method == CrossPairMethod.EXACT ) { + if ( common.isEmpty() ) + { + throw new PairingException( "Encountered an error while inspecting time-series to cross-pair. " + + "Attempted to calculate the total duration between the commonly typed " + + "reference times of two time-series, but no commonly typed reference " + + "times were discovered, which is not allowed. For lenient cross-pairing " + + "that considers all types of reference time equivalent, declare the " + + "'fuzzy' cross-pairing method instead of 'exact'. The first time-series " + + "was: " + + first.getMetadata() + + ". The second time-series was: " + + second.getMetadata() + + ". The first time-series had reference time types of: " + + first.getReferenceTimes() + .keySet() + + ". The second time-series had reference time types of: " + + second.getReferenceTimes() + .keySet() + + "." ); + } + + // Filter non-matching reference time types firstTimes = firstTimes.entrySet() .stream() .filter( e -> common.contains( e.getKey() ) ) @@ -372,36 +394,10 @@ private Duration getTotalDurationBetweenCommonTimeTypes( TimeSeries

fi .collect( Collectors.toMap( Map.Entry::getKey, Map.Entry::getValue ) ); } - if ( firstTimes.isEmpty() || secondTimes.isEmpty() ) - { - String append = ""; - if ( method != CrossPairMethod.FUZZY ) - { - append = "For lenient cross-pairing that considers all types of reference time equivalent, declare the " - + "'fuzzy' cross-pairing method. "; - } - - throw new PairingException( "Encountered an error while inspecting time-series to cross-pair. Attempted to " - + "calculate the total duration between the commonly typed " - + "reference times of two time-series, but no commonly typed reference times " - + "were discovered, which is not allowed. " - + append - + "The first time-series was: " - + first.getMetadata() - + ". The second time-series was: " - + second.getMetadata() - + ". The first time-series had reference time types of: " - + first.getReferenceTimes() - .keySet() - + ". The second time-series had reference time types of: " - + second.getReferenceTimes() - .keySet() - + "." ); - } - // The neutral difference Duration returnMe = Duration.ZERO; + // Iterate through the differences and sum them for ( Instant firstInstant : firstTimes.values() ) { for ( Instant secondInstant : secondTimes.values() ) diff --git a/wres-datamodel/test/wres/datamodel/time/TimeSeriesCrossPairerTest.java b/wres-datamodel/test/wres/datamodel/time/TimeSeriesCrossPairerTest.java index 23b4dfa9ea..2c6c420f4c 100644 --- a/wres-datamodel/test/wres/datamodel/time/TimeSeriesCrossPairerTest.java +++ b/wres-datamodel/test/wres/datamodel/time/TimeSeriesCrossPairerTest.java @@ -9,6 +9,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -57,7 +58,7 @@ void runBeforeEachTest() } @Test - void testCrossPairTwoTimeSeriesWithEqualReferenceTimesThatEachAppearTwice() + void testCrossPairTwoTimeSeriesWithEqualReferenceTimesThatEachAppearTwiceAndFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); Event> second = Event.of( SECOND, Pair.of( 2, 2 ) ); @@ -120,7 +121,7 @@ void testCrossPairTwoTimeSeriesWithEqualReferenceTimesThatEachAppearTwice() } @Test - void testCrossPairTimeSeriesWithSomeEqualReferenceTimes() + void testCrossPairTimeSeriesWithSomeEqualReferenceTimesOfDifferentTypesAndFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -139,7 +140,7 @@ void testCrossPairTimeSeriesWithSomeEqualReferenceTimes() Event> second = Event.of( SECOND, Pair.of( 2, 2 ) ); TimeSeriesMetadata secondMetadata = - TimeSeriesMetadata.of( Collections.singletonMap( ReferenceTimeType.T0, + TimeSeriesMetadata.of( Collections.singletonMap( ReferenceTimeType.ISSUED_TIME, FIRST ), TimeScaleOuter.of(), CHICKENS, @@ -181,7 +182,7 @@ void testCrossPairTimeSeriesWithSomeEqualReferenceTimes() } @Test - void testCrossPairTimeSeriesWithNoEqualReferenceTimesOrValidTimes() + void testCrossPairTimeSeriesWithNoEqualReferenceTimesOrValidTimesWhenFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -222,7 +223,7 @@ void testCrossPairTimeSeriesWithNoEqualReferenceTimesOrValidTimes() } @Test - void testCrossPairTwoTimeSeriesWithEqualReferenceTimesAndNoEqualValidTimes() + void testCrossPairTwoTimeSeriesWithEqualReferenceTimesAndNoEqualValidTimesWhenFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -263,7 +264,7 @@ void testCrossPairTwoTimeSeriesWithEqualReferenceTimesAndNoEqualValidTimes() } @Test - void testCrossPairTimeSeriesWithNoEqualReferenceTimesAndSomeEqualValidTimes() + void testCrossPairTimeSeriesWithNoEqualReferenceTimesAndSomeEqualValidTimesWhenFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -353,7 +354,7 @@ void testCrossPairTimeSeriesWithNoEqualReferenceTimesAndSomeEqualValidTimesWhenE } @Test - void testCrossPairTwoTimeSeriesWithNoReferenceTimes() + void testCrossPairTwoTimeSeriesWithNoReferenceTimesAndFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -391,7 +392,7 @@ void testCrossPairTwoTimeSeriesWithNoReferenceTimes() } @Test - void testCrossPairTimeSeriesWithSomeNearbyReferenceTimes() + void testCrossPairTimeSeriesWithSomeNearbyReferenceTimesAndFuzzyMatching() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -489,7 +490,7 @@ void testCrossPairTimeSeriesWithSomeNearbyReferenceTimes() } @Test - void testCrossPairTimeSeriesWithNoEqualReferenceTimeTypes() + void testCrossPairTimeSeriesWithNoEqualReferenceTimeTypesThrowsPairingException() { Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); @@ -535,6 +536,31 @@ void testCrossPairTimeSeriesWithNoEqualReferenceTimeTypes() .contains( "no commonly typed reference times" ) ); } + @Test + void testCrossPairWithEmptyBaselineProducesEmptyCrossPairsWhenFuzzyMatching() + { + Event> first = Event.of( FIRST, Pair.of( 1, 1 ) ); + + TimeSeriesMetadata firstMetadata = + TimeSeriesMetadata.of( Collections.singletonMap( ReferenceTimeType.T0, + ZEROTH ), + TimeScaleOuter.of(), + CHICKENS, + GEORGIA, + KG_H ); + + TimeSeries> firstSeries = + new Builder>().setMetadata( firstMetadata ) + .addEvent( first ) + .build(); + + CrossPairs, Pair> cp = this.instance.apply( List.of( firstSeries ), + List.of() ); + + assertAll( () -> assertTrue( cp.getFirstPairs().isEmpty() ), + () -> assertTrue( cp.getSecondPairs().isEmpty() ) ); + } + @Test void testCrossPairProducesSymmetricallyShapedPairs() {