From cfbb297c1253c3859148ba908310dc0b0034454a Mon Sep 17 00:00:00 2001 From: Gabriele Bozzola Date: Tue, 26 Mar 2024 08:32:45 -0700 Subject: [PATCH] Use ClimaUtilities --- NEWS.md | 3 + Project.toml | 8 +- docs/Manifest.toml | 47 +- docs/Project.toml | 3 +- docs/src/APIs/shared_utilities.md | 11 - .../standalone/Bucket/bucket_tutorial.jl | 3 +- .../tutorials/standalone/Soil/layered_soil.jl | 4 +- experiments/Manifest.toml | 126 ++- experiments/Project.toml | 2 + .../Bucket/global_bucket_function.jl | 4 +- .../Bucket/global_bucket_staticmap.jl | 6 +- .../Bucket/global_bucket_temporalmap.jl | 10 +- .../standalone/Soil/richards_runoff.jl | 6 +- lib/ClimaLandSimulations/Project.toml | 1 + src/ClimaLand.jl | 17 +- src/shared_utilities/FileReader.jl | 549 -------------- src/shared_utilities/Regridder.jl | 353 --------- src/shared_utilities/SpaceVaryingInputs.jl | 165 ---- src/shared_utilities/TimeVaryingInputs.jl | 99 --- .../analytic_time_varying_input.jl | 15 - src/shared_utilities/drivers.jl | 2 + src/shared_utilities/general_utils.jl | 45 -- .../interpolating_time_varying_input0d.jl | 160 ---- .../interpolating_time_varying_inputs.jl | 25 - src/standalone/Bucket/Bucket.jl | 100 ++- src/standalone/Vegetation/PlantHydraulics.jl | 5 +- test/Project.toml | 5 +- test/runtests.jl | 9 - test/shared_utilities/drivers.jl | 7 +- test/shared_utilities/file_reader.jl | 716 ------------------ test/shared_utilities/space_varying_inputs.jl | 79 -- test/shared_utilities/time_varying_inputs.jl | 104 --- test/shared_utilities/utilities.jl | 10 - test/standalone/Bucket/albedo_types.jl | 147 ++-- test/standalone/Soil/runoff.jl | 8 +- test/standalone/Vegetation/canopy_model.jl | 2 +- 36 files changed, 231 insertions(+), 2625 deletions(-) delete mode 100644 src/shared_utilities/FileReader.jl delete mode 100644 src/shared_utilities/Regridder.jl delete mode 100644 src/shared_utilities/SpaceVaryingInputs.jl delete mode 100644 src/shared_utilities/TimeVaryingInputs.jl delete mode 100644 src/shared_utilities/analytic_time_varying_input.jl delete mode 100644 src/shared_utilities/general_utils.jl delete mode 100644 src/shared_utilities/interpolating_time_varying_input0d.jl delete mode 100644 src/shared_utilities/interpolating_time_varying_inputs.jl delete mode 100644 test/shared_utilities/file_reader.jl delete mode 100644 test/shared_utilities/space_varying_inputs.jl delete mode 100644 test/shared_utilities/time_varying_inputs.jl diff --git a/NEWS.md b/NEWS.md index ac175e3459..f5173054cf 100644 --- a/NEWS.md +++ b/NEWS.md @@ -4,6 +4,9 @@ ClimaLand.jl Release Notes main -------- +- ![][badge-✨feature] Use [ClimaUtilities](https://github.com/CliMA/ClimaUtilities.jl) for `Space` + and `Time` `VaryingInputs`. This adds support to non-conservative MPI/GPU compatible regridding of NetCDF input. PR [#560](https://github.com/CliMA/ClimaLand.jl/pull/560) + v0.11.1 ------- - ![][badge-✨feature] Add option to profile albedo job. PR diff --git a/Project.toml b/Project.toml index cba5b934a4..355492d09a 100644 --- a/Project.toml +++ b/Project.toml @@ -6,18 +6,19 @@ version = "0.11.1" [deps] Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" ArtifactWrappers = "a14bc488-3040-4b00-9dc1-f6467924858a" -CFTime = "179af706-886a-5703-950a-314cd64e0468" CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ClimaComms = "3a4d1b5c-c61d-41fd-a00a-5873ba7a1b0d" ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884" ClimaCoreTempestRemap = "d934ef94-cdd4-4710-83d6-720549644b70" +ClimaUtilities = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" Insolation = "e98cc03f-d57e-4e3c-b70c-8d51efe9e0d8" +Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab" @@ -37,19 +38,20 @@ CreateParametersExt = "ClimaParams" [compat] Adapt = "3, 4" ArtifactWrappers = "0.2" -CFTime = "0.1" -ClimaParams = "0.10.2" CSV = "0.10" CUDA = "5" ClimaComms = "0.5.6" ClimaCore = "0.13.2" ClimaCoreTempestRemap = "0.3" +ClimaParams = "0.10.2" +ClimaUtilities = "0.1.2" DataFrames = "1" Dates = "1" DocStringExtensions = "0.8, 0.9" Flux = "0.14" HTTP = "1.10" Insolation = "0.9.2" +Interpolations = "0.15" IntervalSets = "0.5, 0.6, 0.7" LinearAlgebra = "1" NCDatasets = "0.11, 0.12, 0.13, 0.14" diff --git a/docs/Manifest.toml b/docs/Manifest.toml index c406de75a9..bdc5d2cd90 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.10.2" manifest_format = "2.0" -project_hash = "5f713b750235cf07465e21e92a6bd2f152ad3358" +project_hash = "36bd85e1aba82f32f7f1a0894fd2191ef63bed32" [[deps.ADTypes]] git-tree-sha1 = "016833eb52ba2d6bea9fcb50ca295980e728ee24" @@ -267,9 +267,9 @@ weakdeps = ["SparseArrays"] [[deps.ClimaComms]] deps = ["CUDA", "MPI"] -git-tree-sha1 = "f0350e34c91c8f3b5a11b5e39990439303d727b1" +git-tree-sha1 = "ef5d206be51fdf62cd0cbd63058e237128652cf7" uuid = "3a4d1b5c-c61d-41fd-a00a-5873ba7a1b0d" -version = "0.5.7" +version = "0.5.8" [[deps.ClimaCore]] deps = ["Adapt", "BandedMatrices", "BlockArrays", "CUDA", "ClimaComms", "CubedSphere", "DataStructures", "DocStringExtensions", "ForwardDiff", "GaussQuadrature", "GilbertCurves", "HDF5", "InteractiveUtils", "IntervalSets", "KrylovKit", "LinearAlgebra", "PkgVersion", "RecursiveArrayTools", "RootSolvers", "SparseArrays", "Static", "StaticArrays", "Statistics", "Unrolled"] @@ -288,7 +288,7 @@ uuid = "d934ef94-cdd4-4710-83d6-720549644b70" version = "0.3.14" [[deps.ClimaLand]] -deps = ["Adapt", "ArtifactWrappers", "CFTime", "CSV", "CUDA", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "DataFrames", "Dates", "DocStringExtensions", "Flux", "HTTP", "Insolation", "IntervalSets", "LinearAlgebra", "NCDatasets", "SciMLBase", "StaticArrays", "StatsBase", "SurfaceFluxes", "Thermodynamics", "cuDNN"] +deps = ["Adapt", "ArtifactWrappers", "CSV", "CUDA", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "ClimaUtilities", "DataFrames", "Dates", "DocStringExtensions", "Flux", "HTTP", "Insolation", "Interpolations", "IntervalSets", "LinearAlgebra", "NCDatasets", "SciMLBase", "StaticArrays", "StatsBase", "SurfaceFluxes", "Thermodynamics", "cuDNN"] path = ".." uuid = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" version = "0.11.1" @@ -309,6 +309,23 @@ git-tree-sha1 = "9c203f39784c968700c55f555754a7771b3410df" uuid = "595c0a79-7f3d-439a-bc5a-b232dc3bde79" version = "0.7.19" +[[deps.ClimaUtilities]] +deps = ["Artifacts", "CFTime", "Dates"] +git-tree-sha1 = "2b42b44a95245e2920bbb3ec4b5ca643a5f13b9a" +uuid = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" +version = "0.1.2" +weakdeps = ["Adapt", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "Interpolations", "NCDatasets"] + + [deps.ClimaUtilities.extensions] + ClimaArtifactsExt = ["ClimaComms"] + DataHandlingExt = ["ClimaCore", "NCDatasets"] + InterpolationsRegridderExt = ["Interpolations", "ClimaCore"] + NCFileReaderExt = "NCDatasets" + SpaceVaryingInputsExt = ["ClimaCore", "NCDatasets"] + TempestRegridderExt = "ClimaCoreTempestRemap" + TimeVaryingInputs0DExt = "ClimaCore" + TimeVaryingInputsExt = ["ClimaCore", "NCDatasets"] + [[deps.CloseOpenIntervals]] deps = ["Static", "StaticArrayInterface"] git-tree-sha1 = "70232f82ffaab9dc52585e0dd043b5e0c6b714f1" @@ -725,9 +742,9 @@ uuid = "a3f928ae-7b40-5064-980b-68af3947d34b" version = "2.13.93+0" [[deps.Format]] -git-tree-sha1 = "f3cf88025f6d03c194d73f5d13fee9004a108329" +git-tree-sha1 = "9c68794ef81b08086aeb32eeaf33531668d5f5fc" uuid = "1fa38f19-a742-5d3f-a2b9-30dd87b9d5f8" -version = "1.3.6" +version = "1.3.7" [[deps.Formatting]] deps = ["Logging", "Printf"] @@ -889,9 +906,9 @@ version = "1.14.3+1" [[deps.HTTP]] deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"] -git-tree-sha1 = "995f762e0182ebc50548c434c171a5bb6635f8e4" +git-tree-sha1 = "8e59b47b9dc525b70550ca082ce85bcd7f5477cd" uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" -version = "1.10.4" +version = "1.10.5" [[deps.HarfBuzz_jll]] deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Pkg"] @@ -1745,9 +1762,9 @@ version = "0.6.12" [[deps.RecursiveArrayTools]] deps = ["Adapt", "ArrayInterface", "DocStringExtensions", "GPUArraysCore", "IteratorInterfaceExtensions", "LinearAlgebra", "RecipesBase", "SparseArrays", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface", "Tables"] -git-tree-sha1 = "a94d22ca9ad49a7a169ecbc5419c59b9793937cc" +git-tree-sha1 = "d8f131090f2e44b145084928856a561c83f43b27" uuid = "731186ca-8d62-57ce-b412-fbd966d074cd" -version = "3.12.0" +version = "3.13.0" [deps.RecursiveArrayTools.extensions] RecursiveArrayToolsFastBroadcastExt = "FastBroadcast" @@ -1813,9 +1830,9 @@ version = "0.1.0" [[deps.SciMLBase]] deps = ["ADTypes", "ArrayInterface", "CommonSolve", "ConstructionBase", "Distributed", "DocStringExtensions", "EnumX", "FunctionWrappersWrappers", "IteratorInterfaceExtensions", "LinearAlgebra", "Logging", "Markdown", "PrecompileTools", "Preferences", "Printf", "RecipesBase", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLOperators", "SciMLStructures", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface", "Tables"] -git-tree-sha1 = "3daaea955c0905200943175637f184a968574a2d" +git-tree-sha1 = "d15c65e25615272e1b1c5edb1d307484c7942824" uuid = "0bca4576-84f4-4d90-8ffe-ffa030f20462" -version = "2.30.3" +version = "2.31.0" [deps.SciMLBase.extensions] SciMLBaseChainRulesCoreExt = "ChainRulesCore" @@ -2025,10 +2042,10 @@ weakdeps = ["ClimaParams"] CreateParametersExt = "ClimaParams" [[deps.SymbolicIndexingInterface]] -deps = ["MacroTools", "RuntimeGeneratedFunctions"] -git-tree-sha1 = "f7b1fc9fc2bc938436b7684c243be7d317919056" +deps = ["ArrayInterface", "MacroTools", "RuntimeGeneratedFunctions", "StaticArraysCore"] +git-tree-sha1 = "dd94edee1542e9da422cb2f12494ef09ea823e48" uuid = "2efcf032-c050-4f8e-a9bb-153293bab1f5" -version = "0.3.11" +version = "0.3.13" [[deps.TOML]] deps = ["Dates"] diff --git a/docs/Project.toml b/docs/Project.toml index 3b654c5b74..57653da130 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,10 +1,11 @@ [deps] ArtifactWrappers = "a14bc488-3040-4b00-9dc1-f6467924858a" -ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884" ClimaLand = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" +ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" ClimaTimeSteppers = "595c0a79-7f3d-439a-bc5a-b232dc3bde79" +ClimaUtilities = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" DiffEqBase = "2b5f629d-d688-5b77-993f-72d75c75574e" diff --git a/docs/src/APIs/shared_utilities.md b/docs/src/APIs/shared_utilities.md index 5b763c9dbb..eb748711b0 100644 --- a/docs/src/APIs/shared_utilities.md +++ b/docs/src/APIs/shared_utilities.md @@ -85,14 +85,3 @@ ClimaLand.surface_resistance ClimaLand.surface_specific_humidity ClimaLand.make_update_drivers ``` - -## TimeVaryingInput -```@docs -ClimaLand.TimeVaryingInput -ClimaLand.evaluate! -``` - -## SpaceVaryingInput -```@docs -ClimaLand.SpaceVaryingInput -``` diff --git a/docs/tutorials/standalone/Bucket/bucket_tutorial.jl b/docs/tutorials/standalone/Bucket/bucket_tutorial.jl index eb1845d6e2..38df1b97c5 100644 --- a/docs/tutorials/standalone/Bucket/bucket_tutorial.jl +++ b/docs/tutorials/standalone/Bucket/bucket_tutorial.jl @@ -151,7 +151,8 @@ using ClimaLand: make_set_initial_cache, PrescribedAtmosphere, PrescribedRadiativeFluxes -using ClimaLand.TimeVaryingInputs +using ClimaUtilities.TimeVaryingInputs: TimeVaryingInput + # We also want to plot the solution using Plots diff --git a/docs/tutorials/standalone/Soil/layered_soil.jl b/docs/tutorials/standalone/Soil/layered_soil.jl index 282083f4a2..40945f9169 100644 --- a/docs/tutorials/standalone/Soil/layered_soil.jl +++ b/docs/tutorials/standalone/Soil/layered_soil.jl @@ -1,4 +1,4 @@ -# This shows how to run single colum soil model, in standalone mode +# This shows how to run single column soil model, in standalone mode # with spatially varying properties. We are mimicking the experiment # carried out in Huang et. al. # Can. J. Soil Sci. (2011) 91: 169183 doi:10.4141/CJSS09118, @@ -9,6 +9,8 @@ # publication. using Plots +import ClimaUtilities.SpaceVaryingInputs: SpaceVaryingInput +import NCDatasets import SciMLBase import ClimaTimeSteppers as CTS using ClimaCore diff --git a/experiments/Manifest.toml b/experiments/Manifest.toml index 808418c1b0..0ec79b836d 100644 --- a/experiments/Manifest.toml +++ b/experiments/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.10.2" manifest_format = "2.0" -project_hash = "960bdc0a1ed4b3f2aee66ed2e0c02065769614a9" +project_hash = "4616031e611fded1c2ac1e5271a5018186edc260" [[deps.ADTypes]] git-tree-sha1 = "016833eb52ba2d6bea9fcb50ca295980e728ee24" @@ -191,12 +191,6 @@ git-tree-sha1 = "3c62e3006c23082d2ae524b468298d27788a8502" uuid = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" version = "0.16.40" -[[deps.Blosc_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Lz4_jll", "Zlib_jll", "Zstd_jll"] -git-tree-sha1 = "19b98ee7e3db3b4eff74c5c9c72bf32144e24f10" -uuid = "0b7ba130-8d10-5ba8-a3d6-c5182647fed9" -version = "1.21.5+0" - [[deps.Bzip2_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] git-tree-sha1 = "9e2a6b69137e6969bab0152632dcb3bc108c8bdd" @@ -318,9 +312,9 @@ weakdeps = ["SparseArrays"] [[deps.ClimaComms]] deps = ["CUDA", "MPI"] -git-tree-sha1 = "f0350e34c91c8f3b5a11b5e39990439303d727b1" +git-tree-sha1 = "ef5d206be51fdf62cd0cbd63058e237128652cf7" uuid = "3a4d1b5c-c61d-41fd-a00a-5873ba7a1b0d" -version = "0.5.7" +version = "0.5.8" [[deps.ClimaCore]] deps = ["Adapt", "BandedMatrices", "BlockArrays", "CUDA", "ClimaComms", "CubedSphere", "DataStructures", "DocStringExtensions", "ForwardDiff", "GaussQuadrature", "GilbertCurves", "HDF5", "InteractiveUtils", "IntervalSets", "KrylovKit", "LinearAlgebra", "PkgVersion", "RecursiveArrayTools", "RootSolvers", "SparseArrays", "Static", "StaticArrays", "Statistics", "Unrolled"] @@ -339,7 +333,7 @@ uuid = "d934ef94-cdd4-4710-83d6-720549644b70" version = "0.3.14" [[deps.ClimaLand]] -deps = ["Adapt", "ArtifactWrappers", "CFTime", "CSV", "CUDA", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "DataFrames", "Dates", "DocStringExtensions", "Flux", "HTTP", "Insolation", "IntervalSets", "LinearAlgebra", "NCDatasets", "SciMLBase", "StaticArrays", "StatsBase", "SurfaceFluxes", "Thermodynamics", "cuDNN"] +deps = ["Adapt", "ArtifactWrappers", "CSV", "CUDA", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "ClimaUtilities", "DataFrames", "Dates", "DocStringExtensions", "Flux", "HTTP", "Insolation", "Interpolations", "IntervalSets", "LinearAlgebra", "NCDatasets", "SciMLBase", "StaticArrays", "StatsBase", "SurfaceFluxes", "Thermodynamics", "cuDNN"] path = ".." uuid = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" version = "0.11.1" @@ -360,6 +354,23 @@ git-tree-sha1 = "9c203f39784c968700c55f555754a7771b3410df" uuid = "595c0a79-7f3d-439a-bc5a-b232dc3bde79" version = "0.7.19" +[[deps.ClimaUtilities]] +deps = ["Artifacts", "CFTime", "Dates"] +git-tree-sha1 = "2b42b44a95245e2920bbb3ec4b5ca643a5f13b9a" +uuid = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" +version = "0.1.2" +weakdeps = ["Adapt", "ClimaComms", "ClimaCore", "ClimaCoreTempestRemap", "Interpolations", "NCDatasets"] + + [deps.ClimaUtilities.extensions] + ClimaArtifactsExt = ["ClimaComms"] + DataHandlingExt = ["ClimaCore", "NCDatasets"] + InterpolationsRegridderExt = ["Interpolations", "ClimaCore"] + NCFileReaderExt = "NCDatasets" + SpaceVaryingInputsExt = ["ClimaCore", "NCDatasets"] + TempestRegridderExt = "ClimaCoreTempestRemap" + TimeVaryingInputs0DExt = "ClimaCore" + TimeVaryingInputsExt = ["ClimaCore", "NCDatasets"] + [[deps.CloseOpenIntervals]] deps = ["Static", "StaticArrayInterface"] git-tree-sha1 = "70232f82ffaab9dc52585e0dd043b5e0c6b714f1" @@ -719,10 +730,10 @@ uuid = "c87230d0-a227-11e9-1b43-d7ebe4e7570a" version = "0.4.1" [[deps.FFMPEG_jll]] -deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"] -git-tree-sha1 = "466d45dc38e15794ec7d5d63ec03d776a9aff36e" +deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Pkg", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"] +git-tree-sha1 = "74faea50c1d007c85837327f6775bea60b5492dd" uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5" -version = "4.4.4+1" +version = "4.4.2+2" [[deps.FFTW]] deps = ["AbstractFFTs", "FFTW_jll", "LinearAlgebra", "MKL_jll", "Preferences", "Reexport"] @@ -845,9 +856,9 @@ uuid = "a3f928ae-7b40-5064-980b-68af3947d34b" version = "2.13.93+0" [[deps.Format]] -git-tree-sha1 = "f3cf88025f6d03c194d73f5d13fee9004a108329" +git-tree-sha1 = "9c68794ef81b08086aeb32eeaf33531668d5f5fc" uuid = "1fa38f19-a742-5d3f-a2b9-30dd87b9d5f8" -version = "1.3.6" +version = "1.3.7" [[deps.Formatting]] deps = ["Logging", "Printf"] @@ -916,11 +927,6 @@ git-tree-sha1 = "ff38ba61beff76b8f4acad8ab0c97ef73bb670cb" uuid = "0656b61e-2033-5cc2-a64a-77c0f6c09b89" version = "3.3.9+0" -[[deps.GMP_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d" -version = "6.2.1+6" - [[deps.GPUArrays]] deps = ["Adapt", "GPUArraysCore", "LLVM", "LinearAlgebra", "Printf", "Random", "Reexport", "Serialization", "Statistics"] git-tree-sha1 = "47e4686ec18a9620850bad110b79966132f14283" @@ -986,12 +992,6 @@ git-tree-sha1 = "359a1ba2e320790ddbe4ee8b4d54a305c0ea2aff" uuid = "7746bdde-850d-59dc-9ae8-88ece973131d" version = "2.80.0+0" -[[deps.GnuTLS_jll]] -deps = ["Artifacts", "GMP_jll", "JLLWrappers", "Libdl", "Nettle_jll", "P11Kit_jll", "Zlib_jll"] -git-tree-sha1 = "f3c0936dd685d57fa0b1eee7dbebf382b969ea63" -uuid = "0951126a-58fd-58f1-b5b3-b08c7c4a876d" -version = "3.8.3+0" - [[deps.Graphics]] deps = ["Colors", "LinearAlgebra", "NaNMath"] git-tree-sha1 = "d61890399bc535850c4bf08e4e0d3a7ad0f21cbd" @@ -1026,16 +1026,16 @@ weakdeps = ["MPI"] MPIExt = "MPI" [[deps.HDF5_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LazyArtifacts", "LibCURL_jll", "Libdl", "MPICH_jll", "MPIPreferences", "MPItrampoline_jll", "MicrosoftMPI_jll", "OpenMPI_jll", "OpenSSL_jll", "TOML", "Zlib_jll", "libaec_jll"] -git-tree-sha1 = "e4591176488495bf44d7456bd73179d87d5e6eab" +deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenSSL_jll", "Pkg", "Zlib_jll"] +git-tree-sha1 = "4cc2bb72df6ff40b055295fdef6d92955f9dede8" uuid = "0234f1f7-429e-5d53-9886-15a909be8d59" -version = "1.14.3+1" +version = "1.12.2+2" [[deps.HTTP]] deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"] -git-tree-sha1 = "995f762e0182ebc50548c434c171a5bb6635f8e4" +git-tree-sha1 = "8e59b47b9dc525b70550ca082ce85bcd7f5477cd" uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" -version = "1.10.4" +version = "1.10.5" [[deps.HarfBuzz_jll]] deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Pkg"] @@ -1497,12 +1497,6 @@ git-tree-sha1 = "c1dd6d7978c12545b4179fb6153b9250c96b0075" uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36" version = "1.0.3" -[[deps.Lz4_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "6c26c5e8a4203d43b5497be3ec5d4e0c3cde240a" -uuid = "5ced341a-0733-55b8-9ab6-a4889d929147" -version = "1.9.4+0" - [[deps.MKL_jll]] deps = ["Artifacts", "IntelOpenMP_jll", "JLLWrappers", "LazyArtifacts", "Libdl"] git-tree-sha1 = "72dc3cf284559eb8f53aa593fe62cb33f83ed0c0" @@ -1713,10 +1707,10 @@ uuid = "71a1bf82-56d0-4bbc-8a3c-48b961074391" version = "0.1.5" [[deps.NetCDF_jll]] -deps = ["Artifacts", "Blosc_jll", "Bzip2_jll", "HDF5_jll", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenMPI_jll", "XML2_jll", "Zlib_jll", "Zstd_jll", "libzip_jll"] -git-tree-sha1 = "a8af1798e4eb9ff768ce7fdefc0e957097793f15" +deps = ["Artifacts", "HDF5_jll", "JLLWrappers", "LibCURL_jll", "Libdl", "Pkg", "XML2_jll", "Zlib_jll"] +git-tree-sha1 = "072f8371f74c3b9e1b26679de7fbf059d45ea221" uuid = "7243133f-43d8-5620-bbf4-c2c921802cf3" -version = "400.902.209+0" +version = "400.902.5+1" [[deps.Netpbm]] deps = ["FileIO", "ImageCore", "ImageMetadata"] @@ -1724,12 +1718,6 @@ git-tree-sha1 = "d92b107dbb887293622df7697a2223f9f8176fcd" uuid = "f09324ee-3d7c-5217-9330-fc30815ba969" version = "1.1.1" -[[deps.Nettle_jll]] -deps = ["Artifacts", "GMP_jll", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "eca63e3847dad608cfa6a3329b95ef674c7160b4" -uuid = "4c82536e-c426-54e4-b420-14f461c4ed8b" -version = "3.7.2+0" - [[deps.NetworkOptions]] uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" version = "1.2.0" @@ -1802,9 +1790,9 @@ version = "1.4.2" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "60e3045590bd104a16fefb12836c00c0ef8c7f8c" +git-tree-sha1 = "a12e56c72edee3ce6b96667745e6cbbe5498f200" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "3.0.13+0" +version = "1.1.23+0" [[deps.OpenSpecFun_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] @@ -1841,12 +1829,6 @@ git-tree-sha1 = "dfdf5519f235516220579f949664f1bf44e741c5" uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" version = "1.6.3" -[[deps.P11Kit_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "2cd396108e178f3ae8dedbd8e938a18726ab2fbf" -uuid = "c2071276-7c44-58a7-b746-946036e04d0a" -version = "0.24.1+0" - [[deps.PCRE2_jll]] deps = ["Artifacts", "Libdl"] uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15" @@ -2084,9 +2066,9 @@ version = "1.0.0" [[deps.Qt6Base_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Fontconfig_jll", "Glib_jll", "JLLWrappers", "Libdl", "Libglvnd_jll", "OpenSSL_jll", "Vulkan_Loader_jll", "Xorg_libSM_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Xorg_libxcb_jll", "Xorg_xcb_util_cursor_jll", "Xorg_xcb_util_image_jll", "Xorg_xcb_util_keysyms_jll", "Xorg_xcb_util_renderutil_jll", "Xorg_xcb_util_wm_jll", "Zlib_jll", "libinput_jll", "xkbcommon_jll"] -git-tree-sha1 = "37b7bb7aabf9a085e0044307e1717436117f2b3b" +git-tree-sha1 = "7c29f0e8c575428bd84dc3c72ece5178caa67336" uuid = "c0090381-4147-56d7-9ebc-da0b1113ec56" -version = "6.5.3+1" +version = "6.5.2+2" [[deps.QuadGK]] deps = ["DataStructures", "LinearAlgebra"] @@ -2149,9 +2131,9 @@ version = "0.6.12" [[deps.RecursiveArrayTools]] deps = ["Adapt", "ArrayInterface", "DocStringExtensions", "GPUArraysCore", "IteratorInterfaceExtensions", "LinearAlgebra", "RecipesBase", "SparseArrays", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface", "Tables"] -git-tree-sha1 = "a94d22ca9ad49a7a169ecbc5419c59b9793937cc" +git-tree-sha1 = "d8f131090f2e44b145084928856a561c83f43b27" uuid = "731186ca-8d62-57ce-b412-fbd966d074cd" -version = "3.12.0" +version = "3.13.0" [deps.RecursiveArrayTools.extensions] RecursiveArrayToolsFastBroadcastExt = "FastBroadcast" @@ -2234,9 +2216,9 @@ version = "0.1.0" [[deps.SciMLBase]] deps = ["ADTypes", "ArrayInterface", "CommonSolve", "ConstructionBase", "Distributed", "DocStringExtensions", "EnumX", "FunctionWrappersWrappers", "IteratorInterfaceExtensions", "LinearAlgebra", "Logging", "Markdown", "PrecompileTools", "Preferences", "Printf", "RecipesBase", "RecursiveArrayTools", "Reexport", "RuntimeGeneratedFunctions", "SciMLOperators", "SciMLStructures", "StaticArraysCore", "Statistics", "SymbolicIndexingInterface", "Tables"] -git-tree-sha1 = "3daaea955c0905200943175637f184a968574a2d" +git-tree-sha1 = "d15c65e25615272e1b1c5edb1d307484c7942824" uuid = "0bca4576-84f4-4d90-8ffe-ffa030f20462" -version = "2.30.3" +version = "2.31.0" [deps.SciMLBase.extensions] SciMLBaseChainRulesCoreExt = "ChainRulesCore" @@ -2519,10 +2501,10 @@ weakdeps = ["ClimaParams"] CreateParametersExt = "ClimaParams" [[deps.SymbolicIndexingInterface]] -deps = ["MacroTools", "RuntimeGeneratedFunctions"] -git-tree-sha1 = "f7b1fc9fc2bc938436b7684c243be7d317919056" +deps = ["ArrayInterface", "MacroTools", "RuntimeGeneratedFunctions", "StaticArraysCore"] +git-tree-sha1 = "dd94edee1542e9da422cb2f12494ef09ea823e48" uuid = "2efcf032-c050-4f8e-a9bb-153293bab1f5" -version = "0.3.11" +version = "0.3.13" [[deps.TOML]] deps = ["Dates"] @@ -2557,10 +2539,10 @@ weakdeps = ["IntervalArithmetic"] TaylorSeriesIAExt = "IntervalArithmetic" [[deps.TempestRemap_jll]] -deps = ["Artifacts", "HDF5_jll", "JLLWrappers", "Libdl", "NetCDF_jll", "OpenBLAS32_jll"] -git-tree-sha1 = "723112218783928a20e0d865932694acfb7a7571" +deps = ["Artifacts", "HDF5_jll", "JLLWrappers", "Libdl", "NetCDF_jll", "OpenBLAS32_jll", "Pkg"] +git-tree-sha1 = "88c3818a492ad1a94b1aa440b01eab5d133209ff" uuid = "8573a8c5-1df0-515e-a024-abad257ee284" -version = "2.2.0+0" +version = "2.1.6+1" [[deps.TensorCore]] deps = ["LinearAlgebra"] @@ -2975,12 +2957,6 @@ git-tree-sha1 = "51b5eeb3f98367157a7a12a1fb0aa5328946c03c" uuid = "9a68df92-36a6-505f-a73e-abb412b6bfb4" version = "0.2.3+0" -[[deps.libaec_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl"] -git-tree-sha1 = "46bf7be2917b59b761247be3f317ddf75e50e997" -uuid = "477f73a3-ac25-53e9-8cc3-50b2fa2566f0" -version = "1.1.2+0" - [[deps.libaom_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] git-tree-sha1 = "3a2ea60308f0996d26f1e5354e10c24e9ef905d4" @@ -3034,12 +3010,6 @@ git-tree-sha1 = "b910cb81ef3fe6e78bf6acee440bda86fd6ae00c" uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a" version = "1.3.7+1" -[[deps.libzip_jll]] -deps = ["Artifacts", "Bzip2_jll", "GnuTLS_jll", "JLLWrappers", "Libdl", "XZ_jll", "Zlib_jll", "Zstd_jll"] -git-tree-sha1 = "3282b7d16ae7ac3e57ec2f3fa8fafb564d8f9f7f" -uuid = "337d8026-41b4-5cde-a456-74a10e5b31d1" -version = "1.10.1+0" - [[deps.mtdev_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] git-tree-sha1 = "814e154bdb7be91d78b6802843f76b6ece642f11" diff --git a/experiments/Project.toml b/experiments/Project.toml index 997fb0cf7f..0ed619d556 100644 --- a/experiments/Project.toml +++ b/experiments/Project.toml @@ -6,12 +6,14 @@ ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884" ClimaLand = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" ClimaTimeSteppers = "595c0a79-7f3d-439a-bc5a-b232dc3bde79" +ClimaUtilities = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" Formatting = "59287772-0a20-5a39-b81b-1366585eb4c0" HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3" Insolation = "e98cc03f-d57e-4e3c-b70c-8d51efe9e0d8" Interpolations = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab" NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" RootSolvers = "7181ea78-2dcb-4de3-ab41-2b8ab5a31e74" diff --git a/experiments/standalone/Bucket/global_bucket_function.jl b/experiments/standalone/Bucket/global_bucket_function.jl index 62bc6e89e5..dfc86cfac9 100644 --- a/experiments/standalone/Bucket/global_bucket_function.jl +++ b/experiments/standalone/Bucket/global_bucket_function.jl @@ -18,7 +18,10 @@ using Dates using DelimitedFiles using Statistics +import ClimaUtilities.TimeVaryingInputs: TimeVaryingInput + import ClimaTimeSteppers as CTS +import NCDatasets using ClimaCore using ClimaCore: Remapping, Geometry import ClimaComms @@ -35,7 +38,6 @@ using ClimaLand: make_set_initial_cache, PrescribedAtmosphere, PrescribedRadiativeFluxes -using ClimaLand.TimeVaryingInputs """ compute_extrema(v) diff --git a/experiments/standalone/Bucket/global_bucket_staticmap.jl b/experiments/standalone/Bucket/global_bucket_staticmap.jl index 39e9ff15f6..09a926719b 100644 --- a/experiments/standalone/Bucket/global_bucket_staticmap.jl +++ b/experiments/standalone/Bucket/global_bucket_staticmap.jl @@ -21,7 +21,10 @@ using Dates using DelimitedFiles using Statistics +import ClimaUtilities.TimeVaryingInputs: TimeVaryingInput + import ClimaTimeSteppers as CTS +import NCDatasets using ClimaCore using ClimaCore: Remapping, Geometry import ClimaParams as CP @@ -38,7 +41,6 @@ using ClimaLand: make_set_initial_cache, PrescribedAtmosphere, PrescribedRadiativeFluxes -using ClimaLand.TimeVaryingInputs """ compute_extrema(v) @@ -98,7 +100,7 @@ regrid_dir = joinpath( !ispath(regrid_dir) && mkpath(regrid_dir) surface_space = bucket_domain.space.surface α_snow = FT(0.8) -albedo = PrescribedBaregroundAlbedo{FT}(α_snow, regrid_dir, surface_space); +albedo = PrescribedBaregroundAlbedo{FT}(α_snow, surface_space); bucket_parameters = BucketModelParameters(FT; albedo, z_0m, z_0b, τc); diff --git a/experiments/standalone/Bucket/global_bucket_temporalmap.jl b/experiments/standalone/Bucket/global_bucket_temporalmap.jl index 724436352d..493b6fa4cb 100644 --- a/experiments/standalone/Bucket/global_bucket_temporalmap.jl +++ b/experiments/standalone/Bucket/global_bucket_temporalmap.jl @@ -19,7 +19,10 @@ using Dates using DelimitedFiles using Statistics +import ClimaUtilities.TimeVaryingInputs: TimeVaryingInput + import ClimaTimeSteppers as CTS +import NCDatasets using ClimaCore using ClimaCore: Remapping, Geometry import ClimaParams as CP @@ -36,7 +39,6 @@ using ClimaLand: make_set_initial_cache, PrescribedAtmosphere, PrescribedRadiativeFluxes -using ClimaLand.TimeVaryingInputs PROFILING = false try @@ -67,6 +69,9 @@ outdir = joinpath( pkgdir(ClimaLand), "experiments/standalone/Bucket/artifacts_temporalmap", ) +device_suffix = + typeof(ClimaComms.context().device) <: ClimaComms.CPUSingleThreaded ? + "cpu" : "gpu" !ispath(outdir) && mkpath(outdir) # Use separate output directory for CPU and GPU runs to avoid race condition device_suffix = @@ -108,8 +113,7 @@ function setup_prob(t0, tf, Δt) surface_space = bucket_domain.space.surface # Construct albedo parameter object using temporal map - albedo = - PrescribedSurfaceAlbedo{FT}(regrid_dir, ref_time, t0, surface_space) + albedo = PrescribedSurfaceAlbedo{FT}(ref_time, t0, surface_space) bucket_parameters = BucketModelParameters(FT; albedo, z_0m, z_0b, τc) diff --git a/experiments/standalone/Soil/richards_runoff.jl b/experiments/standalone/Soil/richards_runoff.jl index 49674eb858..446f07efdb 100644 --- a/experiments/standalone/Soil/richards_runoff.jl +++ b/experiments/standalone/Soil/richards_runoff.jl @@ -4,6 +4,8 @@ using ArtifactWrappers import SciMLBase import ClimaTimeSteppers as CTS using ClimaCore +import ClimaUtilitites.SpaceVaryingInputs: SpaceVaryingInput +import NCDatasets import ClimaParams as CP using ClimaComms @@ -11,7 +13,7 @@ using ClimaLand using ClimaLand.Soil import ClimaLand import ClimaLand.Parameters as LP -import ClimaLand.SpaceVaryingInputs: SpaceVaryingInput + context = ClimaComms.context() outdir = joinpath(pkgdir(ClimaLand), "experiments/standalone/Soil/artifacts") @@ -73,7 +75,7 @@ soil_params = ClimaLand.Soil.RichardsParameters(; function precip_function(t) return -1e-6 end -precip = ClimaLand.TimeVaryingInput(precip_function) +precip = TimeVaryingInput(precip_function) atmos = ClimaLand.PrescribedPrecipitation{FT, typeof(precip)}(precip) bottom_bc = ClimaLand.Soil.WaterFluxBC((p, t) -> 0.0) bc = (; diff --git a/lib/ClimaLandSimulations/Project.toml b/lib/ClimaLandSimulations/Project.toml index adbb03e753..812b3fe89b 100644 --- a/lib/ClimaLandSimulations/Project.toml +++ b/lib/ClimaLandSimulations/Project.toml @@ -12,6 +12,7 @@ ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884" ClimaLand = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" ClimaTimeSteppers = "595c0a79-7f3d-439a-bc5a-b232dc3bde79" +ClimaUtilities = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" diff --git a/src/ClimaLand.jl b/src/ClimaLand.jl index f1101846dd..623a457fca 100644 --- a/src/ClimaLand.jl +++ b/src/ClimaLand.jl @@ -7,16 +7,15 @@ import ClimaCore: Fields, Spaces include("shared_utilities/Parameters.jl") import .Parameters as LP -include("shared_utilities/general_utils.jl") -include("shared_utilities/TimeVaryingInputs.jl") -using .TimeVaryingInputs -export TimeVaryingInput, evaluate! -include("shared_utilities/Regridder.jl") include("shared_utilities/Domains.jl") -include("shared_utilities/FileReader.jl") -include("shared_utilities/SpaceVaryingInputs.jl") -using .SpaceVaryingInputs -export SpaceVaryingInput +import ClimaUtilities.TimeVaryingInputs +import ClimaUtilities.TimeVaryingInputs: + TimeVaryingInput, AbstractTimeVaryingInput, evaluate! +export TimeVaryingInput, evaluate! +import ClimaUtilities.SpaceVaryingInputs +import ClimaUtilities.SpaceVaryingInputs: SpaceVaryingInput + +import NCDatasets # Needed to load the ClimaUtilities.*VaryingInput using .Domains include("shared_utilities/utils.jl") include("shared_utilities/models.jl") diff --git a/src/shared_utilities/FileReader.jl b/src/shared_utilities/FileReader.jl deleted file mode 100644 index e9d8e9b4b6..0000000000 --- a/src/shared_utilities/FileReader.jl +++ /dev/null @@ -1,549 +0,0 @@ -""" - FileReader - -This module coordinates reading, regridding, and interpolating -data from NetCDF files that is required for global land model -simulations, including globally varying parameters which may -or may not change in time. It also includes regridding and -temporal interpolations of this data. - -This is based on ClimaCoupler.jl's BCReader and TimeManager modules. -""" -module FileReader - -using ClimaComms -using ClimaCore: Fields, Spaces -using Dates -using CFTime -using NCDatasets - -using ClimaLand.Regridder - -export AbstractPrescribedData, - PrescribedDataTemporal, - PrescribedDataStatic, - FileInfo, - FileState, - SimInfo, - read_data_fields!, - next_date_in_file, - get_data_at_date, - to_datetime - - -""" - abstract type AbstractPrescribedData - -An abstract type for storing prescribed data info. Subtypes -include temporally-varying prescribed data and static prescribed data. -""" -abstract type AbstractPrescribedData end - -""" - PrescribedDataStatic <: AbstractPrescribedData - -Stores information to read in a prescribed variable from a file. -The data is read in once and stored without changing for the duration of a -simulation. This type is meant to be used with input data that does not have -a time dimension. -Each of the fields of this struct is itself a struct. - -# Inputs: -- file_info::FI # unchanging info about the input data file -""" -struct PrescribedDataStatic{FI} <: AbstractPrescribedData - file_info::FI -end - -""" - PrescribedDataTemporal <: AbstractPrescribedData - -Stores information to read in prescribed data from a file. -Contains sufficient information to read in the variables at various -timesteps, and to coordinate this reading between data coming from -different files. This type is meant to be used with input data that has -a time dimension. -The `file_states` field is a dictionary mapping variable names to `FileState` -structs, which contain information about the current data for that variable. - -# Inputs: -- file_info::FI # unchanging info about the input data file -- file_states::Dict{S, FS} # info about the data currently being read from file for each variable -- sim_info::SI # unchanging info about the start date/time of the simulation -""" -struct PrescribedDataTemporal{FI, S, FS, SI} <: AbstractPrescribedData - file_info::FI - file_states::Dict{S, FS} - sim_info::SI -end - -""" - FileInfo - -Stores information about the current data being read in from a file. - -# Inputs: -- infile_path::String # path to the input NetCDF data file -- regrid_dirpath::String # directory for storing files used in regridding -- varnames::Vector{String} # names of the variables we're reading from the input file -- outfile_root::String # root for regridded data files generated when writing data at each time from input file -- all_dates::Vector # vector containing all dates of the input file, which we assume are `DateTime`s or `DateTimeNoLeap`s -- date_idx0::Vector{Int} # index of the first data in the file being used for this simulation -""" -struct FileInfo - infile_path::String - regrid_dirpath::String - varnames::Vector{String} - outfile_root::String - all_dates::Vector - date_idx0::Vector{Int} -end - -""" - FileState - -Stores information about the current data being read in from a file for one variable. - -# Inputs: -- data_fields::F # tuple of two fields at consecutive dates, that will be used for interpolation -- date_idx::Vector{Int} # index in the input file of the first data field currently being used -- segment_length::Vector{Int} # length of the time interval between the two data field entries; used in temporal interpolation -""" -struct FileState{F} - data_fields::F - date_idx::Vector{Int} - segment_length::Vector{Int} -end - -""" - SimInfo - -Stores information about the simulation being run. We may want to store -multiple copies of an instance of this struct in multiple PrescribedDataTemporal -objects if we're reading in data over time for multiple variables. - -# Inputs: -- date_ref::D # a reference date before or at the start of the simulation -- t_start # time in seconds since `date_ref` -""" -struct SimInfo{D} - date_ref::D - t_start::Any -end - -""" - PrescribedDataStatic{FT}( - get_infile::Function, - regrid_dirpath::String, - varnames::Vector{String}, - surface_space::Spaces.AbstractSpace, - mono::Bool = true, - ) where {FT <: AbstractFloat} - -Constructor for the `PrescribedDataStatic`` type. -Regrids from the input lat-lon grid to the simulation cgll grid, saving -the regridded output in new files found at `regrid_dirpath`. The `mono` flag -here is used to determine whether or not the remapping is monotone. - -Creates a `FileInfo` object containing all the information needed to read in -the data stored in the input file, which will later be regridded to our -simulation grid. Date-related args (last 3 passed to FileInfo) are unused for -static data maps. -""" -function PrescribedDataStatic{FT}( - get_infile::Function, - regrid_dirpath::String, - varnames::Vector{String}, - surface_space::Spaces.AbstractSpace; - mono::Bool = true, -) where {FT <: AbstractFloat} - comms_ctx = ClimaComms.context(surface_space) - outfile_root = "static_data_cgll" - - # Download `infile_path` artifact on root process first to avoid race condition - if ClimaComms.iamroot(comms_ctx) - infile_path = get_infile() - Regridder.hdwrite_regridfile_rll_to_cgll( - FT, - regrid_dirpath, - infile_path, - varnames, - surface_space, - outfile_root; - mono = mono, - ) - end - ClimaComms.barrier(comms_ctx) - infile_path = get_infile() - - file_info = - FileInfo(infile_path, regrid_dirpath, varnames, outfile_root, [], []) - return PrescribedDataStatic{typeof(file_info)}(file_info) -end - - -""" - PrescribedDataTemporal{FT}( - regrid_dirpath, - get_infile, - varnames, - date_ref, - t_start, - surface_space; - mono = true, - ) where {FT <: AbstractFloat} - -Constructor for the `PrescribedDataTemporal` type. -Regrids from the input lat-lon grid to the simulation cgll grid, saving -the regridded output in a new file found at `regrid_dirpath`, and -returns the info required to run the simulation using this prescribed -data packaged into a single `PrescribedDataTemporal` struct. - -# Arguments -- `regrid_dirpath` # directory the data file is stored in. -- `get_infile` # function returning path to NCDataset file containing data to regrid. -- `varnames` # names of the variables to be regridded. -- `date_ref` # reference date to coordinate start of the simulation -- `t_start` # start time of the simulation relative to `date_ref` (date_start = date_ref + t_start) -- `surface_space` # the space to which we are mapping. -- `mono` # flag for monotone remapping of `infile_path`. - -# Returns -- `PrescribedDataTemporal` object -""" -function PrescribedDataTemporal{FT}( - regrid_dirpath::String, - get_infile::Function, - varnames::Vector{String}, - date_ref::Union{DateTime, DateTimeNoLeap}, - t_start, - surface_space::Spaces.AbstractSpace; - mono::Bool = true, -) where {FT <: AbstractFloat} - comms_ctx = ClimaComms.context(surface_space) - outfile_root = "temporal_data_cgll" - - # Initialize dummy date to be overwritten by actual dates during file read - all_dates = [DateTime(0)] - - # Regrid data at all times from lat/lon (RLL) to simulation grid (CGLL) - # Download `infile_path` artifact on root process first to avoid race condition - if ClimaComms.iamroot(comms_ctx) - infile_path = get_infile() - all_dates = Regridder.hdwrite_regridfile_rll_to_cgll( - FT, - regrid_dirpath, - infile_path, - varnames, - surface_space, - outfile_root; - mono = mono, - ) - - NCDataset(infile_path, "r") do ds - if !("time" in keys(ds)) - error( - "Using a temporal albedo map requires data with time dimension.", - ) - end - end - end - all_dates = ClimaComms.bcast(comms_ctx, all_dates) - ClimaComms.barrier(comms_ctx) - infile_path = get_infile() - - # Init time tracking info - data_fields = - Fields.zeros(FT, surface_space), Fields.zeros(FT, surface_space) - # Store `segment_length` as an array so we can modify it as a field of a struct - segment_length = Int[0] - - date_start = date_ref + Dates.Second(t_start) - if date_start < all_dates[1] - @warn "Simulation start date is before first file data" - end - - # Find the index of the start file closest to this simulation's start date - # Like `segment_length`, store in an array so we can modify in struct - date_idx0 = - [argmin(abs.(Dates.value(date_start) .- Dates.value.(all_dates[:])))] - - # Construct component structs of PrescribedDataTemporal object - file_info = FileInfo( - infile_path, - regrid_dirpath, - varnames, - outfile_root, - all_dates, - date_idx0, - ) - file_states = Dict{String, FileState{typeof(data_fields)}}() - for varname in varnames - file_states[varname] = - FileState(deepcopy(data_fields), copy(date_idx0), segment_length) - end - sim_info = SimInfo(date_ref, t_start) - - args = (file_info, file_states, sim_info) - - # Get types of `file_info`, the first `file_states` Dict pair, and `sim_info` - type_args = ( - typeof(file_info), - typeof(first(file_states)[1]), - typeof(first(file_states)[2]), - typeof(sim_info), - ) - return PrescribedDataTemporal{type_args...}(args...) -end - -""" - read_data_fields!( - prescribed_data::PrescribedDataTemporal, - date::DateTime, - space::Spaces.AbstractSpace - ) - -Extracts data from regridded (to model grid) NetCDF files. -The times for which data is extracted depends on the specifications in the -`prescribed_data` struct). -Data at one point in time is stored in `prescribed_data.file_state.data_fields[1]`, and -data at the next time is stored in `prescribed_data.file_state.data_fields[2]`. With these -two data fields saved, we can interpolate between them for any dates -in this range of time. - -# Arguments -- `prescribed_data` # containing data and file information. -- `date` # current date to read in data for. -- `space` # space we're remapping the data onto. -""" -function read_data_fields!( - prescribed_data::PrescribedDataTemporal, - date::DateTime, - space::Spaces.AbstractSpace, -) - comms_ctx = ClimaComms.context(space) - pd_file_info = prescribed_data.file_info - pd_file_states = prescribed_data.file_states - - (; regrid_dirpath, outfile_root, all_dates, varnames) = pd_file_info - - date_idx0 = pd_file_info.date_idx0[1] - # Assumes that all variables in `prescribed_data` have the same dates - date_idx = pd_file_states[varnames[1]].date_idx[1] - - # Case 1: Current date is before or at first date in data file - # Load in data at first date for both `data_fields[1]` and `data_fields[2]` - if (date_idx == date_idx0) && (date <= all_dates[date_idx]) - if date !== all_dates[date_idx] - @warn "this time period is before input data - using file from $(all_dates[date_idx0])" - end - - # Loop over all variables we need to read in - for (varname, file_state) in pd_file_states - file_state.data_fields[1] .= Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - all_dates[Int(date_idx0)], - varname, - space, - ) - file_state.data_fields[2] .= file_state.data_fields[1] - file_state.segment_length .= 0 - end - - # Case 2: current date is at or after last date in input file - # Load in data at last date for both `data_fields[1]` and `data_fields[2]` - elseif date >= all_dates[end - 1] - @warn "this time period is after input data - using file from $(all_dates[end - 1])" - - # Loop over all variables we need to read in - for (varname, file_state) in pd_file_states - file_state.data_fields[1] .= Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - all_dates[end], - varname, - space, - ) - file_state.data_fields[2] .= file_state.data_fields[1] - file_state.segment_length .= 0 - end - - # Case 3: current date is later than date of data being read in - # Load in data at most recent past date in `data_fields[1]` and - # next date in `data_fields[2]` - # elseif Dates.days(date - all_dates[Int(date_idx)]) > 0 - elseif date > all_dates[Int(date_idx)] - # Loop over all variables we need to read in - for (varname, file_state) in pd_file_states - file_state = pd_file_states[varname] - - # Increment `date_idx` to use next date - date_idx = file_state.date_idx[1] += Int(1) - # Time between consecutive dates being stored gives `segment_length` - file_state.segment_length .= - (all_dates[Int(date_idx + 1)] - all_dates[Int(date_idx)]).value - - # Read in data fields at both dates - file_state.data_fields[1] .= Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - all_dates[Int(date_idx)], - varname, - space, - ) - file_state.data_fields[2] .= Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - all_dates[Int(date_idx + 1)], - varname, - space, - ) - end - # Case 4: Everything else - else - throw(ErrorException("Check input file specification")) - end -end - -""" - next_date_in_file(prescribed_data::PrescribedDataTemporal) - -Returns the next date stored in the file `prescribed_data` struct after the -current date index given by `date_idx`. -Note: this function does not update `date_idx`, so repeated calls will -return the same value unless `date_idx` is modified elsewhere in between. -Assumes that all variables in `prescribed_data` have the same dates. - -# Arguments -- `prescribed_data` # contains all input file information needed for the simulation. - -# Returns -- DateTime or DateTimeNoLeap -""" -next_date_in_file(prescribed_data::PrescribedDataTemporal) = - prescribed_data.file_info.all_dates[first( - prescribed_data.file_states, - )[2].date_idx[1] + Int(1)] - -""" - get_data_at_date( - prescribed_data::PrescribedDataStatic, - space::Spaces.AbstractSpace, - varname::String, - ) - -Returns the data at a given date, interpolated if necessary. - -# Arguments -- `prescribed_data` # contains fields to be interpolated. -- `space` # the space of our simulation. -- `varname` # the name of the variable we want to read in. - -# Returns -- Fields.field -""" -function get_data_at_date( - prescribed_data::PrescribedDataStatic, - space::Spaces.AbstractSpace, - varname::String, -) - (; regrid_dirpath, outfile_root) = prescribed_data.file_info - - comms_ctx = ClimaComms.context(space) - field = Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - Dates.DateTime(0), # dummy date - varname, - space, - ) - return field -end - -""" - get_data_at_date( - prescribed_data::PrescribedDataTemporal, - space::Spaces.AbstractSpace, - varname::String, - date::Union{DateTime, DateTimeNoLeap}, - ) - -Returns the data for a specific variable at a given date, -interpolated if necessary. - -# Arguments -- `prescribed_data` # contains fields to be interpolated. -- `space` # the space of our simulation. -- `varname` # the name of the variable we want to read in. -- `date` # start date for data. - -# Returns -- Fields.field -""" -function get_data_at_date( - prescribed_data::PrescribedDataTemporal, - space::Spaces.AbstractSpace, - varname::String, - date::Union{DateTime, DateTimeNoLeap}, -) - FT = Spaces.undertype(space) - (; all_dates) = prescribed_data.file_info - - # Use the file state of the variable we want - file_state = prescribed_data.file_states[varname] - (; segment_length, date_idx, data_fields) = file_state - # Interpolate if the time period between dates is nonzero - if segment_length[1] > FT(0) && date != all_dates[Int(date_idx[1])] - Δt_tt1 = FT((date - all_dates[Int(date_idx[1])]).value) - interp_fraction = Δt_tt1 / FT(segment_length[1]) - @assert abs(interp_fraction) <= FT(1) "time interpolation weights must be <= 1, but `interp_fraction` = $interp_fraction" - return interpol.( - data_fields[1], - data_fields[2], - Δt_tt1, - FT(segment_length[1]), - ) - # Otherwise use the data at the first date - else - return data_fields[1] - end -end - - -""" - interpol(f1::FT, f2::FT, Δt_tt1::FT, Δt_t2t1::FT) - -Performs linear interpolation of `f` at time `t` within -a segment `Δt_t2t1 = (t2 - t1)`, of fields `f1` and `f2`, with `t2 > t1`. - -# Arguments -- `f1`::FT # first value to be interpolated (`f(t1) = f1`). -- `f2`::FT # second value to be interpolated. -- `Δt_tt1`::FT # time between `t1` and some `t` (`Δt_tt1 = (t - t1)`). -- `Δt_t2t1`::FT # time between `t1` and `t2`. - -# Returns -- FT -""" -function interpol(f1::FT, f2::FT, Δt_tt1::FT, Δt_t2t1::FT) where {FT} - interp_fraction = Δt_tt1 / Δt_t2t1 - return f1 * (FT(1) - interp_fraction) + f2 * (interp_fraction) -end - -""" - to_datetime(date) - -Convert a DateTime-like object (e.g. DateTimeNoLeap) to a DateTime, -using CFTime.jl. We need this since the CESM2 albedo file contains -DateTimeNoLeap objects for dates, which can't be used for math with DateTimes. - -Note that this conversion may fail if the date to convert doesn't -exist in the DateTime calendar. - -# Arguments -- `date`: object to be converted to DateTime -""" -to_datetime(date) = CFTime.reinterpret(DateTime, date) - -end diff --git a/src/shared_utilities/Regridder.jl b/src/shared_utilities/Regridder.jl deleted file mode 100644 index a06ae4f802..0000000000 --- a/src/shared_utilities/Regridder.jl +++ /dev/null @@ -1,353 +0,0 @@ -# Code from ClimaCoupler Regridder.jl -module Regridder -using ClimaCore -using ClimaComms -using NCDatasets -using ClimaCoreTempestRemap -using Dates -using DocStringExtensions - -export hdwrite_regridfile_rll_to_cgll - -""" - reshape_cgll_sparse_to_field!(field::Fields.Field, in_array::Array, R) - -Reshapes a sparse vector array `in_array` (CGLL, raw output of the TempestRemap), -and uses its data to populate the input Field object `field`. -Redundant nodes are populated using `dss` operations. - -Code taken from ClimaCoupler.Regridder. - -# Arguments -- `field`: [Fields.Field] object populated with the input array. -- `in_array`: [Array] input used to fill `field`. -- `R`: [NamedTuple] containing `target_idxs` and `row_indices` used for indexing. -""" -function reshape_cgll_sparse_to_field!( - field::ClimaCore.Fields.Field, - in_array::Array, - R, -) - field_array = parent(field) - - fill!(field_array, zero(eltype(field_array))) - Nf = size(field_array, 3) - - for (n, row) in enumerate(R.row_indices) - it, jt, et = ( - view(R.target_idxs[1], n), - view(R.target_idxs[2], n), - view(R.target_idxs[3], n), - ) - for f in 1:Nf - field_array[it, jt, f, et] .= in_array[row] - end - end - - # broadcast to the redundant nodes using unweighted dss - space = axes(field) - topology = ClimaCore.Spaces.topology(space) - hspace = ClimaCore.Spaces.horizontal_space(space) - target = ClimaCore.Fields.field_values(field) - - ClimaCore.Topologies.dss!(target, topology) -end - -""" - swap_space(field, new_space) - -Update the space of a ClimaCore.Fields.Field object. Returns a new Field -object with the same values as the original field, but on the new space. -This is needed to correctly read in regridded files that may be reused -between simulations. - -# Arguments -- `field`: The ClimaCore.Fields.Field object to swap the space of. -- `new_space`: The new ClimaCore.Spaces.AbstractSpace to assign to the field. -""" -function swap_space(field, new_space) - return ClimaCore.Fields.Field( - ClimaCore.Fields.field_values(field), - new_space, - ) -end - -""" - read_from_hdf5(REGIRD_DIR, hd_outfile_root, time, varname, - space) - -Read in a variable `varname` from an HDF5 file onto the provided space. -If a CommsContext other than SingletonCommsContext is used in the `space`, -the input HDF5 file must be readable by multiple MPI processes. - -Code taken from ClimaCoupler.Regridder. - -# Arguments -- `REGRID_DIR`: [String] directory to save output files in. -- `hd_outfile_root`: [String] root of the output file name. -- `time`: [Dates.DateTime] the timestamp of the data being written. -- `varname`: [String] variable name of data. -- `space`: [ClimaCore.Spaces.AbstractSpace] to read the HDF5 file onto. -# Returns -- Field or FieldVector -""" -function read_from_hdf5(REGRID_DIR, hd_outfile_root, time, varname, space) - comms_ctx = ClimaComms.context(space) - # Include time component in HDF5 reader name if it's a valid date - if !(time == Dates.DateTime(0)) - hdfreader_path = joinpath( - REGRID_DIR, - hd_outfile_root * "_" * varname * "_" * string(time) * ".hdf5", - ) - else - hdfreader_path = - joinpath(REGRID_DIR, hd_outfile_root * "_" * varname * ".hdf5") - end - hdfreader = ClimaCore.InputOutput.HDF5Reader(hdfreader_path, comms_ctx) - - field = ClimaCore.InputOutput.read_field(hdfreader, varname) - Base.close(hdfreader) - - # Ensure the field is on the correct space when reusing regridded files - # between simulations - return swap_space(field, space) -end - - -""" - write_to_hdf5(REGRID_DIR, hd_outfile_root, time, field, varname, - comms_ctx = ClimaComms.SingletonCommsContext()) -Function to save individual HDF5 files after remapping. -If a CommsContext other than SingletonCommsContext is used for `comms_ctx`, -the HDF5 output is readable by multiple MPI processes. - -Code taken from ClimaCoupler.Regridder. - - -# Arguments -- `REGRID_DIR`: [String] directory to save output files in. -- `hd_outfile_root`: [String] root of the output file name. -- `time`: [Dates.DateTime] the timestamp of the data being written. -- `field`: [Fields.Field] object to be written. -- `varname`: [String] variable name of data. -- `comms_ctx`: [ClimaComms.AbstractCommsContext] context used for this operation. -""" -function write_to_hdf5( - REGRID_DIR, - hd_outfile_root, - time, - field, - varname, - comms_ctx = ClimaComms.SingletonCommsContext(), -) - # Include time component in HDF5 writer name, and write time to file if it's a valid date - if !(time == Dates.DateTime(0)) - hdfwriter = ClimaCore.InputOutput.HDF5Writer( - joinpath( - REGRID_DIR, - hd_outfile_root * "_" * varname * "_" * string(time) * ".hdf5", - ), - comms_ctx, - ) - - t = Dates.datetime2unix.(time) - ClimaCore.InputOutput.HDF5.write_attribute( - hdfwriter.file, - "unix time", - t, - ) # TODO: a better way to write metadata, CMIP convention - else - hdfwriter = ClimaCore.InputOutput.HDF5Writer( - joinpath(REGRID_DIR, hd_outfile_root * "_" * varname * ".hdf5"), - comms_ctx, - ) - end - ClimaCore.InputOutput.write!(hdfwriter, field, string(varname)) - Base.close(hdfwriter) -end - - -""" - function hdwrite_regridfile_rll_to_cgll( - FT, - REGRID_DIR, - datafile_rll, - varnames, - space, - outfile_root; - mono = false, - ) -Reads and regrids data of all `varnames` variables from an input NetCDF file and -saves it as another NetCDF file using Tempest Remap. -The input NetCDF fileneeds to be `Exodus` formatted, and can contain -time-dependent data. The output NetCDF file is then read back, the output -arrays converted into Fields and saved as HDF5 files (one per time slice). -This function should be called by the root process. -The saved regridded HDF5 output is readable by multiple MPI processes. -Assumes that all variables specified by `varnames` have the same dates -and grid. - -Code taken from ClimaCoupler.Regridder. - - -# Arguments -- `FT`: [DataType] Float type. -- `REGRID_DIR`: [String] directory to save output files in. -- `datafile_rll`: [String] filename of RLL dataset to be mapped to CGLL. -- `varnames`: [Vector{String}] the name of the variable to be remapped. -- `space`: [ClimaCore.Spaces.AbstractSpace] the space to which we are mapping. -- `outfile_root`: [String] root of the output file name. -- `mono`: [Bool] flag to specify monotone remapping. -""" -function hdwrite_regridfile_rll_to_cgll( - FT, - REGRID_DIR, - datafile_rll, - varnames::Vector{String}, - space, - outfile_root; - mono = false, -) - out_type = "cgll" - - datafile_cgll = joinpath(REGRID_DIR, outfile_root * ".g") - meshfile_rll = joinpath(REGRID_DIR, outfile_root * "_mesh_rll.g") - meshfile_cgll = joinpath(REGRID_DIR, outfile_root * "_mesh_cgll.g") - meshfile_overlap = joinpath(REGRID_DIR, outfile_root * "_mesh_overlap.g") - weightfile = joinpath(REGRID_DIR, outfile_root * "_remap_weights.nc") - - # If doesn't make sense to regrid with GPUs/MPI processes - cpu_context = - ClimaComms.SingletonCommsContext(ClimaComms.CPUSingleThreaded()) - - # Note: this topology gives us `space == space_undistributed` in the undistributed - # case (as desired), which wouldn't hold if we used `spacefillingcurve` here. - topology = ClimaCore.Topologies.Topology2D( - cpu_context, - ClimaCore.Spaces.topology(space).mesh, - ) - Nq = - ClimaCore.Spaces.Quadratures.polynomial_degree( - ClimaCore.Spaces.quadrature_style(space), - ) + 1 - space_undistributed = ClimaCore.Spaces.SpectralElementSpace2D( - topology, - ClimaCore.Spaces.Quadratures.GLL{Nq}(), - ) - - if isfile(datafile_cgll) == false - isdir(REGRID_DIR) ? nothing : mkpath(REGRID_DIR) - - nlat, nlon = NCDataset(datafile_rll) do ds - (ds.dim["lat"], ds.dim["lon"]) - end - # write lat-lon mesh - rll_mesh(meshfile_rll; nlat = nlat, nlon = nlon) - - # write cgll mesh, overlap mesh and weight file - write_exodus(meshfile_cgll, topology) - overlap_mesh(meshfile_overlap, meshfile_rll, meshfile_cgll) - - # 'in_np = 1' and 'mono = true' arguments ensure mapping is conservative and monotone - # Note: for a kwarg not followed by a value, set it to true here (i.e. pass 'mono = true' to produce '--mono') - # Note: out_np = degrees of freedom = polynomial degree + 1 - kwargs = (; out_type = out_type, out_np = Nq) - kwargs = mono ? (; (kwargs)..., in_np = 1, mono = mono) : kwargs - remap_weights( - weightfile, - meshfile_rll, - meshfile_cgll, - meshfile_overlap; - kwargs..., - ) - - apply_remap(datafile_cgll, datafile_rll, weightfile, varnames) - else - @warn "Using the existing $datafile_cgll : check topology is consistent" - end - - function get_time(ds) - if "time" in keys(ds.dim) - data_dates = - Dates.DateTime.( - reinterpret.( - Ref(NCDatasets.DateTimeStandard), - Array(ds["time"]), - ) - ) - elseif "date" in keys(ds.dim) - data_dates = strdate_to_datetime.(string.(Array(ds["date"]))) - else - @warn "No dates available in file $datafile_rll" - data_dates = [Dates.DateTime(0)] - end - end - - # weightfile info needed to populate all nodes and save into fields with - # sparse matrices - _, _, row_indices = NCDataset(weightfile, "r") do ds_wt - (Array(ds_wt["S"]), Array(ds_wt["col"]), Array(ds_wt["row"])) - end - - target_unique_idxs = - out_type == "cgll" ? - collect(ClimaCore.Spaces.unique_nodes(space_undistributed)) : - collect(ClimaCore.Spaces.all_nodes(space_undistributed)) - target_unique_idxs_i = - map(row -> target_unique_idxs[row][1][1], row_indices) - target_unique_idxs_j = - map(row -> target_unique_idxs[row][1][2], row_indices) - target_unique_idxs_e = map(row -> target_unique_idxs[row][2], row_indices) - target_unique_idxs = - (target_unique_idxs_i, target_unique_idxs_j, target_unique_idxs_e) - - R = (; target_idxs = target_unique_idxs, row_indices = row_indices) - - offline_field = ClimaCore.Fields.zeros(FT, space_undistributed) - - times = [DateTime(0)] - # Save regridded HDF5 file for each variable in `varnames` - for varname in varnames - # read the remapped file with sparse matrices - offline_outvector, times = NCDataset(datafile_cgll, "r") do ds_wt - ( - # read the data in, and remove missing type (will error if missing data is present) - offline_outvector = nomissing(Array(ds_wt[varname])[:, :]), # ncol, times - times = get_time(ds_wt), - ) - end - - # Convert input data float type if needed - if eltype(offline_outvector) <: AbstractFloat && - eltype(offline_outvector) != FT - @warn "Converting $varname data in $datafile_cgll from $(eltype(offline_outvector)) to $FT" - offline_outvector = Array{FT}(offline_outvector) - end - @assert length(times) == size(offline_outvector, 2) "Inconsistent time dimension in $datafile_cgll for $varname" - - offline_fields = ntuple(x -> similar(offline_field), length(times)) - ntuple( - x -> reshape_cgll_sparse_to_field!( - offline_fields[x], - offline_outvector[:, x], - R, - ), - length(times), - ) - - map( - x -> write_to_hdf5( - REGRID_DIR, - outfile_root, - times[x], - offline_fields[x], - varname, - cpu_context, - ), - 1:length(times), - ) - end - return times -end - -end diff --git a/src/shared_utilities/SpaceVaryingInputs.jl b/src/shared_utilities/SpaceVaryingInputs.jl deleted file mode 100644 index 843012df5e..0000000000 --- a/src/shared_utilities/SpaceVaryingInputs.jl +++ /dev/null @@ -1,165 +0,0 @@ -# SpaceVaryingInputs.jl -# -# This module contains methods to process external data, regrid it onto the -# model grid, and return the corresponding fields for use in the simulation. -# This module only concerns with external data which varies in space, -# and not time. For temporally varying input, we refer you to `TimeVaryingInputs.jl`. - -# All spatially varying parameter fields are assumed to fit into memory, -# and on GPU runs, they have underlying CuArrays on the GPU. - -# The planned parameter underlying arrays are: -# - one-dimensional (values prescribed as a function of depth at a site), -# - two-dimensional (values prescribed globally at each lat/lon), -# - three-dimensional (values prescribed as a function of depth globally) -# - analytic (functions of the coordinates of the space) - -module SpaceVaryingInputs -using ClimaCore -using ClimaComms -using DocStringExtensions -import ..searchsortednearest -import ..linear_interpolation -using ClimaLand.FileReader -export SpaceVaryingInput - -# Analytic case -""" - SpaceVaryingInput(data_function::Function, space::ClimaCore.Spaces.AbstractSpace) - -Returns the parameter field to be used in the model; appropriate when -a parameter is defined using a function of the coordinates of the space. - -Pass the ``data" as a function `data_function` which takes coordinates as arguments, -and the ClimaCore space of the model simulation. - -This returns a scalar field. -Note that data_function is broadcasted over the coordinate field. Internally, inside -your function, this must be unpacked (coords.lat, coords.lon, e.g.) for -use of the coordinate values directly. -""" -function SpaceVaryingInput( - data_function::Function, - space::ClimaCore.Spaces.AbstractSpace, -) - model_value = ClimaCore.Fields.zeros(space) - coords = ClimaCore.Fields.coordinate_field(space) - return model_value .= data_function.(coords) -end - -# 1-D Case -""" - function SpaceVaryingInput( - data_z::AbstractArray, - data_values::AbstractArray, - space::S, - ) where {S <: ClimaCore.Spaces.CenterFiniteDifferenceSpace} - -Given a set of depths `data_z` and the observed values `data_values` -at those depths, create an interpolated field of values at each value -of z in the model grid - defined implicitly by `space`. - -Returns a ClimaCore.Fields.Field of scalars. -""" -function SpaceVaryingInput( - data_z::AbstractArray, - data_values::AbstractArray, - space::S, -) where {S <: ClimaCore.Spaces.CenterFiniteDifferenceSpace} - model_value = ClimaCore.Fields.zeros(space) - # convert the passed arrays to the appropriate type for the device - device = ClimaComms.device(space) - AT = ClimaComms.array_type(device) - data_values = AT(data_values) - data_z = AT(data_z) - zvalues = ClimaCore.Fields.coordinate_field(space).z - #now create the parameter field - model_value .= map(zvalues) do z - linear_interpolation(data_z, data_values, z) - end - return model_value -end - - -""" - SpaceVaryingInputs.SpaceVaryingInput( - data_z::AbstractArray, - data_values::NamedTuple, - space::S, - dest_type::Type{DT}, - ) where { - S <: ClimaCore.Spaces.CenterFiniteDifferenceSpace, - DT, - } - -Returns a field of parameter structs to be used in the model; -appropriate when the parameter struct values vary in depth; -the `dest_type` argument is the struct type - we assumed that -your struct as a constructor which accepts the values of its arguments -by kwarg, -- `data_z` is where the measured values were obtained, -- `data_values` is a NamedTuple with keys equal to the argument names -of the struct, and with values equal to an array of measured values, -- `space` defines the model grid. - -As an example, we can create a field of vanGenuchten structs as follows. This struct -requires two parameters, `α` and `n`. Let's assume that we have measurements of these -as a function of depth at the locations given by `data_z`, called `data_α` and `data_n`. -Then we can write -`vG_field = SpaceVaryingInput(data_z, (;α = data_α, n = data_n), space, vanGenuchten{Float32})`. -Under the hood, at each point in the model grid, we will create -`vanGenuchten{Float32}(;α = interp_α, n = interp_n)`, where `interp` indicates -the interpolated value at the model depth. - -Returns a ClimaCore.Fields.Field of type DT. - -""" -function SpaceVaryingInput( - data_z::AbstractArray, - data_values::NamedTuple, - space::S, - dest_type::Type{DT}, -) where {S <: ClimaCore.Spaces.CenterFiniteDifferenceSpace, DT} - zvalues = ClimaCore.Fields.coordinate_field(space).z - # convert the passed arrays to the appropriate type for the device - device = ClimaComms.device(space) - AT = ClimaComms.array_type(device) - data_z = AT(data_z) - data_values_AT = map(AT, data_values) - # now create the field of structs - model_value = map(zvalues) do z - args = map(data_values_AT) do value - return linear_interpolation(data_z, value, z) - end - DT(; args...) - end - return model_value -end - -""" - SpaceVaryingInput(data::PDS, varname::N; space::S) - where {PDS <: FileReader.PrescribedDataStatic, N <: String, S <: ClimaCore.Spaces.SpectralElement2D} - -Returns the parameter field to be used in the model; appropriate when -a parameter is defined on the surface of the Earth. - -Pass the data as a `FileReader.PrescribedDataStatic` object -as well as the variable name of the variable in the data file, and the ClimaCore space -of the model simulation. - -Returns a ClimaCore.Fields.Field of scalars; analogous to the 1D case which also -returns a ClimaCore.Fields.Field of scalars. -""" -function SpaceVaryingInput( - data::PDS, - varname::N, - space::S, -) where { - PDS <: FileReader.PrescribedDataStatic, - N <: String, - S <: ClimaCore.Spaces.SpectralElementSpace2D, -} - return FileReader.get_data_at_date(data, space, varname) -end - -end diff --git a/src/shared_utilities/TimeVaryingInputs.jl b/src/shared_utilities/TimeVaryingInputs.jl deleted file mode 100644 index 528787f91f..0000000000 --- a/src/shared_utilities/TimeVaryingInputs.jl +++ /dev/null @@ -1,99 +0,0 @@ -# TimeVaryingInputs.jl -# -# This module contains structs and methods to process external data and evaluate it on the -# model. This module only concerns with evaluations in time, not in space. - -# There are three possible sources of data: -# 1. Analytic functions that prescribe how a variable has to be set at a given time -# 2. 0D, single-site data, which is assumed small enough to be saved to memory -# 3. 2D, global data, which cannot be saved to memory in its entirety. -# -# The TimeVaryingInputs module introduces a shared interface for the three cases so that -# uses and developers do not have to worry about the details of what type of data will be -# provided. Behind the scenes, we introduce a new type AbstractTimeVaryingInput, that has -# three concrete implementation, corresponding to the three use cases described above. -# Constructors will automatically identify which of the three implementations to use based -# on the input data, and the existence of three concrete structs should be considered an -# implementation detail. -# -# The three TimeVaryingInputs are: -# - AnalyticTimeVaryingInput, -# - InterpolatingTimeVaryingInput0D, -# - InterpolatingTimeVaryingInput3D. -# -# Along side these TimeVaryingInputs, we also define InterpolationMethods that implement -# specific interpolation strategies (e.g., linear interpolation). -# -# In all cases, the TimeVaryingInputs work with simulation time (ie, seconds from the -# beginning of the reference date). It is up to the various TimeVaryingInputs to convert this -# information to an actual date (if needed). - -module TimeVaryingInputs - -import ..searchsortednearest -import ..linear_interpolation - -import Adapt -import CUDA -import ClimaComms -import ClimaCore: DeviceSideDevice, DeviceSideContext - -export AbstractTimeVaryingInput, - AbstractInterpolationMethod, TimeVaryingInput, evaluate! - -""" - AbstractTimeVaryingInput - -Note -===== - -`TimeVaryingInput`s should be considered implementation details. The exposed public interface -should only be considered -- `TimeVaryingInput(input; method, context)` for construction, -- `evaluate!(dest, input, time)` for evaluation -""" -abstract type AbstractTimeVaryingInput end - -""" - AbstractInterpolationMethod - -Defines how to perform interpolation. - -Not all the TimeVaryingInputs support all the interpolation methods (e.g., no interpolation -methods are supported when the given function is analytic). -""" -abstract type AbstractInterpolationMethod end - -""" - TimeVaryingInput(func) - TimeVaryingInput(times, vals; method, context) - -Construct on object that knows how to evaluate the given function/data on the model times. - -When passing a function -======================= - -When a function `func` is passed, the function has to be GPU-compatible (e.g., no splines). - -When passing single-site data -============================= - -When a `times` and `vals` are passed, `times` have to be sorted and the two arrays have to -have the same length. - -""" -function TimeVaryingInput end - -""" - evaluate!(dest, input, time) - -Evaluate the `input` at the given `time`, writing the output in-place to `dest`. - -Depending on the details of `input`, this function might do I/O and communication. -""" -function evaluate! end - -include("analytic_time_varying_input.jl") -include("interpolating_time_varying_input0d.jl") - -end diff --git a/src/shared_utilities/analytic_time_varying_input.jl b/src/shared_utilities/analytic_time_varying_input.jl deleted file mode 100644 index acd7458d6c..0000000000 --- a/src/shared_utilities/analytic_time_varying_input.jl +++ /dev/null @@ -1,15 +0,0 @@ -struct AnalyticTimeVaryingInput{F <: Function} <: AbstractTimeVaryingInput - # func here as to be GPU-compatible (e.g., splines are not) - func::F -end - -function TimeVaryingInput(input::Function; method = nothing, device = nothing) - isnothing(method) || - @warn "Interpolation method is ignored for analytical functions" - return AnalyticTimeVaryingInput(input) -end - -function evaluate!(dest, input::AnalyticTimeVaryingInput, time) - dest .= input.func(time) - return nothing -end diff --git a/src/shared_utilities/drivers.jl b/src/shared_utilities/drivers.jl index 623288db13..5ec71d3ce8 100644 --- a/src/shared_utilities/drivers.jl +++ b/src/shared_utilities/drivers.jl @@ -1,3 +1,5 @@ +import ClimaUtilities.TimeVaryingInputs: + TimeVaryingInput, AbstractTimeVaryingInput using Thermodynamics using ClimaCore using DocStringExtensions diff --git a/src/shared_utilities/general_utils.jl b/src/shared_utilities/general_utils.jl deleted file mode 100644 index 6759288f33..0000000000 --- a/src/shared_utilities/general_utils.jl +++ /dev/null @@ -1,45 +0,0 @@ -export searchsortednearest -""" - searchsortednearest(a, x) - -Find the index corresponding to the nearest value to `x` in `a`. -""" -function searchsortednearest(a, x) - i = searchsortedfirst(a, x) - if i == 1 # x <= a[1] - return i - elseif i > length(a) # x > a[end] - return length(a) - elseif a[i] == x # x is one of the elements - return i - else # general case - return abs(a[i] - x) < abs(a[i - 1] - x) ? i : i - 1 - end -end - - -""" - linear_interpolation(indep_vars, dep_vars, indep_value) - -Carries out linear interpolation to obtain a value at -location `indep_value`, using a independent variable -1-d vector `indep_vars` and a dependent variable -1-d vector `dep_vars`. - -If the `indep_value` is outside the range of `indep_vars`, this -returns the endpoint value closest. -""" -function linear_interpolation(indep_vars, dep_vars, indep_value) - N = length(indep_vars) - id = searchsortedfirst(indep_vars, indep_value) - if id == 1 - dep_vars[begin] - elseif id == N + 1 - dep_vars[end] - else - id_prev = id - 1 - x0, x1 = indep_vars[id_prev], indep_vars[id] - y0, y1 = dep_vars[id_prev], dep_vars[id] - y0 + (y1 - y0) / (x1 - x0) * (indep_value - x0) - end -end diff --git a/src/shared_utilities/interpolating_time_varying_input0d.jl b/src/shared_utilities/interpolating_time_varying_input0d.jl deleted file mode 100644 index 9dde4b3910..0000000000 --- a/src/shared_utilities/interpolating_time_varying_input0d.jl +++ /dev/null @@ -1,160 +0,0 @@ -import CUDA - -""" - NearestNeighbor - -Return the value corresponding to the point closest to the input time. -""" -struct NearestNeighbor <: AbstractInterpolationMethod end - -""" - LinearInterpolation - -Perform linear interpolation between the two neighboring points. -""" -struct LinearInterpolation <: AbstractInterpolationMethod end - -""" - InterpolatingTimeVaryingInput0D - -The constructor for InterpolatingTimeVaryingInput0D is not supposed to be used directly, unless you -know what you are doing. The constructor does not perform any check and does not take care of -GPU compatibility. It is responsibility of the user-facing constructor TimeVaryingInput() to do so. - -`times` and `vales` may have different float types, but they must be the same length, and we -assume that they have been sorted to be monotonically increasing in time, without repeated -values for the same timestamp. -""" -struct InterpolatingTimeVaryingInput0D{ - AA1 <: AbstractArray, - AA2 <: AbstractArray, - M <: AbstractInterpolationMethod, - CC <: ClimaComms.AbstractCommsContext, - R <: Tuple, -} <: AbstractTimeVaryingInput - # AA1 and AA2 could be different because of different FTs - - """Independent coordinate""" - times::AA1 - - """Variable""" - vals::AA2 - - """Interpolation method""" - method::M - - """ClimaComms context""" - context::CC - - """Range of times over which the interpolator is defined. range is always defined on the - CPU. Used by the in() function.""" - range::R -end - -# GPU compatibility -function Adapt.adapt_structure(to, itp::InterpolatingTimeVaryingInput0D) - times = Adapt.adapt_structure(to, itp.times) - vals = Adapt.adapt_structure(to, itp.vals) - method = Adapt.adapt_structure(to, itp.method) - range = Adapt.adapt_structure(to, itp.range) - # On a GPU, we have a "ClimaCore.DeviceSideContext" - InterpolatingTimeVaryingInput0D( - times, - vals, - method, - DeviceSideContext(), - range, - ) -end - -function evaluate!(destination, itp::InterpolatingTimeVaryingInput0D, time) - time in itp || error("TimeVaryingInput does not cover time $time") - if ClimaComms.device(itp.context) isa ClimaComms.CUDADevice - CUDA.@cuda evaluate!(parent(destination), itp, time, itp.method) - else - evaluate!(parent(destination), itp, time, itp.method) - end - return nothing -end - -function TimeVaryingInput( - times::AbstractArray, - vals::AbstractArray; - method = LinearInterpolation(), - context = ClimaComms.context(), -) - issorted(times) || error("Can only interpolate with sorted times") - length(times) == length(vals) || - error("times and vals have different lengths") - - # When device is CUDADevice, ArrayType will be a CUDADevice, so that times and vals get - # copied to the GPU. - ArrayType = ClimaComms.array_type(ClimaComms.device(context)) - - range = (times[begin], times[end]) - return InterpolatingTimeVaryingInput0D( - ArrayType(times), - ArrayType(vals), - method, - context, - range, - ) -end - -""" - in(time, itp::InterpolatingTimeVaryingInput0D) - -Check if the given `time` is in the range of definition for `itp`. -""" -function Base.in(time, itp::InterpolatingTimeVaryingInput0D) - return itp.range[1] <= time <= itp.range[2] -end - - -function evaluate!( - dest, - itp::InterpolatingTimeVaryingInput0D, - time, - ::NearestNeighbor, -) - # Nearest neighbor interpolation: just pick the values corresponding to the entry in - # itp.times that is closest to the given time. - - index = searchsortednearest(itp.times, time) - - dest .= itp.vals[index] - - return nothing -end - -""" - evaluate!( - dest, - itp::InterpolatingTimeVaryingInput0D, - time, - ::LinearInterpolation, - ) - -Write to `dest` the result of a linear interpolation of `itp` on the given `time`. -""" -function evaluate!( - dest, - itp::InterpolatingTimeVaryingInput0D, - time, - ::LinearInterpolation, -) - # We perform linear interpolation with the two values that bracket the given time. - # itp.times is sorted, so we find the first index that is after `time`, the previous - # index is the other side of the bracket. searchsortedfirst returns the index of the - # first element that is >= than the given. Also, given that we check that range[1] <= - # time <= range[2], index will always be 1 <= index <= length(itp.times), so we have to - # worry about the edge case where time == itp.times (because it returns 1). In that - # case, we just return the value of vals[1] (we are on a node, no need for - # interpolation). - - indep_vars = itp.times - indep_value = time - dep_vars = itp.vals - dest .= linear_interpolation(indep_vars, dep_vars, indep_value) - return nothing -end diff --git a/src/shared_utilities/interpolating_time_varying_inputs.jl b/src/shared_utilities/interpolating_time_varying_inputs.jl deleted file mode 100644 index f220071d28..0000000000 --- a/src/shared_utilities/interpolating_time_varying_inputs.jl +++ /dev/null @@ -1,25 +0,0 @@ -InterpolatingTimeVaryingInput = - Union{InterpolatingTimeVaryingInput0D, InterpolatingTimeVaryingInput2D} - -""" - NearestNeighbor - -Return the value corresponding to the point closest to the input time. -""" -struct NearestNeighbor <: AbstractInterpolationMethod end - -""" - LinearInterpolation - -Perform linear interpolation between the two neighboring points. -""" -struct LinearInterpolation <: AbstractInterpolationMethod end - -""" - in(time, itp::InterpolatingTimeVaryingInput0D) - -Check if the given `time` is in the range of definition for `itp`. -""" -function Base.in(time, itp::InterpolatingTimeVaryingInput0D) - return itp.range[1] <= time <= itp.range[2] -end diff --git a/src/standalone/Bucket/Bucket.jl b/src/standalone/Bucket/Bucket.jl index ce3cc77e6a..fba7252074 100644 --- a/src/standalone/Bucket/Bucket.jl +++ b/src/standalone/Bucket/Bucket.jl @@ -3,6 +3,13 @@ using DocStringExtensions using Thermodynamics using Dates using NCDatasets +import ClimaUtilities.TimeVaryingInputs +import ClimaUtilities.DataHandling +import ClimaUtilities.SpaceVaryingInputs +import ClimaUtilities.SpaceVaryingInputs: SpaceVaryingInput +import ClimaUtilities.TimeVaryingInputs: + TimeVaryingInput, AbstractTimeVaryingInput +import ClimaCoreTempestRemap using ClimaCore using ClimaCore.Fields: coordinate_field, level, FieldVector using ClimaCore.Operators: InterpolateC2F, DivergenceF2C, GradientC2F, SetValue @@ -11,7 +18,6 @@ using ClimaComms using ClimaLand -using ClimaLand.FileReader import ..Parameters as LP import ClimaLand.Domains: coordinates, SphericalShell using ClimaLand: @@ -103,10 +109,9 @@ end """ PrescribedBaregroundAlbedo{FT}(α_snow::FT, - regrid_dirpath::String, surface_space::ClimaCore.Spaces.AbstractSpace; varnames = ["sw_alb"], - get_infile::Function = Bucket.bareground_albedo_dataset_path, + albedo_file_path::AbstractString = Bucket.bareground_albedo_dataset_path(), ) where{FT} An outer constructor for the PrescribedBaregroundAlbedo model which uses data @@ -116,24 +121,16 @@ This particular method can only be used with global runs. """ function PrescribedBaregroundAlbedo{FT}( α_snow::FT, - regrid_dirpath::String, surface_space::ClimaCore.Spaces.AbstractSpace; varnames = ["sw_alb"], - get_infile::Function = Bucket.bareground_albedo_dataset_path, + albedo_file_path::AbstractString = Bucket.bareground_albedo_dataset_path(), ) where {FT} if surface_space isa ClimaCore.Spaces.PointSpace error("Using an albedo map requires a global run.") end - α_bareground_data = PrescribedDataStatic{FT}( - get_infile, - regrid_dirpath, - varnames, - surface_space, - ) - # Albedo file only has one variable, so access first `varname` - varname = varnames[1] - α_bareground = SpaceVaryingInput(α_bareground_data, varname, surface_space) + α_bareground = + SpaceVaryingInput(albedo_file_path, varnames[begin], surface_space) return PrescribedBaregroundAlbedo{FT, typeof(α_bareground)}( α_snow, α_bareground, @@ -141,7 +138,7 @@ function PrescribedBaregroundAlbedo{FT}( end """ - PrescribedSurfaceAlbedo{FT, FR <: FileReader.PrescribedDataTemporal} + PrescribedSurfaceAlbedo{FT, TV <: AbstractTimeVaryingInput} <: AbstractBucketAlbedoModel An albedo model where the albedo of different surface types @@ -152,14 +149,13 @@ This albedo type changes over time according to the input file. Note that this option should only be used with global simulations, i.e. with a `ClimaLand.LSMSphericalShellDomain.` """ -struct PrescribedSurfaceAlbedo{FT, FR <: FileReader.PrescribedDataTemporal} <: +struct PrescribedSurfaceAlbedo{FT, TV <: AbstractTimeVaryingInput} <: AbstractBucketAlbedoModel{FT} - albedo_info::FR + albedo::TV end """ PrescribedSurfaceAlbedo{FT}( - regrid_dirpath::String, date_ref::Union{DateTime, DateTimeNoLeap}, t_start, Space::ClimaCore.Spaces.AbstractSpace; @@ -175,11 +171,10 @@ and download the data if it doesn't already exist on the machine. The input data file must have a time component. """ function PrescribedSurfaceAlbedo{FT}( - regrid_dirpath::String, date_ref::Union{DateTime, DateTimeNoLeap}, t_start, space::ClimaCore.Spaces.AbstractSpace; - get_infile = Bucket.cesm2_albedo_dataset_path, + albedo_file_path = Bucket.cesm2_albedo_dataset_path(), varname = "sw_alb", ) where {FT} # Verify inputs @@ -187,16 +182,17 @@ function PrescribedSurfaceAlbedo{FT}( error("Using an albedo map requires a global run.") end - # Construct object containing info to read in surface albedo over time - data_info = PrescribedDataTemporal{FT}( - regrid_dirpath, - get_infile, - [varname], - date_ref, + data_handler = DataHandling.DataHandler( + albedo_file_path, + varname, + space; + reference_date = date_ref, t_start, - space, ) - return PrescribedSurfaceAlbedo{FT, typeof(data_info)}(data_info) + + # Construct object containing info to read in surface albedo over time + albedo = TimeVaryingInput(data_handler) + return PrescribedSurfaceAlbedo{FT, typeof(albedo)}(albedo) end @@ -460,16 +456,24 @@ function make_update_aux(model::BucketModel{FT}) where {FT} p.bucket.R_n .= net_radiation(model.radiation, model, Y, p, t) # Surface albedo - p.bucket.α_sfc .= - next_albedo(model.parameters.albedo, model.parameters, Y, p, t) + next_albedo!( + p.bucket.α_sfc, + model.parameters.albedo, + model.parameters, + Y, + p, + t, + ) end return update_aux! end """ - next_albedo(model_albedo::PrescribedBaregroundAlbedo{FT}, parameters, Y, p, t) where {FT} + next_albedo!(next_α_sfc, + model_albedo::PrescribedBaregroundAlbedo{FT}, + parameters, Y, p, t) -Update the surface albedo for time `t`: the albedo is calculated by +Update the surface albedo for time `t`: the albedo is calculated by linearly interpolating between the albedo of snow and of the bareground surface, based on the snow water equivalent `S` relative to the parameter `S_c`. The linear interpolation is taken from Lague et al 2019. @@ -480,7 +484,8 @@ fractions that are not a heaviside function, we have a small inconsistency for 0 < σS < eps(FT) where the snow cover fraction is zero, but there is a small contribution of snow to the albedo. """ -function next_albedo( +function next_albedo!( + next_α_sfc, model_albedo::PrescribedBaregroundAlbedo{FT}, parameters, Y, @@ -490,42 +495,25 @@ function next_albedo( (; α_snow, α_bareground) = model_albedo (; σS_c) = parameters σS = max.(Y.bucket.σS, FT(0))# if σS is small and negative, clip to zero for albedo estimation - return @. ( - (1 - σS / (σS + σS_c)) * α_bareground + σS / (σS + σS_c) * α_snow - ) + @. next_α_sfc = + ((1 - σS / (σS + σS_c)) * α_bareground + σS / (σS + σS_c) * α_snow) end """ - next_albedo(model_albedo::PrescribedSurfaceAlbedo{FT}, parameters, Y, p, t) where {FT} + next_albedo!(next_α_sfc, model_albedo::PrescribedSurfaceAlbedo{FT}, parameters, Y, p, t) Update the surface albedo for time `t`: for a file containing surface albedo information over time, this reads in the value for time t. """ -function next_albedo( +function next_albedo!( + next_α_sfc, model_albedo::PrescribedSurfaceAlbedo{FT}, parameters, Y, p, t, ) where {FT} - # Get the current date from `t` - sim_info = model_albedo.albedo_info.sim_info - sim_date = to_datetime( - sim_info.date_ref + Second(round(sim_info.t_start)) + Second(round(t)), - ) - # Read next data fields if initializing or next date is closest to current time - # This maintains `all_dates[date_idx]` <= `sim_date` < `all_dates[date_idx + 1]` - if t == sim_info.t_start || - sim_date >= to_datetime(next_date_in_file(model_albedo.albedo_info)) - read_data_fields!(model_albedo.albedo_info, sim_date, axes(Y.bucket.W)) - end - # Interpolate data value to current time - return get_data_at_date( - model_albedo.albedo_info, - axes(Y.bucket.W), - "sw_alb", - sim_date, - ) + TimeVaryingInputs.evaluate!(next_α_sfc, model_albedo.albedo, t) end function ClimaLand.get_drivers(model::BucketModel) diff --git a/src/standalone/Vegetation/PlantHydraulics.jl b/src/standalone/Vegetation/PlantHydraulics.jl index ebfbc4598c..7ea8281bb8 100644 --- a/src/standalone/Vegetation/PlantHydraulics.jl +++ b/src/standalone/Vegetation/PlantHydraulics.jl @@ -1,6 +1,9 @@ module PlantHydraulics using ClimaLand -using ClimaLand.TimeVaryingInputs +using ClimaUtilities.TimeVaryingInputs +import ClimaUtilities.TimeVaryingInputs: + TimeVaryingInput, AbstractTimeVaryingInput +import NCDatasets, ClimaCore, Interpolations # Needed to load TimeVaryingInputs using ..ClimaLand.Canopy: AbstractCanopyComponent, update_canopy_prescribed_field!, diff --git a/test/Project.toml b/test/Project.toml index 438b9b3013..e03ba42d14 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -2,11 +2,14 @@ Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" ArtifactWrappers = "a14bc488-3040-4b00-9dc1-f6467924858a" BSON = "fbb218c0-5317-5bc6-957e-2ee96dd4b1f0" -ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" +CFTime = "179af706-886a-5703-950a-314cd64e0468" CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" ClimaComms = "3a4d1b5c-c61d-41fd-a00a-5873ba7a1b0d" ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884" +ClimaCoreTempestRemap = "d934ef94-cdd4-4710-83d6-720549644b70" ClimaLand = "08f4d4ce-cf43-44bb-ad95-9d2d5f413532" +ClimaParams = "5c42b081-d73a-476f-9059-fd94b934656c" +ClimaUtilities = "b3f4f4ca-9299-4f7f-bd9b-81e1242a7513" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" diff --git a/test/runtests.jl b/test/runtests.jl index 06f322e97e..e5a1d86781 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -14,15 +14,6 @@ end @safetestset "Domains module tests" begin include("shared_utilities/domains.jl") end -@safetestset "FileReader module tests" begin - include("shared_utilities/file_reader.jl") -end -@safetestset "SpaceVaryingInput module tests" begin - include("shared_utilities/space_varying_inputs.jl") -end -@safetestset "TimeVaryingInput module tests" begin - include("shared_utilities/time_varying_inputs.jl") -end @safetestset "General utilities tests" begin include("shared_utilities/utilities.jl") end diff --git a/test/shared_utilities/drivers.jl b/test/shared_utilities/drivers.jl index be2b635787..3ba415b874 100644 --- a/test/shared_utilities/drivers.jl +++ b/test/shared_utilities/drivers.jl @@ -1,6 +1,7 @@ using ClimaCore using Test using StaticArrays +using ClimaUtilities.TimeVaryingInputs: TimeVaryingInput using ClimaLand FT = Float32 @@ -16,8 +17,7 @@ FT = Float32 FT(1); ) pr = ClimaLand.PrescribedRadiativeFluxes(FT, nothing, nothing, nothing) - liquid_precip = - ClimaLand.TimeVaryingInputs.AnalyticTimeVaryingInput((t) -> -1.0) + liquid_precip = TimeVaryingInput((t) -> -1.0) pp = ClimaLand.PrescribedPrecipitation{FT}(liquid_precip) coords = (; surface = [1, 2, 3]) @test ClimaLand.initialize_drivers(pp, nothing, coords) == @@ -122,8 +122,7 @@ end @test p.drivers.LW_d == [FT(10)] @test p.drivers.θs == [FT(0)] - liquid_precip = - ClimaLand.TimeVaryingInputs.AnalyticTimeVaryingInput((t) -> -1.0) + liquid_precip = TimeVaryingInput((t) -> -1.0) pp = ClimaLand.PrescribedPrecipitation{FT}(liquid_precip) precip_update! = ClimaLand.make_update_drivers(pp, nothing) p = (; drivers = ClimaLand.initialize_drivers(pp, nothing, coords)) diff --git a/test/shared_utilities/file_reader.jl b/test/shared_utilities/file_reader.jl deleted file mode 100644 index 0960f9fdab..0000000000 --- a/test/shared_utilities/file_reader.jl +++ /dev/null @@ -1,716 +0,0 @@ -#= - Unit tests for ClimaLand FileReader module -=# - -using ClimaLand -using ClimaLand: Bucket, Regridder, FileReader -using ClimaCore -using ClimaCore: Fields, Meshes, Domains, Topologies, Spaces -using ClimaComms -using Test -using Dates -using NCDatasets - -# Include testing artifacts from Bucket -albedo_temporal_data = Bucket.cesm2_albedo_dataset_path -albedo_bareground_data = Bucket.bareground_albedo_dataset_path - -# Include testing artifacts from test directory -include(joinpath(pkgdir(ClimaLand), "test", "artifacts", "artifacts.jl")) -era5_t2m_sp_u10n_data = era5_t2m_sp_u10n_dataset_path -era5_t2m_sp_u10n_static_data = era5_t2m_sp_u10n_static_dataset_path - -comms_ctx = ClimaComms.SingletonCommsContext() - -# Use two separate regrid dirs to avoid duplicate filenames since files have same varname -regrid_dir_static = joinpath(pkgdir(ClimaLand), "test", "regridding") -regrid_dir_temporal = - joinpath(pkgdir(ClimaLand), "test", "regridding", "temporal") - -""" -Set NaN and Missing to zero (GPU compatible) -""" -function replace_nan_missing!(field::Fields.Field) - # For GPU runs, we perform the substitution on the CPU and move back to GPU - parent_without_NaN_missing = - replace(Array(parent(field)), NaN => 0, missing => 0) - ArrayType = ClimaComms.array_type(ClimaComms.device()) - parent(field) .= ArrayType(parent_without_NaN_missing) -end - -FT = Float32 -@testset "test interpol, FT = $FT" begin - # Setup - x1 = FT(0) - x2 = FT(10) - - f1 = FT(-1) - f2 = FT(1) - - # Case 1: x1 < x < x2 - x = FT(5) - @test FileReader.interpol(f1, f2, x - x1, x2 - x1) == 0 - - # Case 2: x1 = x < x2 - x = FT(0) - @test FileReader.interpol(f1, f2, x - x1, x2 - x1) == f1 - - # Case 3: x1 < x = x2 - x = FT(10) - @test FileReader.interpol(f1, f2, x - x1, x2 - x1) == f2 -end - -@testset "test to_datetime, FT = $FT" begin - year = 2000 - dt_noleap = DateTimeNoLeap(year) - @test FileReader.to_datetime(dt_noleap) == DateTime(year) -end - -# Add tests which use TempestRemap here - -# TempestRemap is not built on Windows because of NetCDF support limitations -# `PrescribedData` constructors use TR via a call to `hdwrite_regridfile_rll_to_cgll` -if !Sys.iswindows() - @testset "test PrescribedDataStatic construction, FT = $FT" begin - # setup for test - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - - # Loop over files with one and multiple variables - for (get_infile, varnames) in [ - (albedo_bareground_data, ["sw_alb"]), - (era5_t2m_sp_u10n_static_data, ["t2m", "sp", "u10n"]), - ] - # reset regrid directory between files - isdir(regrid_dir_static) ? nothing : mkpath(regrid_dir_static) - - ps_data_spatial = FileReader.PrescribedDataStatic{FT}( - get_infile, - regrid_dir_static, - varnames, - surface_space_t, - ) - - # test that created object exists - @test @isdefined(ps_data_spatial) - @test ps_data_spatial isa FileReader.AbstractPrescribedData - - # test fields that we've passed into constructor as args - @test ps_data_spatial.file_info.infile_path == get_infile() - @test ps_data_spatial.file_info.regrid_dirpath == regrid_dir_static - @test ps_data_spatial.file_info.varnames == varnames - - # test fields which are set internally by constructor - @test ps_data_spatial.file_info.outfile_root == "static_data_cgll" - @test ps_data_spatial.file_info.all_dates == [] - @test ps_data_spatial.file_info.date_idx0 == [] - rm(regrid_dir_static; recursive = true, force = true) - end - end - - @testset "test get_data_at_date for PrescribedDataStatic, FT = $FT" begin - isdir(regrid_dir_static) ? nothing : mkpath(regrid_dir_static) - # test `get_data_at_date` with interpolation (default) - dummy_dates = [DateTime(1999, 1, 1)] - date_idx0 = Int[1] - - # construct space and dummy field - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain_sphere = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain_sphere, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - data_field = ones(surface_space_t) .* FT(0.5) - - # Add 2 variables to `FileInfo`, but only read/write `var1` - varnames = ["var1", "var2"] - outfile_root = "static_data_cgll" - - # write data to dummy HDF5 file, which gets read in by `get_data_at_date` - field = Regridder.write_to_hdf5( - regrid_dir_static, - outfile_root, - Dates.DateTime(0), # dummy date - data_field, - varnames[1], - comms_ctx, - ) - - # create structs manually since we aren't reading from a data file - file_info = FileReader.FileInfo( - "", # infile_path - regrid_dir_static, # regrid_dirpath - varnames, # varnames - outfile_root, # outfile_root - dummy_dates, # all_dates - date_idx0, # date_idx0 - ) - - prescribed_data_static = - FileReader.PrescribedDataStatic{typeof(file_info)}(file_info) - - # Read in dummy data that has been written by `write_to_hdf5` - field_out = FileReader.get_data_at_date( - prescribed_data_static, - surface_space_t, - varnames[1], - ) - @test field_out == data_field - rm(regrid_dir_static; recursive = true, force = true) - end - - @testset "test PrescribedDataTemporal construction, FT = $FT" begin - # setup for test - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - - date_idx0 = Int[1] - date_ref = DateTime(1800, 1, 1) - t_start = Float64(0) - - # Loop over files with one and multiple variables - for (get_infile, varnames) in [ - (albedo_temporal_data, ["sw_alb"]), - (era5_t2m_sp_u10n_data, ["t2m", "sp", "u10n"]), - ] - # reset regrid directory between files - isdir(regrid_dir_temporal) ? nothing : mkpath(regrid_dir_temporal) - - ps_data_temp = FileReader.PrescribedDataTemporal{FT}( - regrid_dir_temporal, - get_infile, - varnames, - date_ref, - t_start, - surface_space_t, - ) - - # test that created object exists - @test @isdefined(ps_data_temp) - @test ps_data_temp isa FileReader.AbstractPrescribedData - - # test fields that we've passed into constructor as args - @test ps_data_temp.file_info.regrid_dirpath == regrid_dir_temporal - @test ps_data_temp.file_info.varnames == varnames - @test ps_data_temp.file_info.date_idx0 == date_idx0 - @test ps_data_temp.sim_info.date_ref == date_ref - @test ps_data_temp.sim_info.t_start == t_start - - # test fields which are set internally by constructor - @test ps_data_temp.file_info.outfile_root == "temporal_data_cgll" - @test !isnothing(ps_data_temp.file_info.all_dates) - @test sort(collect(keys(ps_data_temp.file_states))) == - sort(varnames) - - # check varnames individually - for varname in varnames - @test ps_data_temp.file_states[varname].date_idx == date_idx0 - @test ps_data_temp.file_states[varname].date_idx == date_idx0 - @test !isnothing(ps_data_temp.file_states[varname].data_fields) - @test !isnothing( - ps_data_temp.file_states[varname].segment_length, - ) - end - rm(regrid_dir_temporal; recursive = true, force = true) - end - end - - @testset "test get_data_at_date for PrescribedDataTemporal, FT = $FT" begin - # test `get_data_at_date` with interpolation (default) - dummy_dates = - Vector(range(DateTime(1999, 1, 1); step = Day(1), length = 100)) - date_idx0 = Int[1] - date_ref = dummy_dates[Int(date_idx0[1]) + 1] - t_start = Float64(0) - date0 = date_ref + Dates.Second(t_start) - - # these values give an `interp_fraction` of 0.5 in `interpol` for ease of testing - segment_length = - [Int(2) * ((date_ref - dummy_dates[Int(date_idx0[1])]).value)] - - # construct space and dummy field - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain_sphere = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain_sphere, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - data_fields = (zeros(surface_space_t), ones(surface_space_t)) - - # Add 2 variables to `FileInfo`, but only read/write `var1` - varnames = ["var1", "var2"] - - # create structs manually since we aren't reading from a data file - file_info = FileReader.FileInfo( - "", # infile_path - "", # regrid_dirpath - varnames, # varnames - "", # outfile_root - dummy_dates, # all_dates - date_idx0, # date_idx0 - ) - file_state1 = FileReader.FileState( - data_fields, # data_fields - copy(date_idx0), # date_idx - segment_length, # segment_length - ) - file_state2 = FileReader.FileState( - data_fields, # data_fields - copy(date_idx0), # date_idx - segment_length, # segment_length - ) - file_states = - Dict(varnames[1] => file_state1, varnames[2] => file_state2) - sim_info = FileReader.SimInfo( - date_ref, # date_ref - t_start, # t_start - ) - - pd_args = (file_info, file_states, sim_info) - pd_type_args = ( - typeof(file_info), - typeof(varnames[1]), - typeof(file_states[varnames[1]]), - typeof(sim_info), - ) - prescribed_data = - FileReader.PrescribedDataTemporal{pd_type_args...}(pd_args...) - - # Note: we expect this to give a warning "No dates available in file..." - @test FileReader.get_data_at_date( - prescribed_data, - surface_space_t, - varnames[1], - date0, - ) == ones(surface_space_t) .* FT(0.5) - end - - @testset "test next_date_in_file with PrescribedDataTemporal, FT = $FT" begin - dummy_dates = - Vector(range(DateTime(1999, 1, 1); step = Day(1), length = 10)) - date_ref = dummy_dates[1] - t_start = Float64(0) - date0 = date_ref + Dates.Second(t_start) - date_idx0 = - [argmin(abs.(Dates.value(date0) .- Dates.value.(dummy_dates[:])))] - varname = "var" - - # manually create structs to avoid creating space for outer constructor - file_info = FileReader.FileInfo( - "", # infile_path - "", # regrid_dirpath - [varname], # varnames - "", # outfile_root - dummy_dates, # all_dates - date_idx0, # date_idx0 - ) - file_state = FileReader.FileState( - nothing, # data_fields - copy(date_idx0), # date_idx - Int[], # segment_length - ) - file_states = Dict(varname => file_state) - sim_info = nothing - - pd_args = (file_info, file_states, sim_info) - pd_type_args = ( - typeof(file_info), - typeof(varname), - typeof(file_states[varname]), - typeof(sim_info), - ) - prescribed_data = - FileReader.PrescribedDataTemporal{pd_type_args...}(pd_args...) - - # read in next dates, manually compare to `dummy_dates` array - idx = date_idx0[1] - next_date = date0 - for i in 1:(length(dummy_dates) - 1) - current_date = next_date - next_date = FileReader.next_date_in_file(prescribed_data) - - @test next_date == dummy_dates[idx + 1] - - prescribed_data.file_states[varname].date_idx[1] += 1 - idx = date_idx0[1] + i - end - end - - @testset "test read_data_fields! for single variable, FT = $FT" begin - # Create regridding directory - isdir(regrid_dir_temporal) ? nothing : mkpath(regrid_dir_temporal) - - get_infile = albedo_temporal_data - infile_path = get_infile() - varname = "sw_alb" - varnames = [varname] - - # Start with first date in data file - date0 = NCDataset(infile_path) do ds - ds["time"][1] - end - date0 = FileReader.to_datetime(date0) - dates = collect(date0:Day(10):(date0 + Day(100))) # includes both endpoints - date_ref = date0 - t_start = Float64(0) - - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - - prescribed_data = FileReader.PrescribedDataTemporal{FT}( - regrid_dir_temporal, - get_infile, - varnames, - date_ref, - t_start, - surface_space_t, - ) - - # Test each case (1-4) - current_fields = - Fields.zeros(FT, surface_space_t), Fields.zeros(FT, surface_space_t) - - # Use this function to reset values between test cases - function reset_ps_data(ps_data) - ps_data.file_states[varname].data_fields[1] .= current_fields[1] - ps_data.file_states[varname].data_fields[2] .= current_fields[2] - ps_data.file_states[varname].segment_length[1] = - ps_data.file_states[varname].date_idx[1] = - ps_data.file_info.date_idx0[1] = Int(1) - end - - # Case 1: test date before first date of file, and date aligned with first date of file - for date in [date0 - Day(1), date0] - reset_ps_data(prescribed_data) - prescribed_data_copy = prescribed_data - FileReader.read_data_fields!(prescribed_data, date, surface_space_t) - - # Test that both data fields store the same data - # Remove NaNs and missings before comparison - (; data_fields) = prescribed_data.file_states[varname] - - foreach(replace_nan_missing!, data_fields) - - @test prescribed_data.file_states[varname].data_fields[1] == - prescribed_data.file_states[varname].data_fields[2] - # date_idx and date_idx0 should be unchanged - @test prescribed_data.file_states[varname].segment_length == Int[0] - @test prescribed_data.file_states[varname].date_idx[1] == - prescribed_data_copy.file_states[varname].date_idx[1] - @test prescribed_data.file_info.date_idx0[1] == - prescribed_data_copy.file_info.date_idx0[1] - end - - # Case 2: test date after dates in file - # This case should print 1 warning "this time period is after input data..." - reset_ps_data(prescribed_data) - date_after = prescribed_data.file_info.all_dates[end] + Dates.Day(1) - prescribed_data_copy = prescribed_data - FileReader.read_data_fields!( - prescribed_data, - date_after, - surface_space_t, - ) - - # Test that both data fields store the same data - @test prescribed_data.file_states[varname].segment_length == Int[0] - - # Remove NaNs and missings before comparison - (; data_fields) = prescribed_data.file_states[varname] - foreach(replace_nan_missing!, data_fields) - - @test prescribed_data.file_states[varname].data_fields[1] == - prescribed_data.file_states[varname].data_fields[2] - - # Case 3: loop over simulation dates (general case) - # This case should print 3 warnings "updating data files: ..." - data_saved = [] - updating_dates = [] - - # Read in data fields over dummy times - # With the current test setup, read_data_fields! should get called 3 times - for date in dates - callback_date = FileReader.next_date_in_file(prescribed_data) - - if (date >= callback_date) - FileReader.read_data_fields!( - prescribed_data, - date, - surface_space_t, - ) - push!( - data_saved, - deepcopy( - prescribed_data.file_states[varname].data_fields[1], - ), - ) - push!(updating_dates, deepcopy(date)) - end - end - - # Replace NaNs and missings for testing - (; data_fields) = prescribed_data.file_states[varname] - foreach(replace_nan_missing!, data_saved) - - # Manually read in data from HDF5 - f = prescribed_data.file_states[varname].data_fields[1] - data_manual = [similar(f), similar(f), similar(f)] - (; regrid_dirpath, outfile_root, varnames, all_dates) = - prescribed_data.file_info - for i in eachindex(data_saved) - data_manual[i] = Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - all_dates[i + 1], - varnames[1], - surface_space_t, - ) - # Replace NaNs and missings for testing comparison - replace_nan_missing!(data_manual[i]) - - @test parent(data_saved[i]) == parent(data_manual[i]) - end - - # Test that the data_saved array was modified - @test length(data_saved) > 0 - # Check that the final saved date is as expected (midmonth day of last month) - midmonth_end = - DateTime(year(dates[end]), month(dates[end]), 15, hour(dates[end])) - @test updating_dates[end] == midmonth_end - - # Case 4: everything else - reset_ps_data(prescribed_data) - prescribed_data.file_states[varname].date_idx[1] = - prescribed_data.file_info.date_idx0[1] + Int(1) - date = - prescribed_data.file_info.all_dates[prescribed_data.file_states[varname].date_idx[1]] - - Dates.Day(1) - - @test_throws ErrorException FileReader.read_data_fields!( - prescribed_data, - date, - surface_space_t, - ) - # Delete regridding directory and files - rm(regrid_dir_temporal; recursive = true, force = true) - end - - @testset "test read_data_fields! for multiple variables, FT = $FT" begin - # Create regridding directory - isdir(regrid_dir_temporal) ? nothing : mkpath(regrid_dir_temporal) - - get_infile = era5_t2m_sp_u10n_data - infile_path = get_infile() - varnames = ["t2m", "sp", "u10n"] - - # Start with first date in data file - date0 = NCDataset(infile_path) do ds - ds["time"][1] - end - date0 = FileReader.to_datetime(date0) - # Gives dates [00:45, 01:30, 02:15, 03:00, 03:45, 04:30, 05:15, 06:00, 06:45, 07:30] - dates = collect(date0:Minute(45):(date0 + Minute(450))) # includes both endpoints - date_ref = date0 - t_start = Float64(0) - - comms_ctx = ClimaComms.SingletonCommsContext() - radius = FT(6731e3) - Nq = 4 - domain = ClimaCore.Domains.SphereDomain(radius) - mesh = Meshes.EquiangularCubedSphere(domain, 4) - topology = Topologies.Topology2D(comms_ctx, mesh) - quad = Spaces.Quadratures.GLL{Nq}() - surface_space_t = Spaces.SpectralElementSpace2D(topology, quad) - - prescribed_data = FileReader.PrescribedDataTemporal{FT}( - regrid_dir_temporal, - get_infile, - varnames, - date_ref, - t_start, - surface_space_t, - ) - - # Test each case (1-4) - current_fields = - Fields.zeros(FT, surface_space_t), Fields.zeros(FT, surface_space_t) - - # Use this function to reset values between test cases - function reset_ps_data(ps_data) - for varname in varnames - ps_data.file_states[varname].data_fields[1] .= current_fields[1] - ps_data.file_states[varname].data_fields[2] .= current_fields[2] - ps_data.file_states[varname].segment_length[1] = - ps_data.file_states[varname].date_idx[1] = - ps_data.file_info.date_idx0[1] = Int(1) - end - end - - # Case 1: test date before first date of file, and date aligned with first date of file - for date in [date0 - Day(1), date0] - reset_ps_data(prescribed_data) - prescribed_data_copy = prescribed_data - FileReader.read_data_fields!(prescribed_data, date, surface_space_t) - - # Test that both data fields store the same data - # Remove NaNs and missings before comparison - for varname in varnames - (; data_fields) = prescribed_data.file_states[varname] - - foreach(replace_nan_missing!, data_fields) - - @test prescribed_data.file_states[varname].data_fields[1] == - prescribed_data.file_states[varname].data_fields[2] - # date_idx and date_idx0 should be unchanged - @test prescribed_data.file_states[varname].segment_length == - Int[0] - @test prescribed_data.file_states[varname].date_idx[1] == - prescribed_data_copy.file_states[varname].date_idx[1] - @test prescribed_data.file_info.date_idx0[1] == - prescribed_data_copy.file_info.date_idx0[1] - end - end - - # Case 2: test date after dates in file - # This case should print 1 warning "this time period is after input data..." - reset_ps_data(prescribed_data) - date_after = prescribed_data.file_info.all_dates[end] + Dates.Day(1) - prescribed_data_copy = prescribed_data - FileReader.read_data_fields!( - prescribed_data, - date_after, - surface_space_t, - ) - - for varname in varnames - # Test that both data fields store the same data - @test prescribed_data.file_states[varname].segment_length == Int[0] - @test prescribed_data.file_states[varname].data_fields[1] == - prescribed_data.file_states[varname].data_fields[2] - end - - # Case 3: loop over simulation dates (general case) - # This case should print 3 warnings "updating data files: ..." - reset_ps_data(prescribed_data) - - # Read in data fields over dummy times - # With the current test setup, read_data_fields! should get called 3 times - data_saved = Dict{String, Vector{Fields.Field}}( - varnames[1] => [], - varnames[2] => [], - varnames[3] => [], - ) - updating_dates = [] - for date in dates - callback_date = FileReader.next_date_in_file(prescribed_data) - - if (date >= callback_date) - FileReader.read_data_fields!( - prescribed_data, - date, - surface_space_t, - ) - for varname in varnames - push!( - data_saved[varname], - deepcopy( - prescribed_data.file_states[varname].data_fields[1], - ), - ) - # Replace NaNs and missings for testing - replace_nan_missing!(data_saved[varname][end]) - end - - push!(updating_dates, callback_date) - end - end - - # Manually read in data from HDF5 - data_manual = Dict{String, Vector{Fields.Field}}( - varnames[1] => [], - varnames[2] => [], - varnames[3] => [], - ) - (; regrid_dirpath, outfile_root, varnames, all_dates) = - prescribed_data.file_info - - for i in eachindex(updating_dates) - for varname in varnames - push!( - data_manual[varname], - Regridder.read_from_hdf5( - regrid_dirpath, - outfile_root, - updating_dates[i], - varname, - surface_space_t, - ), - ) - # Replace NaNs and missings for testing comparison - replace_nan_missing!(data_manual[varname][end]) - end - end - - # Test read_data_fields and manually read data are the same - for varname in varnames - @test all( - parent(data_saved[varname]) .== parent(data_manual[varname]), - ) - end - - # Test that the data_saved array was modified - @test length(data_saved) > 0 - # Check that the final saved date is as expected (midmonth day of last month) - midmonth_end = DateTime( - year(dates[end]), - month(dates[end]), - day(dates[end]), - hour(dates[end]), - ) - @test updating_dates[end] == midmonth_end - - # Case 4: everything else - reset_ps_data(prescribed_data) - for varname in varnames - prescribed_data.file_states[varname].date_idx[1] = - prescribed_data.file_info.date_idx0[1] + Int(1) - date = - prescribed_data.file_info.all_dates[prescribed_data.file_states[varname].date_idx[1]] - - Dates.Day(1) - - @test_throws ErrorException FileReader.read_data_fields!( - prescribed_data, - date, - surface_space_t, - ) - end - # Delete regridding directory and files - rm(regrid_dir_temporal; recursive = true, force = true) - end -end - -# Delete testing directory and files -rm(regrid_dir_static; recursive = true, force = true) -rm(regrid_dir_temporal; recursive = true, force = true) diff --git a/test/shared_utilities/space_varying_inputs.jl b/test/shared_utilities/space_varying_inputs.jl deleted file mode 100644 index f8cefc732f..0000000000 --- a/test/shared_utilities/space_varying_inputs.jl +++ /dev/null @@ -1,79 +0,0 @@ -using Test -import ClimaLand -using ClimaLand.SpaceVaryingInputs: SpaceVaryingInput -using ClimaLand: Domains - -using ClimaCore: Fields -using ClimaComms - -device = ClimaComms.device() -context = ClimaComms.SingletonCommsContext(device) -AT = ClimaComms.array_type(device) -# This tests the analytic and 1d cases of the SpaceVaryingInput function -@testset "SpaceVaryingInput" begin - FT = Float32 - zmin = FT(-1.0) - zmax = FT(0.0) - xlim = FT.((0.0, 10.0)) - ylim = FT.((0.0, 1.0)) - zlim = FT.((zmin, zmax)) - nelements = (1, 1, 10) - radius = FT(100) - depth = FT(30) - n_elements_sphere = (6, 20) - npoly_sphere = 3 - shell = Domains.SphericalShell(; - radius = radius, - depth = depth, - nelements = n_elements_sphere, - npolynomial = npoly_sphere, - ) - - box = Domains.HybridBox(; - xlim = xlim, - ylim = ylim, - zlim = zlim, - nelements = nelements, - npolynomial = 0, - ) - - column = Domains.Column(; zlim = zlim, nelements = nelements[3]) - - domains = [shell, box, column] - analytic_func = (coords) -> 2.0 - for domain in domains - for space in domain.space - coords = Fields.coordinate_field(space) - @test SpaceVaryingInput(analytic_func, space) == - FT.(analytic_func.(coords)) - end - end - - # 1D cases - data_z = collect(range(FT(-1.0), FT(0.0), 11)) - data_value = data_z .* 2 - space = column.space.subsurface - field = SpaceVaryingInput(data_z, data_value, space) - @test parent(field)[:] ≈ AT(collect(range(FT(-1.9), FT(-0.1), 10))) - - struct Tmp{FT} - a::FT - b::FT - c::FT - function Tmp{FT}(; a::FT, c::FT) where {FT} - b = a * 2 - new{FT}(a, b, c) - end - end - data_values = (; a = data_z .* 2, c = data_z .* 3) - field_of_structs = SpaceVaryingInput(data_z, data_values, space, Tmp{FT}) - @test eltype(field_of_structs) == Tmp{FT} - @test field_of_structs.a == field - @test field_of_structs.b == 2 .* field_of_structs.a - @test parent(field_of_structs.c)[:] ≈ - AT(collect(range(FT(-2.85), FT(-0.15), 10))) - - # 2D SpaceVaryingInput - # the 2d from netcdf + regridding is tested now implicitly in the albedo_types test, since the BulkAlbedoStatic - # uses SpaceVaryingInput now. -end diff --git a/test/shared_utilities/time_varying_inputs.jl b/test/shared_utilities/time_varying_inputs.jl deleted file mode 100644 index 973cc51fd3..0000000000 --- a/test/shared_utilities/time_varying_inputs.jl +++ /dev/null @@ -1,104 +0,0 @@ -using Test -import ClimaLand -import ClimaLand: TimeVaryingInputs - -import ClimaCore: Domains, Geometry, Fields, Meshes, Topologies, Spaces -import ClimaComms - -device = ClimaComms.device() -context = ClimaComms.SingletonCommsContext(device) - -@testset "Analytic TimeVaryingInput" begin - fun = (x) -> 2x - input = TimeVaryingInputs.TimeVaryingInput(fun) - - FT = Float32 - - # Prepare a field - domain = Domains.IntervalDomain( - Geometry.ZPoint{FT}(0), - Geometry.ZPoint{FT}(5), - boundary_names = (:bottom, :top), - ) - mesh = Meshes.IntervalMesh(domain; nelems = 10) - topology = Topologies.IntervalTopology(context, mesh) - - column_space = Spaces.CenterFiniteDifferenceSpace(topology) - column_field = Fields.zeros(column_space) - - TimeVaryingInputs.evaluate!(column_field, input, 10.0) - @test Array(parent(column_field))[1] == fun(10.0) -end - -@testset "Temporal TimeVaryingInput 1D" begin - # Check times not sorted - xs = [1.0, 0.0] - ys = [1.0, 2.0] - - @test_throws ErrorException TimeVaryingInputs.TimeVaryingInput(xs, ys) - - for FT in (Float32, Float64) - # Prepare spaces/fields - - domain = Domains.IntervalDomain( - Geometry.ZPoint{FT}(0), - Geometry.ZPoint{FT}(5), - boundary_names = (:bottom, :top), - ) - mesh = Meshes.IntervalMesh(domain; nelems = 10) - topology = Topologies.IntervalTopology(context, mesh) - - column_space = Spaces.CenterFiniteDifferenceSpace(topology) - point_space = Spaces.level(column_space, 1) - column_field = Fields.zeros(column_space) - point_field = Fields.zeros(point_space) - - times = collect(range(FT(0), FT(8π), 100)) - vals = sin.(times) - - # Nearest neighbor interpolation - input = TimeVaryingInputs.TimeVaryingInput( - times, - vals; - context, - method = TimeVaryingInputs.NearestNeighbor(), - ) - - # Test in - @test FT(3.0) in input - @test !(FT(-3.0) in input) - - # Test with different types of spaces - for dest in (point_field, column_field) - # Time outside of range - @test_throws ErrorException TimeVaryingInputs.evaluate!( - dest, - input, - FT(-4), - ) - - TimeVaryingInputs.evaluate!(dest, input, times[10]) - - @test Array(parent(dest))[1] == vals[10] - - # Linear interpolation - input = TimeVaryingInputs.TimeVaryingInput(times, vals; context) - - TimeVaryingInputs.evaluate!(dest, input, 0.1) - - index = searchsortedfirst(times, 0.1) - @test times[index - 1] <= 0.1 <= times[index] - expected = - vals[index - 1] + - (vals[index] - vals[index - 1]) / - (times[index] - times[index - 1]) * (0.1 - times[index - 1]) - - @test Array(parent(dest))[1] ≈ expected - - # Check edge case - TimeVaryingInputs.evaluate!(dest, input, 0.0) - - @test Array(parent(dest))[1] ≈ 0.0 - end - end -end diff --git a/test/shared_utilities/utilities.jl b/test/shared_utilities/utilities.jl index 035f0bf51c..cf66addd3e 100644 --- a/test/shared_utilities/utilities.jl +++ b/test/shared_utilities/utilities.jl @@ -252,13 +252,3 @@ end @test Y.subfields.subfield2 != Y_copy.subfields.subfield2 end end - -@testset "searchsortednearest" begin - A = 10 * collect(range(1, 10)) - - @test searchsortednearest(A, 0) == 1 - @test searchsortednearest(A, 1000) == 10 - @test searchsortednearest(A, 20) == 2 - @test searchsortednearest(A, 21) == 2 - @test searchsortednearest(A, 29) == 3 -end diff --git a/test/standalone/Bucket/albedo_types.jl b/test/standalone/Bucket/albedo_types.jl index 93a54d18a1..c3857b26fa 100644 --- a/test/standalone/Bucket/albedo_types.jl +++ b/test/standalone/Bucket/albedo_types.jl @@ -1,14 +1,17 @@ using Test +using ClimaUtilities.TimeManager +using ClimaUtilities.DataHandling +using ClimaUtilities.SpaceVaryingInputs: SpaceVaryingInput +using ClimaUtilities.TimeVaryingInputs: TimeVaryingInput using ClimaCore using ClimaCore: Geometry, Meshes, Domains, Topologies, Spaces, Fields using ClimaComms +import NCDatasets, ClimaCoreTempestRemap import ClimaParams as CP using Dates using NCDatasets -using ClimaLand.Regridder: read_from_hdf5 -using ClimaLand.FileReader: next_date_in_file, to_datetime, get_data_at_date using ClimaLand.Bucket: BucketModel, BucketModelParameters, @@ -16,15 +19,15 @@ using ClimaLand.Bucket: PrescribedSurfaceAlbedo, bareground_albedo_dataset_path, cesm2_albedo_dataset_path, - next_albedo + next_albedo! using ClimaLand.Domains: coordinates, Column, SphericalShell using ClimaLand: initialize, make_update_aux, make_set_initial_cache, PrescribedAtmosphere, - PrescribedRadiativeFluxes, - TimeVaryingInput + PrescribedRadiativeFluxes + # Bucket model parameters import ClimaLand @@ -55,11 +58,6 @@ function replace_nan_missing!(field::Fields.Field) end FT = Float32 -# Use two separate regrid dirs to avoid duplicate filenames since files have same varname -regrid_dir_static = joinpath(pkgdir(ClimaLand), "test", "static") -regrid_dir_temporal = joinpath(pkgdir(ClimaLand), "test", "temporal") -isdir(regrid_dir_static) ? nothing : mkpath(regrid_dir_static) -isdir(regrid_dir_temporal) ? nothing : mkpath(regrid_dir_temporal) @testset "Test next_albedo for PrescribedBaregroundAlbedo, from a function, FT = $FT" begin # set up each argument for function call @@ -88,7 +86,9 @@ isdir(regrid_dir_temporal) ? nothing : mkpath(regrid_dir_temporal) Y_σS / (Y_σS + param_σS_c) * a_α_snow ) - @test next_albedo(albedo, parameters, Y, p, FT(0)) == next_alb_manual + next_albedo!(p.bucket.α_sfc, albedo, parameters, Y, p, FT(0)) + + @test p.bucket.α_sfc == next_alb_manual end # Add tests which use TempestRemap here - @@ -104,8 +104,7 @@ if !Sys.iswindows() # set up each argument for function call α_snow = FT(0.8) - albedo = - PrescribedBaregroundAlbedo{FT}(α_snow, regrid_dir_static, space) + albedo = PrescribedBaregroundAlbedo{FT}(α_snow, space) σS_c = FT(0.2) parameters = (; σS_c = σS_c) @@ -126,7 +125,8 @@ if !Sys.iswindows() Y_σS / (Y_σS + param_σS_c) * a_α_snow ) - @test next_albedo(albedo, parameters, Y, p, FT(0)) == next_alb_manual + next_albedo!(p.bucket.α_sfc, albedo, parameters, Y, p, FT(0)) + @test p.bucket.α_sfc == next_alb_manual end @testset "Test next_albedo for PrescribedSurfaceAlbedo, FT = $FT" begin @@ -136,24 +136,20 @@ if !Sys.iswindows() surface_coords = Fields.coordinate_field(space) infile_path = cesm2_albedo_dataset_path() - date_ref = to_datetime(NCDataset(infile_path, "r") do ds + date_ref_noleap = NCDataset(infile_path, "r") do ds ds["time"][1] - end) + end + date_ref = CFTime.reinterpret(DateTime, date_ref_noleap) t_start = Float64(0) - albedo = PrescribedSurfaceAlbedo{FT}( - regrid_dir_temporal, - date_ref, - t_start, - space, - ) + albedo = PrescribedSurfaceAlbedo{FT}(date_ref, t_start, space) Y = (; bucket = (; W = Fields.zeros(space))) p = (; bucket = (; α_sfc = Fields.zeros(space))) # set up for manual data reading varname = "sw_alb" - file_dates = albedo.albedo_info.file_info.all_dates + file_dates = DataHandling.available_dates(albedo.albedo.data_handler) new_date = date_ref + Second(t_start) t_curr = t_start @@ -161,12 +157,13 @@ if !Sys.iswindows() @assert new_date == file_dates[i] # manually read in data at this date (not testing interpolation) - field = - get_data_at_date(albedo.albedo_info, space, varname, new_date) - albedo_next = next_albedo(albedo, (;), Y, p, t_curr) - replace_nan_missing!(albedo_next) - replace_nan_missing!(field) - @test albedo_next == field + field = DataHandling.regridded_snapshot( + albedo.albedo.data_handler, + new_date, + ) + + next_albedo!(p.bucket.α_sfc, albedo, (;), Y, p, t_curr) + @test p.bucket.α_sfc == field # Update manual date to match next date in file dt = Second(file_dates[i + 1] - file_dates[i]) @@ -179,8 +176,6 @@ if !Sys.iswindows() earth_param_set = LP.LandParameters(FT) varname = "sw_alb" path = bareground_albedo_dataset_path() - regrid_dirpath = joinpath(pkgdir(ClimaLand), "test/albedo_tmpfiles/") - mkpath(regrid_dirpath) α_snow = FT(0.8) σS_c = FT(0.2) W_f = FT(0.15) @@ -203,14 +198,10 @@ if !Sys.iswindows() for bucket_domain in bucket_domains if bucket_domain isa SphericalShell surface_space = bucket_domain.space.surface - albedo = PrescribedBaregroundAlbedo{FT}( - α_snow, - regrid_dirpath, - surface_space, - ) + albedo = PrescribedBaregroundAlbedo{FT}(α_snow, surface_space) # Radiation - ref_time = DateTime(2005) + ref_time = DateTime(2005, 1, 15, 12) SW_d = (t) -> 0.0 LW_d = (t) -> 5.67e-8 * 280.0^4.0 bucket_rad = PrescribedRadiativeFluxes( @@ -256,59 +247,25 @@ if !Sys.iswindows() set_initial_cache!(p, Y, FT(0.0)) # Read in data manually - outfile_root = "static_data_cgll" comms_ctx = ClimaComms.context(surface_space) - field = read_from_hdf5( - regrid_dirpath, - outfile_root, - Dates.DateTime(0), # dummy date - varname, - surface_space, - ) - replace_nan_missing!(field) - data_manual = field - - @test p.bucket.α_sfc == data_manual + α_bareground = SpaceVaryingInput(path, varname, surface_space) + @test p.bucket.α_sfc == α_bareground else surface_space = bucket_domain.space.surface @test_throws "Using an albedo map requires a global run." PrescribedBaregroundAlbedo{ FT, }( α_snow, - regrid_dirpath, surface_space, ) end end - rm(regrid_dirpath, recursive = true) - end - - @testset "Test PrescribedSurfaceAlbedo error with static map, FT = $FT" begin - get_infile = bareground_albedo_dataset_path - date_ref = Dates.DateTime(1900, 1, 1) - t_start = Float64(0) - - # Test error for non-time varying data - domain = create_domain_2d(FT) - space = domain.space.surface - - @test_throws "Using a temporal albedo map requires data with time dimension." PrescribedSurfaceAlbedo{ - FT, - }( - regrid_dir_temporal, - date_ref, - t_start, - space, - get_infile = get_infile, - ) end @testset "Test PrescribedSurfaceAlbedo - albedo from map over time, FT = $FT" begin earth_param_set = LP.LandParameters(FT) varname = "sw_alb" infile_path = cesm2_albedo_dataset_path() - regrid_dirpath = joinpath(pkgdir(ClimaLand), "test/albedo_tmpfiles/") - mkpath(regrid_dirpath) σS_c = FT(0.2) W_f = FT(0.15) @@ -319,9 +276,10 @@ if !Sys.iswindows() init_temp(z::FT, value::FT) where {FT} = FT(value) t_start = Float64(0) - file_dates = to_datetime(NCDataset(infile_path, "r") do ds + file_dates_noleap = NCDataset(infile_path, "r") do ds ds["time"][:] - end) + end + file_dates = CFTime.reinterpret.(Ref(DateTime), file_dates_noleap) date_ref = file_dates[1] bucket_domains = [ @@ -337,14 +295,10 @@ if !Sys.iswindows() for bucket_domain in bucket_domains space = bucket_domain.space.surface if bucket_domain isa SphericalShell - albedo_model = PrescribedSurfaceAlbedo{FT}( - regrid_dirpath, - date_ref, - t_start, - space, - ) + albedo_model = + PrescribedSurfaceAlbedo{FT}(date_ref, t_start, space) # Radiation - ref_time = DateTime(2005) + ref_time = DateTime(2005, 1, 15, 12) SW_d = (t) -> 0 LW_d = (t) -> 5.67e-8 * 280.0^4.0 bucket_rad = PrescribedRadiativeFluxes( @@ -360,7 +314,7 @@ if !Sys.iswindows() q_atmos = (t) -> 0.0 # no atmos water h_atmos = FT(1e-8) P_atmos = (t) -> 101325 - ref_time = DateTime(2005) + ref_time = DateTime(2005, 1, 15, 12) bucket_atmos = PrescribedAtmosphere( TimeVaryingInput(precip), TimeVaryingInput(precip), @@ -394,15 +348,11 @@ if !Sys.iswindows() Y.bucket.σS .= 0.0 set_initial_cache! = make_set_initial_cache(model) set_initial_cache!(p, Y, FT(0.0)) - data_manual = get_data_at_date( - albedo_model.albedo_info, - model.domain.space.surface, - varname, + data_manual = DataHandling.regridded_snapshot( + albedo_model.albedo.data_handler, date_ref, ) - # If there are any NaNs in the input data, replace them so we can compare results - replace_nan_missing!(p.bucket.α_sfc) - replace_nan_missing!(data_manual) + @test p.bucket.α_sfc == data_manual update_aux! = make_update_aux(model) @@ -412,14 +362,11 @@ if !Sys.iswindows() @assert new_date == file_dates[i] update_aux!(p, Y, t_curr) - data_manual = get_data_at_date( - albedo_model.albedo_info, - model.domain.space.surface, - varname, - file_dates[i], + data_manual = DataHandling.regridded_snapshot( + albedo_model.albedo.data_handler, + new_date, ) - replace_nan_missing!(p.bucket.α_sfc) - replace_nan_missing!(data_manual) + @test p.bucket.α_sfc == data_manual # Update manual date to match next date in file @@ -431,17 +378,11 @@ if !Sys.iswindows() @test_throws "Using an albedo map requires a global run." PrescribedSurfaceAlbedo{ FT, }( - regrid_dirpath, date_ref, t_start, space, ) end end - rm(regrid_dirpath, recursive = true) end end - -# Delete testing directory and files -rm(regrid_dir_static; recursive = true, force = true) -rm(regrid_dir_temporal; recursive = true, force = true) diff --git a/test/standalone/Soil/runoff.jl b/test/standalone/Soil/runoff.jl index 1f5e273de2..62d451cd78 100644 --- a/test/standalone/Soil/runoff.jl +++ b/test/standalone/Soil/runoff.jl @@ -1,6 +1,8 @@ +import ClimaUtilities +import ClimaUtilities.TimeVaryingInputs: TimeVaryingInput using ClimaLand using Test -using ClimaCore +using ClimaCore, NCDatasets FT = Float32 @testset "Base runoff functionality, FT = $FT" begin runoff = ClimaLand.Soil.Runoff.NoRunoff() @@ -44,7 +46,7 @@ end return fieldvals end - precip = ClimaLand.TimeVaryingInput(precip_function) + precip = TimeVaryingInput(precip_function) atmos = ClimaLand.PrescribedPrecipitation{FT, typeof(precip)}(precip) noflux = ClimaLand.Soil.WaterFluxBC((p, t) -> 0.0) @@ -115,7 +117,7 @@ end return fieldvals end - precip = ClimaLand.TimeVaryingInput(precip_function) + precip = TimeVaryingInput(precip_function) atmos = ClimaLand.PrescribedPrecipitation{FT, typeof(precip)}(precip) noflux = ClimaLand.Soil.WaterFluxBC((p, t) -> 0.0) diff --git a/test/standalone/Vegetation/canopy_model.jl b/test/standalone/Vegetation/canopy_model.jl index e89f286e0d..9562f34b94 100644 --- a/test/standalone/Vegetation/canopy_model.jl +++ b/test/standalone/Vegetation/canopy_model.jl @@ -6,7 +6,7 @@ using Dates using StaticArrays using ClimaLand using ClimaLand: PrescribedAtmosphere, PrescribedRadiativeFluxes -using ClimaLand: TimeVaryingInput +using ClimaUtilities.TimeVaryingInputs: TimeVaryingInput, evaluate! using ClimaLand.Canopy using ClimaLand.Canopy.PlantHydraulics using ClimaLand.Domains: Point