diff --git a/doc/sample_config_MCOnly.yaml b/doc/sample_config_MCOnly.yaml new file mode 100644 index 000000000..7f901751d --- /dev/null +++ b/doc/sample_config_MCOnly.yaml @@ -0,0 +1,45 @@ +# $ python -m nwm_routing -f -V4 sample_MCOnly.yaml + +# Usage: Parameters that have "#REQUIRED" must be updated by user. All other parameters +# should not be altered. + +# This example serves as the simplest configuration file one can use to perform a +# Muskingum-Cunge (MC) only routing simulation. This means there is no reservoir routing +# (underlying flowpaths within waterbodies are routed using MC), no data assimilation, and +# no diffusive routing. + +# This defaults to computing routing in serial. If you would like to enable parallel routing, +# you can adjust the 'compute_parameters -> parallel_compute_method:' and +# 'compute_parameters -> cpu_pool:' (see documentation). + +# This defaults to creating 1 netcdf output file for every hour of simulation time, each file +# containing 12 time steps (1 every 5 minutes, t-route's default time step). Output files +# are configurable, see documentation. + +#-------------------------------------------------------------------------------- +log_parameters: + #---------- + showtiming: True + log_level : DEBUG +#-------------------------------------------------------------------------------- +network_topology_parameters: + #---------- + supernetwork_parameters: + #---------- + geo_file_path: #REQUIRED +#-------------------------------------------------------------------------------- +compute_parameters: + #---------- + restart_parameters: + #---------- + start_datetime: #REQUIRED + forcing_parameters: + #---------- + nts : #REQUIRED + qlat_input_folder : #REQUIRED + qlat_file_pattern_filter: #REQUIRED +#-------------------------------------------------------------------------------- +output_parameters: + #---------- + stream_output : + stream_output_directory: #REQUIRED diff --git a/doc/sample_config_MC_with_waterbodies.yaml b/doc/sample_config_MC_with_waterbodies.yaml new file mode 100644 index 000000000..77b0e484f --- /dev/null +++ b/doc/sample_config_MC_with_waterbodies.yaml @@ -0,0 +1,47 @@ +# $ python -m nwm_routing -f -V4 sample_MC_with_waterbodies.yaml + +# Usage: Parameters that have "#REQUIRED" must be updated by user. All other parameters +# should not be altered. + +# This example serves as a simple configuration file one can use to perform a +# Muskingum-Cunge (MC) with reservoir routing simulation. This means there is no data assimilation +# and no diffusive routing. + +# This defaults to computing routing in serial. If you would like to enable parallel routing, +# you can adjust the 'compute_parameters -> parallel_compute_method:' and +# 'compute_parameters -> cpu_pool:' (see documentation). + +# This defaults to creating 1 netcdf output file for every hour of simulation time, each file +# containing 12 time steps (1 every 5 minutes, t-route's default time step). Output files +# are configurable, see documentation. + +#-------------------------------------------------------------------------------- +log_parameters: + #---------- + showtiming: True + log_level : DEBUG +#-------------------------------------------------------------------------------- +network_topology_parameters: + #---------- + supernetwork_parameters: + #---------- + geo_file_path: #REQUIRED + waterbody_parameters: + #---------- + break_network_at_waterbodies: True +#-------------------------------------------------------------------------------- +compute_parameters: + #---------- + restart_parameters: + #---------- + start_datetime: #REQUIRED + forcing_parameters: + #---------- + nts : #REQUIRED + qlat_input_folder : #REQUIRED + qlat_file_pattern_filter: #REQUIRED +#-------------------------------------------------------------------------------- +output_parameters: + #---------- + stream_output : + stream_output_directory: #REQUIRED diff --git a/doc/v4_config_outline.yaml b/doc/v4_config_outline.yaml deleted file mode 100644 index c8bb915eb..000000000 --- a/doc/v4_config_outline.yaml +++ /dev/null @@ -1,189 +0,0 @@ -# Organization outline for t-route V4 configuration files: - -#-------------------------------------------------------------------------------- -log_parameters: - #---------- - showtiming: - log_level : -#-------------------------------------------------------------------------------- -bmi_parameters: - #---------- - flowpath_columns: - attributes_columns: - waterbody_columns: - network_columns: -#-------------------------------------------------------------------------------- -network_topology_parameters: - #---------- - supernetwork_parameters: - #---------- - title_string: - geo_file_path: - network_type: - mask_file_path: - mask_layer_string: - mask_driver_string: - mask_key: - columns: - #---------- - key: - downstream: - dx: - n: - ncc: - s0: - bw: - waterbody: - tw: - twcc: - alt: - musk: - musx: - cs: - gages: - synthetic_wb_segments: - synthetic_wb_id_offset: - duplicate_wb_segments: - terminal_code: - driver_string: - layer_string: - waterbody_parameters: - #---------- - break_network_at_waterbodies: - level_pool: - #---------- - level_pool_waterbody_parameter_file_path: - level_pool_waterbody_id: - waterbody_null_code: - preprocessing_parameters: - #---------- - preprocess_only: - preprocess_output_folder: - preprocess_output_filename: - use_preprocessed_data: - preprocess_source_file: -#-------------------------------------------------------------------------------- -compute_parameters: - #---------- - parallel_compute_method: - compute_kernel: - assume_short_ts: - subnetwork_target_size: - cpu_pool: - return_courant: - restart_parameters: - #---------- - start_datetime: - lite_channel_restart_file: - lite_waterbody_restart_file: - wrf_hydro_channel_restart_file: - wrf_hydro_channel_ID_crosswalk_file: - wrf_hydro_channel_ID_crosswalk_file_field_name: - wrf_hydro_channel_restart_upstream_flow_field_name: - wrf_hydro_channel_restart_downstream_flow_field_name: - wrf_hydro_channel_restart_depth_flow_field_name: - wrf_hydro_waterbody_restart_file: - wrf_hydro_waterbody_ID_crosswalk_file: - wrf_hydro_waterbody_ID_crosswalk_file_field_name: - wrf_hydro_waterbody_crosswalk_filter_file: - wrf_hydro_waterbody_crosswalk_filter_file_field_name: - hyfeature_channel_restart_file: - hybrid_parameters: - #---------- - run_hybrid_routing: - diffusive_domain: - use_natl_xsections: - topobathy_domain: - run_refactored_network: - refactored_domain: - refactored_topobathy_domain: - coastal_boundary_domain: - forcing_parameters: - #---------- - qts_subdivisions: - dt: - qlat_input_folder: - nts: - max_loop_size: - qlat_file_index_col: - qlat_file_value_col: - qlat_file_gw_bucket_flux_col: - qlat_file_terrain_runoff_col: - qlat_file_pattern_filter: - qlat_forcing_sets: - #---------- - nts: - #---------- - qlat_files: - binary_nexus_file_folder: - coastal_boundary_input_file: - data_assimilation_parameters: - #---------- - usgs_timeslices_folder: - usace_timeslices_folder: - timeslice_lookback_hours: - interpolation_limit_min: - wrf_hydro_lastobs_lead_time_relative_to_simulation_start_time: - wrf_lastobs_type: - streamflow_da: - #---------- - treamflow_nudging: - gage_segID_crosswalk_file: - crosswalk_gage_field: - crosswalk_segID_field: - lastobs_file: - lastobs_output_folder: - diffusive_streamflow_nudging: - reservoir_da: - #---------- - reservoir_persistence_da: - #---------- - reservoir_persistence_usgs: - reservoir_persistence_usace: - crosswalk_usgs_gage_field: - crosswalk_usace_gage_field: - crosswalk_usgs_lakeID_field: - crosswalk_usace_lakeID_field: - reservoir_rfc_da: - #---------- - reservoir_rfc_forecasts: - reservoir_rfc_forecasts_time_series_path: - reservoir_rfc_forecasts_lookback_hours: - reservoir_rfc_forecasts_offset_hours: - reservoir_rfc_forecast_persist_days: - reservoir_parameter_file: - qc_threshold: -#-------------------------------------------------------------------------------- -output_parameters: - #---------- - chanobs_output: - #---------- - chanobs_output_directory: - chanobs_filepath: - csv_output: - #---------- - csv_output_folder: - csv_output_segments: - chrtout_output: - #---------- - wrf_hydro_channel_output_source_folder: - lite_restart: - #---------- - lite_restart_output_directory: - hydro_rst_output: - #---------- - wrf_hydro_restart_dir: - wrf_hydro_channel_restart_pattern_filter: - wrf_hydro_channel_restart_source_directory: - wrf_hydro_channel_output_source_folder: - wrf_hydro_parity_check: - #---------- - parity_check_input_folder: - parity_check_file_index_col: - parity_check_file_value_col: - parity_check_compare_node: - parity_check_compare_file_sets: - #---------- - validation_files: - lakeout_output: - test_output: \ No newline at end of file diff --git a/src/troute-config/troute/config/compute_parameters.py b/src/troute-config/troute/config/compute_parameters.py index 26b21aa76..686ee4125 100644 --- a/src/troute-config/troute/config/compute_parameters.py +++ b/src/troute-config/troute/config/compute_parameters.py @@ -23,53 +23,130 @@ class ComputeParameters(BaseModel): + """ + Parameters specific to the routing simulation. + """ parallel_compute_method: ParallelComputeMethod = "by-network" + """ + parallel computing scheme used during simulation, options below + - "serial": no parallelization + - "by-network": parallelization across independent drainage basins + - "by-subnetwork-jit": parallelization across subnetworks + - "by-subnetwork-jit-clustered": parallelization across subnetworks, with clustering to optimize scaling + """ compute_kernel: ComputeKernel = "V02-structured" + """ + routing engine used for simulation + - "V02-structured" - Muskingum Cunge + NOTE: There are two other options that were previously being developed for use with the diffusive kernel, + but they are now depricated: + - "diffusive" - Diffusive with adaptive timestepping + - "diffusice_cnt" - Diffusive with CNT numerical solution + TODO: Remove these additional options? And this parameter altogether as there is only one option? + """ assume_short_ts: bool = False + """ + If True the short timestep assumption used in WRF hyro is used. if False, the assumption is dropped. + """ subnetwork_target_size: int = 10000 + """ + The target number of segments per subnetwork, only needed for "by-subnetwork..." parallel schemes. + The magnitude of this parameter affects parallel scaling. This is to improve efficiency. Default value has + been tested as the fastest for CONUS simultions. For smaller domains this can be reduced. + """ cpu_pool: Optional[int] = 1 + """ + Number of CPUs used for parallel computations + If parallel_compute_method is anything but 'serial', this determines how many cpus to use for parallel processing. + """ return_courant: bool = False + """ + If True, Courant metrics are returnd with simulations. This only works for MC simulations + """ - # TODO: default appears to be {}. see nhd_io.read_config_file ~:138 restart_parameters: "RestartParameters" = Field(default_factory=dict) - # TODO: default appears to be {}. see nhd_io.read_config_file ~:138 hybrid_parameters: "HybridParameters" = Field(default_factory=dict) - # TODO: default appears to be {}. see nhd_io.read_config_file ~:138 forcing_parameters: "ForcingParameters" = Field(default_factory=dict) - # TODO: default appears to be {}. see nhd_io.read_config_file ~:138 - data_assimilation_parameters: "DataAssimilationParameters" = Field( - default_factory=dict - ) + data_assimilation_parameters: "DataAssimilationParameters" = Field(default_factory=dict) # TODO: determine how to handle context specific required fields # TODO: consider other ways to handle wrf hydro fields (i.e. subclass) class RestartParameters(BaseModel): - # NOTE: this is technically optional as it can be derived from the - # `wrf_hydro_channel_restart_file` if the `start_datetime` is not provided. + """ + Parameters specifying warm-state simulation conditions. + """ start_datetime: Optional[datetime] = None + """ + Time of model initialization (timestep zero). Datetime format should be %Y-%m-%d_%H:%M, e.g., 2023-04-25_00:00 + This start time will control which forcing files and TimeSlice files are required for the simulation. + If the start time is erroneously enertered, such that there are no available forcing files, then the simulation will fail. + Likewise, if there are no TimeSlice files available, then data assimilation will not occur. + NOTE: The default is 'None' because the start date can be determined from restart files + such as 'lite_channel_restart_file' or 'wrf_hydro_channel_restart_file'. But if no restart + file is provided, this parameter is required. + """ lite_channel_restart_file: Optional[FilePath] = None + """ + Filepath to a 'lite' channel restart file create by a previous t-route simulation. If a file is specified, then it will be + given preference over WRF restart files for a simulation restart. + """ lite_waterbody_restart_file: Optional[FilePath] = None + """ + Filepath to a 'lite' waterbody restart file create by a previous t-route simulation. If a file is specified, then it will be + given preference over WRF restart files for a simulation restart. + """ wrf_hydro_channel_restart_file: Optional[FilePath] = None - # NOTE: if `wrf_hydro_channel_restart_file` is given, `wrf_hydro_channel_ID_crosswalk_file` is required + """ + Filepath to WRF Hydro HYDRO_RST file. This file does not need to be timed with start_datetime, which allows initial states + from one datetime to initialize a simulation with forcings starting at a different datetime. However, if the start_datetime + parameter is not specified, then the time attribute in the channel restart file will be used as the starting time of the simulation. + """ wrf_hydro_channel_ID_crosswalk_file: Optional[FilePath] = None - + """ + Filepath to channel geometry file. + NOTE: if `wrf_hydro_channel_restart_file` is given, `wrf_hydro_channel_ID_crosswalk_file` is required + """ wrf_hydro_channel_ID_crosswalk_file_field_name: Optional[str] = None + """ + Field name of segment IDs in restart file. + """ wrf_hydro_channel_restart_upstream_flow_field_name: Optional[str] = None + """ + Field name of upstream flow in restart file. + """ wrf_hydro_channel_restart_downstream_flow_field_name: Optional[str] = None + """ + Field name of downstream flow in restart file. + """ wrf_hydro_channel_restart_depth_flow_field_name: Optional[str] = None + """ + Field name of depth in restart file. + """ wrf_hydro_waterbody_restart_file: Optional[FilePath] = None - # NOTE: required if `wrf_hydro_waterbody_restart_file` + """ + Filepath to waterbody restart file. This is often the same as wrf_hydro_channel_restart_file. + """ wrf_hydro_waterbody_ID_crosswalk_file: Optional[FilePath] = None + """ + Filepath to lake parameter file. + NOTE: required if `wrf_hydro_waterbody_restart_file` + """ wrf_hydro_waterbody_ID_crosswalk_file_field_name: Optional[str] = None + """ + Field name of waterbody ID. + """ wrf_hydro_waterbody_crosswalk_filter_file: Optional[FilePath] = None + """ + Filepath to channel geometry file. + """ wrf_hydro_waterbody_crosswalk_filter_file_field_name: Optional[str] = None - - # TODO: missing from `v3_doc.yaml` - # TODO: shorvath: I think we can remove this... - hyfeature_channel_restart_file: Optional[FilePath] = None + """ + Fieldname of waterbody IDs in channel geometry file. + """ + _coerce_datetime = validator("start_datetime", pre=True, allow_reuse=True)( coerce_datetime @@ -78,74 +155,181 @@ class RestartParameters(BaseModel): # TODO: determine how to handle context specific required fields class HybridParameters(BaseModel): - # NOTE: required for hybrid simulations + """ + Parameters controlling the use of MC/diffusive hybrid simulations. Only include/populate these parameters if an + MC/diffusive hybrid simulations is desired. + """ run_hybrid_routing: bool = False - # NOTE: required for hybrid simulations + """ + Boolean parameter whether or not hybrid routing is actived. If it is set to True, the hybrid routing is activated. + If false, MC is solely used for channel flow routing. + NOTE: required for hybrid simulations + """ diffusive_domain: Optional[FilePath] = None - + """ + Filepath to diffusive domain dictionary file. This file can be either JSON or yaml and contain a dictionary + of diffusive network segments, organized by tailwater ID (keys). This is a file such as: + https://github.com/NOAA-OWP/t-route/blob/master/test/LowerColorado_TX_v4/domain/coastal_domain_tw.yaml + This file defines tailwater and head water flowpath IDs for the diffusive domain. See file for more info. + NOTE: required for hybrid simulations + """ use_natl_xsections: bool = False - # NOTE: required for diffusive routing for natural cross sections + """ + Boolean parameter whether or not natural cross section data is used. If it is set to True, diffusive model + uses natural cross section data. If False, diffusive model uses synthetic cross section defined by RouteLink.nc + """ topobathy_domain: Optional[FilePath] = None - - # TODO: missing from `v3_doc.yaml` + """ + Filepath to topobathy data for channel cross sections. Currently (June 25, 2024), 3D cross section data + is contained in a separate file, which this parameter should point to. In the future this data may simply be + included in the hydrofabric. + Topobathy data of a channel cross section is defined by comid. + NOTE: Required for diffusive routing for natural cross sections. + """ run_refactored_network: bool = False - # TODO: missing from `v3_doc.yaml` + """ + Boolean parameter whether or not to run the diffusive module on a refactored network. This was necessary on + the NHD network due to short segments causing computational issues. Not needed for HYFeatures. + """ refactored_domain: Optional[FilePath] = None - # TODO: missing from `v3_doc.yaml` + """ + A file with refactored flowpaths to eliminate short segments. + NOTE: Only needed for NHD network. + """ refactored_topobathy_domain: Optional[FilePath] = None - # TODO: missing from `v3_doc.yaml` + """ + A file with refactored topobathy data. + NOTE: Only needed for NHD network. + """ coastal_boundary_domain: Optional[FilePath] = None + """ + File containing crosswalk between diffusive tailwater segment IDs and coastal model output node IDs. + This is needed if t-route will use outputs from a coastal model as the downstream boundary condition for + the diffusive module. See example: + https://github.com/NOAA-OWP/t-route/blob/master/test/LowerColorado_TX_v4/domain/coastal_domain_crosswalk.yaml + NOTE: This is related to the ForcingParameters -> coastal_boundary_input_file parameter. + """ class QLateralForcingSet(BaseModel): + """ + Forcing files and number of timesteps associated with each simulation loop. This is optional, only include if + explicitly listing the forcing files in each set. If this variable is not present, make sure nts, + qlat_file_pattern_filter, and max_loop_size variables are listed. + NOTE: Using nts, qlat_input_folder, qlat_file_pattern_filter, and max_loop_size is the preferred method. + """ nts: "QLateralFiles" + """ + Number of timesteps in loop iteration 1. This corresponds to the number of files listed in qlat_files. + This parameter is repeated for as many iterations as are desired. + """ class QLateralFiles(BaseModel): qlat_files: List[FilePath] + """ + List of forcing file names to be used in a single iteration. + """ class StreamflowDA(BaseModel): - # NOTE: mandatory for streamflow DA, defaults to False + """ + Parameters controlling streamflow nudging DA + """ streamflow_nudging: bool = False - # NOTE: mandatory for streamflow DA on NHDNetwork. + """ + Boolean, determines whether or not streamflow nudging is performed. + NOTE: Mandatory for streamflow DA + """ gage_segID_crosswalk_file: Optional[FilePath] = None - - # TODO: not sure if these are dependent on anything + """ + File relating stream gage IDs to segment links in the model domain. This is typically the RouteLink file. + NOTE: Mandatory for streamflow DA on NHDNetwork. Not necessary on HYFeatures as this information is included + in the hydrofabric. + """ crosswalk_gage_field: Optional[str] = 'gages' + """ + Column name for gages in gage_segID_crosswalk_file. + NOTE: Not necessary on HYFeatures. + """ crosswalk_segID_field: Optional[str] = 'link' - - # NOTE: required for analysis and - # TODO: changed the name of this parameter from "wrf_hydro_lastobs_file" to "lastobs_file" - # Need to update this in t-route as well. + """ + Column name for flowpaths/links in gage_segID_crosswalk_file. + NOTE: Not necessary on HYFeatures. + """ lastobs_file: Optional[FilePath] = None - - # TODO: missing from `v3_doc.yaml` - # see troute/DataAssimilation.py :57 - # see troute/nhd_network_utilities_v02.py :765 + """ + File containing information on the last streamflow observations that were assimilated from a previous t-route run. + This is used for a 'warm' restart. Mostly used for operational NWM settings. + """ diffusive_streamflow_nudging: bool = False + """ + If True, enable streamflow data assimilation in diffusive module. + NOTE: Not yet implemented, leave as False. (June 25, 2024) + """ -class ReservoirPersistenceDA(BaseModel, extra='ignore'): - # NOTE: mandatory for USGS reservoir DA, defaults to False +class ReservoirPersistenceDA(BaseModel): + """ + Parameters controlling persistence reservoir DA. This if for USGS/USACE reservoirs. + """ reservoir_persistence_usgs: bool = False - # NOTE: mandatory for USACE reservoir DA, defaults to False + """ + If True, USGS reservoirs will perform data assimilation. + """ reservoir_persistence_usace: bool = False - # NOTE: mandatory for USACE reservoir DA, defaults to False + """ + If True, USACE reservoirs will perform data assimilation. + """ reservoir_persistence_greatLake: bool = False + """ + If True, Great Lakes will perform data assimilation. + """ crosswalk_usgs_gage_field: str = "usgs_gage_id" + """ + Column name designation in files for USGS gages. + """ crosswalk_usace_gage_field: str = "usace_gage_id" + """ + Column name designation in files for USACE gages. + """ crosswalk_usgs_lakeID_field: str = "usgs_lake_id" + """ + Column name designation in files for USGS lake IDs. + """ crosswalk_usace_lakeID_field: str = "usace_lake_id" + """ + Column name designation in files for USACE lake IDs. + """ class ReservoirRfcParameters(BaseModel): + """ + Parameters controlling RFC reservoirs DA. + """ reservoir_rfc_forecasts: Literal[True] = True + """ + If True, RFC reservoirs will perform data assimilation. + """ reservoir_rfc_forecasts_time_series_path: Optional[DirectoryPath] = None + """ + Directory containing RFC timeseries files. + NOTE: Required if reservoir_rfc_forecasts is True. + """ reservoir_rfc_forecasts_lookback_hours: int = 28 + """ + Hours to look back in time from simulation time for RFC timeseries files. + """ reservoir_rfc_forecasts_offset_hours: int = 28 + """ + Offset hours forward in time from simulation time to look for files. + This helps find the most recent RFC timeseries files for operational NWM use. + """ reservoir_rfc_forecast_persist_days: int = 11 + """ + Days to persist an observation when no new, good observations can be found. + """ class ReservoirRfcParametersDisabled(BaseModel): @@ -153,36 +337,70 @@ class ReservoirRfcParametersDisabled(BaseModel): class ReservoirDA(BaseModel): + """ + Parameters controlling reservoir DA. + """ reservoir_persistence_da: Optional[ReservoirPersistenceDA] = None reservoir_rfc_da: Optional[ Union[ReservoirRfcParameters, ReservoirRfcParametersDisabled] ] = Field(None, discriminator="reservoir_rfc_forecasts") reservoir_parameter_file: Optional[FilePath] = None + """ + File conaining reservoir parameters (e.g., reservoir_index_AnA.nc). + NOTE: Needed for NHDNetwork, but not HYFeatures as this information is included in the hydrofabric. + """ class DataAssimilationParameters(BaseModel, extra='ignore'): - # NOTE: required for streamflow nudging and/or USGS reservoir DA + """ + Parameters controlling data assimilation. + """ usgs_timeslices_folder: Optional[DirectoryPath] = None - # NOTE: required for USACE reservoir DA + """ + Directory path to usgs timeslice files. + NOTE: required for streamflow nudging and/or USGS reservoir DA + """ usace_timeslices_folder: Optional[DirectoryPath] = None - # NOTE: required for canada reservoir DA + """ + Directory path to usace timeslice files. + NOTE: required for USACE reservoir DA + """ canada_timeslices_folder: Optional[DirectoryPath] = None - # NOTE: required for LakeOntario reservoir DA + """ + Directory path to canadian timeslice files. + NOTE: required for Lake Erie DA (and streamflow nudging using Canadian gages, though that has not been + implemented as of June 25, 2024). + """ LakeOntario_outflow: Optional[FilePath] = None - # NOTE: required for reservoir DA - suggested value 24 (1 days) + """ + CSV file containing DA values for Lake Ontario. Needs to be obtained and pre-processed from https://ijc.org/en/loslrb/watershed/flows. + NOTE: Required for Lake Ontario DA. + """ timeslice_lookback_hours: int = 24 - + """ + Number of hours to look back in time (from simulation time) for USGS, USACE, and Canadian timeslice data assimilation files. + """ interpolation_limit_min: int = 59 + """ + Limit on how many missing values can be replaced by linear interpolation from timeslice files. + """ wrf_hydro_lastobs_lead_time_relative_to_simulation_start_time: int = 0 + """ + Lead time of lastobs relative to simulation start time (secs). + NOTE: Only relevant if using a WRF-Hydro lastobs restart file. + """ wrf_lastobs_type: str = "obs-based" + streamflow_da: StreamflowDA = None - # NOTE: this appears to be optional. See nwm_routing/input.py ~:439 reservoir_da: Optional[ReservoirDA] = None - # NOTE: not present in v3_doc.yaml - # see troute/nhd_network_utilities_v02.py ~:801 qc_threshold: float = Field(1, ge=0, le=1) + """ + Threshold for determining which observations are deemed acceptable for DA and which are not. If the values is set to 1, + then only the very best observations are retained. On the other hand, if the value is set to 0, then all observations will be + used for assimilation, even those markesd as very poor quality. + """ _coerce_none_to_default = validator( "timeslice_lookback_hours", "qc_threshold", pre=True, allow_reuse=True @@ -190,36 +408,66 @@ class DataAssimilationParameters(BaseModel, extra='ignore'): class ForcingParameters(BaseModel): + """ + Parameters controlling model forcing. + """ qts_subdivisions: int = 12 + """ + The number of routing simulation timesteps per qlateral time interval. For example, if dt_qlateral = 3600 secs, + and dt = 300 secs, then qts_subdivisions = 3600/300 = 12 + """ dt: int = 300 - # TODO: see note about potentially throwing in v3_doc.yaml - # aaraney: this is optional if `qlat_forcing_sets` is provided + """ + Time step size (seconds). Default is 5 mintues + """ qlat_input_folder: Optional[DirectoryPath] = None - # TODO: mandatory if loop sets will be automatically created nts: Optional[int] = 288 + """ + Number of timesteps. This value, multiplied by 'dt', gives the total simulation time in seconds. + """ max_loop_size: int = 24 - # NOTE: determine if okay to use this default + """ + Value is in hours. To handle memory issues, t-route can divvy it's simulation time into chunks, reducing the amount + of forcing and data assimilation files it reads into memory at once. This is the size of those time loops. + """ qlat_file_index_col: str = "feature_id" + """ + Name of column containing flowpath/nexus IDs + """ qlat_file_value_col: str = "q_lateral" + """ + Name of column containing q_lateral data. + """ qlat_file_gw_bucket_flux_col: str = "qBucket" + """ + Groundwater bucket flux (to channel) variable name in forcing file. + NOTE: Only needed if using WRF-Hydro output files (CHRTOUT) as forcing files. + """ qlat_file_terrain_runoff_col: str = "qSfcLatRunoff" + """ + Surface terrain runoff (to channel) variable name in forcing file. + NOTE: Only needed if using WRF-Hydro output files (CHRTOUT) as forcing files. + """ qlat_file_pattern_filter: Optional[str] = "*NEXOUT" - # NOTE: - # If this variable is not present, make sure nts, qlat_file_pattern_filter, and - # max_loop_size variables are listed above. - qlat_forcing_sets: Optional[List[QLateralForcingSet]] = None + """ + Globbing file pattern to identify q_lateral forcing files. + """ - # TODO: shorvath: We might be able to remove binary_nexus_file_folder. - # This converts ngen output .csv files into parquet files for t-route. + qlat_forcing_sets: Optional[List[QLateralForcingSet]] = None binary_nexus_file_folder: Optional[DirectoryPath] = None + """ + Directory to save converted forcing files. Only needed if running t-route as part of ngen suite AND if t-route is having memory issues. + NOTE: Exlpanation: Ngen outputs q_lateral files as 1 file per nexus containing all timesteps. t-route requires 1 file per timestep + containing all locations. If this parameter is omitted or left blank, t-route will simply read in all of ngen's output q_lateral files + into memory and will attempt routing. If the simulation is large (temporally and/or spatially), t-route might crash due to memory issues. + By providing a directory to this parameter, t-route will convert ngen's output q_lateral files into parquet files in the format t-route + needs. Then, during routing, t-route will only read the required parquet files as determined by 'max_loop_size', thus reducing memory. + """ coastal_boundary_input_file: Optional[FilePath] = None - # NOTE: aaraney: seen as: - # in code : "*.NEXOUT", "*NEXOUT*", - # in config: "*NEXOUT.parquet" "*NEXUS.csv", "nex-*" - # TODO: shorvath: I belive we no longer use these two arguments... - # need to double check. - #nexus_file_pattern_filter: Optional[str] = None - #nexus_input_folder: Optional[DirectoryPath] = None + """ + File containing coastal model output. + NOTE: Only used if running diffusive routing. + """ ComputeParameters.update_forward_refs() diff --git a/src/troute-config/troute/config/config.py b/src/troute-config/troute/config/config.py index 1d5f8a8f9..f23d13754 100644 --- a/src/troute-config/troute/config/config.py +++ b/src/troute-config/troute/config/config.py @@ -13,10 +13,6 @@ from .bmi_parameters import BMIParameters from ._utils import use_strict -class LoggingParameters(BaseModel): - showtiming: Optional[bool] = None - log_level: Optional[str] = None - log_directory: Optional[str] = None class Config(BaseModel): log_parameters: LoggingParameters = Field(default_factory=LoggingParameters) @@ -51,7 +47,8 @@ def with_strict_mode(cls, **data: Dict[str, Any]) -> Self: ########################################################################################## @root_validator(skip_on_failure=True) - def check_levelpool_filepath(cls, values): + def check_levelpool_filepath(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that the level pool parameter file is provided IF waterbodies are being routed.""" network_type = values['network_topology_parameters'].supernetwork_parameters.network_type waterbody_parameters = values['network_topology_parameters'].waterbody_parameters if waterbody_parameters: @@ -59,14 +56,15 @@ def check_levelpool_filepath(cls, values): levelpool = waterbody_parameters.level_pool if simulate_waterbodies and network_type=='NHDNetwork': - assert levelpool, 'Waterbody simulation is enabled for NHDNetwork, but levelpool parameters are missing.' + assert levelpool, 'Waterbody simulation is enabled for NHDNetwork, but level pool parameters are missing.' levelpool_file = levelpool.level_pool_waterbody_parameter_file_path - assert levelpool_file, 'Waterbody simulation is enabled for NHDNetwork, but no levelpool parameter file is provided.' + assert levelpool_file, 'Waterbody simulation is enabled for NHDNetwork, but no level pool parameter file is provided.' return values @root_validator(skip_on_failure=True) - def check_diffusive_domain(cls, values): + def check_diffusive_domain(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that the diffusive domain file is provided IF diffusive routing is enabled.""" hybrid_parameters = values['compute_parameters'].hybrid_parameters if hybrid_parameters: run_hybrid = hybrid_parameters.run_hybrid_routing @@ -76,7 +74,8 @@ def check_diffusive_domain(cls, values): return values @root_validator(skip_on_failure=True) - def check_topobathy_domain(cls, values): + def check_topobathy_domain(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a topobathy domain file is provided IF using natural cross-sections has been enabled.""" hybrid_parameters = values['compute_parameters'].hybrid_parameters if hybrid_parameters: use_natl_xsections = hybrid_parameters.use_natl_xsections @@ -86,7 +85,8 @@ def check_topobathy_domain(cls, values): return values @root_validator(skip_on_failure=True) - def check_refactored(cls, values): + def check_refactored(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that the refactored domain and topobathy files are provided IF refactored network is enabled.""" hybrid_parameters = values['compute_parameters'].hybrid_parameters if hybrid_parameters: run_refactored_network = hybrid_parameters.run_refactored_network @@ -97,7 +97,8 @@ def check_refactored(cls, values): return values @root_validator(skip_on_failure=True) - def check_coastal_domain(cls, values): + def check_coastal_domain(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a coastal boundary domain file is provided IF diffusive routing is enabled and coastal forcing files are provided.""" hybrid_parameters = values['compute_parameters'].hybrid_parameters forcing_parameters = values['compute_parameters'].forcing_parameters if hybrid_parameters: @@ -108,7 +109,8 @@ def check_coastal_domain(cls, values): return values @root_validator(skip_on_failure=True) - def check_gage_segID_crosswalk_file(cls, values): + def check_gage_segID_crosswalk_file(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a gage-segment cross-walk file is provided IF streamflow nudging is enabled on a NHD network.""" da_parameters = values['compute_parameters'].data_assimilation_parameters if da_parameters: streamflow_DA = da_parameters.streamflow_da @@ -116,12 +118,13 @@ def check_gage_segID_crosswalk_file(cls, values): streamflow_nudging = streamflow_DA.streamflow_nudging network_type = values['network_topology_parameters'].supernetwork_parameters.network_type if streamflow_nudging and network_type=='NHDNetwork': - assert streamflow_DA.gage_segID_crosswalk_file, 'Streamflow nuding is enabled on NHDNetwork, but gage_segID_crosswalk_file is missing.' + assert streamflow_DA.gage_segID_crosswalk_file, 'Streamflow nudging is enabled on NHDNetwork, but gage_segID_crosswalk_file is missing.' return values @root_validator(skip_on_failure=True) - def check_rfc_parameters(cls, values): + def check_rfc_parameters(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that RFC parameter file (IF on NHD network) and timeseries directory path are provided IF RFC reservoir DA is enabled.""" da_parameters = values['compute_parameters'].data_assimilation_parameters if da_parameters: reservoir_da = da_parameters.reservoir_da @@ -147,7 +150,8 @@ def check_rfc_parameters(cls, values): return values @root_validator(skip_on_failure=True) - def check_usgs_reservoir_da_parameters(cls, values): + def check_usgs_reservoir_da_parameters(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that USGS parameter file (IF on NHD network) and timeslice directory path are provided IF USGS reservoir DA is enabled.""" da_parameters = values['compute_parameters'].data_assimilation_parameters if da_parameters: reservoir_da = da_parameters.reservoir_da @@ -173,7 +177,8 @@ def check_usgs_reservoir_da_parameters(cls, values): return values @root_validator(skip_on_failure=True) - def check_usace_reservoir_da_parameters(cls, values): + def check_usace_reservoir_da_parameters(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that USACE parameter file (IF on NHD network) and timeslice directory path are provided IF USACE reservoir DA is enabled.""" da_parameters = values['compute_parameters'].data_assimilation_parameters if da_parameters: reservoir_da = da_parameters.reservoir_da @@ -199,7 +204,8 @@ def check_usace_reservoir_da_parameters(cls, values): return values @root_validator(skip_on_failure=True) - def check_qlat_inputs(cls, values): + def check_qlat_inputs(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a forcing directory is provided. TODO: Add bypass for this check once t-route is connected to BMI.""" forcing_parameters = values['compute_parameters'].forcing_parameters if forcing_parameters: qlat_forcing_sets = forcing_parameters.qlat_forcing_sets @@ -210,7 +216,8 @@ def check_qlat_inputs(cls, values): return values @root_validator(skip_on_failure=True) - def check_wrf_hydro_restart_files(cls, values): + def check_wrf_hydro_restart_files(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that auxilary WRF-Hydro files exist IF a WRF-Hydro restart file is being used.""" restart_parameters = values['compute_parameters'].restart_parameters if restart_parameters: wrf_hydro_channel_restart_file = restart_parameters.wrf_hydro_channel_restart_file @@ -228,7 +235,8 @@ def check_wrf_hydro_restart_files(cls, values): return values @root_validator(skip_on_failure=True) - def check_start_datetime(cls, values): + def check_start_datetime(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a start datetime is provided.""" restart_parameters = values['compute_parameters'].restart_parameters if restart_parameters: wrf_hydro_channel_restart_file = restart_parameters.wrf_hydro_channel_restart_file @@ -239,7 +247,8 @@ def check_start_datetime(cls, values): return values @root_validator(skip_on_failure=True) - def check_flowpath_edge_list(cls, values): + def check_flowpath_edge_list(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verfiy that a flowpath_edge_list file is provided IF using a .json geo-file.""" geo_file_path = values['network_topology_parameters'].supernetwork_parameters.geo_file_path flowpath_edge_list = values['network_topology_parameters'].supernetwork_parameters.flowpath_edge_list if Path(geo_file_path).suffix=='.json': @@ -249,7 +258,8 @@ def check_flowpath_edge_list(cls, values): return values @root_validator(skip_on_failure=True) - def check_lite_restart_directory(cls, values): + def check_lite_restart_directory(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that a lite restart output directory is provided IF lite restart output is enabled.""" if values['output_parameters']: lite_restart = values['output_parameters'].lite_restart if lite_restart is not None: @@ -259,7 +269,8 @@ def check_lite_restart_directory(cls, values): return values @root_validator(skip_on_failure=True) - def check_nts_dt_stream_output_internal_frequency(cls, values): + def check_nts_dt_stream_output_internal_frequency(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Verify that stream output parameters make sense IF writing stream output files is enabled.""" compute_params = values.get('compute_parameters') output_params = values.get('output_parameters') diff --git a/src/troute-config/troute/config/logging_parameters.py b/src/troute-config/troute/config/logging_parameters.py index ec310a3e9..36df9b44e 100644 --- a/src/troute-config/troute/config/logging_parameters.py +++ b/src/troute-config/troute/config/logging_parameters.py @@ -1,5 +1,7 @@ from pydantic import BaseModel +from pathlib import Path +from typing import Optional from typing_extensions import Literal LogLevel = Literal["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"] @@ -21,3 +23,7 @@ class LoggingParameters(BaseModel): major process in the simulation sequence. optional, defaults to None and no timing summary is reported """ + log_directory: Optional[Path] = None + """ + Path to location where a logging file will be saved. + """ diff --git a/src/troute-config/troute/config/network_topology_parameters.py b/src/troute-config/troute/config/network_topology_parameters.py index bdaac8887..52b786a9a 100644 --- a/src/troute-config/troute/config/network_topology_parameters.py +++ b/src/troute-config/troute/config/network_topology_parameters.py @@ -7,44 +7,81 @@ class NetworkTopologyParameters(BaseModel): - # TODO: default {}. see nhd_io.read_config_file ~:100 + """ + Parameters controlling how the stream network is synthesized. + """ preprocessing_parameters: "PreprocessingParameters" = Field(default_factory=dict) - # TODO: not sure if default {}. see nhd_io.read_config_file ~:100 supernetwork_parameters: "SupernetworkParameters" - # TODO: default {}. see nhd_io.read_config_file ~:100 waterbody_parameters: "WaterbodyParameters" = Field(default_factory=dict) - # TODO: error in v3_doc.yaml; `rfc` is listed as network_topology_parameters parameter. - # should instead be waterbody_parameters -# TODO: This is an old parameter but probably worth keeping moving forward. However, it is -# not implemented in V4 at the moment (Aug 11, 2023). Need to add this functionality to t-route. class PreprocessingParameters(BaseModel): + """ + Parameters controlling the creation and use of preprocessed network graph data. + """ preprocess_only: bool = False - # NOTE: required if preprocess_only = True - # TODO: determine if str type + """ + If True, then network graph objects will be created, saved to disk, and then the execution will stop. + """ preprocess_output_folder: Optional[DirectoryPath] = None + """ + Directory to save preprocessed data to. + NOTE: required if preprocess_only = True + """ preprocess_output_filename: str = "preprocess_output" + """ + Name to save preprocessed file to (do not include file extension). + """ use_preprocessed_data: bool = False + """ + If True, used preprocessed network data istead of reading from geo_file_path. + """ # NOTE: required if use_preprocessed_data = True # TODO: determine if str type preprocess_source_file: Optional[FilePath] = None + """ + Filepath of preprocessed data. + NOTE: required if use_preprocessed_data = True + """ class SupernetworkParameters(BaseModel): + """ + Parameters specific to the stream network. + """ title_string: Optional[str] = None - # TODO: hopefully places in the code can be changed so this is a `Path` instead of a `str` + """ + Used for simulation identification. Appears in csv filename, if csv oupt is used. + Otherwise, this variable is of little use. + """ geo_file_path: FilePath + """ + Path to the hydrofabric. Currently accepts geopackage (assumes HYFeatures), geojson (assumes HYFeatures), + json (assumes HYFeatures), netcdf (assumes NHD). + """ network_type: Literal["HYFeaturesNetwork", "NHDNetwork"] = "HYFeaturesNetwork" + """ + Specify if this is an NHD network or a HYFeatures network. + """ flowpath_edge_list: Optional[str] = None + """ + File containing dictionary of connections between segment IDs and nexus IDs. + NOTE: Only used if using geojson files for hydrofabric. + """ mask_file_path: Optional[FilePath] = None + """ + File containing channel mask file. + NOTE: Not implemented for HYFeatures. + """ mask_layer_string: str = "" - # TODO: determine if this is still used - # TODO: determine what the default for this should be. Not sure if this is right? mask_driver_string: Optional[str] = None mask_key: int = 0 columns: Optional["Columns"] = None + """ + Attribute names in channel geometry file. + Default values depend on newtork type. + """ # NOTE: required for CONUS-scale simulations with NWM 2.1 or 3.0 Route_Link.nc data synthetic_wb_segments: Optional[List[int]] = Field( default_factory=lambda: [ @@ -54,10 +91,20 @@ class SupernetworkParameters(BaseModel): 4800007, ] ) + """ + Synthetic waterbody segment IDs that are used to construct the Great Lakes + NOTE: required for CONUS-scale simulations with NWM 2.1 or 3.0 Route_Link.nc data + """ synthetic_wb_id_offset: float = 9.99e11 + """ + Arbitrary large number appended to synthetic_wb_segments in their handling process + """ terminal_code: int = 0 - # TODO: It would be nice if this were a literal / str + """ + Coding in channel geometry dataset for segments draining to ocean. A '0' ID indicates there is nothing downstream. + """ + driver_string: Union[str, Literal["NetCDF"]] = "NetCDF" layer_string: int = 0 @@ -107,51 +154,99 @@ def get_columns(cls, columns: dict, values: Dict[str, Any]) -> dict: class Columns(BaseModel): - # string, unique segment identifier key: str - # string, unique identifier of downstream segment + """ + unique segment identifier + """ downstream: str - # string, segment length + """ + unique identifier of downstream segment + """ dx: str - # string, manning's roughness of main channel + """ + segment length + """ n: str - # string, mannings roughness of compound channel + """ + manning's roughness of main channel + """ ncc: str - # string, channel slope + """ + mannings roughness of compound channel + """ s0: str - # string, channel bottom width + """ + channel slope + """ bw: str - # string, waterbody identifier + """ + channel bottom width + """ waterbody: Optional[str] - # string, channel top width + """ + waterbody identifier + """ tw: str - # string, compound channel top width + """ + channel top width + """ twcc: str - # string, channel bottom altitude + """ + compound channel top width + """ alt: Optional[str] - # string, muskingum K parameter + """ + channel bottom altitude + """ musk: str - # string, muskingum X parameter + """ + muskingum K parameter + """ musx: str - # string, channel sideslope + """ + muskingum X parameter + """ cs: str - # string, gage ID + """ + channel sideslope + """ gages: Optional[str] - # string, mainstem ID + """ + gage ID + """ mainstem: Optional[str] + """ + mainstem ID + """ class WaterbodyParameters(BaseModel): - # NOTE: required, True for simulations with waterbodies. + """ + Parameters specifying how (if) waterbodies are handled. + """ break_network_at_waterbodies: bool = False + """ + If True, waterbodies will be treated as reservoirs. If False, the underlying flowpaths will be used for channel routing. + """ level_pool: Optional["LevelPool"] = None waterbody_null_code: int = -9999 + """ + NULL value to use in flowpath-waterbody crosswalk. + """ class LevelPool(BaseModel): - # string, filepath to waterbody parameter file (LAKEPARM.nc) + """ + Attributes of the lake geometry file for levelpool simulations. + """ level_pool_waterbody_parameter_file_path: Optional[FilePath] = None + """ + Filepath for NetCDF file containing lake parameters (LAKEPARM). Only used for NHD networks. + """ level_pool_waterbody_id: Union[str, Literal["lake_id"]] = "lake_id" + """ + Column name for waterbody ID. + """ NetworkTopologyParameters.update_forward_refs() diff --git a/src/troute-config/troute/config/output_parameters.py b/src/troute-config/troute/config/output_parameters.py index 78128401e..17545b0cf 100644 --- a/src/troute-config/troute/config/output_parameters.py +++ b/src/troute-config/troute/config/output_parameters.py @@ -9,44 +9,51 @@ class OutputParameters(BaseModel): + """ + Parameters controlling model outputs. Output parameters can be left completely blank and no files will be written. + However, if 'output_parameters' exists, one of the following specific output file parameters should also be specified. + Many of these are meant to mimic WRF-Hydro's outputs. + """ chanobs_output: Optional["ChanobsOutput"] = None - # NOTE: this appears to be optional. See nwm_routing/input.py ~:477 csv_output: Optional["CsvOutput"] = None - # NOTE: this appears to be optional. See nwm_routing/input.py ~:496 parquet_output: Optional["ParquetOutput"] = None - # NOTE: this appears to be optional. See nwm_routing/input.py ~:563 chrtout_output: Optional["ChrtoutOutput"] = None lite_restart: Optional["LiteRestart"] = None - # NOTE: this appears to be optional. See nwm_routing/input.py ~:520 hydro_rst_output: Optional["HydroRstOutput"] = None - # TODO: default appears to be {}. see nhd_io.read_config_file ~:141 - # shorvath: parity_parameters defaults to {}, but omitting 'wrf_hydro_parity_check' - # from output_parameters will successfully skip lines~112-115 in __main__.py if this - # parameter is left blank. wrf_hydro_parity_check: Optional["WrfHydroParityCheck"] = None - # NOTE: mandatory if writing results to lakeout. lakeout_output: Optional[DirectoryPath] = None - - # NOTE: assuming this should be removed - # TODO: missing from `v3_doc.yaml` - # see nwm_routing/output.py :114 test_output: Optional[Path] = None stream_output: Optional["StreamOutput"] = None - # NOTE: mandatory if writing results to lastobs lastobs_output: Optional[DirectoryPath] = None + class ChanobsOutput(BaseModel): - # NOTE: required if writing chanobs files + """ + CHANOBS files are outputs from WRF-Hydro containing station observations. This replicates that behavior. + """ chanobs_output_directory: Optional[DirectoryPath] = None - # NOTE: required if writing chanobs files - # NOTE: is `Path` b.c. is output file + """ + Directory to save CHANOBS output files. If this is None, no CHANOBS will be written. + """ chanobs_filepath: Optional[Path] = None + """ + Filename of CHANOBS output file. + """ class CsvOutput(BaseModel): - # NOTE: required if writing results to csv + """ + This is an older alternative to the CSV file writing capabilities of the more recently developed 'stream_output'. + This will simply write the full flowveldepth array to a .csv file. + """ csv_output_folder: Optional[DirectoryPath] = None + """ + Directory to save csv output files. If this is None, no csv will be written. + """ csv_output_segments: Optional[List[str]] = None + """ + Subset of segment IDs to include in the output file. + """ class ParquetOutput(BaseModel): @@ -58,46 +65,106 @@ class ParquetOutput(BaseModel): class ChrtoutOutput(BaseModel): - # NOTE: mandatory if writing results to CHRTOUT. + """ + CHRTOUT files are outputs from WRF-Hydro containing full channel network output. This replicates that behavior. + """ wrf_hydro_channel_output_source_folder: Optional[DirectoryPath] = None + """ + Directory to save CHRTOUT files. No files will be written if this is None. + """ class LiteRestart(BaseModel): - # NOTE: required if writing restart data lite files. + """ + Saves final conditions of channel and reservoir dataframes as pickle files to be used in follow up simulation as initial conditions. + """ lite_restart_output_directory: Optional[DirectoryPath] = None + """ + Directory to save lite_restart files. No files will be written if this is None. + """ class HydroRstOutput(BaseModel): - # NOTE: required if writing restart data to HYDRO_RST + """ + Parameters controlling the writing of restart data to HYDRO_RST netcdf files. Mimics WRF-Hydro. + """ wrf_hydro_restart_dir: Optional[DirectoryPath] = None + """ + Directory to save state files. + """ wrf_hydro_channel_restart_pattern_filter: str = "HYDRO_RST.*" - + """ + File pattern for state files. + """ wrf_hydro_channel_restart_source_directory: Optional[DirectoryPath] = None + """ + DEPRECATED? + """ wrf_hydro_channel_output_source_folder: Optional[DirectoryPath] = None + """ + DEPRECATED? + """ class WrfHydroParityCheck(BaseModel): - # NOTE: required for parity check to occur - # TODO: not sure if this should be optional? - # shorvath: I'm ok with removing parity_checks for t-routeV4... + """ + Paramters controlling a single-segment parity assessment between t-route and WRF-hydro. + """ parity_check_input_folder: Optional[DirectoryPath] = None + """ + """ parity_check_file_index_col: str + """ + """ parity_check_file_value_col: str + """ + """ parity_check_compare_node: str + """ + """ parity_check_compare_file_sets: Optional[List["ParityCheckCompareFileSet"]] = None class ParityCheckCompareFileSet(BaseModel): validation_files: List[FilePath] + """ + """ class StreamOutput(BaseModel): - # NOTE: required if writing StreamOutput files + """ + t-route's most recent output file type. This will output channel network values (flow, velocity, depth, and nudge values). + This has been designed for as much flexibility for user needs as possible, including file type (netcdf, csv, pickle) and how + frequently to create output files relative to simulation time and how many output timesteps to include. Only 'stream_output_directory' + is required, the other default values will create 1 file per hour of simulation time, containing values at every timestep of + simulation. If t-route is run with default dt (300 seconds/5 minutes) for 24 hours, the defaults here would produce 24 output files + (1 per hour of simulation), each containing 12 values for each variable (1 value every 5 minutes in the hour of simulation). + """ stream_output_directory: Optional[Path] = None + """ + Directory to save flowveldepth outputs. If this is not None, this form of output will be written. + """ mask_output: Optional[FilePath] = None + """ + Yaml file specifying flowpath/nexus IDs to include in output files. + """ stream_output_time: int = 1 - stream_output_type:streamOutput_allowedTypes = ".nc" + """ + Value is in simulation time hours. This tells t-route how frequently to make output files. '1' would be 1 file per hour + of simulation time. + """ + stream_output_type: streamOutput_allowedTypes = ".nc" + """ + Output file type. + """ stream_output_internal_frequency: Annotated[int, Field(strict=True, ge=5)] = 5 + """ + Value is in minutes. This tells t-route the frequency of t-route's timesteps to include in the output file. For instance, + a value of '5' here would output flow, velocity, and depth values every 5 minutes of simulation time. A value of '30' would + output values every 30 mintues of simulation time. + NOTE: This value should not be smaller than dt, and should be a multiple of dt (keep in mind dt is in seconds, while this value + is in minutes). So if dt=300(sec), this value cannot be smaller than 5(min) and should be a multiple of 5. + """ @validator('stream_output_directory') def validate_stream_output_directory(cls, value):