diff --git a/.appveyor_clear_cache.txt b/.appveyor_clear_cache.txt index 48a46ed8..7258dd4e 100644 --- a/.appveyor_clear_cache.txt +++ b/.appveyor_clear_cache.txt @@ -1 +1,2 @@ 2018-05-02: clear appveyor cache +2020-04-27: invalidate appveyor cache (R v4.0.0) diff --git a/.lintr b/.lintr index eb18283d..8abf48c3 100644 --- a/.lintr +++ b/.lintr @@ -28,10 +28,10 @@ linters: with_defaults( #extraction_operator_linter, #implicit_integer_linter, #todo_comment_linter, + #nonportable_path_linter(lax = TRUE), # see https://github.com/jimhester/lintr/issues/468 #--- Activated non-default linters: absolute_path_linter(lax = TRUE), T_and_F_symbol_linter, - nonportable_path_linter(lax = TRUE), semicolon_terminator_linter(semicolon = c("compound", "trailing")), undesirable_function_linter, undesirable_operator_linter, diff --git a/.travis.yml b/.travis.yml index 1a38bc92..76cc257c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ # R for travis: see documentation at https://docs.travis-ci.com/user/languages/r -language: R +language: r +os: linux # Set environmental variables # - _R_CHECK_FORCE_SUGGESTS_=false: don't fail if suggested packages fail @@ -16,7 +17,6 @@ env: - secure: "rq8iu0qkN0QYa1CF3zx/ub2eXJ1vc6XmOSqWPUSCHQTOuftUnKjbysAo0a8Cj1GqHPltXyIx8ZfLSJpN/zIsW919B3XBPSVeoStdpvJ/omUdGf8g4/0k2jlLQx0JpfFuIjOaFBlL5Kg5iAm3Xg1dxA/+cgx57KLbbF91HEvCE22g5NgO5ZEWsKH6iyVzEkz5o3sxvIxDEniiJSKdxfZLe8jrZNmqG+H2tKn1GbiDfunN8ynYE5Enwamr6y46E8Q2Bm7BR4FX3syxjvhz/JOi44vy88E50wKpMU+y9/lxpqMjQxM8lU0L5ZAxsSe08fM6jvEzOVZgsEVS3tMJHdCy9mQ3YktVl7l4hce4+OI866ZkkfpX2nBoGcjLe9LVbbGLet7UoC8DNv5XF1eJU+9sn5Ynr94YFaNhlFP2H+PoqWgyt7SLpgYjogrd8/X8a/oQeIMhriqNVbddc6oB2rkBsjRA9kGo+8jcHei80Xm/ZaIafMoa9KZV4Aw7Uhd/RP0JiaRQmL8mBf09/LOJD24LIpqYzgHJg6q6pHa/XS+9qcbjoObzbdy1rBtGiS5f1eXrCefTmsvSsOvRWlyZcGLrX2VbTFiLTjFCDWLjYNMUVCxhislsxZCrvRuMaS9IIep8rD+p8EDjUIr/UTq/vl64E1jfd3+3wA4rcW3nbQ6iiYg=" - LINTR_COMMENT_BOT=false -sudo: false cache: packages # sets `R_LIBS_USER` warnings_are_errors: false # turn this back on for more severe testing @@ -41,25 +41,23 @@ addons: - netcdf-bin before_install: - # pwd = /, here DrylandEcology/rSFSW2 - # Don't install heavy-weight `devtools`, instead use `remotes` to - # detect/install/update package dependencies - - Rscript -e 'utils::install.packages("remotes", lib = Sys.getenv("R_LIBS_USER"))' - # Install/update dependencies of rSOILWAT2 (because rSOILWAT2 is installed - # 'before install' and thus dependencies are not installed and r_packages - # has not taken effect yet) - - Rscript -e 'remotes::update_packages(c("blob", "DBI", "RSQLite"), upgrade = TRUE, lib = Sys.getenv("R_LIBS_USER"))' - # Obtain source code of rSOILWAT2 from github and save at - # 'DrylandEcology/rSOILWAT2' - - git clone -b master --single-branch --recursive https://github.com/DrylandEcology/rSOILWAT2.git ../rSOILWAT2 - # Install rSOILWAT2 without help pages and without vignettes - - R CMD INSTALL --no-docs --no-help ../rSOILWAT2 - # Use git-lfs to pull reference files for package checking + # Use git-lfs to pull reference files for rSFSW2 package checking - git lfs pull install: + # Use `remotes` to detect/install/update package dependencies + - Rscript -e 'utils::install.packages("remotes", lib = Sys.getenv("R_LIBS_USER"))' + # Custom install/update dependencies of rSW2utils and rSOILWAT2 + - Rscript -e 'remotes::update_packages(c("blob", "DBI", "RSQLite", "circular", "mvtnorm"), upgrade = TRUE, lib = Sys.getenv("R_LIBS_USER"))' + # Install rSW2utils as dependency of rSOILWAT2 and rSFSW2 + - Rscript -e 'remotes::install_github("DrylandEcology/rSW2utils", upgrade = TRUE)' + # Install rSOILWAT2 as dependency of rSFSW2 + # `remotes::install_github` supports submodules since v2.0.0! + - Rscript -e 'remotes::install_github("DrylandEcology/rSOILWAT2", upgrade = TRUE)' + # Install rSW2funs as dependency of rSFSW2 + - Rscript -e 'remotes::install_github("DrylandEcology/rSW2funs", upgrade = TRUE)' # Install rSFSW2 dependencies, but remove `Rmpi` etc. - - Rscript -e 'pkgs <- remotes::dev_package_deps(dependencies = TRUE); ids_not <- pkgs[, "package"] %in% c("rSOILWAT2", "Rmpi", "weathergen", "lubridate", "zoo", "dplyr"); update(pkgs[!ids_not, ], upgrade = TRUE, lib = Sys.getenv("R_LIBS_USER"))' + - Rscript -e 'pkgs <- remotes::dev_package_deps(dependencies = TRUE); ids_not <- pkgs[, "package"] %in% c("rSW2utils", "rSOILWAT2", "rSW2funs", "Rmpi", "weathergen", "lubridate", "zoo", "dplyr"); update(pkgs[!ids_not, ], upgrade = TRUE, lib = Sys.getenv("R_LIBS_USER"))' script: - R CMD build --no-build-vignettes --no-manual . diff --git a/DESCRIPTION b/DESCRIPTION index 9d46e30f..dab37e78 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,9 +1,8 @@ Package: rSFSW2 Title: Simulation Framework for SOILWAT2 -Version: 4.1.0 -Date: 2019-10-18 +Version: 4.3.0 Authors@R: c( - person("Daniel", "Schlaepfer", email = "daniel.schlaepfer@yale.edu", + person("Daniel", "Schlaepfer", email = "daniel.schlaepfer@alumni.ethz.ch", comment = c(ORCID = "0000-0001-9973-2065"), role = c("aut", "cre")), person("Caitlin", "Andrews", role = "ctb"), person("Zach", "Kramer", role = "ctb"), @@ -12,12 +11,12 @@ Authors@R: c( Description: Setting up, carrying out, and analyzing ecosystem water balance simulation experiments with SOILWAT2. Depends: - R (>= 3.5.0) + R (>= 3.5.0), + rSOILWAT2 (>= 4.0.0), + rSW2utils, + rSW2funs Imports: - rSOILWAT2 (>= 3.1.3), RSQLite (>= 2.1.1), - DBI (>= 1.0), - Rcpp (>= 0.12.12), raster (>= 2.5.8), sp (>= 1.2.3), methods @@ -30,7 +29,6 @@ Suggests: RCurl (>= 1.95.4.8), fastmatch (>= 1.0.4), iotools (>= 0.1-12), - circular (>= 0.4.7), Hmisc (>= 4.0-2), SPEI (>= 1.6), qmap (>= 1.0.4), @@ -46,18 +44,17 @@ Suggests: hunspell (>= 2.9), covr, lintr (>= 2.0.0), - goodpractice + goodpractice, + pkgload Remotes: github::dschlaep/weathergen -LinkingTo: - Rcpp LazyData: true ByteCompile: true NeedsCompilation: yes License: GPL-3 + file LICENSE URL: https://github.com/DrylandEcology/rSFSW2 BugReports: https://github.com/DrylandEcology/rSFSW2/issues -RoxygenNote: 6.1.1 +RoxygenNote: 7.1.0 VignetteBuilder: knitr Language: en-US Encoding: UTF-8 diff --git a/NAMESPACE b/NAMESPACE index e46e7c05..d4bf0221 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -18,18 +18,14 @@ export(calc_RequestedSoilLayers) export(check_aggregated_output) export(check_cltool) export(check_lock_content) -export(check_monotonic_increase) export(check_outputDB_completeness) export(check_rSFSW2_project_input_data) +export(check_rSW2_version) export(check_weatherDB) -export(circ_add) -export(circ_minus) export(climscen_metadata) export(compare_test_output) export(compare_two_dbOutput) export(compile_overall_timer) -export(convert_precipitation) -export(convert_temperature) export(crs_units) export(dbConnect2) export(dbExecute2) @@ -59,7 +55,6 @@ export(determine_simulation_size) export(dir_safe_create) export(doQmapQUANT_drs) export(do_OneSite) -export(do_compare) export(do_prior_TableLookups) export(downscale.delta) export(downscale.deltahybrid) @@ -73,6 +68,7 @@ export(extract_SFSW2_cells_from_shp) export(extract_SFSW2_points_from_shp) export(extract_blocks) export(extract_climate_NCEPCFSR) +export(extract_daily_weather_from_gridMET) export(extract_daily_weather_from_livneh) export(extract_from_external_raster_old) export(extract_rSFSW2) @@ -80,7 +76,6 @@ export(find_sites_with_bad_weather) export(gather_objects_for_export) export(generate_OverallAggregation_fields) export(generate_RNG_streams) -export(germination_wait_times) export(get.SeveralOverallVariables) export(get.SeveralOverallVariables_Ensemble) export(get.SeveralOverallVariables_Scenario) @@ -88,9 +83,10 @@ export(get.SeveralOverallVariables_Scenario_old) export(get.Table) export(get.Table_Ensemble) export(get.Table_Scenario) -export(get_KilledBySoilLayers) export(get_fieldnames) export(global_args_do_OneSite) +export(gridMET_download_and_check) +export(gridMET_metadata) export(init_SFSW2_cluster) export(init_rSFSW2_project) export(init_timer) @@ -103,7 +99,6 @@ export(it_sim0) export(it_sim2) export(it_site) export(it_site2) -export(kill_seedling) export(list.dbTables) export(list.dbVariables) export(list.dbVariablesOfAllTables) @@ -118,6 +113,7 @@ export(make_test_output_reference) export(missing_Pids_outputDB) export(move_output_to_dbOutput) export(mpi_work) +export(obtain_CMIP5_MACAv2metdata_USA) export(populate_rSFSW2_project_with_data) export(prepare_climatedata_netCDFs) export(quickprepare_dbOutput_dbWork) @@ -149,9 +145,10 @@ export(upgrade_soilsin_v11_to_v12) export(weighted.agg) import(RSQLite) import(methods) +import(rSOILWAT2) +import(rSW2funs) +import(rSW2utils) importClassesFrom(raster,Raster) importClassesFrom(sp,SpatialPoints) importClassesFrom(sp,SpatialPolygons) -importFrom(Rcpp,evalCpp) -importFrom(Rcpp,sourceCpp) useDynLib(rSFSW2, .registration = TRUE) diff --git a/R/ExtractData_ClimateDownscaling.R b/R/ExtractData_ClimateDownscaling.R index 4a730a1d..be3c8708 100644 --- a/R/ExtractData_ClimateDownscaling.R +++ b/R/ExtractData_ClimateDownscaling.R @@ -5,9 +5,11 @@ climscen_metadata <- function() { #--- Meta information of climate datasets template_bbox <- data.frame(matrix(NA, nrow = 2, ncol = 2, - dimnames = list(NULL, c("lat", "lon")))) + dimnames = list(NULL, c("lat", "lon")) + )) template_tbox <- data.frame(matrix(NA, nrow = 2, ncol = 2, - dimnames = list(c("start", "end"), c("first", "second")))) + dimnames = list(c("start", "end"), c("first", "second")) + )) # SOILWAT2 required units are c("cm/day", "C", "C") @@ -15,91 +17,152 @@ climscen_metadata <- function() { climDB_metas <- list( CMIP3_ClimateWizardEnsembles_Global = list( + convention = "ClimateWizardEnsembles", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(-55, 84), x = c(-180, 180))), tbox = fill_bounding_box(template_tbox, list(t1 = c(NA, NA), t2 = c(2070, 2099))), units = c(prcp = "%", tmin = "C", tmax = "C", tmean = "C"), - var_desc = data.frame(tag = NA, fileVarTags = NA, unit_given = NA, + var_desc = data.frame(varname = NA, tag = NA, fileVarTags = NA, unit_given = NA, unit_real = NA)[0, ], - sep_fname = NULL, str_fname = NULL), + sep_fname = NULL, str_fname = NULL + ), CMIP3_ClimateWizardEnsembles_USA = list( + convention = "ClimateWizardEnsembles", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 49.375), x = c(-124.75, -67))), tbox = fill_bounding_box(template_tbox, list(t1 = c(NA, NA), t2 = c(2070, 2099))), units = c(prcp = "%", tmin = "C", tmax = "C", tmean = "C"), - var_desc = data.frame(tag = NA, fileVarTags = NA, unit_given = NA, + var_desc = data.frame(varname = NA, tag = NA, fileVarTags = NA, unit_given = NA, unit_real = NA)[0, ], - sep_fname = NULL, str_fname = NULL), + sep_fname = NULL, str_fname = NULL + ), CMIP3_BCSD_GDODCPUCLLNL_Global = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(-55.25-0.25, 83.25+0.25), x = c(-179.75-0.25, 179.75+0.25))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 1999), t2 = c(2000, 2099))), var_desc = data.frame(tag = temp <- c("Prcp", "Tmin", "Tmax", "Tavg"), fileVarTags = paste("monthly", temp, sep = "."), + varname = temp, unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = ".", - str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6)), + str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6) + ), CMIP5_BCSD_GDODCPUCLLNL_Global = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(-55.25-0.25, 83.25+0.25), x = c(-179.75-0.25, 179.75+0.25))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), fileVarTags = paste0("_", temp, "_"), + varname = temp, unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = "_", - str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8)), + str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8) + ), CMIP3_BCSD_GDODCPUCLLNL_USA = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 52.875), x = c(-124.625, -67))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 1999), t2 = c(2000, 2099))), var_desc = data.frame(tag = temp <- c("Prcp", "Tmin", "Tmax", "Tavg"), fileVarTags = paste("monthly", temp, sep = "."), + varname = temp, unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = ".", - str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6)), + str_fname = c(id_var = 5, id_gcm = 2, id_scen = 1, id_run = 3, id_time = 6) + ), CMIP5_BCSD_GDODCPUCLLNL_USA = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(25.125, 52.875), x = c(-124.625, -67))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), fileVarTags = paste0("_", temp, "_"), + varname = temp, unit_given = temp <- c("mm/d", "C", "C", "C"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = "_", - str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8)), + str_fname = c(id_var = 3, id_gcm = 5, id_scen = 6, id_run = 7, id_time = 8) + ), CMIP5_BCSD_NEX_USA = list( + convention = "NEX", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(24.0625, 49.9375), x = c(-125.02083333, -66.47916667))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2099))), var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), fileVarTags = paste0("_", temp, "_"), + varname = temp, unit_given = temp <- c("kg/m2/s", "K", "K", "K"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), - sep_fname = NULL, str_fname = NULL), # online access, i.e., no file names to parse + sep_fname = NULL, str_fname = NULL + ), # online access, i.e., no file names to parse CMIP5_BCSD_SageSeer_USA = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(31.75333, 49.00701), x = c(-124.2542, -102.2534))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1980, 1999), t2 = c(2070, 2099))), var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), + varname = temp, fileVarTags = paste0("_", temp, "_"), unit_given = c("kg m-2 s-1", "K", "K", "K"), unit_real = c("mm/month", "C", "C", "C"), row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = "_", - str_fname = c(id_var = 2, id_gcm = 4, id_scen = 5, id_run = 6, id_time = 7)), + str_fname = c(id_var = 2, id_gcm = 4, id_scen = 5, id_run = 6, id_time = 7) + ), CMIP5_ESGF_Global = list( + convention = "CF", + tres = "monthly", bbox = fill_bounding_box(template_bbox, list(y = c(-90, 90), x = c(-180-0.25, 180+0.25))), tbox = fill_bounding_box(template_tbox, list(t1 = c(1950, 2005), t2 = c(2006, 2100))), var_desc = data.frame(tag = temp <- c("pr", "tasmin", "tasmax", "tas"), fileVarTags = paste0(temp, "_"), + varname = temp, unit_given = temp <- c("kg m-2 s-1", "K", "K", "K"), unit_real = temp, row.names = var_names_fixed, stringsAsFactors = FALSE), sep_fname = "_", - str_fname = c(id_var = 1, id_gcm = 3, id_scen = 4, id_run = 5, id_time = 6)) + str_fname = c(id_var = 1, id_gcm = 3, id_scen = 4, id_run = 5, id_time = 6) + ), + + CMIP5_MACAv2metdata_USA = list( + convention = "CF", + tres = "daily", + bbox = fill_bounding_box(template_bbox, + list(y = c(25.063, 49.396), x = -360 + c(235.228, 292.935)) + ), + tbox = fill_bounding_box(template_tbox, + list(t1 = c(1950, 2005), t2 = c(2006, 2099)) + ), + var_desc = data.frame( + varname = c("precipitation", "air_temperature", "air_temperature", NA), + tag = tmp <- c("pr", "tasmin", "tasmax", "tas"), + fileVarTags = paste0("_", tmp, "_"), + unit_given = c("mm", "K", "K", "K"), + unit_real = c("mm/d", "K", "K", "K"), + row.names = var_names_fixed, + stringsAsFactors = FALSE + ), + sep_fname = "_", + str_fname = c( + id_var = 3, + id_gcm = 4, id_scen = 6, id_run = 5, + id_time = 7 + ) + ) + ) climDB_metas @@ -124,10 +187,21 @@ unique_times <- function(timeSlices, slice) { useSlices <- function(getYears, timeSlices, run, slice) { res <- rep(FALSE, length = nrow(getYears[[slice]])) + temp <- timeSlices$Year[timeSlices$Run == run & timeSlices$Slice == slice] + if (!anyNA(temp)) { - istart <- findInterval(temp[1], getYears[[slice]][, 1], rightmost.closed = FALSE, all.inside = FALSE) - iend <- findInterval(temp[2], getYears[[slice]][, 2], rightmost.closed = FALSE, all.inside = FALSE) + istart <- findInterval(temp[1], + getYears[[slice]][, 1], + rightmost.closed = FALSE, + all.inside = FALSE + ) + iend <- findInterval(temp[2], + getYears[[slice]][, 2], + rightmost.closed = FALSE, + all.inside = FALSE + ) + res[istart:iend] <- TRUE } @@ -337,7 +411,7 @@ controlExtremePPTevents <- function(data, dailyPPTceiling, sigmaN, do_checks = F irep <- irep + 1 } - if (do_checks) test_sigmaGamma(data = data, sigmaN) + if (do_checks) rSW2utils::test_sigmaGamma(data = data, sigmaN) data } @@ -354,10 +428,10 @@ applyDeltas <- function(obs.hist.daily, obs.hist.monthly, delta_ts, ppt_fun, sig ydelta <- delta_ts[delta_ts[, "Year"] == obs@year, -(1:2)] tmax <- obs@data[, "Tmax_C"] + ydelta[month, "Tmax_C"] - if (do_checks) test_sigmaNormal(data = tmax, sigmaN) + if (do_checks) rSW2utils::test_sigmaNormal(data = tmax, sigmaN) tmin <- obs@data[, "Tmin_C"] + ydelta[month, "Tmin_C"] - if (do_checks) test_sigmaNormal(data = tmin, sigmaN) + if (do_checks) rSW2utils::test_sigmaNormal(data = tmin, sigmaN) ppt_data <- unlist(lapply(1:12, function(m) { im_month <- month == m @@ -565,17 +639,17 @@ applyDelta_oneYear <- function(obs, delta_ts, ppt_fun, daily, monthly, ppt_type <- match.arg(ppt_type, c(NA, "detailed", "simple")) - month <- 1 + as.POSIXlt(rSOILWAT2::days_in_years(obs@year, obs@year))$mon + month <- 1 + as.POSIXlt(rSW2utils::days_in_years(obs@year, obs@year))$mon ydeltas <- delta_ts[delta_ts[, "Year"] == obs@year, -(1:2)] add_days <- ppt_fun[month] == "+" mult_days <- !add_days PPT_to_remove <- 0 tmax <- obs@data[, "Tmax_C"] + ydeltas[month, "Tmax_C"] - if (do_checks) test_sigmaNormal(data = tmax, sigmaN) + if (do_checks) rSW2utils::test_sigmaNormal(data = tmax, sigmaN) tmin <- obs@data[, "Tmin_C"] + ydeltas[month, "Tmin_C"] - if (do_checks) test_sigmaNormal(data = tmin, sigmaN) + if (do_checks) rSW2utils::test_sigmaNormal(data = tmin, sigmaN) if (isTRUE(ppt_type == "simple")) { ppt <- applyPPTdelta_simple(m = month, @@ -1071,9 +1145,9 @@ downscale.deltahybrid <- function(obs.hist.daily, obs.hist.monthly, stopifnot(is.finite(scen.fut.xadj)) if (do_checks) { if (iv <= 2) - test_sigmaNormal(data = scen.fut.xadj, opt_DS[["sigmaN"]]) + rSW2utils::test_sigmaNormal(data = scen.fut.xadj, opt_DS[["sigmaN"]]) if (iv == 3) - test_sigmaGamma(data = scen.fut.xadj, opt_DS[["sigmaN"]]) + rSW2utils::test_sigmaGamma(data = scen.fut.xadj, opt_DS[["sigmaN"]]) } # 3. Calculate eCDF of future adjusted scenario @@ -1267,7 +1341,7 @@ doQmapQUANT.default_drs <- function(x, fobj, type = NULL, lin_extrapol = NULL, } #' @rdname doQmapQUANT -#' @inheritParams doQmapQUANT +#' #' @param type_map A character vector. The type of interpolation, extrapolation, and #' spline passed to \code{\link{doQmapQUANT.default_drs}}. Possible values include #' \var{\dQuote{linear_Boe}}, \var{\dQuote{linear_Thermessl2012CC.QMv1b}}, @@ -1369,49 +1443,78 @@ doQmapQUANT_drs <- function(x, fobj, type_map = NULL, monthly_obs_base = NULL, #' #' @export downscale.deltahybrid3mod <- function( - obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - itime, years = NULL, sim_time = NULL, - opt_DS = list( - extrapol_type = "linear_Thermessl2012CC.QMv1b", - ppt_type = "detailed", - sigmaN = 6, - PPTratioCutoff = 10, - fix_spline = "attempt"), - dailyPPTceiling, monthly_extremes, - do_checks = TRUE, ...) { + obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, + itime, + years = NULL, + sim_time = NULL, + opt_DS = list( + extrapol_type = "linear_Thermessl2012CC.QMv1b", + ppt_type = "detailed", + sigmaN = 6, + PPTratioCutoff = 10, + fix_spline = "attempt" + ), + dailyPPTceiling, + monthly_extremes, + do_checks = TRUE, + ... +) { stopifnot(requireNamespace("qmap")) qstep <- 0.01 nboot <- 1 # Time periods - tp <- downscale.periods(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, - years, sim_time[["DScur_startyr"]], sim_time[["DScur_endyr"]], + tp <- downscale.periods( + obs.hist.daily, obs.hist.monthly, scen.hist.monthly, scen.fut.monthly, + years, + sim_time[["DScur_startyr"]], + sim_time[["DScur_endyr"]], sim_time[["future_yrs"]][itime, "DSfut_startyr"], - sim_time[["future_yrs"]][itime, "DSfut_endyr"]) + sim_time[["future_yrs"]][itime, "DSfut_endyr"] + ) - if (any(!tp$iuse_obs_hist_d)) + if (any(!tp$iuse_obs_hist_d)) { obs.hist.daily <- obs.hist.daily[tp$iuse_obs_hist_d] - if (any(!tp$iuse_obs_hist_m)) + } + + if (any(!tp$iuse_obs_hist_m)) { obs.hist.monthly <- obs.hist.monthly[tp$iuse_obs_hist_m, ] - if (any(!tp$iuse_scen_hist_m)) + } + + if (any(!tp$iuse_scen_hist_m)) { scen.hist.monthly <- scen.hist.monthly[tp$iuse_scen_hist_m, ] - if (any(!tp$iuse_scen_fut_m)) + } + + if (any(!tp$iuse_scen_fut_m)) { scen.fut.monthly <- scen.fut.monthly[tp$iuse_scen_fut_m, ] + } # Data objects - sbc.hist.monthly <- matrix(NA, nrow = nrow(scen.hist.monthly), ncol = 5, - dimnames = list(NULL, colnames(obs.hist.monthly))) - sbc.hist.monthly[, 1:2] <- scen.hist.monthly[, 1:2] + sbc.hist.monthly <- matrix( + NA, + nrow = nrow(scen.hist.monthly), + ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly)) + ) + sbc.hist.monthly[, 1:2] <- as.matrix(scen.hist.monthly[, 1:2]) - sbc.fut.monthly <- matrix(NA, nrow = nrow(scen.fut.monthly), ncol = 5, - dimnames = list(NULL, colnames(obs.hist.monthly))) - sbc.fut.monthly[, 1:2] <- scen.fut.monthly[, 1:2] + sbc.fut.monthly <- matrix( + NA, + nrow = nrow(scen.fut.monthly), + ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly)) + ) + sbc.fut.monthly[, 1:2] <- as.matrix(scen.fut.monthly[, 1:2]) # future simulation years = delta + simstartyr:endyr - hd.fut.monthly <- delta_ts <- matrix(NA, nrow = nrow(obs.hist.monthly), ncol = 5, - dimnames = list(NULL, colnames(obs.hist.monthly))) - hd.fut.monthly[, 1:2] <- delta_ts[, 1:2] <- obs.hist.monthly[, 1:2] + hd.fut.monthly <- delta_ts <- matrix( + NA, + nrow = nrow(obs.hist.monthly), + ncol = 5, + dimnames = list(NULL, colnames(obs.hist.monthly)) + ) + hd.fut.monthly[, 1:2] <- delta_ts[, 1:2] <- as.matrix(obs.hist.monthly[, 1:2]) hd.fut.monthly[, 1] <- hd.fut.monthly[, 1] + sim_time[["future_yrs"]][itime, "delta"] @@ -1421,34 +1524,51 @@ downscale.deltahybrid3mod <- function( # TODO(drs): implement a more sophisticated imputation scheme; this one biases variation downwards if (anyNA(scen.hist.monthly[, 2 + iv])) { id_nas <- is.na(scen.hist.monthly[, 2 + iv]) - scen.hist.monthly[id_nas, 2 + iv] <- stats::median(scen.hist.monthly[, 2 + iv], na.rm = TRUE) + scen.hist.monthly[id_nas, 2 + iv] <- stats::median( + scen.hist.monthly[, 2 + iv], + na.rm = TRUE + ) } if (anyNA(scen.fut.monthly[, 2 + iv])) { id_nas <- is.na(scen.fut.monthly[, 2 + iv]) - scen.fut.monthly[id_nas, 2 + iv] <- stats::median(scen.fut.monthly[, 2 + iv], na.rm = TRUE) + scen.fut.monthly[id_nas, 2 + iv] <- stats::median( + scen.fut.monthly[, 2 + iv], + na.rm = TRUE + ) } #---STEP 1: Statistical bias correction of GCM data # 1st part of this step is NOT carried out here because our GCM data is already BCSD downscaled: "first aggregating the gridded T and P observations to the GCM grid scale (at the time of this writing typically about 200km resolution)" # fit quantile map based on training data of same historic time period - qm_fit <- qmap::fitQmapQUANT.default(obs = obs.hist.monthly[, 2 + iv], - mod = scen.hist.monthly[, 2 + iv], qstep = qstep, nboot = nboot, wet.day = FALSE) + qm_fit <- qmap::fitQmapQUANT.default( + obs = obs.hist.monthly[, 2 + iv], + mod = scen.hist.monthly[, 2 + iv], + qstep = qstep, + nboot = nboot, + wet.day = FALSE + ) # 2nd part: bias correcting historic data ("then using quantile mapping techniques to remove the systematic bias in the GCM simulations relative to the observed probability distributions") - sbc.hist.monthly[, 2 + iv] <- doQmapQUANT_drs(x = scen.hist.monthly[, 2 + iv], - fobj = qm_fit, type_map = opt_DS[["extrapol_type"]], + sbc.hist.monthly[, 2 + iv] <- doQmapQUANT_drs( + x = scen.hist.monthly[, 2 + iv], + fobj = qm_fit, + type_map = opt_DS[["extrapol_type"]], montly_obs_base = obs.hist.monthly[, 2 + iv], monthly_extremes = monthly_extremes[[iv]], - fix_spline = opt_DS[["fix_spline"]]) + fix_spline = opt_DS[["fix_spline"]] + ) # 3rd part: bias correcting future data ("the same quantile map between simulations and observations is used to transform the future simulations from the GCM") - sbc.fut.monthly[, 2 + iv] <- doQmapQUANT_drs(x = scen.fut.monthly[, 2 + iv], fobj = qm_fit, + sbc.fut.monthly[, 2 + iv] <- doQmapQUANT_drs( + x = scen.fut.monthly[, 2 + iv], + fobj = qm_fit, type_map = opt_DS[["extrapol_type"]], montly_obs_base = obs.hist.monthly[, 2 + iv], monthly_extremes = monthly_extremes[[iv]], - fix_spline = opt_DS[["fix_spline"]]) + fix_spline = opt_DS[["fix_spline"]] + ) #---STEP 2: Spatial downscaling @@ -1459,16 +1579,21 @@ downscale.deltahybrid3mod <- function( #---STEP 3: Remapping the Historical Record to Interpolated GCM data id_sim_months <- obs.hist.monthly[, "Month"] == im #identical(obs.hist.monthly[, 2], hd.fut.monthly[, 2]) - qm_fitm <- qmap::fitQmapQUANT.default(obs = sbc.fut.monthly[sbc.fut.monthly[, 2] == im, 2 + iv], - mod = obs.hist.monthly[id_sim_months, 2 + iv], qstep = qstep, nboot = nboot, - wet.day = FALSE) + qm_fitm <- qmap::fitQmapQUANT.default( + obs = sbc.fut.monthly[sbc.fut.monthly[, 2] == im, 2 + iv], + mod = obs.hist.monthly[id_sim_months, 2 + iv], + qstep = qstep, + nboot = nboot, + wet.day = FALSE + ) hd.fut.monthly[id_sim_months, 2 + iv] <- doQmapQUANT_drs( x = obs.hist.monthly[id_sim_months, 2 + iv], fobj = qm_fitm, type_map = opt_DS[["extrapol_type"]], montly_obs_base = obs.hist.monthly[, 2 + iv], monthly_extremes = monthly_extremes[[iv]], - fix_spline = opt_DS[["fix_spline"]]) + fix_spline = opt_DS[["fix_spline"]] + ) } } @@ -1493,9 +1618,17 @@ downscale.deltahybrid3mod <- function( # Apply deltas to historic daily weather # Note: PPT differs from call to call to applyDeltas() because of controlExtremePPTevents (if dailyPPTceiling > 0) - applyDeltas2(daily = obs.hist.daily, monthly = obs.hist.monthly, years = tp$years, - delta_ts, ppt_fun, ppt_type = opt_DS[["ppt_type"]], dailyPPTceiling, - sigmaN = opt_DS[["sigmaN"]], do_checks = do_checks) + applyDeltas2( + daily = obs.hist.daily, + monthly = obs.hist.monthly, + years = tp$years, + delta_ts, + ppt_fun, + ppt_type = opt_DS[["ppt_type"]], + dailyPPTceiling, + sigmaN = opt_DS[["sigmaN"]], + do_checks = do_checks + ) } @@ -1633,7 +1766,7 @@ downscale.wgen_package <- function( -#--- NEX climate data source +#------Monthly NEX extractions------ get_request_NEX <- function(service, request, i_tag, variable, scen, gcm, rip, lon, lat, startyear, endyear, dir_out_temp) { @@ -1758,7 +1891,7 @@ extract_variable_NEX <- function(i_tag, variable, scen, gcm, rip, lon, lat, bbox #' and \var{\dQuote{prcp}}. Each row is one day. #' Units are [degree Celsius] for temperature and [cm / day] and [cm / month], #' respectively, for precipitation. -get_GCMdata_NEX <- function(i_tag, ts_mons, dpm, gcm, scen, rip, lon, lat, +get_GCMdata_NEX <- function(i_tag, time, dpm, gcm, scen, rip, lon, lat, startyear, endyear, climDB_meta, ...) { dots <- list(...) # dir_out_temp @@ -1767,53 +1900,93 @@ get_GCMdata_NEX <- function(i_tag, ts_mons, dpm, gcm, scen, rip, lon, lat, names(clim) <- row.names(climDB_meta[["var_desc"]])[seq_len(n_var)] for (iv in seq_len(n_var)) { - var_tag <- climDB_meta[["var_desc"]][iv, "tag"] - unit_from <- climDB_meta[["var_desc"]][iv, "unit_real"] + varname <- climDB_meta[["var_desc"]][iv, "varname"] #Extract data - clim[[iv]] <- extract_variable_NEX(i_tag, variable = var_tag, + clim[[iv]] <- extract_variable_NEX(i_tag, variable = varname, scen = scen, gcm = gcm, rip = rip, lon = lon, lat = lat, bbox = climDB_meta[["bbox"]], tbox = climDB_meta[["tbox"]], - startyear = startyear, endyear = endyear, dir_out_temp = dots[["dir_out_temp"]]) + startyear = startyear, endyear = endyear, + dir_out_temp = dots[["dir_out_temp"]] + ) #Adjust units - if (var_tag == "pr") { - clim[[iv]] <- convert_precipitation(clim[[iv]], dpm, unit_from) - - } else if (grepl("tas", var_tag, ignore.case = TRUE)) { - clim[[iv]] <- convert_temperature(clim[[iv]], unit_from) + if (varname == "pr") { + clim[[iv]] <- rSW2utils::convert_precipitation( + x = clim[[iv]], + dpm = dpm, + unit_from = climDB_meta[["var_desc"]][iv, "unit_real"], + unit_to = "cm/month" + ) + + } else if (grepl("tas", varname, ignore.case = TRUE)) { + clim[[iv]] <- rSW2utils::convert_temperature( + x = clim[[iv]], + unit_from = climDB_meta[["var_desc"]][iv, "unit_real"], + unit_to = "C" + ) } } #Monthly weather time-series (var names as in 'var_names_fixed') - list(cbind(year = ts_mons$year + 1900, - month = ts_mons$mon + 1, - tmax = clim[["tmax"]], tmin = clim[["tmin"]], prcp = clim[["prcp"]])) + list(data.frame( + time, + tmax = clim[["tmax"]], + tmin = clim[["tmin"]], + prcp = clim[["prcp"]]) + ) } #--- end NEX -#--- netCDF climate data source +#------netCDF helper functions------ get_SpatialIndices_netCDF <- function(filename, lon, lat) { stopifnot(requireNamespace("ncdf4")) - nc <- ncdf4::nc_open(filename = filename, write = FALSE, readunlim = TRUE, verbose = FALSE) + if (inherits(filename, "ncdf4")) { + nc <- filename + } else { + nc <- ncdf4::nc_open( + filename = filename, + write = FALSE, + readunlim = TRUE, + verbose = FALSE + ) + on.exit(ncdf4::nc_close(nc)) + } - #Get latitudes/longitudes from the netCDF files...; they are the same for each CMIP x extent + # Get latitudes/longitudes from the netCDF files...; + # they are the same for each CMIP x extent # - these are used to get the correct indices in the whereNearest function - dim_lat <- grep("(\\lat\\b)|(\\blatitude\\b)", names(nc$dim), value = TRUE, ignore.case = TRUE) - dim_lon <- grep("(\\lon\\b)|(\\blongitude\\b)", names(nc$dim), value = TRUE, ignore.case = TRUE) + tmp <- names(nc$dim) + dim_lat <- grep("(\\lat\\b)|(\\blatitude\\b)", tmp, + value = TRUE, + ignore.case = TRUE + ) + dim_lon <- grep("(\\lon\\b)|(\\blongitude\\b)", tmp, + value = TRUE, + ignore.case = TRUE + ) stopifnot(length(dim_lat) > 0, length(dim_lon) > 0) lats <- nc$dim[[dim_lat]]$vals lons <- nc$dim[[dim_lon]]$vals - #close the netCDF file - ncdf4::nc_close(nc) - if (any(lons > 180)) lons <- ifelse(lons > 180, lons - 360, lons) - #Calculate the spatial indices - ncg <- NULL - ncg$ix <- whereNearest(val = lon, matrix = lons) - ncg$iy <- whereNearest(val = lat, matrix = lats) + if (any(lons > 180)) { + lons <- ifelse(lons > 180, lons - 360, lons) + } + + # Calculate the spatial indices + #TODO: make sure that CRS agree + + ncg <- list() + ncg[["longitude"]] <- lons + ncg[["latitude"]] <- lats + ncg[["ix"]] <- sapply(lon, + function(x) whereNearest(val = x, matrix = lons) + ) + ncg[["iy"]] <- sapply(lat, + function(x) whereNearest(val = x, matrix = lats) + ) ncg } @@ -1837,7 +2010,9 @@ get_time_unit <- function(tunit) { #' Read and interpret time dimension of a \var{netCDF} file with \acronym{CF} 1 or larger #' -#' @param filename A character string. The name of a \var{netCDF} file. +#' @param filename A character string, the name of a \var{netCDF} file; or, +#' the result of \code{\link[ncdf4]{nc_open}}. +#' @param tres A character string. The temporal resolution (time step). #' #' @return A list with six elements: #' \describe{ @@ -1854,37 +2029,55 @@ get_time_unit <- function(tunit) { #' } #' #' @export -read_time_netCDF <- function(filename) { +read_time_netCDF <- function(filename, tres = c("monthly", "daily")) { + + tres <- match.arg(tres) stopifnot(requireNamespace("ncdf4")) - nc <- ncdf4::nc_open(filename = filename, write = FALSE, readunlim = TRUE, verbose = FALSE) - ncdf4::nc_close(nc) + if (inherits(filename, "ncdf4")) { + nc <- filename + } else { + nc <- ncdf4::nc_open( + filename = filename, + write = FALSE, + readunlim = TRUE, + verbose = FALSE + ) + ncdf4::nc_close(nc) + } - dim_time <- grep("(\\btime\\b)|(\\bt\\b)", names(nc$dim), value = TRUE, ignore.case = TRUE) + dim_time <- grep("(\\btime\\b)|(\\bt\\b)", names(nc$dim), + value = TRUE, + ignore.case = TRUE + ) stopifnot(length(dim_time) > 0) utemp <- nc$dim[[dim_time]]$units tvals <- nc$dim[[dim_time]]$vals calendar <- tolower(nc$dim[[dim_time]]$calendar) N <- length(tvals) + if (tres == "monthly") { + tvals <- tvals[c(1, N)] + } + utemp <- strsplit(utemp, split = " ", fixed = TRUE)[[1]] tunit <- get_time_unit(utemp[1]) - temp12 <- rep(NA, 2) if ("as" %in% utemp) { - # for instance: "day as %Y%m%d.%f" used by 'pr_Amon_EC-EARTH-DMI_1pctCO2_r1i1p1_185001-198912.nc' + # for instance: "day as %Y%m%d.%f" used + # by 'pr_Amon_EC-EARTH-DMI_1pctCO2_r1i1p1_185001-198912.nc' iformat <- grep("%Y", utemp, value = TRUE)[1] if (is.na(as.Date(as.character(tvals[1]), format = iformat))) { iformat <- sub(".%f", "", iformat) } - temp12 <- lapply(tvals[c(1, N)], function(x) - strptime(as.character(x), format = iformat, tz = "UTC")) - tbase <- temp12[[1]] + time <- strptime(tvals, format = iformat, tz = "UTC") + tbase <- time[[1]] } else if ("since" %in% utemp) { - # for instance: "days since 1765-12-01 00:00:00" used by 'pr_Amon_HadCM3_1pctCO2_r1i1p1_000101-010012.nc' + # for instance: "days since 1765-12-01 00:00:00" used + # by 'pr_Amon_HadCM3_1pctCO2_r1i1p1_000101-010012.nc' temp <- lapply(utemp, function(x) as.Date(x, format = "%Y-%m-%d")) tbase <- temp[sapply(temp, function(x) !is.na(x))][[1]] stopifnot(length(tbase) == 1) @@ -1892,27 +2085,42 @@ read_time_netCDF <- function(filename) { #--- http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#calendar # days per calendar year cdays <- switch(calendar, - noleap = 365, `365_day` = 365, `all_leap` = 366, `366_day` = 366, `360_day` = 360, - julian = 365.25, gregorian = 365.2425, -1) + noleap = 365, + `365_day` = 365, + `all_leap` = 366, + `366_day` = 366, + `360_day` = 360, + julian = 365.25, + gregorian = 365.2425, + -1 + ) - if (identical(calendar, "proleptic_gregorian") || identical(calendar, "gregorian") || - identical(calendar, "standard") || identical(calendar, "julian") || + if (identical(calendar, "proleptic_gregorian") || + identical(calendar, "gregorian") || + identical(calendar, "standard") || + identical(calendar, "julian") || is.null(calendar)) { - # TODO: this doesn't seem to work perfectly well for Julian calendars, but should - # be ok-ish for a few hundred years around 'origin = tbase' + # TODO: this doesn't seem to work perfectly well for Julian calendars, + # but should be ok-ish for a few hundred years around 'origin = tbase' temp <- if (identical(calendar, "julian")) { 365.2425 / 365.25 # gregorian / julian days per year } else 1 day_scaler <- 86400 * temp / tunit - temp12 <- lapply(tvals[c(1, N)], function(x) - as.POSIXlt(x * day_scaler, origin = tbase, tz = "UTC")) + time <- as.POSIXlt(tvals * day_scaler, origin = tbase, tz = "UTC") } else if (cdays > 0) { + if (identical(tres, "daily")) { + #TODO + stop( + "Calendars with fixed duration of years are not yet implemented ", + "for daily netCDF files.") + } + # all years are of a constant fixed duration tbase_utc <- as.POSIXlt(tbase, tz = "UTC") - temp <- tvals[c(1, N)] / tunit + temp <- tvals / tunit to_add_years <- temp %/% cdays to_add_days <- temp %% cdays # base0 @@ -1926,15 +2134,21 @@ read_time_netCDF <- function(filename) { if (cdays > 360) { # calendar is one of 'noleap', '365_day', 'all_leap', and '366_day' # format '%j' is base1: Day of year as decimal number (001-366) - temp12 <- lapply(1:2, function(k) - strptime(paste(tbase_utc$year + 1900 + to_add_years[k], - to_add_days[k], sep = "-"), format = "%Y-%j", tz = "UTC")) + time <- strptime( + paste( + tbase_utc$year + 1900 + to_add_years, + to_add_days, + sep = "-" + ), + format = "%Y-%j", + tz = "UTC" + ) } else if (cdays == 360) { # all years are 360 days divided into 30-day months to_add_months <- floor(to_add_days / 30) - # POSIXlt element 'mon' is base0: 0-11: months after the first of the year. + # POSIXlt element 'mon' is base0: 0-11 months after the first of year temp_yr <- tbase_utc$year + 1900 + to_add_years temp_mon <- tbase_utc$mon + 1 + to_add_months mons_next_yr <- temp_mon - 12 @@ -1944,44 +2158,88 @@ read_time_netCDF <- function(filename) { temp_mon[imon_next_yr] <- mons_next_yr[imon_next_yr] } - temp12 <- lapply(1:2, function(k) c(year = temp_yr[k], month = temp_mon[k])) + time <- lapply(seq_along(tvals), function(k) + c(year = temp_yr[k], month = temp_mon[k]) + ) } } else stop("calendar of netCDF not recognized") } else stop("time unit of netCDF not recognized") - time12 <- lapply(temp12, function(x) { + time12 <- lapply(time, function(x) { if (inherits(x, "POSIXt")) c(year = x$year + 1900, month = x$mon + 1) else x }) - list(calendar = calendar, unit = tunit, N = N, base = tbase, start = time12[[1]], - end = time12[[2]]) + list( + calendar = calendar, + unit = tunit, + N = N, + time = time, # POSIXlt + base = tbase, + start = time12[[1]], + end = time12[[2]] + ) } -get_TimeIndices_netCDF <- function(filename, startyear, endyear) { - nc_time <- read_time_netCDF(filename) +get_TimeIndices_netCDF <- function(filename, startyear, endyear, + tres = c("monthly", "daily")) { + + tres <- match.arg(tres) - stopifnot(nc_time[["start"]]["year"] <= startyear || - (nc_time[["start"]]["month"] == 1 && nc_time[["start"]]["year"] == startyear)) #we only extract full years and require data from the start["year"] on - temp <- startyear - nc_time[["start"]]["year"] - timeStartIndex <- temp * 12 + 2 - nc_time[["start"]]["month"] #we extract beginning with January of start["year"] + nct <- read_time_netCDF(filename, tres = tres) + + # we only extract full years and require data from the start["year"] on + stopifnot( + nct[["start"]]["year"] <= startyear || + (nct[["start"]]["month"] == 1 && nct[["start"]]["year"] == startyear) + ) + + # we extract beginning with January (1) of start["year"] + timeStartIndex <- if (identical(tres, "monthly")) { + temp <- startyear - nct[["start"]]["year"] + temp * 12 + 2 - nct[["start"]]["month"] + + } else if (identical(tres, "daily")) { + tmp <- as.POSIXlt(ISOdate(startyear, 1, 1, tz = "UTC")) + which.min(abs(nct[["time"]] - tmp)) + } + + # account for missing months: assume all are at the end; + # e.g., precipitation of 'HadGEM2-ES' has values only until + # Nov 2099 instead Dec 2100 + + # timeCount must include a count at timeStartIndex; + # e.g., to extract two values at 1:2, use timeStartIndex = 1 and timeCount = 2 + timeCount_should <- if (identical(tres, "monthly")) { + (endyear - startyear + 1) * 12 + + } else if (identical(tres, "daily")) { + tmp <- as.POSIXlt(ISOdate(endyear, 12, 31, tz = "UTC")) + which.min(abs(nct[["time"]] - tmp)) - timeStartIndex + 1 + } - #account for missing months: assume all are at the end; e.g., precipitation of 'HadGEM2-ES' has values only until Nov 2099 instead Dec 2100 - timeCount_should <- (endyear - startyear + 1) * 12 #timeCount must include a count at timeStartIndex; to extract two values at 1:2, have timeStartIndex = 1 and timeCount = 2 N_should <- timeStartIndex + timeCount_should - 1 - if (nc_time[["N"]] >= N_should) { + + if (nct[["N"]] >= N_should) { timeCount <- timeCount_should - addMissingMonthAtEnd <- 0 + addMissingTimeAtEnd <- 0 } else { - timeCount <- nc_time[["N"]] - timeStartIndex + 1 - addMissingMonthAtEnd <- N_should - nc_time[["N"]] + timeCount <- nct[["N"]] - timeStartIndex + 1 + addMissingTimeAtEnd <- N_should - nct[["N"]] } - list(timeStartIndex = timeStartIndex, timeCount = timeCount, - addMissingMonthAtEnd = addMissingMonthAtEnd) + list( + timeStartIndex = timeStartIndex, + timeCount = timeCount, + time = nct[["time"]], + addMissingTimeAtEnd = addMissingTimeAtEnd + ) } + +#------Monthly netCDF extractions------ + do_ncvar_netCDF <- function(nc, nc_perm, variable, ncg, nct) { stopifnot(requireNamespace("ncdf4")) @@ -1989,162 +2247,261 @@ do_ncvar_netCDF <- function(nc, nc_perm, variable, ncg, nct) { if (index == 3L) { # if file is in order of (lat, lon, time) - ncdf4::ncvar_get(nc, variable, start = c(ncg$ix, ncg$iy, nct$timeStartIndex), - count = c(1, 1, nct$timeCount)) + ncdf4::ncvar_get(nc, + varid = variable, + start = c(ncg$ix, ncg$iy, nct$timeStartIndex), + count = c(1, 1, nct$timeCount) + ) } else if (index == 1L) { - # if file is optimized for time series extraction and permutated to order (time, lat, lon) - ncdf4::ncvar_get(nc, variable, start = c(nct$timeStartIndex, ncg$ix, ncg$iy), - count = c(nct$timeCount, 1, 1)) + # if file is optimized for time series extraction and permutated to order + # (time, lat, lon) + ncdf4::ncvar_get(nc, + varid = variable, + start = c(nct$timeStartIndex, ncg$ix, ncg$iy), + count = c(nct$timeCount, 1, 1) + ) } else { - stop("do_ncvar_netCDF: dimension 'time' must be either in first or third place, but is instead at ", index) + stop("do_ncvar_netCDF: dimension 'time' must be either in ", + "first or third place, but is instead at ", index + ) } } -extract_variable_netCDF <- function(filepath, variable, unit, ncg, nct, lon, lat, startyear, endyear) { + +extract_monthly_variable_netCDF <- function(filename, variable, unit, ncg, nct, + lon = NA, lat = NA, startyear = NA, endyear = NA) { + stopifnot(requireNamespace("ncdf4")) - # the 'raster' package (version <= '2.5.2') cannot handle non-equally spaced cells - nc <- ncdf4::nc_open(filename = filepath, write = FALSE, readunlim = TRUE, verbose = FALSE) + # the 'raster' package (version <= '2.5.2') cannot handle non-equally + # spaced cells + nc <- ncdf4::nc_open( + filename = filename, + write = FALSE, + readunlim = TRUE, + verbose = FALSE + ) + on.exit(ncdf4::nc_close(nc)) - nc_var <- grep(paste0("\\b", variable, "\\b"), names(nc$var), value = TRUE, ignore.case = TRUE) + nc_var <- grep(paste0("\\b", variable, "\\b"), names(nc$var), + value = TRUE, + ignore.case = TRUE + ) stopifnot(length(nc_var) > 0) stopifnot(isTRUE(tolower(unit) == tolower(nc$var[[nc_var]]$units))) # getting the values from the netCDF files... nc_perm <- sapply(nc$var[[nc_var]]$dim, function(x) x$name) res <- try(do_ncvar_netCDF(nc, nc_perm, nc_var, ncg, nct)) + if (inherits(res, "try-error")) { - # in case of 'HadGEM2-ES x RCP45' where pr and tasmax/tasmin have different timings - ncg <- get_SpatialIndices_netCDF(filename = filepath, lon, lat) - nct <- get_TimeIndices_netCDF(filename = filepath, startyear, endyear) + stopifnot( + is.finite(lon), is.finite(lat), + is.finite(startyear), is.finite(endyear) + ) + # in case of 'HadGEM2-ES x RCP45' where pr and tasmax/tasmin have + # different timings + ncg <- get_SpatialIndices_netCDF(filename = nc, lon, lat) + nct <- get_TimeIndices_netCDF( + filename = nc, + startyear = startyear, + endyear = endyear, + tres = "monthly" + ) res <- do_ncvar_netCDF(nc, nc_perm, nc_var, ncg, nct) } - ncdf4::nc_close(nc) #close the netCDF file - #adjust for missing months - if (nct$addMissingMonthAtEnd > 0) - res <- c(res, rep(NA, times = nct$addMissingMonthAtEnd)) + # adjust for missing time + if (nct$addMissingTimeAtEnd > 0) + res <- c(res, rep(NA, times = nct$addMissingTimeAtEnd)) - if (all(is.na(res)) || inherits(res, "try-error")) - stop("'extract_variable_netCDF' at (", round(lon, 5), ", ", round(lat, 5), "): ", - "extraction failed or no data available. Error message: ", - paste(utils::head(res), collapse = "/")) + if (all(is.na(res)) || inherits(res, "try-error")) { + stop( + "'extract_monthly_variable_netCDF' at (", + round(lon, 5), ", ", round(lat, 5), + "): extraction failed or no data available. Error message: ", + paste(res[1:6], collapse = "/") + ) + } res } -#' Extract \var{GCM} projection from a \var{netCDF} file +#' Extract monthly \var{GCM} projection from a \var{netCDF} file #' #' @return A list of one data.frame object with 5 columns and names of -#' \var{\dQuote{year}}, \var{\dQuote{month}}, \var{\dQuote{tmax}}, \var{\dQuote{tmin}}, -#' and \var{\dQuote{prcp}}. Each row is one day. -#' Units are [degree Celsius] for temperature and [cm / day] and [cm / month], -#' respectively, for precipitation. -get_GCMdata_netCDF <- function(i_tag, ts_mons, dpm, gcm, scen, rip, lon, lat, startyear, - endyear, climDB_meta, ...) { +#' \var{\dQuote{year}}, \var{\dQuote{month}}, +#' \var{\dQuote{tmax}}, \var{\dQuote{tmin}}, and \var{\dQuote{prcp}}. +#' Each row represents one month. +#' Units are [degree Celsius] for temperature and [cm / month] +#' for precipitation. +get_MonthlyGCMdata_netCDF <- function(i_tag, time, dpm, gcm, scen, rip, lon, lat, + startyear, endyear, climDB_meta, ncg, nct, ncFiles) { - dots <- list(...) # ncFiles, ncg, nct ctemp <- paste(c(i_tag, gcm, scen, rip), collapse = " * ") - # Extract precipitation data - ftemp1 <- grep(climDB_meta[["var_desc"]]["prcp", "fileVarTags"], dots[["ncFiles"]], - ignore.case = TRUE, value = TRUE) + #--- Extract precipitation data + ftemp1 <- grep(climDB_meta[["var_desc"]]["prcp", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) if (length(ftemp1) == 1) { - prcp <- extract_variable_netCDF(filepath = ftemp1, - variable = climDB_meta[["var_desc"]]["prcp", "tag"], + prcp <- extract_monthly_variable_netCDF( + filename = ftemp1, + variable = climDB_meta[["var_desc"]]["prcp", "varname"], unit = climDB_meta[["var_desc"]]["prcp", "unit_given"], - ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, - startyear = startyear, endyear = endyear) + ncg = ncg, + nct = nct, + lon = lon, + lat = lat, + startyear = startyear, + endyear = endyear + ) } else { if (length(ftemp1) > 1) { - stop("More than one netCDF file with precipitation data found for combination ", - ctemp, " with files = ", paste(shQuote(basename(ftemp1)), collapse = "/")) + stop("More than one netCDF file with precipitation data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftemp1)), collapse = "/") + ) } else { - stop("No suitable netCDF file with precipitation data found for combination ", - ctemp) + stop("No suitable netCDF file with precipitation data ", + "available for combination ", ctemp + ) } } - # Extract temperature data - ftemp3 <- grep(climDB_meta[["var_desc"]]["tmin", "fileVarTags"], dots[["ncFiles"]], - ignore.case = TRUE, value = TRUE) - ftemp4 <- grep(climDB_meta[["var_desc"]]["tmax", "fileVarTags"], dots[["ncFiles"]], - ignore.case = TRUE, value = TRUE) + #--- Extract temperature data + ftemp3 <- grep(climDB_meta[["var_desc"]]["tmin", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) + ftemp4 <- grep(climDB_meta[["var_desc"]]["tmax", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) if (length(ftemp3) > 0 && length(ftemp4) > 0) { if (length(ftemp3) == 1 && length(ftemp4) == 1) { - tmin <- extract_variable_netCDF(filepath = ftemp3, - variable = climDB_meta[["var_desc"]]["tmin", "tag"], + tmin <- extract_monthly_variable_netCDF( + filename = ftemp3, + variable = climDB_meta[["var_desc"]]["tmin", "varname"], unit = climDB_meta[["var_desc"]]["tmin", "unit_given"], - ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, - startyear = startyear, endyear = endyear) - - tmax <- extract_variable_netCDF(filepath = ftemp4, - variable = climDB_meta[["var_desc"]]["tmax", "tag"], + ncg = ncg, + nct = nct, + lon = lon, + lat = lat, + startyear = startyear, + endyear = endyear + ) + + tmax <- extract_monthly_variable_netCDF( + filename = ftemp4, + variable = climDB_meta[["var_desc"]]["tmax", "varname"], unit = climDB_meta[["var_desc"]]["tmax", "unit_given"], - ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, - startyear = startyear, endyear = endyear) + ncg = ncg, + nct = nct, + lon = lon, + lat = lat, + startyear = startyear, + endyear = endyear + ) } else { - stop("More than one netCDF file with tmin/tmax data found for combination ", - ctemp, " with files = ", paste(shQuote(basename(ftemp3)), collapse = "/"), " or ", - paste(shQuote(basename(ftemp4)), collapse = "/")) + stop("More than one netCDF file with tmin/tmax data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftemp3)), collapse = "/"), + " or ", + paste(shQuote(basename(ftemp4)), collapse = "/") + ) } } else { - ftemp2 <- grep(climDB_meta[["var_desc"]]["tmean", "fileVarTags"], dots[["ncFiles"]], - ignore.case = TRUE, value = TRUE) + ftemp2 <- grep(climDB_meta[["var_desc"]]["tmean", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) if (length(ftemp2) == 1) { - tmean <- extract_variable_netCDF(filepath = ftemp2, - variable = climDB_meta[["var_desc"]]["tmean", "tag"], + tmean <- extract_monthly_variable_netCDF( + filename = ftemp2, + variable = climDB_meta[["var_desc"]]["tmean", "varname"], unit = climDB_meta[["var_desc"]]["tmean", "unit_given"], - ncg = dots[["ncg"]], nct = dots[["nct"]], lon = lon, lat = lat, - startyear = startyear, endyear = endyear) + ncg = ncg, + nct = nct, + lon = lon, + lat = lat, + startyear = startyear, + endyear = endyear + ) tmin <- tmax <- tmean + vars <- c("tmin", "tmax", "tmean") + unit_from <- climDB_meta[["var_desc"]][vars, "unit_real"] + stopifnot(unit_from[1] == unit_from[2], unit_from[1] == unit_from[3]) + + } else { if (length(ftemp2) > 1) { - stop("More than one netCDF file with tmean data found for combination ", - ctemp, " with files = ", paste(shQuote(basename(ftemp2)), collapse = "/")) + stop("More than one netCDF file with tmean data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftemp2)), collapse = "/") + ) } else { - stop("No suitable netCDF file with tmean data found for combination ", - ctemp) + stop("No suitable netCDF file with tmean data ", + "available for combination ", ctemp + ) } } } - # Convert units - unit_from <- climDB_meta[["var_desc"]]["prcp", "unit_real"] - prcp <- convert_precipitation(prcp, dpm, unit_from) - - unit_from <- climDB_meta[["var_desc"]][c("tmin", "tmax", "tmean"), "unit_real"] - stopifnot(unit_from[1] == unit_from[2], unit_from[1] == unit_from[3]) - tmin <- convert_temperature(tmin, unit_from[1]) - tmax <- convert_temperature(tmax, unit_from[1]) - - list(cbind(year = ts_mons$year + 1900, - month = ts_mons$mon + 1, - tmax = tmax, tmin = tmin, prcp = prcp)) -} + #--- Convert units + prcp <- rSW2utils::convert_precipitation( + x = prcp, + dpm = dpm, + unit_from = climDB_meta[["var_desc"]]["prcp", "unit_real"], + unit_to = "cm/month" + ) -#--- end netCDF + tmin <- rSW2utils::convert_temperature( + x = tmin, + unit_from = climDB_meta[["var_desc"]]["tmin", "unit_real"], + unit_to = "C" + ) + tmax <- rSW2utils::convert_temperature( + x = tmax, + unit_from = climDB_meta[["var_desc"]]["tmax", "unit_real"], + unit_to = "C" + ) -#----Extraction function + list(data.frame( + time, + tmax = tmax, + tmin = tmin, + prcp = prcp + )) +} -#' Extract climate scenario data and downscale to daily weather data -calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, - use_CF, use_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, - reqDownscalingsPerGCM, climate.ambient, locations, compression_type, - getYears, assocYears, sim_time, task_seed, opt_DS, project_paths, dir_failed, resume, - verbose, print.debug) { - on.exit({save(list = ls(), file = file.path(dir_failed, - paste0("ClimScen_failed_", i_tag, "_l2.RData")))}) +#' Extract monthly climate scenario data and downscale to daily weather data +#' @seealso \code{\link{calc_DailyScenarioWeather}} +#' +calc_MonthlyScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, + clim_source, use_CF, use_NEX, climDB_meta, climDB_files, reqGCMs, + reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, locations, + compression_type, getYears, assocYears, sim_time, task_seed, opt_DS, + project_paths, dir_failed, resume, verbose, print.debug) { + + on.exit({save( + list = ls(), + file = file.path( + dir_failed, + paste0("ClimScen_failed_", i_tag, "_l2.RData") + ) + )}) # Set RNG seed for random number use by functions # - fix_PPTdata_length @@ -2158,42 +2515,67 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, Site_id_by_dbW <- locations[il, "Site_id_by_dbW"] if (verbose) { - print(paste0(i_tag, " extraction: ", shQuote(clim_source), " at ", Sys.time(), + print(paste0( + i_tag, " extraction: ", shQuote(clim_source), " at ", Sys.time(), " for ", gcm, " (", paste(reqRCPsPerGCM[[ig]], collapse = ", "), ") at ", - lon, " / ", lat)) + lon, " / ", lat + )) } #--- Output container for downscaled scenario weather data - temp1 <- expand.grid(downscaling = reqDownscalingsPerGCM[[ig]], + temp1 <- expand.grid( + downscaling = reqDownscalingsPerGCM[[ig]], futures = rownames(sim_time[["future_yrs"]]), - rcps = reqRCPsPerGCM[[ig]], stringsAsFactors = FALSE)[, 3:1] + rcps = reqRCPsPerGCM[[ig]] + , stringsAsFactors = FALSE + )[, 3:1] n <- dim(temp1)[1] temp1[, "tag"] <- paste0(temp1[, "futures"], ".", temp1[, "rcps"]) - temp1[, "Scenario"] <- paste(temp1[, "downscaling"], temp1[, "tag"], gcm, sep = ".") - temp1[, "Scenario_id"] <- rSOILWAT2::dbW_getScenarioId(temp1[, "Scenario"], - ignore.case = TRUE) + temp1[, "Scenario"] <- paste( + temp1[, "downscaling"], + temp1[, "tag"], + gcm, + sep = "." + ) + temp1[, "Scenario_id"] <- rSOILWAT2::dbW_getScenarioId( + temp1[, "Scenario"], + ignore.case = TRUE + ) if (anyNA(temp1[, "Scenario_id"])) { - stop("Not all requested scenarios available in the weather database scenario table:\n", - paste(shQuote(temp1[temp1[, "Scenario_id"], "Scenario"]), collapse = ", ")) + stop( + "Not all requested scenarios available ", + "in the weather database scenario table:\n", + paste(shQuote(temp1[temp1[, "Scenario_id"], "Scenario"]), collapse = ", ") + ) } if (anyNA(Site_id_by_dbW)) { - stop("Not all requested sites matched up with entries in the weather ", - "database scenario table:\n", paste("*", shQuote(locations[il, ]), collapse = "\n")) + stop( + "Not all requested sites matched up with entries in the weather ", + "database scenario table:\n", + paste("*", shQuote(locations[il, ]), collapse = "\n") + ) } else { temp1[, "Site_id_by_dbW"] <- rep(Site_id_by_dbW, n) } temp <- rep(NA, n) - temp <- list(todo = rep(TRUE, n), StartYear = temp, EndYear = temp, weatherData = temp) + temp <- list( + todo = rep(TRUE, n), + StartYear = temp, + EndYear = temp, + weatherData = temp + ) df_wdataOut <- c(temp, as.list(temp1)) #--- Determine if any are already downscaled and stored in weather database if (resume) { df_wdataOut[["todo"]] <- !rSOILWAT2::dbW_has_weatherData( - Site_ids = Site_id_by_dbW, Scenario_ids = df_wdataOut[["Scenario_id"]])[1, ] + Site_ids = Site_id_by_dbW, + Scenario_ids = df_wdataOut[["Scenario_id"]] + )[1, ] } ids_down <- which(df_wdataOut[["todo"]]) @@ -2207,195 +2589,186 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, rip <- "r1i1p1" } else if (use_CF) { - #--- Select netCDF files for this 'gcm' and include only required scenarios and variables - fnc_gcmXscens <- climDB_files - - tag <- paste0(climDB_meta[["sep_fname"]], gcm, climDB_meta[["sep_fname"]]) - fnc_gcmXscens <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE) - - tag <- paste0(climDB_meta[["sep_fname"]], all_scens, climDB_meta[["sep_fname"]]) - tag <- paste0("(", tag, ")", collapse = "|") - fnc_gcmXscens <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE) - - tag <- paste0("(", climDB_meta[["var_desc"]][["fileVarTags"]], ")", collapse = "|") - fnc_gcmXscens <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE) - - fnc_parts <- strsplit(basename(fnc_gcmXscens), split = climDB_meta[["sep_fname"]], - fixed = TRUE) - - #--- Determine most suitable 'ensemble member' rip that is available among fnc_gcmXscens - temp <- climDB_meta[["str_fname"]][c("id_scen", "id_var", "id_run")] - ptemp <- sapply(fnc_parts, function(x) x[temp]) - - # Number of netCDF files per scenario, variable, and rip - # 'pnc_count' should be - # * = 1 if netCDF file is available per scen x var x rip combination - # * > 1 if multiple time periods are available - # * = 0 if files are missing (yet 'tas' is allowed to replace missing 'tasmax'+'tasmin') - pnc_count <- table(ptemp[1, ], ptemp[2, ], ptemp[3, ]) - pnc_avail <- apply(pnc_count, 2:3, function(x) sum(x >= 1) >= n_scens) - temp <- apply(pnc_avail, 2, function(x) { - x[climDB_meta[["var_desc"]]["prcp", "tag"]] && ( - x[climDB_meta[["var_desc"]]["tmean", "tag"]] || ( - x[climDB_meta[["var_desc"]]["tmax", "tag"]] && - x[climDB_meta[["var_desc"]]["tmin", "tag"]])) - }) - rips <- names(temp) - rip <- if (length(rips) > 1) sort(rips)[1] else rips - - if (length(rip) == 0) { - stop("'calc.ScenarioWeather': input file(s) for model ", shQuote(gcm), - " and scenario(s) ", paste(shQuote(all_scens), collapse = "/"), - " not available: ", paste0(colnames(pnc_avail), ": ", apply(pnc_avail, 2, - function(x) paste(rownames(pnc_avail), "=", x, collapse = "/")), - collapse = " - ")) - } - - # Double check what time period to choose (the one with the most overlap to requested - # years) if multiple netCDF files for selected combination of scen x var x rip are - # present - pnc_count_rip <- pnc_count[,, rip] - i_count_rip <- which(pnc_count_rip > 1, arr.ind = TRUE) - fnc_parts2 <- fnc_parts # information that is used to index/subset fnc_gcmXscens - req_years <- c(seq.int(getYears[["first"]][1, 1], getYears[["first"]][1, 2]), - unlist(lapply(seq_len(nrow(getYears[["second"]])), function(k) - seq.int(getYears[["second"]][k, 1], getYears[["second"]][k, 2])))) - - for (k in seq_len(nrow(i_count_rip))) { - temp_var <- colnames(pnc_count_rip)[i_count_rip[k, "col"]] - temp_scen <- rownames(pnc_count_rip)[i_count_rip[k, "row"]] - - ids_fnc <- which(sapply(fnc_parts2, function(x) - any(x == rip) && any(x == temp_var) && any(x == temp_scen))) - temp_times <- lapply(fnc_parts2[ids_fnc], function(x) { - temp <- x[climDB_meta[["str_fname"]]["id_time"]] - seq.int(as.integer(substr(temp, 1, 4)), as.integer(substr(temp, 8, 11)))}) - - temp_overlap <- sapply(temp_times, function(x) sum(x %in% req_years)) - imax_overlap <- which.max(temp_overlap) # the one to keep - itemp_remove <- ids_fnc[-imax_overlap] - - fnc_gcmXscens <- fnc_gcmXscens[-itemp_remove] - fnc_parts2 <- fnc_parts2[-itemp_remove] - } - - # Subset files to selected rip - if (length(rip) > 0) { - tag <- paste0(climDB_meta[["sep_fname"]], rip, climDB_meta[["sep_fname"]]) - fnc_gcmXscens <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE) - } - - # Check that selected netCDF-files are available for requested variables: - # 'prcp' and ('tmean' or ('tmax' and 'tmin')) - fnc_parts <- strsplit(basename(fnc_gcmXscens), split = climDB_meta[["sep_fname"]], - fixed = TRUE) - ptemp <- sapply(fnc_parts, function(x) x[climDB_meta[["str_fname"]][c("id_scen", "id_var")]]) - pnc_count <- table(ptemp[1, ], ptemp[2, ]) - pnc_temp <- apply(pnc_count, 2, function(x) sum(x == 1) >= n_scens) - - pnc_avail <- pnc_temp[climDB_meta[["var_desc"]]["prcp", "tag"]] && ( - pnc_temp[climDB_meta[["var_desc"]]["tmean", "tag"]] || - all(pnc_temp[climDB_meta[["var_desc"]][c("tmin", "tmax"), "tag"]])) - - if (!pnc_avail) { - stop("'calc.ScenarioWeather': input file(s) for model ", shQuote(gcm), - " and scenario(s) ", paste(shQuote(all_scens), collapse = "/"), - " not available for required variables: ", - paste(shQuote(names(pnc_temp)[!pnc_temp]), collapse = "/")) - } + #--- Select netCDF files for this 'gcm', scenarios, and variables + tmp <- select_suitable_CFs( + climDB_files = climDB_files, + climDB_meta = climDB_meta, + getYears = getYears, + model_name = gcm, + scenario_names = all_scens + ) + + fnc_gcmXscens <- tmp[["files"]] + rip <- tmp[["rip"]] } - #---Scenario monthly weather time-series: Get GCM data for each scenario and time slice + #---Scenario monthly weather time-series: + # Get GCM data for each scenario and time slice temp <- vector("list", (getYears$n_first + getYears$n_second) * n_scens) - scen.monthly <- matrix(temp, ncol = getYears$n_first+getYears$n_second, - dimnames = list(all_scens, c(paste0("first", seq_len(getYears$n_first)), - paste0("second", seq_len(getYears$n_second))))) + scen.monthly <- matrix( + temp, + ncol = getYears$n_first + getYears$n_second, + dimnames = list( + all_scens, + c( + paste0("first", seq_len(getYears$n_first)), + paste0("second", seq_len(getYears$n_second)) + ) + ) + ) if (print.debug) { - print(paste0(i_tag, " extraction: first slice ('historical'): ", - paste(getYears$first, collapse = "-"))) + print(paste0( + i_tag, " extraction: first slice ('historical'): ", + paste(getYears$first, collapse = "-") + )) } - args_extract1 <- list(i_tag = i_tag, gcm = gcm, scen = scen_historical, rip = rip, - lon = lon, lat = lat, climDB_meta = climDB_meta) + args_extract1 <- list( + i_tag = i_tag, + gcm = gcm, + scen = scen_historical, + rip = rip, + lon = lon, + lat = lat, + climDB_meta = climDB_meta + ) if (use_CF) { - tag <- paste0(climDB_meta[["sep_fname"]], args_extract1[["scen"]], - climDB_meta[["sep_fname"]]) + tag <- paste0( + climDB_meta[["sep_fname"]], + args_extract1[["scen"]], + climDB_meta[["sep_fname"]] + ) fnc_gcmXscen <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE) - ncg <- get_SpatialIndices_netCDF(filename = fnc_gcmXscen[1], lon = lon, lat = lat) - args_extract1 <- c(args_extract1, ncFiles = list(fnc_gcmXscen), ncg = list(ncg)) + ncg <- get_SpatialIndices_netCDF( + filename = fnc_gcmXscen[1], + lon = lon, + lat = lat + ) + + args_extract1 <- c( + args_extract1, + ncFiles = list(fnc_gcmXscen), + ncg = list(ncg) + ) } if (use_NEX) { - args_extract1 <- c(args_extract1, dir_out_temp = project_paths[["dir_out_temp"]]) + args_extract1 <- c( + args_extract1, + dir_out_temp = project_paths[["dir_out_temp"]] + ) } for (it in seq_len(getYears$n_first)) { - args_first <- c(args_extract1, ts_mons = list(getYears$first_dates[[it]]), - dpm = list(getYears$first_dpm[[it]]), startyear = getYears$first[it, 1], - endyear = getYears$first[it, 2]) + args_first <- c( + args_extract1, + time = list(data.frame( + year = getYears$first_dates[[it]]$year + 1900, + month = getYears$first_dates[[it]]$mon + 1 + )), + dpm = list(getYears$first_dpm[[it]]), + startyear = getYears$first[it, 1], + endyear = getYears$first[it, 2] + ) if (use_CF) { - # Time index: differs among variables from the same GCMxRCP: in only once case: - # HadGEM2-ES x RCP45 - args_first <- c(args_first, nct = list(get_TimeIndices_netCDF( - filename = fnc_gcmXscen[1], startyear = getYears$first[it, 1], - endyear = getYears$first[it, 2]))) + # Time index: differs among variables from the same GCMxRCP: + # in only once case: HadGEM2-ES x RCP45 + args_first <- c( + args_first, + nct = list(get_TimeIndices_netCDF( + filename = fnc_gcmXscen[1], + startyear = getYears$first[it, 1], + endyear = getYears$first[it, 2], + tres = climDB_meta[["tres"]] + )) + ) } scen.monthly[1, it] <- if (use_CF) { - do.call(get_GCMdata_netCDF, args = args_first) - } else if (use_NEX) { - do.call(get_GCMdata_NEX, args = args_first) - } else NULL + do.call(get_MonthlyGCMdata_netCDF, args = args_first) + } else if (use_NEX) { + do.call(get_GCMdata_NEX, args = args_first) + } else NULL } if (print.debug) { - print(paste0(i_tag, " extraction: second slice ('future'): ", - paste(getYears$second, collapse = "-"))) + print(paste0( + i_tag, " extraction: second slice ('future'): ", + paste(t(getYears$second), collapse = "-") + )) } for (it in seq_len(getYears$n_second)) { - args_extract2 <- c(args_extract1, ts_mons = list(getYears$second_dates[[it]]), - dpm = list(getYears$second_dpm[[it]]), startyear = getYears$second[it, 1], - endyear = getYears$second[it, 2]) + args_extract2 <- c( + args_extract1, + time = list(data.frame( + year = getYears$second_dates[[it]]$year + 1900, + month = getYears$second_dates[[it]]$mon + 1 + )), + dpm = list(getYears$second_dpm[[it]]), + startyear = getYears$second[it, 1], + endyear = getYears$second[it, 2] + ) if (use_CF) { - # Assume that netCDF file structure is identical among RCPs within a variable + # Assume that netCDF file structure is identical + # among RCPs within a variable # - differs among variables from the same GCMxRCP: HadGEM2-ES x RCP45 - tag <- paste0(climDB_meta[["sep_fname"]], rcps[1], climDB_meta[["sep_fname"]]) + tag <- paste0( + climDB_meta[["sep_fname"]], + rcps[1], + climDB_meta[["sep_fname"]] + ) temp <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, value = TRUE)[1] - args_extract2[["nct"]] <- get_TimeIndices_netCDF(filename = temp, - startyear = getYears$second[it, 1], endyear = getYears$second[it, 2]) + + args_extract2[["nct"]] <- get_TimeIndices_netCDF( + filename = temp, + startyear = getYears$second[it, 1], + endyear = getYears$second[it, 2], + tres = climDB_meta[["tres"]] + ) } for (isc in 2:nrow(scen.monthly)) { args_second <- args_extract2 args_second[["scen"]] <- rcps[isc - 1] + if (use_CF) { tag <- paste0(climDB_meta[["sep_fname"]], args_second[["scen"]], climDB_meta[["sep_fname"]]) - args_second[["ncFiles"]] <- grep(tag, fnc_gcmXscens, ignore.case = TRUE, - value = TRUE) + args_second[["ncFiles"]] <- grep( + tag, + fnc_gcmXscens, + ignore.case = TRUE, + value = TRUE + ) } + scen.monthly[isc, getYears$n_first + it] <- if (use_CF) { - do.call(get_GCMdata_netCDF, args = args_second) - } else if (use_NEX) { - do.call(get_GCMdata_NEX, args = args_second) - } else NULL + do.call(get_MonthlyGCMdata_netCDF, args = args_second) + } else if (use_NEX) { + do.call(get_GCMdata_NEX, args = args_second) + } else NULL } } #Observed historic daily weather from weather database if (print.debug) - print(paste0(i_tag, " extraction: observed historic daily weather from weather DB: ", - sim_time[["simstartyr"]], "-", sim_time[["endyr"]])) - - obs.hist.daily <- rSOILWAT2::dbW_getWeatherData(Site_id = Site_id_by_dbW, - startYear = sim_time[["simstartyr"]], endYear = sim_time[["endyr"]], - Scenario_id = 1L) + print(paste0( + i_tag, " extraction: observed historic daily weather from weather DB: ", + sim_time[["simstartyr"]], "-", sim_time[["endyr"]] + )) + + obs.hist.daily <- rSOILWAT2::dbW_getWeatherData( + Site_id = Site_id_by_dbW, + startYear = sim_time[["simstartyr"]], + endYear = sim_time[["endyr"]], + Scenario_id = 1L + ) if (obs.hist.daily[[1]]@year < 1950) { #TODO(drs): I don't know where the hard coded value of 1950 comes from; it doesn't @@ -2416,66 +2789,112 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, #Hamlet et al. 2010: "an arbitrary ceiling of 150% of the observed maximum # precipitation value for each cell is also imposed by ???spreading out??? very large # daily precipitation values into one or more adjacent days" - dailyPPTceiling <- opt_DS[["daily_ppt_limit"]] * max(unlist(lapply(obs.hist.daily, - function(obs) max(obs@data[, "PPT_cm"])))) + dailyPPTceiling <- opt_DS[["daily_ppt_limit"]] * + max(unlist(lapply( + obs.hist.daily, + function(obs) max(obs@data[, "PPT_cm"]) + ))) + # Monthly extremes are used to cut the most extreme spline oscillations; these limits # are ad hoc; monthly temperature extremes based on expanded daily extremes - temp <- stretch_values(x = range(sapply(obs.hist.daily, function(obs) - obs@data[, c("Tmax_C", "Tmin_C")])), lambda = opt_DS[["monthly_limit"]]) - monthly_extremes <- list(Tmax = temp, Tmin = temp, PPT = c(0, - opt_DS[["monthly_limit"]] * max(tapply(obs.hist.monthly[, "PPT_cm"], - obs.hist.monthly[, 1], sum)))) + temp <- rSW2utils::stretch_values( + x = range(sapply( + obs.hist.daily, + function(obs) obs@data[, c("Tmax_C", "Tmin_C")]) + ), + lambda = opt_DS[["monthly_limit"]] + ) + + monthly_extremes <- list( + Tmax = temp, + Tmin = temp, + PPT = c( + 0, + opt_DS[["monthly_limit"]] * + max(tapply( + obs.hist.monthly[, "PPT_cm"], + obs.hist.monthly[, 1], + sum + )) + ) + ) # Loop through todos for downscaling for (k in ids_down) { ir <- which(rcps == df_wdataOut[["rcps"]][k]) - it <- which(rownames(sim_time[["future_yrs"]]) == df_wdataOut[["futures"]][k]) + it <- which( + rownames(sim_time[["future_yrs"]]) == df_wdataOut[["futures"]][k] + ) # Put historical data together - #NOTE: both scen.hist.monthly and scen.fut.monthly may have NAs because some GCMs do - # not provide data for the last month of a time slice (e.g. December 2005 may be NA) + #NOTE: both scen.hist.monthly and scen.fut.monthly may have NAs + # because some GCMs do not provide data for the last month of a time slice + # (e.g. December 2005 may be NA) scen.hist.monthly <- NULL if (!all(df_wdataOut[["downscaling"]][k] == "raw")) { for (itt in which(assocYears[["historical"]]$first)) { scen.hist.monthly <- rbind_2cols_nonoverlapping( scen.hist.monthly, - scen.monthly[1, itt][[1]]) + scen.monthly[1, itt][[1]] + ) } for (itt in which(assocYears[["historical"]]$second)) { scen.hist.monthly <- rbind_2cols_nonoverlapping( scen.hist.monthly, - scen.monthly[1 + ir, getYears$n_first + itt][[1]]) + scen.monthly[1 + ir, getYears$n_first + itt][[1]] + ) } } if (print.debug && !is.null(scen.hist.monthly)) { - scen.hist.monthly_mean <- stats::aggregate(scen.hist.monthly[, -(1:2)], - list(scen.hist.monthly[, "month"]), mean, na.rm = TRUE) - - temp <- apply(scen.hist.monthly_mean[, -1] - obs.hist.monthly_mean[, -1], 2, mean) - print(paste0(i_tag, " extraction: 'scen hist' - 'obs hist': ", - paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) + scen.hist.monthly_mean <- stats::aggregate( + scen.hist.monthly[, -(1:2)], + list(scen.hist.monthly[, "month"]), + mean, + na.rm = TRUE + ) + + temp <- apply( + scen.hist.monthly_mean[, -1] - obs.hist.monthly_mean[, -1], + 2, + mean + ) + print(paste0( + i_tag, " extraction: 'scen hist' - 'obs hist': ", + paste( + colnames(obs.hist.monthly[, -(1:2)]), + "=", + round(temp, 2), + collapse = ", " + ) + )) } # Put future data together scen.fut.monthly <- NULL for (itt in which(assocYears[[df_wdataOut[["tag"]][k]]]$first)) { - scen.fut.monthly <- rbind_2cols_nonoverlapping( - scen.fut.monthly, - scen.monthly[1, itt][[1]]) + scen.fut.monthly <- rbind_2cols_nonoverlapping( + scen.fut.monthly, + scen.monthly[1, itt][[1]] + ) } for (itt in which(assocYears[[df_wdataOut[["tag"]][k]]]$second)) { - scen.fut.monthly <- rbind_2cols_nonoverlapping( - scen.fut.monthly, - scen.monthly[1 + ir, getYears$n_first + itt][[1]]) + scen.fut.monthly <- rbind_2cols_nonoverlapping( + scen.fut.monthly, + scen.monthly[1 + ir, getYears$n_first + itt][[1]] + ) } if (print.debug) { - scen.fut.monthly_mean <- stats::aggregate(scen.fut.monthly[, -(1:2)], - list(scen.fut.monthly[, "month"]), mean, na.rm = TRUE) + scen.fut.monthly_mean <- stats::aggregate( + scen.fut.monthly[, -(1:2)], + list(scen.fut.monthly[, "month"]), + mean, + na.rm = TRUE + ) } # Comment: The variables are expected to cover the following time periods @@ -2489,19 +2908,27 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, #Apply downscaling if (print.debug) - print(paste0(i_tag, " extraction: ", df_wdataOut[["tag"]][k], " downscaling with method ", - shQuote(df_wdataOut[["downscaling"]][k]))) - - dm_fun <- switch(df_wdataOut[["downscaling"]][k], + print(paste0( + i_tag, " extraction: ", df_wdataOut[["tag"]][k], + " downscaling with method ", + shQuote(df_wdataOut[["downscaling"]][k]) + )) + + dm_fun <- switch( + df_wdataOut[["downscaling"]][k], raw = downscale.raw, delta = downscale.delta, `hybrid-delta` = downscale.deltahybrid, `hybrid-delta-3mod` = downscale.deltahybrid3mod, `wgen-package` = downscale.wgen_package, - stop) + stop + ) # a list of additional parameters for downscaling - dm_add_params <- switch(df_wdataOut[["downscaling"]][k], raw = NULL, delta = NULL, + dm_add_params <- switch( + df_wdataOut[["downscaling"]][k], + raw = NULL, + delta = NULL, `hybrid-delta` = NULL, `hybrid-delta-3mod` = NULL, `wgen-package` = list( @@ -2515,16 +2942,27 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, for (do_checks in c(TRUE, FALSE)) { scen.fut.daily <- try(dm_fun( - obs.hist.daily = obs.hist.daily, obs.hist.monthly = obs.hist.monthly, - scen.hist.monthly = scen.hist.monthly, scen.fut.monthly = scen.fut.monthly, - itime = it, years = sim_years, sim_time = sim_time, opt_DS = opt_DS, - dailyPPTceiling = dailyPPTceiling, monthly_extremes = monthly_extremes, - do_checks = do_checks, add_params = dm_add_params)) + obs.hist.daily = obs.hist.daily, + obs.hist.monthly = obs.hist.monthly, + scen.hist.monthly = scen.hist.monthly, + scen.fut.monthly = scen.fut.monthly, + itime = it, + years = sim_years, + sim_time = sim_time, + opt_DS = opt_DS, + dailyPPTceiling = dailyPPTceiling, + monthly_extremes = monthly_extremes, + do_checks = do_checks, + add_params = dm_add_params + )) if (!inherits(scen.fut.daily, "try-error")) { if (!do_checks) - print(paste0(i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", - shQuote(df_wdataOut[["downscaling"]][k]), " quality checks turned off")) + print(paste0( + i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", + shQuote(df_wdataOut[["downscaling"]][k]), + " quality checks turned off" + )) break } } @@ -2534,18 +2972,49 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, if (print.debug) { temp <- rSOILWAT2::dbW_weatherData_to_monthly(scen.fut.daily) - scen.fut.down_mean <- stats::aggregate(temp[, -(1:2)], list(temp[, "Month"]), mean) - - temp <- apply(scen.fut.down_mean[, -1] - obs.hist.monthly_mean[, -1], 2, mean) - print(paste0(i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", - shQuote(df_wdataOut[["downscaling"]][k]), "'downscaled fut' - 'obs hist': ", - paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) - - if (exists("scen.hist.monthly_mean")) { # this doesn't exist, e.g., for 'raw' DSing - temp <- apply(scen.fut.down_mean[, -1] - scen.hist.monthly_mean[, -1], 2, mean) - print(paste0(i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", - shQuote(df_wdataOut[["downscaling"]][k]), ": 'downscaled fut' - 'scen hist': ", - paste(colnames(obs.hist.monthly[, -(1:2)]), "=", round(temp, 2), collapse = ", "))) + scen.fut.down_mean <- stats::aggregate( + temp[, -(1:2)], + list(temp[, "Month"]), + mean + ) + + temp <- apply( + scen.fut.down_mean[, -1] - obs.hist.monthly_mean[, -1], + 2, + mean + ) + + print(paste0( + i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", + shQuote(df_wdataOut[["downscaling"]][k]), + "'downscaled fut' - 'obs hist': ", + paste( + colnames(obs.hist.monthly[, -(1:2)]), + "=", + round(temp, 2), + collapse = ", " + ) + )) + + if (exists("scen.hist.monthly_mean")) { + # this doesn't exist, e.g., for 'raw' DSing + temp <- apply( + scen.fut.down_mean[, -1] - scen.hist.monthly_mean[, -1], + 2, + mean + ) + + print(paste0( + i_tag, " extraction: ", df_wdataOut[["tag"]][k], ": ", + shQuote(df_wdataOut[["downscaling"]][k]), + ": 'downscaled fut' - 'scen hist': ", + paste( + colnames(obs.hist.monthly[, -(1:2)]), + "=", + round(temp, 2), + collapse = ", " + ) + )) } } @@ -2553,11 +3022,18 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, df_wdataOut[["StartYear"]][k] <- years[1] df_wdataOut[["EndYear"]][k] <- years[length(years)] df_wdataOut[["weatherData"]][k] <- list( - rSOILWAT2::dbW_weatherData_to_blob(scen.fut.daily, compression_type)) + rSOILWAT2::dbW_weatherData_to_blob(scen.fut.daily, compression_type) + ) } - saveRDS(df_wdataOut, file = file.path(project_paths[["dir_out_temp"]], tolower(gcm), - paste0(clim_source, "_", i_tag, ".rds"))) + saveRDS( + df_wdataOut, + file = file.path( + project_paths[["dir_out_temp"]], + tolower(gcm), + paste0(clim_source, "_", i_tag, ".rds") + ) + ) } on.exit() @@ -2567,17 +3043,21 @@ calc.ScenarioWeather <- function(i, ig, il, gcm, site_id, i_tag, clim_source, #' Make daily weather for a scenario #' -#' A wrapper function for \code{calc.ScenarioWeather} with error control. +#' A wrapper function for \code{calc_MonthlyScenarioWeather} with error control. #' -#' @inheritParams calc.ScenarioWeather -try.ScenarioWeather <- function(i, clim_source, use_CF, use_NEX, climDB_meta, - climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, - locations, compression_type, getYears, assocYears, sim_time, seeds_DS, opt_DS, - project_paths, dir_failed, fdbWeather, resume, verbose, print.debug) { +#' @inheritParams calc_MonthlyScenarioWeather +try_MonthlyScenarioWeather <- function(i, clim_source, use_CF, use_NEX, + climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, + climate.ambient, locations, compression_type, getYears, assocYears, sim_time, + seeds_DS, opt_DS, project_paths, dir_failed, fdbWeather, resume, verbose, + print.debug) { # Identify index for site and scenario - # - loop over locations then loop over GCMs, i.e., - # site[il] / GCM[ig], then site[il] / GCM[ig + 1], ... + # Let ids be 1 to length(req_GCMs) + # then, i in ids result in ig = ids and il = 1 + # then, i in (1 * length(req_GCMs) + ids) result in ig = ids and il = 2 + # then, i in ((k - 1) * length(req_GCMs) + ids): ig = ids and il = k + # ... ig <- (i - 1) %% length(reqGCMs) + 1 gcm <- reqGCMs[ig] il <- (i - 1) %/% length(reqGCMs) + 1 @@ -2592,35 +3072,38 @@ try.ScenarioWeather <- function(i, clim_source, use_CF, use_NEX, climDB_meta, res <- NULL if (!rSOILWAT2::dbW_IsValid()) { - print(paste("'calc.ScenarioWeather':", shQuote(i_tag), "failed because weather", - "database cannot be accessed.")) + print(paste("'try_MonthlyScenarioWeather':", shQuote(i_tag), + "failed because weather database cannot be accessed." + )) } else { - temp <- try(calc.ScenarioWeather(i = i, - ig = ig, il = il, gcm = gcm, site_id = site_id, i_tag = i_tag, - clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, - climDB_meta = climDB_meta, climDB_files = climDB_files, - reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, - reqDownscalingsPerGCM = reqDownscalingsPerGCM, - climate.ambient = climate.ambient, - locations = locations, - compression_type = compression_type, - getYears = getYears, assocYears = assocYears, - sim_time = sim_time, - task_seed = seeds_DS[[i]], - opt_DS = opt_DS, - project_paths = project_paths, dir_failed = dir_failed, - resume = resume, - verbose = verbose, print.debug = print.debug)) + temp <- try(calc_MonthlyScenarioWeather(i = i, + ig = ig, il = il, gcm = gcm, site_id = site_id, i_tag = i_tag, + clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, + reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, + reqDownscalingsPerGCM = reqDownscalingsPerGCM, + climate.ambient = climate.ambient, + locations = locations, + compression_type = compression_type, + getYears = getYears, assocYears = assocYears, + sim_time = sim_time, + task_seed = seeds_DS[[i]], + opt_DS = opt_DS, + project_paths = project_paths, dir_failed = dir_failed, + resume = resume, + verbose = verbose, print.debug = print.debug)) if (inherits(temp, "try-error")) { print(paste(Sys.time(), temp)) save(i, ig, il, gcm, site_id, i_tag, temp, clim_source, use_CF, use_NEX, climDB_meta, - climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, - locations, compression_type, getYears, assocYears, sim_time, opt_DS, - project_paths, verbose, - file = file.path(dir_failed, paste0("ClimScen_failed_", i_tag, "_l1.RData"))) + climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, climate.ambient, + locations, compression_type, getYears, assocYears, sim_time, opt_DS, + project_paths, verbose, + file = file.path(dir_failed, + paste0("ClimScenMonthly_failed_", i_tag, "_l1.RData")) + ) } else { res <- i } @@ -2629,36 +3112,843 @@ try.ScenarioWeather <- function(i, clim_source, use_CF, use_NEX, climDB_meta, res } -#' Organizes the calls (in parallel) which obtain specified scenario weather for the -#' weather database from one of the available \var{GCM} sources +#------ End of Monthly netCDF extractions + + +#------Daily netCDF extractions------ + +extract_daily_variable_netCDF <- function(filename, variable, unit, + tres = "daily", startyear, endyear, lon, lat) { + stopifnot(requireNamespace("ncdf4")) + + tres <- match.arg(tres) + + nc <- ncdf4::nc_open( + filename = filename, + write = FALSE, + readunlim = TRUE, + verbose = FALSE + ) + on.exit(ncdf4::nc_close(nc)) + + nc_var <- grep(paste0("\\b", variable, "\\b"), names(nc$var), + value = TRUE, + ignore.case = TRUE + ) + + stopifnot(length(nc_var) > 0) + stopifnot(isTRUE(tolower(unit) == tolower(nc$var[[nc_var]]$units))) + + # dimnames + nc_perm <- sapply(nc$var[[nc_var]]$dim, function(x) x$name) + it <- grep("(\\btime\\b)|(\\bt\\b)", nc_perm, ignore.case = TRUE) + ilat <- grep("(\\lat\\b)|(\\blatitude\\b)", nc_perm, ignore.case = TRUE) + ilon <- grep("(\\lon\\b)|(\\blongitude\\b)", nc_perm, ignore.case = TRUE) + dimnames <- rep(NA, length = 3) + dimnames[it] <- "time" + dimnames[ilat] <- "latitude" + dimnames[ilon] <- "longitude" + + list( + nct = get_TimeIndices_netCDF( + filename = nc, + startyear = startyear, + endyear = endyear, + tres = tres + ), + + ncg = get_SpatialIndices_netCDF( + filename = nc, + lon = lon, + lat = lat + ), + + dimnames = dimnames, + values = ncdf4::ncvar_get(nc, varid = variable) + ) +} + +#' Extract all daily \var{GCM} projection values for precipitation, minimum and +#' maximum temperature from each one \var{netCDF} file. #' -#' This function assumes that a whole bunch of global variables exist and contain -#' appropriate values. +#' @return A list with three 3-dimensional arrays +#' \var{\dQuote{tmax}}, \var{\dQuote{tmin}}, and \var{\dQuote{prcp}}. +#' Units are [degree Celsius] for temperature and [cm / day] for precipitation. +get_DailyGCMdata_netCDF <- function(i_tag, climDB_meta, ncFiles, + startyear, endyear, lon, lat) { + + ctemp <- i_tag + + #--- Extract precipitation data + ftmp1 <- grep(climDB_meta[["var_desc"]]["prcp", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) + + if (length(ftmp1) == 1) { + prcp <- extract_daily_variable_netCDF( + filename = ftmp1, + variable = climDB_meta[["var_desc"]]["prcp", "varname"], + unit = climDB_meta[["var_desc"]]["prcp", "unit_given"], + tres = climDB_meta[["tres"]], + startyear = startyear, + endyear = endyear, + lon = lon, + lat = lat + ) + + } else { + if (length(ftmp1) > 1) { + stop("More than one netCDF file with precipitation data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftmp1)), collapse = "/") + ) + } else { + stop("No suitable netCDF file with precipitation data ", + "available for combination ", ctemp + ) + } + } + + #--- Extract temperature data + ftmp3 <- grep(climDB_meta[["var_desc"]]["tmin", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) + ftmp4 <- grep(climDB_meta[["var_desc"]]["tmax", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) + + if (length(ftmp3) > 0 && length(ftmp4) > 0) { + if (length(ftmp3) == 1 && length(ftmp4) == 1) { + tmin <- extract_daily_variable_netCDF( + filename = ftmp3, + variable = climDB_meta[["var_desc"]]["tmin", "varname"], + unit = climDB_meta[["var_desc"]]["tmin", "unit_given"], + tres = climDB_meta[["tres"]], + startyear = startyear, + endyear = endyear, + lon = lon, + lat = lat + ) + + tmax <- extract_daily_variable_netCDF( + filename = ftmp4, + variable = climDB_meta[["var_desc"]]["tmax", "varname"], + unit = climDB_meta[["var_desc"]]["tmax", "unit_given"], + tres = climDB_meta[["tres"]], + startyear = startyear, + endyear = endyear, + lon = lon, + lat = lat + ) + + } else { + stop("More than one netCDF file with tmin/tmax data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftmp3)), collapse = "/"), + " or ", + paste(shQuote(basename(ftmp4)), collapse = "/") + ) + } + + } else { + ftmp2 <- grep(climDB_meta[["var_desc"]]["tmean", "fileVarTags"], ncFiles, + ignore.case = TRUE, + value = TRUE + ) + + if (length(ftmp2) == 1) { + tmean <- extract_daily_variable_netCDF( + filename = ftmp2, + variable = climDB_meta[["var_desc"]]["tmean", "varname"], + unit = climDB_meta[["var_desc"]]["tmean", "unit_given"], + tres = climDB_meta[["tres"]], + startyear = startyear, + endyear = endyear, + lon = lon, + lat = lat + ) + + tmin <- tmax <- tmean + vars <- c("tmin", "tmax", "tmean") + unit_from <- climDB_meta[["var_desc"]][vars, "unit_real"] + stopifnot(unit_from[1] == unit_from[2], unit_from[1] == unit_from[3]) + + } else { + if (length(ftmp2) > 1) { + stop("More than one netCDF file with tmean data ", + "available for combination ", ctemp, + " with files = ", paste(shQuote(basename(ftmp2)), collapse = "/") + ) + } else { + stop("No suitable netCDF file with temperature data ", + "available for combination ", ctemp + ) + } + } + } + + #--- Convert units + prcp[["values"]] <- rSW2utils::convert_precipitation( + x = prcp[["values"]], + dpm = NA, + unit_from = climDB_meta[["var_desc"]]["prcp", "unit_real"], + unit_to = "cm/day" + ) + + tmin[["values"]] <- rSW2utils::convert_temperature( + x = tmin[["values"]], + unit_from = climDB_meta[["var_desc"]]["tmin", "unit_real"], + unit_to = "C" + ) + + tmax[["values"]] <- rSW2utils::convert_temperature( + x = tmax[["values"]], + unit_from = climDB_meta[["var_desc"]]["tmax", "unit_real"], + unit_to = "C" + ) + + list( + tmax = tmax, + tmin = tmin, + prcp = prcp + ) +} + + + +get_DailyScenarioData_netCDF <- function(id_sim_scen, + sim_scen_ids1, sim_scen_ids1_by_dbW, reqGCMs, reqRCPsPerGCM, + clim_source, climDB_meta, climDB_files, locations, getYears, + fdbWeather, compression_type, write_tmp_to_disk, + dir_out_temp, dir_failed, resume, verbose) { + + if (!rSOILWAT2::dbW_IsValid()) { + rSOILWAT2::dbW_setConnection(dbFilePath = fdbWeather) + } + + ids_Done <- NULL + + #--- Determine RCP x GCM + sim_scen <- sim_scen_ids1[id_sim_scen] + id <- strsplit(sim_scen, split = ".", fixed = TRUE)[[1]] + gcm <- id[4] + igcm <- which(tolower(gcm) == tolower(reqGCMs)) + stopifnot(length(igcm) == 1) + scen <- id[3] + isc <- which(tolower(scen) == tolower(reqRCPsPerGCM[[igcm]])) + stopifnot(length(isc) == 1) + slice <- if (tolower(scen) == "historical") "first" else "second" + + + #--- Determine which sites still need data + # `ids_todo_sites` is an index for `locations` + if (resume) { + ids_todo_sites <- which(!as.vector(rSOILWAT2::dbW_has_weatherData( + Site_ids = locations[, "Site_id_by_dbW"], + Scenario_ids = sim_scen_ids1_by_dbW[id_sim_scen] + ))) + + } else { + ids_todo_sites <- seq_len(nrow(locations)) + } + + n_todo_sites <- length(ids_todo_sites) + + if (length(ids_todo_sites) > 0) { + # `ids_seq_todo_sites is indexing objects subset by `ids_todo_sites`, + # e.g., `x` + ids_seq_todo_sites <- seq_along(ids_todo_sites) + + if (verbose) { + print(paste("'get_DailyScenarioData_netCDF':", Sys.time(), + "extracting data from", shQuote(sim_scen), + "for sites n =", n_todo_sites + )) + } + + var_map <- data.frame( + vars_rSOILWAT2 = c("Tmax_C", "Tmin_C", "PPT_cm"), + vars_get_DailyGCMdata_netCDF = c("tmax", "tmin", "prcp"), + stringsAsFactors = FALSE + ) + + + #--- Select netCDF files for this 'gcm', scenarios, and variables + tmp <- select_suitable_CFs( + climDB_files = climDB_files, + climDB_meta = climDB_meta, + getYears = getYears, + model_name = gcm, + scenario_names = scen + ) + + fnc_gcmXscens <- tmp[["files"]] + rip <- tmp[["rip"]] + + + #--- Extract data (for all sites at once) + x <- try( + get_DailyGCMdata_netCDF( + i_tag = paste(c(gcm, scen, rip), collapse = " * "), + climDB_meta = climDB_meta, + ncFiles = fnc_gcmXscens, + startyear = getYears[[slice]][1, 1], + endyear = getYears[[slice]][1, 2], + lon = locations[ids_todo_sites, "X_WGS84"], + lat = locations[ids_todo_sites, "Y_WGS84"] + ), + silent = TRUE + ) + + if (!inherits(x, "try-error")) { + df_time <- x[["prcp"]][["nct"]][["time"]] + + df_site_template <- data.frame( + Year = 1900 + df_time$year, + DOY = 1 + df_time$yday, + Tmax_C = NA, + Tmin_C = NA, + PPT_cm = NA + ) + + #--- Convert array into rSOILWAT2 weather objects for each site + tmp_ids <- lapply( + X = ids_seq_todo_sites, + FUN = try_prepare_site_with_daily_scenario_weather, + data = x, + rcp = scen, + scenario = sim_scen, + scenario_id_by_dbW = sim_scen_ids1_by_dbW[id_sim_scen], + site_ids_by_dbW = locations[ids_todo_sites, "Site_id_by_dbW"], + var_map = var_map, + df_time = df_time, + df_site_template = df_site_template, + compression_type = compression_type, + write_tmp_to_disk = write_tmp_to_disk, + path = file.path(dir_out_temp, tolower(gcm)), + filenames = paste0( + clim_source, + "_SiteID", locations[ids_todo_sites, "site_id"], "-", + gcm, "-", scen, ".rds" + ), + dir_failed = dir_failed + ) + + tmp_ids <- as.vector(stats::na.omit(unlist(tmp_ids))) + + if (length(tmp_ids) > 0) { + ids_Done <- (ids_todo_sites[tmp_ids] - 1) * length(reqGCMs) + igcm + } + + } else { + print(paste( + "'get_DailyScenarioData_netCDF': ", + "call to 'get_DailyGCMdata_netCDF' failed with error message:", + shQuote(attr(x, "condition")[["message"]]) + )) + } + } + + ids_Done +} + + +prepare_site_with_daily_scenario_weather <- function(i, + data, rcp, scenario, scenario_id_by_dbW, site_id_by_dbW, var_map, + df_time, df_site_template, compression_type, write_tmp_to_disk, filename) { + + df_site <- df_site_template + + years <- range(df_site[, "Year"], na.rm = TRUE) + + for (iv in seq_len(nrow(var_map))) { + v1 <- var_map[iv, "vars_get_DailyGCMdata_netCDF"] + v2 <- var_map[iv, "vars_rSOILWAT2"] + + ix <- data[[v1]][["ncg"]][["ix"]][i] + iy <- data[[v1]][["ncg"]][["iy"]][i] + + ids <- rep(NA, 3) + ilon <- which("longitude" == data[[v1]][["dimnames"]]) + ids[ilon] <- ix + ilat <- which("latitude" == data[[v1]][["dimnames"]]) + ids[ilat] <- iy + + tmp <- eval(expr = str2expression(paste0( + "data[['", v1, "']][['values']][", + paste(ifelse(is.na(ids), "", ids), collapse = ","), + "]" + ))) + + idt <- match(df_time, data[[v1]][["nct"]][["time"]], nomatch = 0) + + df_site[idt > 0, v2] <- tmp[idt] + } + + scen_fut_daily <- rSOILWAT2::dbW_dataframe_to_weatherData( + weatherDF = df_site, + weatherDF_dataColumns = colnames(df_site)[-1] + ) + + blob_scen_fut_daily <- rSOILWAT2::dbW_weatherData_to_blob( + weatherData = scen_fut_daily, + type = compression_type + ) + + if (write_tmp_to_disk) { + # Prepare object for later insertion into weather database + # by function `copy_tempdata_to_dbW` + df_wdataOut <- data.frame( + todo = TRUE, + downscaling = "idem", + futures = "dall", + rcps = rcp, + tag = paste0("dall.", rcp), + Scenario = scenario, + Scenario_id = scenario_id_by_dbW, + Site_id_by_dbW = site_id_by_dbW, + StartYear = years[1], + EndYear = years[2], + weatherData = NA + ) + + df_wdataOut[["weatherData"]] <- list(blob_scen_fut_daily) + + saveRDS(object = df_wdataOut, file = filename) + + } else { + # Insert into weather database directly: + # Faster than writing to disk and then importing into dbWeather by + # `copy_tempdata_to_dbW` -- but only possible if not working in parallel + # mode + rSOILWAT2:::dbW_addWeatherDataNoCheck( + Site_id = site_id_by_dbW, + Scenario_id = scenario_id_by_dbW, + StartYear = years[1], + EndYear = years[2], + weather_blob = blob_scen_fut_daily + ) + } + + i +} + +try_prepare_site_with_daily_scenario_weather <- function(i, + data, rcp, scenario, scenario_id_by_dbW, site_ids_by_dbW, var_map, df_time, + df_site_template, compression_type, write_tmp_to_disk, path, filenames, + dir_failed) { + + tmp <- try( + prepare_site_with_daily_scenario_weather( + i = i, + data = data, + rcp = rcp, + scenario = scenario, + scenario_id_by_dbW = scenario_id_by_dbW, + site_id_by_dbW = site_ids_by_dbW[i], + var_map = var_map, + df_time = df_time, + df_site_template = df_site_template, + compression_type = compression_type, + write_tmp_to_disk = write_tmp_to_disk, + filename = file.path(path, filenames[i]) + ) + ) + + if (inherits(tmp, "try-error")) { + print(paste(Sys.time(), tmp)) + + save( + list = ls(), + file = file.path(dir_failed, + paste0("ClimScenDaily_failed_", filenames[i], ".RData") + ) + ) + + res <- NA + + } else { + res <- i + } + + res +} + + +#' Extract daily climate scenario data +#' +#' @section Details: +#' This function parallelize over \code{reqGCMs} x \code{reqRCPsPerGCM} +#' combinations, i.e., data for all \code{locations} are extracted for +#' one value of \code{reqGCMs} x \code{reqRCPsPerGCM} at a time. +#' This is good if file handling is slow and memory is not limiting. +#' However, if data cannot be loaded into memory, then this function +#' cannot work. In this case, this function would need to be re-written to +#' additionally loop over chunks of \code{locations}. +#' +#' @section Notes: This function works only if +#' \itemize{ +#' \item \code{sim_time[["future_yrs"]]} contains \var{"dall"} and +#' \item \code{reqDownscalingsPerGCM} is \var{"idem"}. +#' } +#' +#' @seealso \code{\link{calc_MonthlyScenarioWeather}} +#' +calc_DailyScenarioWeather <- function(ids_ToDo, clim_source, climDB_meta, + climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, + locations, compression_type, getYears, sim_scen_ids, + dir_out_temp, dir_failed, fdbWeather, resume, verbose) { + + #--- ids_ToDo based on length(reqGCMs) x nrow(locations) + #TODO: this should also consider reqRCPs + ids_Done <- NULL + + stopifnot(all(unlist(reqDownscalingsPerGCM) == "idem")) + + #--- Check weather database connection + if (!rSOILWAT2::dbW_IsValid()) { + rSOILWAT2::dbW_setConnection(dbFilePath = fdbWeather) + } + + if (rSOILWAT2::dbW_IsValid()) { + # Scenario IDs in dbWeather (without current/ambient/observational) + sim_scen_ids1_by_dbW <- rSOILWAT2::dbW_getScenarioId( + Scenario = sim_scen_ids[-1], + ignore.case = TRUE + ) + + ids_seq_scens <- seq_along(sim_scen_ids1_by_dbW) + + #--- Extract and loop over GCM x RCP combinations (in parallel) + if (SFSW2_glovars[["p_has"]]) { + if (identical(SFSW2_glovars[["p_type"]], "mpi")) { + Rmpi::mpi.bcast.cmd( + cmd = dbW_setConnection_local, + dbFilePath = fdbWeather + ) + on.exit(Rmpi::mpi.bcast.cmd(dbW_disconnectConnection_local), add = TRUE) + + ids_Done <- Rmpi::mpi.applyLB( + ids_seq_scens, + get_DailyScenarioData_netCDF, + locations = locations, + sim_scen_ids1 = sim_scen_ids[-1], + sim_scen_ids1_by_dbW = sim_scen_ids1_by_dbW, + reqGCMs = reqGCMs, + reqRCPsPerGCM = reqRCPsPerGCM, + clim_source = clim_source, + climDB_meta = climDB_meta, + climDB_files = climDB_files, + getYears = getYears, + fdbWeather = fdbWeather, + compression_type = compression_type, + write_tmp_to_disk = SFSW2_glovars[["p_has"]], + dir_out_temp = dir_out_temp, + dir_failed = dir_failed, + resume = resume, + verbose = verbose + ) + + } else if (identical(SFSW2_glovars[["p_type"]], "socket")) { + parallel::clusterCall( + SFSW2_glovars[["p_cl"]], + fun = rSOILWAT2::dbW_setConnection, + dbFilePath = fdbWeather + ) + on.exit( + parallel::clusterEvalQ( + SFSW2_glovars[["p_cl"]], + rSOILWAT2::dbW_disconnectConnection() + ), + add = TRUE + ) + + ids_Done <- parallel::clusterApplyLB( + cl = SFSW2_glovars[["p_cl"]], + x = ids_seq_scens, + fun = get_DailyScenarioData_netCDF, + locations = locations, + sim_scen_ids1 = sim_scen_ids[-1], + sim_scen_ids1_by_dbW = sim_scen_ids1_by_dbW, + reqGCMs = reqGCMs, + reqRCPsPerGCM = reqRCPsPerGCM, + clim_source = clim_source, + climDB_meta = climDB_meta, + climDB_files = climDB_files, + getYears = getYears, + fdbWeather = fdbWeather, + compression_type = compression_type, + write_tmp_to_disk = SFSW2_glovars[["p_has"]], + dir_out_temp = dir_out_temp, + dir_failed = dir_failed, + resume = resume, + verbose = verbose + ) + + } else { + ids_Done <- NULL + } + + clean_SFSW2_cluster() + + } else { + ids_Done <- lapply(ids_seq_scens, + FUN = get_DailyScenarioData_netCDF, + locations = locations, + sim_scen_ids1 = sim_scen_ids[-1], + sim_scen_ids1_by_dbW = sim_scen_ids1_by_dbW, + reqGCMs = reqGCMs, + reqRCPsPerGCM = reqRCPsPerGCM, + clim_source = clim_source, + climDB_meta = climDB_meta, + climDB_files = climDB_files, + getYears = getYears, + fdbWeather = fdbWeather, + compression_type = compression_type, + write_tmp_to_disk = SFSW2_glovars[["p_has"]], + dir_out_temp = dir_out_temp, + dir_failed = dir_failed, + resume = resume, + verbose = verbose + ) + } + + } else { + print(paste( + "'calc_DailyScenarioWeather': ", + "failed because weather database cannot be accessed." + )) + } + + unlist(ids_Done) +} + +#------ End of Daily netCDF extractions + + + +#------Extraction functions------ + +#' Subset a list of netCDF CF file names to specific models, scenarios, +#' and variables +select_suitable_CFs <- function(climDB_files, climDB_meta, getYears, + model_name, scenario_names) { + + files <- climDB_files + n_scens <- length(scenario_names) + + tag <- paste0( + climDB_meta[["sep_fname"]], model_name, climDB_meta[["sep_fname"]] + ) + files <- grep(tag, files, ignore.case = TRUE, value = TRUE) + + tag <- paste0( + climDB_meta[["sep_fname"]], scenario_names, climDB_meta[["sep_fname"]] + ) + tag <- paste0("(", tag, ")", collapse = "|") + files <- grep(tag, files, ignore.case = TRUE, value = TRUE) + + tag <- paste0( + "(", climDB_meta[["var_desc"]][["fileVarTags"]], ")", + collapse = "|" + ) + files <- grep(tag, files, ignore.case = TRUE, value = TRUE) + + fnc_parts <- strsplit( + basename(files), + split = climDB_meta[["sep_fname"]], + fixed = TRUE + ) + + #--- Determine most suitable 'ensemble member' rip that is available + temp <- climDB_meta[["str_fname"]][c("id_scen", "id_var", "id_run")] + ptemp <- sapply(fnc_parts, function(x) x[temp]) + + # Number of netCDF files per scenario, variable, and rip + # 'pnc_count' should be + # * = 1 if netCDF file is available per scen x var x rip combination + # * > 1 if multiple time periods are available + # * = 0 if files are missing + # (yet 'tas' is allowed to replace missing 'tasmax'+'tasmin') + pnc_count <- table(ptemp[1, ], ptemp[2, ], ptemp[3, ]) + pnc_avail <- apply(pnc_count, 2:3, function(x) sum(x >= 1) >= n_scens) + temp <- apply(pnc_avail, 2, function(x) { + x[climDB_meta[["var_desc"]]["prcp", "tag"]] && ( + x[climDB_meta[["var_desc"]]["tmean", "tag"]] || ( + x[climDB_meta[["var_desc"]]["tmax", "tag"]] && + x[climDB_meta[["var_desc"]]["tmin", "tag"]])) + }) + rips <- names(temp) + rip <- if (length(rips) > 1) sort(rips)[1] else rips + + if (length(rip) == 0) { + stop("Input file(s) ", + "for model ", shQuote(model_name), + " and scenario(s) ", paste(shQuote(scenario_names), collapse = "/"), + " not available: ", + paste0(colnames(pnc_avail), ": ", + apply(pnc_avail, 2, + function(x) + paste(rownames(pnc_avail), "=", x, collapse = "/") + ), + collapse = " - " + ) + ) + } + + # Double check what time period to choose (the one with the most overlap to + # requested years) if multiple netCDF files for selected combination of + # scen x var x rip are present + pnc_count_rip <- array( + pnc_count[,, rip], + dim = dim(pnc_count)[1:2], + dimnames = dimnames(pnc_count)[1:2] + ) + i_count_rip <- which(pnc_count_rip > 1, arr.ind = TRUE) + fnc_parts2 <- fnc_parts # information that is used to index/subset files + req_years <- c( + seq.int(getYears[["first"]][1, 1], getYears[["first"]][1, 2]), + unlist(lapply(seq_len(nrow(getYears[["second"]])), + function(k) + seq.int(getYears[["second"]][k, 1], getYears[["second"]][k, 2]) + )) + ) + + for (k in seq_len(nrow(i_count_rip))) { + temp_var <- colnames(pnc_count_rip)[i_count_rip[k, "col"]] + temp_scen <- rownames(pnc_count_rip)[i_count_rip[k, "row"]] + + ids_fnc <- which(sapply(fnc_parts2, function(x) + any(x == rip) && any(x == temp_var) && any(x == temp_scen) + )) + temp_times <- lapply(fnc_parts2[ids_fnc], function(x) { + temp <- x[climDB_meta[["str_fname"]]["id_time"]] + seq.int(as.integer(substr(temp, 1, 4)), as.integer(substr(temp, 8, 11))) + }) + + temp_overlap <- sapply(temp_times, function(x) sum(x %in% req_years)) + imax_overlap <- which.max(temp_overlap) # the one to keep + itemp_remove <- ids_fnc[-imax_overlap] + + files <- files[-itemp_remove] + fnc_parts2 <- fnc_parts2[-itemp_remove] + } + + # Subset files to selected rip + if (length(rip) > 0) { + tag <- paste0(climDB_meta[["sep_fname"]], rip, climDB_meta[["sep_fname"]]) + files <- grep(tag, files, ignore.case = TRUE, value = TRUE) + } + + # Check that selected netCDF-files are available for requested variables: + # 'prcp' and ('tmean' or ('tmax' and 'tmin')) + fnc_parts <- strsplit(basename(files), split = climDB_meta[["sep_fname"]], + fixed = TRUE + ) + ptemp <- sapply(fnc_parts, + function(x) x[climDB_meta[["str_fname"]][c("id_scen", "id_var")]] + ) + pnc_count <- table(ptemp[1, ], ptemp[2, ]) + pnc_temp <- apply(pnc_count, 2, function(x) sum(x == 1) >= n_scens) + + pnc_avail <- pnc_temp[climDB_meta[["var_desc"]]["prcp", "tag"]] && ( + pnc_temp[climDB_meta[["var_desc"]]["tmean", "tag"]] || + all(pnc_temp[climDB_meta[["var_desc"]][c("tmin", "tmax"), "tag"]])) + + if (!pnc_avail) { + stop("File(s) for model ", shQuote(model_name), + " and scenario(s) ", paste(shQuote(scenario_names), collapse = "/"), + " not available for required variables: ", + paste(shQuote(names(pnc_temp)[!pnc_temp]), collapse = "/") + ) + } + + list(rip = rip, files = files) +} + +#' Organizes the calls (in parallel) which obtain specified scenario weather +#' for the weather database from one of the available \var{GCM} sources +#' +#' This function assumes that a whole bunch of global variables exist and +#' contain appropriate values. +#' +#' @section Details: +#' The daily extractions parallelize over \var{GCM} x \var{scenario} +#' combinations, i.e., data for all \var{locations} are extracted for +#' one value of \var{GCM} x \var{scenario} at a time. This is good if +#' file handling is slow and memory is not limiting. +#' +#' The monthly extractions parallelize over \var{GCM} x \var{locations} +#' combinations, i.e., data for one \var{location} is extracted for +#' all \var{scenarios} of one \var{GCM} at a time. This is good if file +#' handling is fast and memory is limiting. #' #' @param seed A seed set, \code{NULL}, or \code{NA}. \code{NA} will not affect -#' the state of the \acronym{RNG}; \code{NULL} will re-initialize the \acronym{RNG}; -#' and all other values are passed to \code{\link{set.seed}}. +#' the state of the \acronym{RNG}; \code{NULL} will re-initialize the +#' \acronym{RNG}; and all other values are passed to \code{\link{set.seed}}. tryToGet_ClimDB <- function(ids_ToDo, clim_source, use_CF, use_NEX, climDB_meta, - climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, locations, getYears, - assocYears, project_paths, dir_failed, fdbWeather, climate.ambient, - dbW_compression_type, sim_time, seeds_DS, opt_DS, resume, verbose, + climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, locations, + getYears, assocYears, project_paths, dir_failed, fdbWeather, climate.ambient, + dbW_compression_type, sim_time, seeds_DS, sim_scens, resume, verbose, print.debug, seed = NA) { #requests ids_ToDo: fastest if nc file is # - DONE: permutated to (lat, lon, time) instead (time, lat, lon) - # - TODO: many sites are extracted from one nc-read instead of one site per nc-read (see benchmarking_GDODCPUCLLNL_extractions.R) - #TODO: create chunks for ids_ToDo of size sites_per_chunk_N that use the same access to a nc file and distribute among workersN + # - TODO: many sites are extracted from one nc-read instead of one site + # per nc-read (see benchmarking_GDODCPUCLLNL_extractions.R) + #TODO: create chunks for ids_ToDo of size sites_per_chunk_N that use the + # same access to a nc file and distribute among workersN + + do_idem <- + "dall" %in% rownames(sim_time[["future_yrs"]]) && + "idem" %in% unlist(reqDownscalingsPerGCM) && + "daily" %in% climDB_meta[["tres"]] && + use_CF + + if (do_idem) { + ids_Done <- calc_DailyScenarioWeather( + ids_ToDo = ids_ToDo, + clim_source = clim_source, + climDB_meta = climDB_meta, + climDB_files = climDB_files, + reqGCMs = reqGCMs, + reqRCPsPerGCM = reqRCPsPerGCM, + reqDownscalingsPerGCM = reqDownscalingsPerGCM, + locations = locations, + compression_type = dbW_compression_type, + getYears = getYears, + sim_scen_ids = sim_scens[["id"]], + dir_out_temp = project_paths[["dir_out_temp"]], + dir_failed = dir_failed, + fdbWeather = fdbWeather, + resume = resume, + verbose = verbose + ) + + + } else { + + stopifnot("daily" != climDB_meta[["tres"]]) - if (SFSW2_glovars[["p_has"]]) { - if (!is.na(seed)) set.seed(seed) - ids_ToDo <- sample(x = ids_ToDo, size = length(ids_ToDo)) #attempt to prevent reading from same .nc at the same time + if (SFSW2_glovars[["p_has"]]) { + if (!is.na(seed)) set.seed(seed) - # extract the GCM data depending on parallel backend - if (identical(SFSW2_glovars[["p_type"]], "mpi")) { - Rmpi::mpi.bcast.cmd(cmd = dbW_setConnection_local, dbFilePath = fdbWeather) - on.exit(Rmpi::mpi.bcast.cmd(dbW_disconnectConnection_local), add = TRUE) + # attempt to prevent reading from same .nc at the same time + ids_ToDo <- sample(x = ids_ToDo, size = length(ids_ToDo)) - ids_Done <- Rmpi::mpi.applyLB(ids_ToDo, try.ScenarioWeather, + # extract the GCM data depending on parallel backend + if (identical(SFSW2_glovars[["p_type"]], "mpi")) { + Rmpi::mpi.bcast.cmd( + cmd = dbW_setConnection_local, + dbFilePath = fdbWeather + ) + on.exit(Rmpi::mpi.bcast.cmd(dbW_disconnectConnection_local), add = TRUE) + + ids_Done <- Rmpi::mpi.applyLB(ids_ToDo, + try_MonthlyScenarioWeather, clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, @@ -2669,18 +3959,22 @@ tryToGet_ClimDB <- function(ids_ToDo, clim_source, use_CF, use_NEX, climDB_meta, getYears = getYears, assocYears = assocYears, sim_time = sim_time, seeds_DS = seeds_DS, - opt_DS = opt_DS, - project_paths = project_paths, dir_failed = dir_failed, fdbWeather = fdbWeather, + opt_DS = sim_scens[["opt_DS"]], + project_paths = project_paths, + dir_failed = dir_failed, + fdbWeather = fdbWeather, resume = resume, - verbose = verbose, print.debug = print.debug) + verbose = verbose, print.debug = print.debug + ) - } else if (identical(SFSW2_glovars[["p_type"]], "socket")) { - parallel::clusterCall(SFSW2_glovars[["p_cl"]], - fun = rSOILWAT2::dbW_setConnection, dbFilePath = fdbWeather) - on.exit(parallel::clusterEvalQ(SFSW2_glovars[["p_cl"]], - rSOILWAT2::dbW_disconnectConnection()), add = TRUE) + } else if (identical(SFSW2_glovars[["p_type"]], "socket")) { + parallel::clusterCall(SFSW2_glovars[["p_cl"]], + fun = rSOILWAT2::dbW_setConnection, dbFilePath = fdbWeather) + on.exit(parallel::clusterEvalQ(SFSW2_glovars[["p_cl"]], + rSOILWAT2::dbW_disconnectConnection()), add = TRUE) - ids_Done <- parallel::clusterApplyLB(SFSW2_glovars[["p_cl"]], x = ids_ToDo, fun = try.ScenarioWeather, + ids_Done <- parallel::clusterApplyLB(SFSW2_glovars[["p_cl"]], + x = ids_ToDo, fun = try_MonthlyScenarioWeather, clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, climDB_meta = climDB_meta, climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, @@ -2691,44 +3985,56 @@ tryToGet_ClimDB <- function(ids_ToDo, clim_source, use_CF, use_NEX, climDB_meta, getYears = getYears, assocYears = assocYears, sim_time = sim_time, seeds_DS = seeds_DS, - opt_DS = opt_DS, - project_paths = project_paths, dir_failed = dir_failed, fdbWeather = fdbWeather, + opt_DS = sim_scens[["opt_DS"]], + project_paths = project_paths, + dir_failed = dir_failed, + fdbWeather = fdbWeather, resume = resume, - verbose = verbose, print.debug = print.debug) + verbose = verbose, print.debug = print.debug + ) - } else { - ids_Done <- NULL - } + } else { + ids_Done <- NULL + } - clean_SFSW2_cluster() + clean_SFSW2_cluster() - } else { - rSOILWAT2::dbW_setConnection(dbFilePath = fdbWeather) - on.exit(rSOILWAT2::dbW_disconnectConnection(), add = TRUE) - - ids_Done <- lapply(ids_ToDo, FUN = try.ScenarioWeather, - clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, - climDB_meta = climDB_meta, climDB_files = climDB_files, - reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, - reqDownscalingsPerGCM = reqDownscalingsPerGCM, - climate.ambient = climate.ambient, - locations = locations, - compression_type = dbW_compression_type, - getYears = getYears, assocYears = assocYears, - sim_time = sim_time, - seeds_DS = seeds_DS, - opt_DS = opt_DS, - project_paths = project_paths, dir_failed = dir_failed, fdbWeather = fdbWeather, - resume = resume, - verbose = verbose, print.debug = print.debug) - ids_Done <- do.call(c, ids_Done) + } else { + rSOILWAT2::dbW_setConnection(dbFilePath = fdbWeather) + on.exit(rSOILWAT2::dbW_disconnectConnection(), add = TRUE) + + ids_Done <- lapply(ids_ToDo, + FUN = try_MonthlyScenarioWeather, + clim_source = clim_source, use_CF = use_CF, use_NEX = use_NEX, + climDB_meta = climDB_meta, climDB_files = climDB_files, + reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, + reqDownscalingsPerGCM = reqDownscalingsPerGCM, + climate.ambient = climate.ambient, + locations = locations, + compression_type = dbW_compression_type, + getYears = getYears, assocYears = assocYears, + sim_time = sim_time, + seeds_DS = seeds_DS, + opt_DS = sim_scens[["opt_DS"]], + project_paths = project_paths, + dir_failed = dir_failed, + fdbWeather = fdbWeather, + resume = resume, + verbose = verbose, + print.debug = print.debug + ) + + ids_Done <- do.call(c, ids_Done) + } } sort(unlist(ids_Done)) } -copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, verbose = FALSE) { +copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, + verbose = FALSE) { + if (verbose) { t1 <- Sys.time() temp_call <- shQuote(match.call()[1]) @@ -2740,13 +4046,20 @@ copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, verbose dir_failed <- file.path(dir_out_temp, "failed_copy_tempdata_to_dbW") dir.create2(dir_failed, showWarnings = FALSE) - temp_files <- list.files(path = dir_out_temp, pattern = clim_source, recursive = TRUE, - include.dirs = FALSE, no.. = TRUE) + temp_files <- list.files( + path = dir_out_temp, + pattern = clim_source, + recursive = TRUE, + include.dirs = FALSE, + no.. = TRUE + ) if (length(temp_files) > 0) { if (verbose) { - print(paste0("rSFSW2's ", temp_call, ": started at ", t1, " with adding temporary ", - "files (", length(temp_files), ") into database for ", shQuote(clim_source))) + print(paste0("rSFSW2's ", temp_call, ": started at ", t1, + " with adding temporary files (", length(temp_files), + ") into database for ", shQuote(clim_source) + )) on.exit({ print(paste0("rSFSW2's ", temp_call, ": ended after ", @@ -2754,8 +4067,9 @@ copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, verbose add = TRUE) } - req_wdata_fields <- c("todo", "rcps", "futures", "downscaling", "tag", "Scenario", - "Scenario_id", "Site_id_by_dbW", "StartYear", "EndYear", "weatherData") + req_wdata_fields <- c("todo", "rcps", "futures", "downscaling", "tag", + "Scenario", "Scenario_id", "Site_id_by_dbW", "StartYear", "EndYear", + "weatherData") for (f in temp_files) { ok <- 0 @@ -2773,7 +4087,8 @@ copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, verbose Scenario_id = df_wdataOut[["Scenario_id"]][k], StartYear = df_wdataOut[["StartYear"]][k], EndYear = df_wdataOut[["EndYear"]][k], - weather_blob = df_wdataOut[["weatherData"]][k][[1]])) + weather_blob = df_wdataOut[["weatherData"]][k][[1]] + )) if (!inherits(res, "try-error")) { ok <- ok + 1 @@ -2785,13 +4100,18 @@ copy_tempdata_to_dbW <- function(fdbWeather, clim_source, dir_out_temp, verbose if (verbose) { print(paste0(Sys.time(), ": temporary scenario file ", shQuote(f), - " successfully added n = ", ok, " out of t = ", sum(df_wdataOut[["todo"]]), - " records to weather database", if (fail) " and some failed to add")) + " successfully added n = ", ok, + " out of t = ", sum(df_wdataOut[["todo"]]), + " records to weather database", + if (fail) " and some failed to add" + )) } } else { - print(paste("Temporary scenario file", shQuote(f), "cannot be read, likely", - " because it is corrupted or contains malformed data.")) + print(paste("Temporary scenario file", shQuote(f), + "cannot be read, likely because it is corrupted", + "or contains malformed data." + )) fail <- TRUE } @@ -2854,61 +4174,97 @@ climscen_determine_sources <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_i #' @references \url{http://cfconventions.org/} -is_ClimateForecastConvention <- function(x, ignore.case = TRUE) { - grepl("(BCSD_GDODCPUCLLNL)|(SageSeer)|(ESGF)", x, ignore.case = ignore.case) +is_ClimateForecastConvention <- function(climDB_meta) { + grepl("CF", climDB_meta[["convention"]], ignore.case = TRUE) } #' @references \url{https://nex.nasa.gov/nex/projects/1356/} -is_NEX <- function(x, ignore.case = TRUE) { - grepl("NEX", x, ignore.case = ignore.case) +is_NEX <- function(climDB_meta) { + "NEX" %in% climDB_meta[["convention"]] } #' Calculate historical and future simulation time slices #' #' @param sim_time A list with elements \code{future_N}, \code{future_yrs}, #' \code{DScur_startyr}, and \code{DScur_endyr}. -#' @param tbox A data.frame or matrix with two rows \code{start} and \code{end} and two -#' columns \code{first} and \code{second} describing years including in a specific -#' climate data source. +#' @param tbox A data.frame or matrix with two rows \code{start} and +#' \code{end} and two columns \code{first} and \code{second} describing years +#' including in a specific climate data source. #' -#' @return A data.frame with rows for each extraction run-slice and four columns 'Run', -#' 'Slice', 'Time', and 'Year'. +#' @return A data.frame with rows for each extraction run-slice and +#' four columns 'Run', 'Slice', 'Time', and 'Year'. calc_timeSlices <- function(sim_time, tbox) { - # timing: time slices: data is organized into 'historical' runs 1950-2005 ( = "first") - # and future 'rcp' runs 2006-2099 ( = "second") - timeSlices <- data.frame(matrix(NA, nrow = 4 + 4 * sim_time[["future_N"]], ncol = 4, - dimnames = list(NULL, c("Run", "Slice", "Time", "Year")))) - timeSlices[, 1:3] <- expand.grid(c("start", "end"), c("first", "second"), - c("historical", rownames(sim_time[["future_yrs"]])))[, 3:1] - - #historic conditions for downscaling - timeSlices[1, 4] <- max(tbox["start", "first"], sim_time[["DScur_startyr"]]) - timeSlices[2, 4] <- min(tbox["end", "first"], sim_time[["DScur_endyr"]]) - - if (sim_time[["DScur_endyr"]] > tbox["end", "first"]) { - timeSlices[3, 4] <- tbox["start", "second"] - timeSlices[4, 4] <- min(tbox["end", "second"], sim_time[["DScur_endyr"]]) - } - - #future conditions for downscaling - for (it in seq_len(sim_time[["future_N"]])) { - it4 <- 4L * it - timeSlices[3 + it4, 4] <- max(tbox["start", "second"], - sim_time[["future_yrs"]][it, "DSfut_startyr"]) - timeSlices[4 + it4, 4] <- min(tbox["end", "second"], - sim_time[["future_yrs"]][it, "DSfut_endyr"]) #limits timeSlices to 2099 - - if (sim_time[["DScur_startyr"]] < 1950) { - # TODO(drs): I don't know where the hard coded value of 1950 comes from; it doesn't - # make sense to me - print("Note: adjustment to 'timeSlices' because 'DScur_startyr < 1950'") - timeSlices[4 + it4, 4] <- min(timeSlices[4 + it4, 4], timeSlices[4 + 3*it, 4] + - (timeSlices[4, 4]-timeSlices[1, 4])) + # timing: time slices: data is organized into + # - 'historical' runs 1950-2005 ( = "first"), and + # - future 'rcp' runs 2006-2099 ( = "second") + + deltas <- rownames(sim_time[["future_yrs"]]) + + if ("dall" %in% deltas) { + future_N <- 0 + runs <- "dall" + } else { + future_N <- sim_time[["future_N"]] + runs <- c("historical", deltas) + } + + timeSlices <- data.frame(matrix(NA, + nrow = 4 + 4 * future_N, + ncol = 4, + dimnames = list(NULL, c("Run", "Slice", "Time", "Year")) + )) + + timeSlices[, 1:3] <- expand.grid( + c("start", "end"), + c("first", "second"), + runs + )[, 3:1] + + if ("dall" %in% deltas) { + timeSlices[, "Year"] <- unlist(tbox) + + } else { + + # historic conditions for downscaling + timeSlices[1, 4] <- max(tbox["start", "first"], sim_time[["DScur_startyr"]]) + timeSlices[2, 4] <- min(tbox["end", "first"], sim_time[["DScur_endyr"]]) + + if (sim_time[["DScur_endyr"]] > tbox["end", "first"]) { + timeSlices[3, 4] <- tbox["start", "second"] + timeSlices[4, 4] <- min(tbox["end", "second"], sim_time[["DScur_endyr"]]) } - if (sim_time[["future_yrs"]][it, "DSfut_startyr"] < tbox["start", "second"]) { - timeSlices[1 + it4, 4] <- max(tbox["start", "first"], - sim_time[["future_yrs"]][it, "DSfut_startyr"]) - timeSlices[2 + it4, 4] <- tbox["start", "second"] + + # future conditions for downscaling + for (it in seq_len(sim_time[["future_N"]])) { + it4 <- 4L * it + timeSlices[3 + it4, 4] <- max( + tbox["start", "second"], + sim_time[["future_yrs"]][it, "DSfut_startyr"] + ) + timeSlices[4 + it4, 4] <- min( + tbox["end", "second"], + sim_time[["future_yrs"]][it, "DSfut_endyr"] + ) #limits timeSlices to 2099 + + if (sim_time[["DScur_startyr"]] < 1950) { + # TODO(drs): I don't know where the hard coded value of 1950 comes from; + # it doesn't make sense to me + print("Note: adjustment to 'timeSlices' because 'DScur_startyr < 1950'") + timeSlices[4 + it4, 4] <- min( + timeSlices[4 + it4, 4], + timeSlices[4 + 3*it, 4] + (timeSlices[4, 4]-timeSlices[1, 4]) + ) + } + + if ( + sim_time[["future_yrs"]][it, "DSfut_startyr"] < tbox["start", "second"]) { + + timeSlices[1 + it4, 4] <- max( + tbox["start", "first"], + sim_time[["future_yrs"]][it, "DSfut_startyr"] + ) + timeSlices[2 + it4, 4] <- tbox["start", "second"] + } } } @@ -2922,69 +4278,116 @@ calc_getYears <- function(timeSlices) { x <- list( n_first = nrow(temp1), first = temp1, - n_second = nrow(temp2), second = temp2) - - #Monthly time-series - temp1 <- list(ISOdate(x[["first"]][, 1], 1, 1, tz = "UTC"), - ISOdate(x[["first"]][, 2], 12, 31, tz = "UTC")) - temp2 <- list(ISOdate(x[["second"]][, 1], 1, 1, tz = "UTC"), - ISOdate(x[["second"]][, 2], 12, 31, tz = "UTC")) - - x[["first_dates"]] <- lapply(seq_len(x[["n_first"]]), function(it) - as.POSIXlt(seq(from = temp1[[1]][it], to = temp1[[2]][it], by = "1 month"))) - x[["second_dates"]] <- lapply(seq_len(x[["n_second"]]), function(it) - as.POSIXlt(seq(from = temp2[[1]][it], to = temp2[[2]][it], by = "1 month"))) - #Days per month + n_second = nrow(temp2), second = temp2 + ) + + # Monthly time-series + temp1 <- list( + ISOdate(x[["first"]][, 1], 1, 1, tz = "UTC"), + ISOdate(x[["first"]][, 2], 12, 31, tz = "UTC") + ) + temp2 <- list( + ISOdate(x[["second"]][, 1], 1, 1, tz = "UTC"), + ISOdate(x[["second"]][, 2], 12, 31, tz = "UTC") + ) + + x[["first_dates"]] <- lapply(seq_len(x[["n_first"]]), function(it) { + as.POSIXlt(seq(from = temp1[[1]][it], to = temp1[[2]][it], by = "1 month")) + }) + x[["second_dates"]] <- lapply(seq_len(x[["n_second"]]), function(it) { + as.POSIXlt(seq(from = temp2[[1]][it], to = temp2[[2]][it], by = "1 month")) + }) + + # Days per month x[["first_dpm"]] <- lapply(seq_len(x[["n_first"]]), function(it) { - temp <- as.POSIXlt(seq(from = temp1[[1]][it], to = temp1[[2]][it], by = "1 day")) - rle(temp$mon)$lengths - }) + temp <- as.POSIXlt(seq( + from = temp1[[1]][it], + to = temp1[[2]][it], + by = "1 day" + )) + rle(temp$mon)$lengths + }) x[["second_dpm"]] <- lapply(seq_len(x[["n_second"]]), function(it) { - temp <- as.POSIXlt(seq(from = temp2[[1]][it], to = temp2[[2]][it], by = "1 day")) - rle(temp$mon)$lengths - }) + temp <- as.POSIXlt(seq( + from = temp2[[1]][it], + to = temp2[[2]][it], + by = "1 day" + )) + rle(temp$mon)$lengths + }) x } calc_assocYears <- function(sim_time, reqRCPs, getYears, timeSlices) { - x <- vector("list", length = 1 + length(reqRCPs) * sim_time[["future_N"]]) - names_assocYears <- c("historical", paste0(rownames(sim_time[["future_yrs"]]), ".", - rep(reqRCPs, each = sim_time[["future_N"]]))) + + deltas <- rownames(sim_time[["future_yrs"]]) + + if ("dall" %in% deltas) { + future_N <- 0 + names_assocYears <- "dall" + + } else { + future_N <- sim_time[["future_N"]] + names_assocYears <- c( + "historical", + paste0(deltas, ".", rep(reqRCPs, each = future_N)) + ) + + } + + x <- vector("list", length = 1 + length(reqRCPs) * future_N) for (it in seq_along(x)) { temp <- strsplit(names_assocYears[it], ".", fixed = TRUE)[[1]][[1]] x[[it]] <- list( first = useSlices(getYears, timeSlices, run = temp, slice = "first"), - second = useSlices(getYears, timeSlices, run = temp, slice = "second")) + second = useSlices(getYears, timeSlices, run = temp, slice = "second") + ) } + names(x) <- names_assocYears x } +calc_ids_ToDo <- function(ids_AllToDo, ids_Done) { + if (length(ids_Done) > 0) { + ids_AllToDo[-ids_Done] + } else { + ids_AllToDo + } +} + #access climate change data -get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, SFSW2_prj_meta, - locations, climDB_meta, dbW_compression_type, resume, verbose = FALSE, - print.debug = FALSE) { +get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, + SFSW2_prj_meta, locations, climDB_meta, dbW_compression_type, resume, + verbose = FALSE, print.debug = FALSE) { - if (verbose) + if (verbose) { print(paste("Started", shQuote(clim_source), "at", Sys.time())) + } #Global flags repeatN_max <- 3 temp <- strsplit(clim_source, split = "_", fixed = TRUE)[[1]] - dir.ex.dat <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], - "ClimateScenarios", temp[1], paste(temp[-1], collapse = "_")) - dir_failed <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], - "failed_get_climatechange_data") + dir_ex_dat <- file.path( + SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], + "ClimateScenarios", + temp[1], + paste(temp[-1], collapse = "_") + ) + dir_failed <- file.path( + SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], + "failed_get_climatechange_data" + ) dir.create2(dir_failed, showWarnings = FALSE) - use_CF <- is_ClimateForecastConvention(clim_source) - use_NEX <- is_NEX(clim_source) + use_CF <- is_ClimateForecastConvention(climDB_meta) + use_NEX <- is_NEX(climDB_meta) #Specific flags if (use_CF) { @@ -3005,12 +4408,12 @@ get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, SFSW2_prj_meta # - all same spatial coordinates # get netCDF files - temp <- list.files(dir.ex.dat, full.names = TRUE, recursive = TRUE) + temp <- list.files(dir_ex_dat, full.names = TRUE, recursive = TRUE) ext <- sapply(strsplit(basename(temp), split = ".", fixed = TRUE), function(x) x[length(x)]) climDB_files <- temp[tolower(ext) %in% c("nc", "nc4", "ncdf", "netcdf")] if (length(climDB_files) == 0) - stop("Could find no files for ", shQuote(clim_source), " in ", dir.ex.dat) + stop("Could find no files for ", shQuote(clim_source), " in ", dir_ex_dat) climDB_fname_meta <- strsplit(basename(climDB_files), split = climDB_meta[["sep_fname"]], fixed = TRUE) @@ -3050,28 +4453,45 @@ get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, SFSW2_prj_meta climDB_files <- NULL } - # Force dataset specific lower/uper case for GCMs and RCPs, i.e., use values from - # 'climbDB_struct' and not reqGCMs and reqRCPs - temp <- match(tolower(SFSW2_prj_meta[["sim_scens"]][["reqMs"]]), - tolower(climDB_struct[["id_gcm"]]), nomatch = 0) + # Force dataset specific lower/uper case for GCMs and RCPs, + # i.e., use values from 'climbDB_struct' and not reqGCMs and reqRCPs + temp <- match( + tolower(SFSW2_prj_meta[["sim_scens"]][["reqMs"]]), + tolower(climDB_struct[["id_gcm"]]), + nomatch = 0 + ) reqGCMs <- as.character(climDB_struct[["id_gcm"]][temp]) - temp <- match(tolower(SFSW2_prj_meta[["sim_scens"]][["reqCSs"]]), - tolower(climDB_struct[["id_scen"]]), nomatch = 0) + temp <- match( + tolower(SFSW2_prj_meta[["sim_scens"]][["reqCSs"]]), + tolower(climDB_struct[["id_scen"]]), + nomatch = 0 + ) reqRCPs <- as.character(climDB_struct[["id_scen"]][temp]) - reqRCPsPerGCM <- lapply(SFSW2_prj_meta[["sim_scens"]][["reqCSsPerM"]], function(r) { - temp <- match(tolower(r), tolower(climDB_struct[["id_scen"]]), nomatch = 0) + reqRCPsPerGCM <- lapply(SFSW2_prj_meta[["sim_scens"]][["reqCSsPerM"]], + function(r) { + temp <- match( + tolower(r), + tolower(climDB_struct[["id_scen"]]), + nomatch = 0 + ) as.character(climDB_struct[["id_scen"]][temp]) }) #Tests that all requested conditions will be extracted stopifnot(length(reqGCMs) > 0, all(!is.na(reqGCMs))) - stopifnot(length(reqRCPs) > 0, all(!is.na(reqRCPs)), - any(grepl("historic", climDB_struct[["id_scen"]], ignore.case = TRUE))) + stopifnot( + length(reqRCPs) > 0, + all(!is.na(reqRCPs)), + any(grepl("historic", climDB_struct[["id_scen"]], ignore.case = TRUE)) + ) #--- put requests together + #TODO: problably better to include scenarios as well, e.g., + # consider requestN <- length(reqRCPs) * length(reqGCMs) * nrow(locations) requestN <- length(reqGCMs) * nrow(locations) - if (verbose) + if (verbose) { print(paste(shQuote(clim_source), "will run", requestN, "times")) + } if (any("wgen-package" %in% unlist(SFSW2_prj_meta[["sim_scens"]][["reqDSsPerM"]]))) { icols <- c("wgen_dry_spell_changes", "wgen_wet_spell_changes", "wgen_prcp_cv_changes") @@ -3079,94 +4499,137 @@ get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, SFSW2_prj_meta } # calculate time slices - timeSlices <- calc_timeSlices(sim_time = SFSW2_prj_meta[["sim_time"]], - tbox = climDB_meta[["tbox"]]) + timeSlices <- calc_timeSlices( + sim_time = SFSW2_prj_meta[["sim_time"]], + tbox = climDB_meta[["tbox"]] + ) # calculate 'getYears' object getYears <- calc_getYears(timeSlices) #Logical on how to select from getYears - assocYears <- calc_assocYears(sim_time = SFSW2_prj_meta[["sim_time"]], reqRCPs, - getYears, timeSlices) + assocYears <- calc_assocYears( + sim_time = SFSW2_prj_meta[["sim_time"]], + reqRCPs = reqRCPs, + getYears = getYears, + timeSlices = timeSlices + ) - print(paste("Future scenario data will be extracted for a time period spanning", - timeSlices[7, 4], "through", max(stats::na.omit(timeSlices[, 4])))) + print(paste( + "Scenario data will be extracted for a time period spanning", + paste(range(timeSlices[, "Year"], na.rm = TRUE), collapse = " through ") + )) - #Repeat call to get climate data for all requests until complete + #--- Repeat call to get climate data for all requests until complete repeatN <- 0 - # Indices 'ids_AllToDo' and 'ids_Done' are counters in 1:requestN and are thus dependent - # on locations[, "site_id"] and thus on which sites still need (additional) climate scenario - # data that wasn't extracted in a previous attempt to extract and downscale all data. - # That is they shouldn't be used to identify work across runs, i.e., they should be used - # locally, but not for files, logs, etc. across repeated calls -- for those, use instead - # `locations[i, "site_id"]` and GCM. + + # Indices 'ids_AllToDo' and 'ids_Done' are counters in 1:requestN and are + # thus dependent on nrow(locations) and thus on which sites still + # need (additional) climate scenario data that wasn't extracted in a + # previous attempt to extract and downscale all data. + # That is they shouldn't be used to identify work across runs (where + # the object `locations` may change), i.e., they should be used locally, + # but not for files, logs, etc. across repeated calls -- + # for those, use instead `locations[i, "site_id"]` and GCM. ids_AllToDo <- seq_len(requestN) ids_Done <- NULL # Loop - while (repeatN_max > repeatN && - length(ids_ToDo <- if (length(ids_Done) > 0) ids_AllToDo[-ids_Done] else ids_AllToDo) > 0) { + while ( + repeatN_max > repeatN && + length(ids_ToDo <- calc_ids_ToDo(ids_AllToDo, ids_Done)) > 0 + ) { repeatN <- repeatN + 1 if (verbose) { - print(paste(shQuote(clim_source), "will run", repeatN, "out of", repeatN_max, - "repeats to extract n =", length(ids_ToDo), "requests")) + print(paste( + shQuote(clim_source), "will run", repeatN, "out of", repeatN_max, + "repeats to extract n =", length(ids_ToDo), "requests" + )) } - ids_seeds <- as.vector(outer(seq_along(reqGCMs), - (locations[, "site_id"] - 1) * length(reqGCMs), FUN = "+")) - - out <- tryToGet_ClimDB(ids_ToDo = ids_ToDo, clim_source = clim_source, - use_CF = use_CF, use_NEX = use_NEX, climDB_meta = climDB_meta, - climDB_files = climDB_files, reqGCMs = reqGCMs, reqRCPsPerGCM = reqRCPsPerGCM, + ids_seeds <- as.vector(outer( + seq_along(reqGCMs), + (locations[, "site_id"] - 1) * length(reqGCMs), FUN = "+" + )) + + out <- tryToGet_ClimDB( + ids_ToDo = ids_ToDo, + clim_source = clim_source, + use_CF = use_CF, + use_NEX = use_NEX, + climDB_meta = climDB_meta, + climDB_files = climDB_files, + reqGCMs = reqGCMs, + reqRCPsPerGCM = reqRCPsPerGCM, reqDownscalingsPerGCM = SFSW2_prj_meta[["sim_scens"]][["reqDSsPerM"]], - locations = locations, getYears = getYears, assocYears = assocYears, - project_paths = SFSW2_prj_meta[["project_paths"]], dir_failed = dir_failed, + locations = locations, + getYears = getYears, + assocYears = assocYears, + project_paths = SFSW2_prj_meta[["project_paths"]], + dir_failed = dir_failed, fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], climate.ambient = SFSW2_prj_meta[["sim_scens"]][["ambient"]], dbW_compression_type = dbW_compression_type, sim_time = SFSW2_prj_meta[["sim_time"]], seeds_DS = SFSW2_prj_meta[["rng_specs"]][["seeds_DS"]][ids_seeds], - opt_DS = SFSW2_prj_meta[["sim_scens"]][["opt_DS"]], + sim_scens = SFSW2_prj_meta[["sim_scens"]], resume = resume, - verbose = verbose, print.debug = print.debug) + verbose = verbose, + print.debug = print.debug + ) ids_Done <- sort(unique(c(ids_Done, out))) } # Process any temporary datafile from a current run - copy_tempdata_to_dbW(fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], - clim_source, dir_out_temp = SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], - verbose) + copy_tempdata_to_dbW( + fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], + clim_source, + dir_out_temp = SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], + verbose + ) # Determine progress if (length(ids_Done) > 0) { if (verbose) - print(paste(clim_source, "was extracted for n =", length(ids_Done), "out of", - length(ids_AllToDo), "downscaling requests")) + print(paste( + clim_source, "was extracted for n =", length(ids_Done), "out of", + length(ids_AllToDo), "downscaling requests" + )) ils_done <- unique((ids_Done - 1) %/% length(reqGCMs) + 1) ids_ToDo <- ids_AllToDo[-ids_Done] + } else { ids_ToDo <- ids_AllToDo } #Clean up: report unfinished locations, etc. if (length(ids_ToDo) > 0) { - print(paste(length(ids_ToDo), "sites didn't extract climate scenario information by '", - clim_source, "'")) + print(paste( + length(ids_ToDo), + "sites didn't extract climate scenario information by '", + clim_source, "'" + )) + ils_notdone <- unique((ids_ToDo - 1) %/% length(reqGCMs) + 1) failedLocations_DB <- locations[ils_notdone, ] - save(failedLocations_DB, ids_ToDo, ils_notdone, reqGCMs, locations, ids_AllToDo, + save( + failedLocations_DB, ids_ToDo, ils_notdone, reqGCMs, locations, + ids_AllToDo, file = file.path(SFSW2_prj_meta[["project_paths"]][["dir_out"]], - paste0("ClimDB_failedLocations_", clim_source, ".RData"))) + paste0("ClimDB_failedLocations_", clim_source, ".RData") + ) + ) } - if (verbose) + if (verbose) { print(paste("Finished '", clim_source, "' at", Sys.time())) + } invisible(TRUE) } @@ -3179,8 +4642,9 @@ get_climatechange_data <- function(clim_source, SFSW2_prj_inputs, SFSW2_prj_meta #' should be a subset of the \code{TRUE}s of \code{SFSW2_prj_inputs[["include_YN"]]}. #' #' @export -ExtractClimateChangeScenarios <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, - todos, opt_parallel, opt_chunks, resume, verbose = FALSE, print.debug = FALSE) { +ExtractClimateChangeScenarios <- function(climDB_metas, SFSW2_prj_meta, + SFSW2_prj_inputs, todos, opt_parallel, opt_chunks, resume, + verbose = FALSE, print.debug = FALSE) { if (verbose) { t1 <- Sys.time() @@ -3206,14 +4670,23 @@ ExtractClimateChangeScenarios <- function(climDB_metas, SFSW2_prj_meta, SFSW2_pr normal.kind = SFSW2_prj_meta[["rng_specs"]][["RNGkind_prev"]][2]), add = TRUE) - rSOILWAT2::dbW_setConnection(dbFilePath = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]]) + rSOILWAT2::dbW_setConnection( + dbFilePath = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]] + ) on.exit(rSOILWAT2::dbW_disconnectConnection(), add = TRUE) dbW_compression_type <- rSOILWAT2::dbW_compression() for (m in SFSW2_prj_meta[["sim_scens"]][["reqMs"]]) { - dir.create2(file.path(SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], - tolower(m)), showWarnings = opt_verbosity[["print.debug"]], recursive = TRUE) + tmp <- file.path( + SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], + tolower(m) + ) + dir.create2( + path = tmp, + showWarnings = opt_verbosity[["print.debug"]], + recursive = TRUE + ) } # Generate seeds for climate change downscaling @@ -3221,7 +4694,8 @@ ExtractClimateChangeScenarios <- function(climDB_metas, SFSW2_prj_meta, SFSW2_pr N = length(SFSW2_prj_meta[["sim_scens"]][["reqMs"]]) * SFSW2_prj_meta[["sim_size"]][["runsN_master"]], seed = SFSW2_prj_meta[["rng_specs"]][["global_seed"]], - reproducible = SFSW2_prj_meta[["opt_sim"]][["reproducible"]]) + reproducible = SFSW2_prj_meta[["opt_sim"]][["reproducible"]] + ) # keep track of successful/unsuccessful climate scenarios todos_siteIDs <- which(todos) @@ -3247,11 +4721,17 @@ ExtractClimateChangeScenarios <- function(climDB_metas, SFSW2_prj_meta, SFSW2_pr } # obtain climate data for these locations requiring data from this climate source - get_climatechange_data(clim_source = clim_source, - SFSW2_prj_inputs = SFSW2_prj_inputs, SFSW2_prj_meta = SFSW2_prj_meta, - locations = locations, climDB_meta = climDB_metas[[clim_source]], - dbW_compression_type = dbW_compression_type, resume = resume, verbose = verbose, - print.debug = print.debug) + get_climatechange_data( + clim_source = clim_source, + SFSW2_prj_inputs = SFSW2_prj_inputs, + SFSW2_prj_meta = SFSW2_prj_meta, + locations = locations, + climDB_meta = climDB_metas[[clim_source]], + dbW_compression_type = dbW_compression_type, + resume = resume, + verbose = verbose, + print.debug = print.debug + ) } } @@ -3301,15 +4781,15 @@ ExtractClimateWizard <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, if (any("CMIP3_ClimateWizardEnsembles_Global" %in% SFSW2_prj_meta[["sim_scens"]][["sources"]])) { #Maurer EP, Adam JC, Wood AW (2009) Climate model based consensus on the hydrologic impacts of climate change to the Rio Lempa basin of Central America. Hydrology and Earth System Sciences, 13, 183-194. #accessed via climatewizard.org on July 10, 2012 - dir.ex.dat <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], "ClimateScenarios", "ClimateWizardEnsembles_Global") + dir_ex_dat <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], "ClimateScenarios", "ClimateWizardEnsembles_Global") } if (any("CMIP3_ClimateWizardEnsembles_USA" %in% SFSW2_prj_meta[["sim_scens"]][["sources"]])) { #Maurer, E. P., L. Brekke, T. Pruitt, and P. B. Duffy. 2007. Fine-resolution climate projections enhance regional climate change impact studies. Eos Transactions AGU 88:504. #accessed via climatewizard.org - dir.ex.dat <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], "ClimateScenarios", "ClimateWizardEnsembles_USA") + dir_ex_dat <- file.path(SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], "ClimateScenarios", "ClimateWizardEnsembles_USA") } - list.scenarios.external <- basename(list.dirs2(path = dir.ex.dat, full.names = FALSE, + list.scenarios.external <- basename(list.dirs2(path = dir_ex_dat, full.names = FALSE, recursive = FALSE)) if (all(SFSW2_prj_meta[["sim_scens"]][["id"]][-1] %in% list.scenarios.external)) { @@ -3321,24 +4801,24 @@ ExtractClimateWizard <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, include_YN_climscen <- rep(FALSE, SFSW2_prj_meta[["sim_size"]][["runsN_master"]]) for (sc in seq_len(SFSW2_prj_meta[["sim_scens"]][["N"]] - 1)) { - dir.ex.dat.sc <- file.path(dir.ex.dat, SFSW2_prj_meta[["sim_scens"]][["id"]][1 + sc]) - temp <- basename(list.dirs2(path = dir.ex.dat.sc, full.names = FALSE, + dir_ex_dat.sc <- file.path(dir_ex_dat, SFSW2_prj_meta[["sim_scens"]][["id"]][1 + sc]) + temp <- basename(list.dirs2(path = dir_ex_dat.sc, full.names = FALSE, recursive = FALSE)) if ("CMIP3_ClimateWizardEnsembles_Global" %in% SFSW2_prj_meta[["sim_scens"]][["sources"]]) { - dir.ex.dat.sc.ppt <- file.path(dir.ex.dat.sc, grep("Precipitation_Value", temp, + dir_ex_dat.sc.ppt <- file.path(dir_ex_dat.sc, grep("Precipitation_Value", temp, value = TRUE)) - dir.ex.dat.sc.temp <- file.path(dir.ex.dat.sc, grep("Tmean_Value", temp, + dir_ex_dat.sc.temp <- file.path(dir_ex_dat.sc, grep("Tmean_Value", temp, value = TRUE)) } if ("CMIP3_ClimateWizardEnsembles_USA" %in% SFSW2_prj_meta[["sim_scens"]][["sources"]]) { - dir.ex.dat.sc.ppt <- file.path(dir.ex.dat.sc, grep("Precipitation_Change", temp, + dir_ex_dat.sc.ppt <- file.path(dir_ex_dat.sc, grep("Precipitation_Change", temp, value = TRUE)) - dir.ex.dat.sc.temp <- file.path(dir.ex.dat.sc, grep("Tmean_Change", temp, + dir_ex_dat.sc.temp <- file.path(dir_ex_dat.sc, grep("Tmean_Change", temp, value = TRUE)) } - list.temp.asc <- list.files(dir.ex.dat.sc.temp, pattern = ".asc") - list.ppt.asc <- list.files(dir.ex.dat.sc.ppt, pattern = ".asc") + list.temp.asc <- list.files(dir_ex_dat.sc.temp, pattern = ".asc") + list.ppt.asc <- list.files(dir_ex_dat.sc.ppt, pattern = ".asc") #extract data get.month <- function(path, grid, locations) { @@ -3348,11 +4828,11 @@ ExtractClimateWizard <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, } sc.temp <- sapply(SFSW2_glovars[["st_mo"]], function(m) { temp <- grep(paste0("_", m, "_"), list.temp.asc, value = TRUE) - get.month(path = dir.ex.dat.sc.temp, grid = temp, locations) + get.month(path = dir_ex_dat.sc.temp, grid = temp, locations) }) sc.ppt <- sapply(SFSW2_glovars[["st_mo"]], function(m) { temp <- grep(paste0("_", m, "_"), list.ppt.asc, value = TRUE) - get.month(path = dir.ex.dat.sc.ppt, grid = temp, locations) + get.month(path = dir_ex_dat.sc.ppt, grid = temp, locations) }) if ("CMIP3_ClimateWizardEnsembles_Global" %in% SFSW2_prj_meta[["sim_scens"]][["sources"]]) { @@ -3418,8 +4898,8 @@ ExtractClimateWizard <- function(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, #' Extracts climate change scenarios and downscales monthly to daily time series #' @export -PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parallel, - resume, opt_verbosity, opt_chunks) { +PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, + opt_parallel, resume, opt_verbosity, opt_chunks) { if (opt_verbosity[["verbose"]]) { t1 <- Sys.time() @@ -3433,13 +4913,17 @@ PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parall climDB_metas <- climscen_metadata() - SFSW2_prj_inputs[["SWRunInformation"]] <- climscen_determine_sources(climDB_metas, - SFSW2_prj_meta, SFSW2_prj_inputs) + SFSW2_prj_inputs[["SWRunInformation"]] <- climscen_determine_sources( + climDB_metas = climDB_metas, + SFSW2_prj_meta = SFSW2_prj_meta, + SFSW2_prj_inputs = SFSW2_prj_inputs + ) - which_NEX <- is_NEX(SFSW2_prj_meta[["sim_scens"]][["sources"]]) - which_CF <- is_ClimateForecastConvention(SFSW2_prj_meta[["sim_scens"]][["sources"]]) - which_ClimateWizard <- grepl("ClimateWizardEnsembles", - SFSW2_prj_meta[["sim_scens"]][["sources"]]) + conventions <- sapply(SFSW2_prj_meta[["sim_scens"]][["sources"]], + function(x) { + climDB_metas[[x]][["convention"]] + } + ) if (resume) { # Process any temporary datafile from a potential previous run @@ -3447,10 +4931,12 @@ PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parall clim_sources <- stats::na.exclude(clim_sources) for (k in seq_along(clim_sources)) { - copy_tempdata_to_dbW(fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], + copy_tempdata_to_dbW( + fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], clim_source = clim_sources[k], dir_out_temp = SFSW2_prj_meta[["project_paths"]][["dir_out_temp"]], - verbose = opt_verbosity[["verbose"]]) + verbose = opt_verbosity[["verbose"]] + ) } # Determine which climate scenario extractions and downscalings remain to be done @@ -3472,16 +4958,24 @@ PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parall names(todos) <- NULL if (any(todos)) { - if (any(which_NEX) || any(which_CF)) { - temp <- ExtractClimateChangeScenarios(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, - todos, opt_parallel, opt_chunks, resume = resume, - verbose = opt_verbosity[["verbose"]], print.debug = opt_verbosity[["print.debug"]]) - - SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] - SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] + if (any("NEX" %in% conventions) || any("CF" %in% conventions)) { + temp <- ExtractClimateChangeScenarios( + climDB_metas = climDB_metas, + SFSW2_prj_meta = SFSW2_prj_meta, + SFSW2_prj_inputs = SFSW2_prj_inputs, + todos = todos, + opt_parallel = opt_parallel, + opt_chunks = opt_chunks, + resume = resume, + verbose = opt_verbosity[["verbose"]], + print.debug = opt_verbosity[["print.debug"]] + ) + + SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] + SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] } - if (any(which_ClimateWizard)) { + if (any("ClimateWizardEnsembles" %in% conventions)) { SFSW2_prj_inputs <- ExtractClimateWizard(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, todos, verbose = opt_verbosity[["verbose"]]) } @@ -3491,4 +4985,146 @@ PrepareClimateScenarios <- function(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parall list(SFSW2_prj_inputs = SFSW2_prj_inputs, SFSW2_prj_meta = SFSW2_prj_meta) } + + +#-----Obtain climate projection data------ + +#' Check and prepare local copy of \var{CMIP5_MACAv2metdata} dataset +#' +#' @param locations A data frame. Two columns \code{X_WGS84} and +#' \code{Y_WGS84} of locations describe rectangle +#' for which data will be downloaded. +#' @param dir_ex_fut A character string. The path name to future climate +#' projections. +#' +#' @return If all files are available, then a message is printed to the +#' R console with that information. Otherwise, the message points to a +#' \var{.sh} script that was created at the +#' \code{MACAv2metdata_USA} sub-folder. This script must be run +#' separately to download the missing files. +#' +#' @section Notes: The download scripts use \var{wget}, i.e., it must be +#' available on your system to work. The scripts are based on the dataset +#' repository setup at +#' \url{https://climate.northwestknowledge.net/MACA/index.php} as of +#' Dec 2019. This dataset has been bias corrected against \var{gridMET}. +#' +#' @references Abatzoglou, J. T. (2013) Development of gridded surface +#' meteorological data for ecological applications and modelling. +#' \var{Int. J. Climatol.}, 33: 121–131. +#' +#' @examples +#' if (exists("SFSW2_prj_meta") && exists("SFSW2_prj_inputs")) { +#' obtain_CMIP5_MACAv2metdata_USA( +#' locations = +#' SFSW2_prj_inputs[["SWRunInformation"]][, c("X_WGS84", "Y_WGS84")], +#' dir_ex_fut = SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], +#' ) +#' } +#' +#' @export +obtain_CMIP5_MACAv2metdata_USA <- function(locations, dir_ex_fut) { + climDB_meta <- climscen_metadata()[["CMIP5_MACAv2metdata_USA"]] + + dir_ex_dat <- file.path( + dir_ex_fut, + "ClimateScenarios", + "CMIP5", + "MACAv2metdata_USA" + ) + + bbox <- apply(locations, 2, range) + + stopifnot( + min(bbox[, "X_WGS84"]) >= min(climDB_meta[["bbox"]][, "lon"]), + max(bbox[, "X_WGS84"]) <= max(climDB_meta[["bbox"]][, "lon"]), + min(bbox[, "Y_WGS84"]) >= min(climDB_meta[["bbox"]][, "lat"]), + max(bbox[, "Y_WGS84"]) <= max(climDB_meta[["bbox"]][, "lat"]) + ) + + gcms <- c( + "bcc-csm1-1", "bcc-csm1-1-m", "BNU-ESM", "CanESM2", "CCSM4", + "CNRM-CM5", "CSIRO-Mk3-6-0", "GFDL-ESM2G", "GFDL-ESM2M", "HadGEM2-CC365", + "HadGEM2-ES365", "inmcm4", "IPSL-CM5A-LR", "IPSL-CM5A-MR", "IPSL-CM5B-LR", + "MIROC-ESM", "MIROC-ESM-CHEM", "MIROC5", "MRI-CGCM3", "NorESM1-M" + ) + + rcps <- c("historical", "rcp45", "rcp85") + + ids <- !is.na(climDB_meta[["var_desc"]][, "varname"]) + vars <- climDB_meta[["var_desc"]][ids, c("varname", "tag")] + + + #--- Should have these netCDF files + ttmp <- apply(climDB_meta[["tbox"]], 2, paste0, collapse = "_") + + tmp <- expand.grid( + Var = vars[, "tag"], + Model = gcms, + rip = NA, + Scen = rcps + ) + + tmp[, "Period"] <- ifelse( + tmp[, "Scen"] == "historical", + ttmp[1], + ttmp[2] + ) + + tmp[, "rip"] <- ifelse( + tmp[, "Model"] == "CCSM4", + "r6i1p1", + "r1i1p1" + ) + + need_files <- paste0( + "agg_macav2metdata_", + apply(tmp, 1, paste0, collapse = "_"), + "_CONUS_daily.nc" + ) + + + #--- Check which one of these files are available + doesnt_have_files <- !file.exists(file.path(dir_ex_dat, need_files)) + + if (any(doesnt_have_files)) { + ids_get_files <- which(doesnt_have_files) + + wget_bash <- c( + "#!/bin/bash", + paste0( + 'wget -nc -c -nd ', + '"http://thredds.northwestknowledge.net:8080/thredds/ncss/grid/', + need_files[ids_get_files], + '?&var=', + vars[match(tmp[ids_get_files, "Var"], vars[, "tag"]), "varname"], + '&north=', max(bbox[, "Y_WGS84"]), + '&south=', min(bbox[, "Y_WGS84"]), + '&west=', min(bbox[, "X_WGS84"]), + '&east=', max(bbox[, "X_WGS84"]), + '&temporal=all&accept=netcdf&point=false" -O ', + need_files[ids_get_files] + ) + ) + + fname_bash <- file.path(dir_ex_dat, + paste0("macav2metdata_wget_", format(Sys.time(), "%Y%m%d%H%M%S"), ".sh") + ) + + writeLines(wget_bash, con = fname_bash) + + stop("Please execute script ", + shQuote(fname_bash), + " to download missing MACAv2metdata_USA data." + ) + + } else { + print(paste( + "All MACAv2metdata_USA files are available;", + "however, spatial coverage was not checked." + )) + } +} + + #------END CLIMATE CHANGE DATA------ diff --git a/R/ExtractData_Elevation.R b/R/ExtractData_Elevation.R index 715006be..1f428faa 100644 --- a/R/ExtractData_Elevation.R +++ b/R/ExtractData_Elevation.R @@ -3,39 +3,70 @@ prepare_ExtractData_Elevation <- function(SWRunInformation, sim_size, field_sources, field_include, how_determine_sources, scorp, - elev_probs = c(0.025, 0.5, 0.975)) { + elev_probs = c(0.025, 0.5, 0.975) +) { - sites_elevation_source <- get_datasource_masterfield(SWRunInformation, - field_sources, sim_size, how_determine_sources) + sites_elevation_source <- get_datasource_masterfield( + SWRunInformation, + field_sources, + sim_size, + how_determine_sources + ) probs <- if (scorp == "cell") elev_probs else NULL - dtemp <- matrix(NA, nrow = sim_size[["runsN_sites"]], - ncol = 1 + length(probs), dimnames = list(NULL, c("ELEV_m", - if (scorp == "cell") paste0("ELEV_m_q", probs)))) - - do_include <- get_datasource_includefield(SWRunInformation, field_include, - sim_size) - - list(source = sites_elevation_source, data = dtemp, idone = vector(), - probs = probs, input = SWRunInformation, do_include = do_include) + dtemp <- matrix( + NA, + nrow = sim_size[["runsN_sites"]], + ncol = 1 + length(probs), + dimnames = list( + NULL, + c("ELEV_m", if (scorp == "cell") paste0("ELEV_m_q", probs)) + ) + ) + + do_include <- get_datasource_includefield( + SWRunInformation, + field_include, + sim_size + ) + + list( + source = sites_elevation_source, + data = dtemp, + idone = vector(), + probs = probs, + input = SWRunInformation, + do_include = do_include + ) } update_elevation_input <- function(MMC, sim_size, digits = 0, fnames_in) { icolnew <- !(colnames(MMC[["data"]]) %in% colnames(MMC[["input"]])) + if (any(icolnew)) { - MMC[["input"]] <- cbind(MMC[["input"]], - matrix(NA, nrow = nrow(MMC[["input"]]), ncol = sum(icolnew), - dimnames = list(NULL, colnames(MMC[["data"]])[icolnew]))) + MMC[["input"]] <- cbind( + MMC[["input"]], + matrix( + NA, + nrow = nrow(MMC[["input"]]), + ncol = sum(icolnew), + dimnames = list(NULL, colnames(MMC[["data"]])[icolnew]) + ) + ) } i_good <- stats::complete.cases(MMC[["data"]]) + MMC[["input"]][sim_size[["runIDs_sites"]][i_good], colnames(MMC[["data"]])] <- round(MMC[["data"]][i_good, ], digits) - utils::write.csv(MMC[["input"]], file = fnames_in[["fmaster"]], - row.names = FALSE) + utils::write.csv( + MMC[["input"]], + file = fnames_in[["fmaster"]], + row.names = FALSE + ) unlink(fnames_in[["fpreprocin"]]) MMC @@ -238,34 +269,50 @@ ExtractData_Elevation <- function(exinfo, SFSW2_prj_meta, SFSW2_prj_inputs, field_sources <- "Elevation_source" field_include <- "Include_YN_ElevationSources" - MMC <- prepare_ExtractData_Elevation(SFSW2_prj_inputs[["SWRunInformation"]], - sim_size = SFSW2_prj_meta[["sim_size"]], field_sources = field_sources, + MMC <- prepare_ExtractData_Elevation( + SFSW2_prj_inputs[["SWRunInformation"]], + sim_size = SFSW2_prj_meta[["sim_size"]], + field_sources = field_sources, field_include = field_include, how_determine_sources = SFSW2_prj_meta[["opt_input"]][["how_determine_sources"]], - SFSW2_prj_meta[["sim_space"]][["scorp"]]) + scorp = SFSW2_prj_meta[["sim_space"]][["scorp"]] + ) if (exinfo$ExtractElevation_NED_USA) { - MMC <- do_ExtractElevation_NED_USA(MMC, + MMC <- do_ExtractElevation_NED_USA( + MMC, sim_size = SFSW2_prj_meta[["sim_size"]], sim_space = SFSW2_prj_meta[["sim_space"]], dir_ex_dem = SFSW2_prj_meta[["project_paths"]][["dir_ex_dem"]], fnames_in = SFSW2_prj_meta[["fnames_in"]], - resume, verbose) + resume, + verbose + ) } if (exinfo$ExtractElevation_HWSD_Global) { - MMC <- do_ExtractElevation_HWSD_Global(MMC, + MMC <- do_ExtractElevation_HWSD_Global( + MMC, sim_size = SFSW2_prj_meta[["sim_size"]], sim_space = SFSW2_prj_meta[["sim_space"]], dir_ex_dem = SFSW2_prj_meta[["project_paths"]][["dir_ex_dem"]], - fnames_in = SFSW2_prj_meta[["fnames_in"]], resume, verbose) + fnames_in = SFSW2_prj_meta[["fnames_in"]], + resume, + verbose + ) } - SFSW2_prj_inputs[["SWRunInformation"]] <- update_datasource_masterfield(MMC, + SFSW2_prj_inputs[["SWRunInformation"]] <- MMC[["input"]] + + SFSW2_prj_inputs[["SWRunInformation"]] <- update_datasource_masterfield( + MMC, sim_size = SFSW2_prj_meta[["sim_size"]], - SFSW2_prj_inputs[["SWRunInformation"]], SFSW2_prj_meta[["fnames_in"]], - field_sources, field_include) + SWRunInformation = SFSW2_prj_inputs[["SWRunInformation"]], + fnames_in = SFSW2_prj_meta[["fnames_in"]], + field_sources, + field_include + ) SFSW2_prj_inputs } diff --git a/R/ExtractData_MeanMonthlyClimate.R b/R/ExtractData_MeanMonthlyClimate.R index 8ecf91ab..7eb40c40 100644 --- a/R/ExtractData_MeanMonthlyClimate.R +++ b/R/ExtractData_MeanMonthlyClimate.R @@ -341,13 +341,18 @@ extract_climate_NCEPCFSR <- function(MMC, SWRunInformation, #match weather folder names in case of missing extractions res <- as.matrix(temp[["res_clim"]][, -1]) - irow <- match(locations[, "WeatherFolder"], - table = temp[["res_clim"]][, "WeatherFolder"], nomatch = 0) - irowL <- irow > 0 ctemp <- colnames(res) - MMC[["data"]][todos, "RH", ][irowL, ] <- res[irow, grepl("RH", ctemp)] - MMC[["data"]][todos, "cover", ][irowL, ] <- res[irow, grepl("Cloud", ctemp)] - MMC[["data"]][todos, "wind", ][irowL, ] <- res[irow, grepl("Wind", ctemp)] + + irow <- match( + locations[, "WeatherFolder"], + table = temp[["res_clim"]][, "WeatherFolder"], + nomatch = 0 + ) + irowL <- which(todos)[irow > 0] + + MMC[["data"]][irowL, "RH", ] <- res[irow, grepl("RH", ctemp)] + MMC[["data"]][irowL, "cover", ] <- res[irow, grepl("Cloud", ctemp)] + MMC[["data"]][irowL, "wind", ] <- res[irow, grepl("Wind", ctemp)] # Determine successful extractions MMC[["idone"]]["NCEPCFSR1"] <- TRUE diff --git a/R/GISSM.R b/R/GISSM.R deleted file mode 100644 index 3b8a057f..00000000 --- a/R/GISSM.R +++ /dev/null @@ -1,293 +0,0 @@ -######################## -#------ GISSM functions -# Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). Modeling -# regeneration responses of big sagebrush (Artemisia tridentata) to abiotic -# conditions. Ecol Model, 286, 66-77. - -#' Function to convert soil depth to soil layer -SoilLayer_at_SoilDepth <- function(depth_cm, layers_depth) { - pmax(1, pmin(length(layers_depth), 1 + findInterval(depth_cm - 0.01, - layers_depth))) -} - - -#' Function to calculate for each day of the year, duration in days of -#' upcoming favorable conditions accounting for consequences.unfavorable = 0 -#' (if conditions become unfavorable, then restart the count), =1 (resume) -calc_DurationFavorableConds <- function(RYyear, consequences.unfavorable, - Germination_WhileFavorable, RYyear_ForEachUsedDay) { - - index.year <- RYyear_ForEachUsedDay == RYyear - conditions <- Germination_WhileFavorable[index.year] - doys <- seq_len(sum(index.year)) - doys[!conditions] <- NA #calculate only for favorable days - out <- rep(NA, times = sum(index.year)) - - if (consequences.unfavorable == 0) { - # if conditions become unfavorable, then restart the count afterwards - temp.rle <- rle(conditions) - if (sum(!temp.rle$values) > 0) { - temp.unfavorable_startdoy <- c((1 + c(0, - # add starts for odd- and even-lengthed rle - cumsum(temp.rle$lengths)))[!temp.rle$values], 1 + sum(index.year)) - - temp.rle$values <- if (temp.rle$values[1]) { - # first rle period is favorable - rep(temp.unfavorable_startdoy, each = 2) - } else { - # first rle period is unfavorable - rep(temp.unfavorable_startdoy[-1], each = 2) - } - temp.rle$values <- temp.rle$values[seq_along(temp.rle$lengths)] - - } else { - # every day is favorable - temp.rle$values <- length(conditions) + 1 - } - - # difference to next following start of a period of unfavorable conditions - out <- inverse.rle(temp.rle) - doys - - } else if (consequences.unfavorable == 1) { - # if conditions become unfavorable, then resume the count afterwards - temp <- sum(conditions) - count <- if (temp > 0) { - temp:1 - } else { - # every day is unfavorable - vector("numeric", length = 0) - } - - # sum of following favorable conditions in this year - out <- stats::napredict(stats::na.action(stats::na.exclude(doys)), count) - } - - out -} - -#' Based on the \var{NLR} model (equation 5) in Hardegree (2006) and modified -#' by Schlaepfer et al. (2014) by making time to germinate dependent on -#' mean January temperature and soil water potential -#' -#' @references Hardegree SP (2006) Predicting Germination Response to -#' Temperature. I. Cardinal-temperature Models and Subpopulation-specific -#' Regression. Annals of Botany, 97, 1115-1125. -get_modifiedHardegree2006NLR <- function(RYdoy, Estimate_TimeToGerminate, - TmeanJan, a, b, c, d, k1_meanJanTemp, k2_meanJanTempXIncubationTemp, - k3_IncubationSWP, Tgerm.year, SWPgerm.year, durations, rec.delta = 1, - nrec.max = 10L) { - - for (nrec in seq_len(nrec.max)) { - Estimate_TimeToGerminate <- prev_est_TimeToGerminate <- max(0, - round(Estimate_TimeToGerminate)) - - ids <- RYdoy:(RYdoy + Estimate_TimeToGerminate - 1) - Tgerm <- mean(Tgerm.year[ids], na.rm = TRUE) - SWPgerm <- mean(SWPgerm.year[ids], na.rm = TRUE) - - temp.c.lim <- - (Tgerm - b) * (d ^ 2 - 1) / d - c <- if (c > 0) { - if (c > temp.c.lim) c else { - temp.c.lim + SFSW2_glovars[["tol"]] - } - } else if (c < 0) { - if (c < temp.c.lim) c else { - temp.c.lim - SFSW2_glovars[["tol"]] - } - } - - # NLR model (eq.5) in Hardegree SP (2006) - temp <- a * exp(-0.693147181 / log(d) ^ 2 * log(1 + (Tgerm - b) * - (d ^ 2 - 1) / (c * d)) ^ 2) # 0.693147181 is equal to log(2) - - # drs addition to time to germinate dependent on mean January temperature - # and soil water potential - temp <- 1 / temp + - k1_meanJanTemp * TmeanJan + - k2_meanJanTempXIncubationTemp * TmeanJan * Tgerm + - k3_IncubationSWP * SWPgerm - Estimate_TimeToGerminate <- max(1, round(temp)) - - # break if convergence or not enough time in this year - temp <- abs(Estimate_TimeToGerminate - prev_est_TimeToGerminate) - if (temp <= rec.delta | RYdoy + Estimate_TimeToGerminate - 1 > 365) - break - } - - out <- if (nrec >= nrec.max) { - round(mean(c(Estimate_TimeToGerminate, prev_est_TimeToGerminate)), 0) - } else { - Estimate_TimeToGerminate - } - - # test whether enough time to germinate - if (out <= durations[RYdoy] & RYdoy + out <= 365) out else NA -} - -#' Function to estimate time to germinate for each day of a given year and -#' conditions (temperature, top soil \var{SWP}) -#' -#' @param seed A seed set, \code{NULL}, or \code{NA}. \code{NA} will not affect -#' the state of the \var{RNG}; \code{NULL} will re-initialize the \var{RNG}; -#' and all other values are passed to \code{\link{set.seed}}. -calc_TimeToGerminate <- function(RYyear, Germination_WhileFavorable, - LengthDays_FavorableConditions, RYyear_ForEachUsedDay, soilTmeanSnow, - swp.TopMean, TmeanJan, param, seed = NA) { - - if (!is.na(seed)) set.seed(seed) - runifs <- stats::runif(2) - - #values for current year - index.year <- RYyear_ForEachUsedDay == RYyear - conditions <- Germination_WhileFavorable[index.year] - - # determining time to germinate for every day - a <- max(SFSW2_glovars[["tol"]], param$Hardegree_a) - b <- param$Hardegree_b - temp <- if (param$Hardegree_d == 1) { - if (runifs[1] > 0.5) { - 1 + SFSW2_glovars[["tol"]] - } else { - 1 - SFSW2_glovars[["toln"]] - } - } else { - param$Hardegree_d - } - d <- max(SFSW2_glovars[["tol"]], temp) - temp.c <- if (param$Hardegree_c != 0) param$Hardegree_c else { - sign(runifs[2] - 0.5) * SFSW2_glovars[["tol"]] - } - - # consequences of unfavorable conditions coded in here - TimeToGerminate.favorable <- unlist(lapply(which(conditions), - get_modifiedHardegree2006NLR, - Estimate_TimeToGerminate = 1, TmeanJan = TmeanJan, - a = a, b = b, c = temp.c, d = d, - k1_meanJanTemp = param$TimeToGerminate_k1_meanJanTemp, - k2_meanJanTempXIncubationTemp = - param$TimeToGerminate_k2_meanJanTempXIncubationTemp, - k3_IncubationSWP = param$TimeToGerminate_k3_IncubationSWP, - Tgerm.year = soilTmeanSnow[index.year], - SWPgerm.year = swp.TopMean[index.year], - durations = LengthDays_FavorableConditions[index.year])) - - res <- rep(NA, length(conditions)) - if (length(TimeToGerminate.favorable) > 0) { - res[conditions] <- TimeToGerminate.favorable - } - - res -} - -do.vector <- function(kill.vector, max.duration.before.kill) { - doys <- seq_along(kill.vector) - doys[!kill.vector] <- NA #calculate only for kill days - temp.rle <- rle(kill.vector) - - if (sum(!temp.rle$values) > 0) { - temp.startdoy <- (1 + c(0, cumsum(temp.rle$lengths)))[!temp.rle$values] - temp.rle$values <- if (temp.rle$values[1]) { - rep(temp.startdoy, each = 2) - } else { - rep(temp.startdoy[-1], each = 2) - } - temp.rle$values <- temp.rle$values[seq_along(temp.rle$lengths)] - - } else { - # every day is kill free - temp.rle$values <- length(kill.vector) + 1 - } - kill.durations <- inverse.rle(temp.rle) - doys - mortality <- rep(FALSE, times = length(kill.vector)) - mortality[kill.durations > max.duration.before.kill] <- TRUE - - mortality -} - -#' Function to calculate mortality under conditions and checks survival limit -calc_SeedlingMortality <- function(kill.conditions, - max.duration.before.kill) { - - if (length(dim(kill.conditions)) > 0) { - # i.e., is.matrix, columns represent soil layers - apply(kill.conditions, 2, do.vector, max.duration.before.kill) - } else { - do.vector(kill.conditions, max.duration.before.kill) - } -} - - -#' Function to calculate favorable conditions for seedling growth for each day -#' of a given year -check_SuitableGrowthThisYear <- function( - favorable.conditions, consequences.unfavorable) { - - out <- rep(NA, times = length(favorable.conditions)) - - if (consequences.unfavorable == 0) { - # if conditions become unfavorable, then stop growth for rest of season - temp.rle <- rle(favorable.conditions) - temp.firstFavorable.index <- which(temp.rle$values)[1] - - if (!is.na(temp.firstFavorable.index) && - temp.firstFavorable.index < length(temp.rle$values)) { - - temp <- (temp.firstFavorable.index + 1):length(temp.rle$values) - temp.rle$values[temp] <- FALSE - out <- inverse.rle(temp.rle) - - } else { - # nothing changed, either because all days are either favorable or - # unfavorable or because first favorable period is also the last in the - # season - out <- favorable.conditions - } - - } else if (consequences.unfavorable == 1) { - # if conditions become unfavorable, then resume growth afterwards - out <- favorable.conditions - } - - out -} - - -#' Function to calculate rooting depth at given age -#' Units: [age] = days, [P0, K, r] = mm -#' @return A numeric vector of rooting depth in units of centimeters. -SeedlingRootingDepth <- function(age, P0, K, r) { - depth <- K * P0 * exp(r * age) / (K + P0 * (exp(r * age) - 1)) - - pmax(0, depth) / 10 -} - - -get.DoyAtLevel <- function(x, level) { - which(x == level & x > 0) -} - -get.DoyMostFrequentSuccesses <- function(doys, data) { - # must return one of the values because the quantiles are compared against - # the values in function 'get.DoyAtLevel' - res1.max <- sapply(1:2, function(x) - stats::quantile(doys[doys[, x] > 0, x], probs = c(0.1, 1), type = 3)) - germ.doy <- if (all(!data[, 1])) { - # no successful germination - list(NA, NA) - } else { - lapply(1:2, function(x) get.DoyAtLevel(doys[, 1], res1.max[x, 1])) - } - sling.doy <- if (all(!data[, 2])) { - # no successful seedlings - list(NA, NA) - } else { - lapply(1:2, function(x) get.DoyAtLevel(doys[, 2], res1.max[x, 2])) - } - res1.max <- list(germ.doy, sling.doy) - - unlist(lapply(res1.max, function(x) - c(min(x[[1]]), stats::median(x[[2]]), max(x[[1]])))) -} - -#------ End of GISSM functions -######################## diff --git a/R/IO_datafiles.R b/R/IO_datafiles.R index 84a20378..54da84ff 100644 --- a/R/IO_datafiles.R +++ b/R/IO_datafiles.R @@ -2,8 +2,11 @@ #------ datafile-IO functions req_fields_SWRunInformation <- function() { - c("Label", "site_id", "WeatherFolder", "X_WGS84", "Y_WGS84", "ELEV_m", - "Include_YN") + c( + "Label", "site_id", "Include_YN", + "WeatherFolder", + "X_WGS84", "Y_WGS84", "ELEV_m", "Slope", "Aspect" + ) } #' Read a comma-separated value (\var{csv}) file @@ -548,9 +551,12 @@ process_inputs <- function(project_paths, fnames_in, use_preprocin = TRUE, nrowsClasses = nrowsClasses), error = print) sw_input_soillayers <- fix_rowlabels(sw_input_soillayers, SWRunInformation, verbose = verbose) - sw_input_soillayers[, - (1:2)] <- check_monotonic_increase( - data.matrix(sw_input_soillayers[, - (1:2)]), strictly = TRUE, fail = TRUE, - na.rm = TRUE) + sw_input_soillayers[, - (1:2)] <- rSW2utils::check_monotonic_increase( + data.matrix(sw_input_soillayers[, - (1:2)]), + strictly = TRUE, + fail = TRUE, + na.rm = TRUE + ) temp <- tryCatch(SFSW2_read_inputfile(fnames_in[["ftreatDesign"]], nrowsClasses = nrowsClasses), error = print) diff --git a/R/Indices.R b/R/Indices.R index 35c7759a..2866611b 100644 --- a/R/Indices.R +++ b/R/Indices.R @@ -94,13 +94,13 @@ NULL #' \code{include_YN}. #' @export it_exp <- function(isim, runN) { - stopifnot(sapply(list(isim, runN), is.natural)) + stopifnot(sapply(list(isim, runN), rSW2utils::is.natural)) (isim - 1L) %/% runN + 1L } #' @rdname indices #' @export it_exp2 <- function(pid, runN, scN) { - stopifnot(sapply(list(pid, runN, scN), is.natural)) + stopifnot(sapply(list(pid, runN, scN), rSW2utils::is.natural)) it_exp(isim = it_sim2(pid, scN), runN) } @@ -110,13 +110,13 @@ it_exp2 <- function(pid, runN, scN) { #' across loops 1+2b; invariant to \code{include_YN}. #' @export it_site <- function(isim, runN) { - stopifnot(sapply(list(isim, runN), is.natural)) + stopifnot(sapply(list(isim, runN), rSW2utils::is.natural)) (isim - 1L) %% runN + 1L } #' @rdname indices #' @export it_site2 <- function(pid, runN, scN) { - stopifnot(sapply(list(pid, runN, scN), is.natural)) + stopifnot(sapply(list(pid, runN, scN), rSW2utils::is.natural)) it_site(isim = it_sim2(pid, scN), runN) } @@ -127,34 +127,34 @@ it_site2 <- function(pid, runN, scN) { #' the ID for the output database. #' @export it_Pid <- function(isim, runN, sc, scN) { - stopifnot(sapply(list(isim, runN, sc, scN), is.natural)) + stopifnot(sapply(list(isim, runN, sc, scN), rSW2utils::is.natural)) (isim - 1L) * scN + sc } #' @rdname indices #' @export it_Pid0 <- function(iexp, isite, runN, sc, scN) { - stopifnot(sapply(list(iexp, isite, runN, sc, scN), is.natural)) + stopifnot(sapply(list(iexp, isite, runN, sc, scN), rSW2utils::is.natural)) it_Pid(isim = it_sim0(iexp, isite, runN), runN, sc, scN) } #' @rdname indices #' @export it_sim0 <- function(iexp, isite, runN) { - stopifnot(sapply(list(iexp, isite, runN), is.natural)) + stopifnot(sapply(list(iexp, isite, runN), rSW2utils::is.natural)) (iexp - 1L) * runN + isite } #' @rdname indices #' @export it_sim2 <- function(pid, scN) { - stopifnot(sapply(list(pid, scN), is.natural)) + stopifnot(sapply(list(pid, scN), rSW2utils::is.natural)) 1L + (pid - 1L) %/% scN } #' @rdname indices #' @export it_scen2 <- function(pid, scN) { - stopifnot(sapply(list(pid, scN), is.natural)) + stopifnot(sapply(list(pid, scN), rSW2utils::is.natural)) 1L + (pid - 1L) %% scN } diff --git a/R/Mathematical_Functions.R b/R/Mathematical_Functions.R index ee5afa3c..e3bc5549 100644 --- a/R/Mathematical_Functions.R +++ b/R/Mathematical_Functions.R @@ -1,22 +1,3 @@ -#' Error function -#' @seealso Code is from examples of \code{\link[stats]{pnorm}}. -#' @param x A numeric vector. -#' @return A numeric vector of the size of \code{x}. -erf <- function(x) 2 * stats::pnorm(x * sqrt(2)) - 1 - -#' Stretch values -#' -#' Values above the mean of \code{x} are made larger and -#' values below the mean are made smaller - each by -#' \code{lambda * dist(x, mean(x))}. -#' -#' @param x A numeric vector. -#' @param lambda A numeric value. The stretching factor applied to \code{x}. -#' -#' @return A numeric vector of the size of \code{x}. -stretch_values <- function(x, lambda = 0) { - (1 + lambda) * x - lambda * mean(x) -} in_box <- function(xy, xbounds, ybounds, i_use) { !i_use & @@ -25,424 +6,10 @@ in_box <- function(xy, xbounds, ybounds, i_use) { } -cut0Inf <- rSOILWAT2:::cut0Inf -finite01 <- rSOILWAT2:::finite01 - - - -#' Functions for circular descriptive statistics -#' -#' @param x A numeric vector or a matrix. If a data.frame is supplied, then -#' \code{x} is coerced to a matrix. -#' @param int A numeric value. The number of units of \code{x} in a full circle, -#' e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}. -#' @param na.rm A logical value indicating whether \code{NA} values should be -#' stripped before the computation proceeds. -#' -#' @return A numeric value or \code{NA}. -#' -#' @seealso \code{\link[circular]{mean.circular}}, -#' \code{\link[circular]{range.circular}}, \code{\link[circular]{sd.circular}} -#' -#' @aliases circ_mean circ_range circ_sd -#' @name circular -NULL - -#' @rdname circular -circ_mean <- function(x, int, na.rm = FALSE) { - if (!all(is.na(x)) && requireNamespace("circular", quietly = TRUE)) { - circ <- 2 * pi / int - x_circ <- circular::circular(x * circ, type = "angles", units = "radians", - rotation = "clock", modulo = "2pi") - x_int <- circular::mean.circular(x_circ, na.rm = na.rm) / circ - - # map 0 -> int; rounding to 13 digits: 13 was empirically derived for - # int = {12, 365} and - # x = c((-1):2, seq(x-5, x+5, by = 1), seq(2*x-5, 2*x+5, by = 1)) assuming - # that this function will never need to calculate for x > t*int with t>2 - round(as.numeric(x_int) - 1, 13) %% int + 1 - } else { - NA - } -} - -#' @rdname circular -circ_range <- function(x, int, na.rm = FALSE) { - if (!all(is.na(x)) && requireNamespace("circular", quietly = TRUE)) { - circ <- 2 * pi / int - x_circ <- circular::circular(x * circ, type = "angles", units = "radians", - rotation = "clock", modulo = "2pi") - x_int <- range(x_circ, na.rm = na.rm) / circ - as.numeric(x_int) - - } else { - NA - } -} - -#' @rdname circular -circ_sd <- function(x, int, na.rm = FALSE) { - if (length(x) - sum(is.na(x)) > 1 && requireNamespace("circular", - quietly = TRUE)) { - - if (stats::sd(x, na.rm = TRUE) > 0) { - circ <- 2 * pi / int - x_circ <- circular::circular(x * circ, type = "angles", units = "radians", - rotation = "clock", modulo = "2pi") - x_int <- circular::sd.circular(x_circ, na.rm = na.rm) / circ - as.numeric(x_int) - } else { - 0 - } - } else { - NA - } -} -#' Calculate the circular subtraction \var{x - y} -#' -#' @param x A numeric vector or array. -#' @param y A numeric vector or array. -#' @param int A numeric value. The number of units of \code{x} in a full circle, -#' e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}. -#' @examples -#' # Days of year -#' circ_minus(260, 240, int = 365) ## expected: +20 -#' circ_minus(240, 260, int = 365) ## expected: -20 -#' circ_minus(10, 360, int = 365) ## expected: +15 -#' circ_minus(360, 10, int = 365) ## expected: -15 -#' circ_minus(0, 360, int = 365) ## expected: +5 -#' circ_minus(360, 0, int = 365) ## expected: -5 -#' -#' # Matrix examples -#' x <- matrix(c(260, 240, 10, 360, 0, 360), nrow = 3, ncol = 2) -#' y <- matrix(c(240, 260, 360, 10, 360, 0), nrow = 3, ncol = 2) -#' circ_minus(x, y, int = 365) -#' y2 <- y -#' y2[1, 1] <- NA -#' circ_minus(y2, x, int = 365) -#' @export -circ_minus <- function(x, y, int) { - stopifnot(all(dim(x) == dim(y))) - - if (requireNamespace("circular", quietly = TRUE)) { - circ <- 2 * pi / int - d_circ <- circular::circular((x - y) * circ, - type = "angles", units = "radians", rotation = "clock", modulo = "asis") - res <- as.numeric(circular::minusPiPlusPi(d_circ) / circ) - - } else { - res <- rep(NA, length(x)) - } - - if (is.array(x)) { - array(res, dim = dim(x), dimnames = dimnames(x)) - } else { - res - } -} - - -#' Calculate the circular addition \var{x + y} -#' -#' @param x A numeric vector or array. -#' @param y A numeric vector or array. -#' @param int A numeric value. The number of units of \code{x} in a full circle, -#' e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}. -#' @examples -#' # Matrix examples: day of year -#' x <- matrix(c(260, 240, 10, 360, 0, 360), nrow = 3, ncol = 2) -#' y <- matrix(c(240, 260, 360, 10, 360, 0), nrow = 3, ncol = 2) -#' circ_add(x, y, int = 365) -#' -#' # Circular addition and subtraction -#' r1 <- circ_add(circ_minus(x, y, int = 365), y, int = 365) -#' r2 <- circ_minus(circ_add(x, y, int = 365), y, int = 365) -#' all.equal(r1, r2) -#' -#' @export -circ_add <- function(x, y, int) { - stopifnot(all(dim(x) == dim(y))) - - if (requireNamespace("circular", quietly = TRUE)) { - circ <- 2 * pi / int - d_circ <- circular::circular((x + y) * circ, - type = "angles", units = "radians", rotation = "clock", modulo = "asis") - res <- as.numeric(circular::minusPiPlusPi(d_circ) / circ) - - } else { - res <- rep(NA, length(x)) - } - - if (is.array(x)) { - array(res, dim = dim(x), dimnames = dimnames(x)) - } else { - res - } -} - - -#' Find the \code{k}-largest/smallest values (and apply a function to these -#' values) -#' -#' @param x A numeric vector -#' @param largest A logical value. See return value. -#' @param fun A function which requires one argument or \code{"index"}. -#' \code{fun} will be applied to the \code{k}-largest/smallest values of -#' \code{x}. -#' @param k An integer value. The \code{k}-largest/smallest value(s) of \code{x} -#' will be used. The largest/smallest value will be used if 0 or negative. -#' @param na.rm A logical value indicating whether \code{NA} values should be -#' stripped before the computation proceeds. -#' @param \dots Optional arguments to be passed to \code{fun} -#' -#' @return A vector of length \code{k}, \itemize{ -#' \item if \code{is.null(fun)}, then a vector with the \code{k}-largest -#' (if \code{largest = TRUE}) or \code{k}-smallest -#' (if \code{largest = FALSE}) values of \code{x}; -#' \item if \code{fun = "index"}, then a vector with indices of the -#' \code{k}-largest/smallest values (NOTE: this is truncated to the -#' \code{k}-first indices!). } Otherwise, the result of applying \code{fun} -#' to the \code{k}-largest/smallest values. -fun_kLargest <- function(x, largest = TRUE, fun = NULL, k = 10L, - na.rm = FALSE, ...) { - - res <- if (na.rm) { - stats::na.exclude(x) - } else { - x - } - - # Determine k-largest/smallest values - res <- sort.int(res, decreasing = largest, na.last = !na.rm, - method = if (getRversion() >= "3.3.0") "radix" else "quick") - res <- res[seq_len(max(1L, min(length(res), as.integer(k))))] - - # Calculate return values - if (is.null(fun)) { - res - } else if (identical(fun, "index")) { - which(x %in% res)[seq_len(k)] - } else { - fun(res, ...) - } -} - - -handle_NAs <- function(x, na.index, na.act) { - if (length(na.index) > 0) { - stats::napredict(na.act, x) - } else { - x - } -} - -scale_by_sum <- function(x) { - temp <- sum(x, na.rm = TRUE) - if (temp > 0 && is.finite(temp)) { - x / temp - } else { - x - } -} - - -cor2 <- function(y) { - res <- try(stats::cor(y[, 1], y[, 2]), silent = TRUE) - if (inherits(res, "try-error")) NA else res -} - - -#' Check that data are within range of normal distribution -#' -#' @param data A numeric vector. Daily values of temperature. -#' @param sigmaN An integer value. A multiplier of \code{stats::sd}. -test_sigmaNormal <- function(data, sigmaN = 6) { - md <- mean(data) - sdd <- stats::sd(data) * sigmaN - stopifnot(data < md + sdd, data > md - sdd) -} - - -#' Check that data are within range of an approximated gamma distribution -#' -#' @section Note: Approximate shape and scale instead of very slow call: -#' \code{g <- MASS::fitdistr(data, "gamma")} -#' @param data A numeric vector. Daily values of precipitation. -#' @param sigmaN An integer value. A multiplier of \code{stats::sd}. -#' @references Choi, S. C., and R. Wette. 1969. Maximum Likelihood Estimation of -#' the Parameters of the Gamma Distribution and Their Bias. Technometrics -#' 11:683-690. -# nolint start -#' @references -#' \url{http://en.wikipedia.org/wiki/Gamma_distribution#Maximum_likelihood_estimation} -# nolint end -test_sigmaGamma <- function(data, sigmaN = 6) { - tempD <- data[data > 0] - - if (length(tempD) >= 2 && stats::sd(tempD) > 0) { - tempM <- mean(tempD) - temp <- log(tempM) - mean(log(tempD)) - gshape <- (3 - temp + sqrt((temp - 3) ^ 2 + 24 * temp)) / (12 * temp) - gscale <- tempM / gshape - stopifnot(data < stats::qgamma(erf(sigmaN / sqrt(2)), shape = gshape, - scale = gscale)) - } -} #' Index of the closest value in the matrix to the passed in value. whereNearest <- function(val, matrix) { which.min(abs(matrix - val)) } - -#' Test whether input represents a natural number -#' @param x An integer, numeric, or complex vector, matrix, or array. -#' @return A logical value. -is.natural <- function(x) { - typeof(x) %in% c("integer", "double", "complex") && - !is.null(x) && length(x) > 0 && !anyNA(x) && - isTRUE(all.equal(x, round(x))) && all(x > 0) -} - -#' The intersection on any number of vectors -#' -#' @param \dots Any number of vectors or a list of vectors. -#' @return A vector of the same mode as inputs. -#' @seealso \code{\link{intersect}} -intersect2 <- function(...) { - x <- list(...) - n <- length(x) - - if (is.list(x[[1]]) && n == 1) { - x <- x[[1]] - n <- length(x) - } - - res <- NULL - if (n > 1) { - if (all(lengths(x)) > 0) { - res <- x[[1]] - for (k in 2:n) { - res <- intersect(res, x[[k]]) - } - } - - } else { - res <- x[[1]] - } - - res -} - - -#' Recursive comparisons which also works for nested lists -#' -#' @param x1 A R object -#' @param x2 A R object -#' -#' @seealso \code{\link{all.equal}} -#' -#' @return \itemize{ -#' \item If both \code{x1} and \code{x2} are lists, then \code{do_compare} -#' is called recursively on mutually shared names if names exists and on -#' each element otherwise, and the output is a list from the return value -#' of each recursive call. -#' \item Otherwise, the function \code{\link{all.equal}} is called. If the -#' result is \code{TRUE}, then \code{NA} is returned. If the result is -#' \code{FALSE}, then a list with three elements is returned with \describe{ -#' \item{eq}{the result of the call to \code{\link{all.equal}}} -#' \item{x1}{The object \code{x1}} -#' \item{x2}{The object \code{x2}} -#' }} -#' -#' @examples -#' ## expected result: NA -#' do_compare(1L, 1L) -#' -#' ## expected result: list(eq = "Mean relative difference: 1", x1 = 1, x2 = 2) -#' do_compare(1, 2) -#' -# ## expected result: first comparison returns NA; second shows a difference -#' do_compare(list(1, 2), list(1, 3)) -#' ## expected result: comparison for elements a and b return NA; comparison -#' ## for element c shows a difference -#' do_compare(list(a = 1, b = 2), list(b = 2, c = 0, a = 1)) -#' @export -do_compare <- function(x1, x2) { - if (is.list(x1) && is.list(x2)) { - dims <- if (!is.null(names(x1)) && !is.null(names(x2))) { - unique(c(names(x1), names(x2))) - } else { - seq_len(min(length(x1), length(x2))) - } - - # as of R v3.4.1 'Recall' doesn't work as argument to apply-type calls - res <- lapply(dims, function(k) do_compare(x1 = x1[[k]], x2 = x2[[k]])) - names(res) <- dims - res - - } else { - eq <- all.equal(x1, x2) - - if (isTRUE(eq)) { - NA - } else { - list(eq = eq, x1 = x1, x2 = x2) - } - } -} - - -#' Check that values in matrix-like object are (strictly) monotonically -#' increasing/decreasing -#' -#' @param x A numeric matrix like object. -#' @param MARGIN An integer value giving the subscripts over which the -#' monotonicity will be checked; 1 indicates rows, 2 indicates columns. -#' @param increase A logical value. If \code{TRUE}, check monotonic increase; if -#' \code{FALSE}, check monotonic decrease. -#' @param strictly A logical value. If \code{TRUE}, check for a strict monotonic -#' pattern. -#' @param fail A logical value. If \code{TRUE}, throw error if monotonic check -#' fails. -#' @param replacement A value that replaces non-(strictly) monotonically -#' increasing/decreasing values if \code{fail} is \code{FALSE}. -#' @param na.rm A logical value. If \code{TRUE}, then ignore \code{NA}s; if -#' \code{FALSE}, then fail if \code{strictly} or replace with -#' \code{replacement}. -#' @return The updated \code{x}. -#' @export -check_monotonic_increase <- function(x, MARGIN = 1, increase = TRUE, - strictly = FALSE, fail = FALSE, replacement = NA, na.rm = FALSE) { - - stopifnot(MARGIN %in% c(1, 2), length(dim(x)) == 2) - - x <- as.matrix(x) - if (MARGIN == 2) { - x <- t(x) - } - - mfun <- if (increase) { - if (strictly) ">" else ">=" - } else { - if (strictly) "<" else "=<" - } - - ord <- !match.fun(mfun)(x[, -1, drop = FALSE], x[, -ncol(x), drop = FALSE]) - - if ((!na.rm && strictly && anyNA(x)) || any(ord, na.rm = TRUE)) { - if (fail) { - stop(paste0("'check_monotonic_increase': data are not ", - if (strictly) "strictly ", "monotonically ", - if (increase) "increasing " else "decreasing ", - if (MARGIN == 1) "in rows." else "in columns.")) - - } else { - x[, -1][is.na(ord) | ord] <- replacement - x[is.na(x[, 1]), 1] <- replacement - } - } - - if (MARGIN == 1) x else t(x) -} diff --git a/R/Miscellaneous_Functions.R b/R/Miscellaneous_Functions.R index cc9a832a..fc132e1d 100644 --- a/R/Miscellaneous_Functions.R +++ b/R/Miscellaneous_Functions.R @@ -353,8 +353,6 @@ vpd <- function(Tmin, Tmax, RHmean = NULL) { } -max_duration <- rSOILWAT2:::max_duration - startDoyOfDuration <- function(x, duration = 10) { r <- rle(x) res <- NULL @@ -506,10 +504,17 @@ extreme_values_and_doys <- function(x, na.rm = FALSE) { tmin <- min(x, na.rm = na.rm) c(tmax, tmin, - circ_mean(which(abs(x - tmax) < SFSW2_glovars[["tol"]]), int = 365, - na.rm = na.rm), - circ_mean(which(abs(x - tmin) < SFSW2_glovars[["tol"]]), int = 365, - na.rm = na.rm)) + rSW2utils::circ_mean( + which(abs(x - tmax) < SFSW2_glovars[["tol"]]), + int = 365, + na.rm = na.rm + ), + rSW2utils::circ_mean( + which(abs(x - tmin) < SFSW2_glovars[["tol"]]), + int = 365, + na.rm = na.rm + ) + ) } @@ -799,77 +804,6 @@ benchmark_BLAS <- function(platform, seed = NA) { } -#' Converts units of precipitation data -#' -#' @param x A numeric vector. Precipitation data as monthly series in units of -#' \code{unit_from}. -#' @param dpm A numeric vector. Number of days per month in the time series -#' \code{x}. -#' @param unit_from A character string. Units of data in \code{x}. Currently, -#' supported units include "mm/month", "mm month-1", "mm/d", "mm d-1", -#' "kg/m2/s", "kg m-2 s-1", "mm/s", "mm s-1", "cm/month", "cm month-1". -#' @param unit_to A character string. Units to which data are converted. -#' Currently, supported unit is "cm month-1" respectively "cm/month". -#' -#' @return A numeric vector of the same size as \code{x} in units of -#' \code{unit_to}. -#' @export -convert_precipitation <- function(x, dpm, unit_from, unit_to = "cm month-1") { - if (!(unit_to %in% c("cm/month", "cm month-1"))) { - stop("'convert_precipitation': only converts to units of 'cm month-1'") - } - - if (unit_from %in% c("mm/month", "mm month-1")) { - x / 10 - - } else if (unit_from %in% c("mm/d", "mm d-1")) { - x * dpm / 10 - - } else if (unit_from %in% c("cm/d", "cm d-1")) { - x * dpm - - } else if (unit_from %in% c("kg/m2/s", "kg m-2 s-1", "mm/s", "mm s-1")) { - x * dpm * 8640 - - } else if (unit_from %in% c("cm/month", "cm month-1")) { - x - - } else { - stop("Unknown precipitation unit: ", unit_from) - } -} - -#' Converts units of temperature data -#' -#' @param x A numeric vector. Temperature data as monthly series in units of -#' \code{unit_from}. -#' @param unit_from A character string. Units of data in \code{x}. Currently, -#' supported units include "K", "F", and "C". -#' @param unit_to A character string. Units to which data are converted. -#' Currently, supported unit is "C". -#' -#' @return A numeric vector of the same size as \code{x} in units of -#' \code{unit_to}. -#' @export -convert_temperature <- function(x, unit_from, unit_to = "C") { - if (!identical(unit_to, "C")) { - stop("'convert_temperature': only converts to units of degree Celsius") - } - - if (identical(unit_from, "K")) { - x - 273.15 - - } else if (identical(unit_from, "F")) { - (x - 32) * 0.5555556 - - } else if (identical(unit_from, "C")) { - x - - } else { - stop("Unknown temperature unit: ", unit_from) - } -} - @@ -883,56 +817,148 @@ convert_to_todo_list <- function(x) { -setup_scenarios <- function(sim_scens, future_yrs) { +setup_scenarios <- function(sim_scens, sim_time, is_idem = FALSE) { #--- Create complete scenario names # make sure 'ambient' is not among models - temp <- grep(sim_scens[["ambient"]], sim_scens[["models"]], - invert = TRUE, value = TRUE) + temp <- grep(sim_scens[["ambient"]], + sim_scens[["models"]], + invert = TRUE, + value = TRUE + ) if (length(temp) > 0) { - # add (multiple) future_yrs - temp <- paste0(rownames(future_yrs), ".", - rep(temp, each = nrow(future_yrs))) - # add (multiple) downscaling.method - temp <- paste0(sim_scens[["method_DS"]], ".", - rep(temp, each = length(sim_scens[["method_DS"]]))) + if (is_idem) { + # Use all years + temp <- paste0("idem.dall.", temp) + + } else { + # add (multiple) future_yrs, but only if not using full future daily vals + temp <- paste0( + rownames(sim_time[["future_yrs"]]), ".", + rep(temp, each = nrow(sim_time[["future_yrs"]])) + ) + + # add (multiple) downscaling.method + temp <- paste0( + sim_scens[["method_DS"]], ".", + rep(temp, each = length(sim_scens[["method_DS"]])) + ) + } } + # make sure 'ambient' is first entry id <- c(sim_scens[["ambient"]], temp) N <- length(id) + itime <- data.frame( + simstartyr = sim_time[["simstartyr"]], + endyr = sim_time[["endyr"]] + ) + + #--- Create table with scenario name parts for each scenario + # ConcScen = concentration scenarios, e.g., SRESs, RCPs + ctmp <- c( + "Downscaling", "DeltaStr_yrs", "ConcScen", "Model", "Delta_yrs", "itime" + ) + + climScen <- data.frame(matrix( + NA, + nrow = N, + ncol = length(ctmp), + dimnames = list(NULL, ctmp) + )) + + # set `delta_yrs` to 0 (for CO2-concentration data) + climScen[, "Delta_yrs"] <- 0L + climScen[, "itime"] <- 1L + + + # Fill in information for ambient scenario + climScen[1, "Model"] <- sim_scens[["ambient"]] + climScen[1, "ConcScen"] <- if ("tag_aCO2_ambient" %in% names(sim_scens)) { + sim_scens[["tag_aCO2_ambient"]] + } else { + "Fix360ppm" + } + + if (N > 1) { - #--- Create table with scenario name parts for each scenario + # Fill in information about model-scenario combinations temp <- strsplit(id[-1], split = ".", fixed = TRUE) - if (!all(lengths(temp) == 4L)) - stop("'climate.conditions' are mal-formed: they must contain ", - "4 elements that are concatenated by '.'") + if (!all(lengths(temp) == 4L)) { + stop( + "'climate.conditions' are mal-formed: they must contain ", + "4 elements that are concatenated by '.'" + ) + } + + climScen[-1, ctmp[1:4]] <- do.call(rbind, temp) + + + # set simulation time periods + if (is_idem) { + #--- Use calendar years for every run (and calculate `itime`) + + # set `itime` + tmp <- c("DSfut_startyr", "DSfut_endyr") + tmp_itime <- sim_time[["future_yrs"]][-1, tmp] + colnames(tmp_itime) <- colnames(itime) - climScen <- data.frame(matrix(unlist(temp), nrow = N - 1, ncol = 4, - byrow = TRUE), stringsAsFactors = FALSE) + stopifnot(length(unique(climScen[-1, "ConcScen"])) == NROW(tmp_itime)) - # ConcScen = concentration scenarios, e.g., SRESs, RCPs - colnames(climScen) <- c("Downscaling", "DeltaStr_yrs", "ConcScen", "Model") - # see 'setup_time_simulation_project' for how 'future_yrs' is created - climScen[, "Delta_yrs"] <- as.integer(substr(climScen[, "DeltaStr_yrs"], 2, - nchar(climScen[, "DeltaStr_yrs"]) - 3)) + itime <- rbind(itime, unique(tmp_itime)) + rownames(itime) <- NULL + + # set index for `itime` + ids_itime <- match( + apply(tmp_itime, 1, paste, collapse = "_"), + apply(itime, 1, paste, collapse = "_") + ) + + climScen[-1, "itime"] <- rep( + x = ids_itime, + times = table(climScen[-1, "ConcScen"]) + ) + + } else { + #--- Use current/ambient years and a delta for future runs + # see 'setup_time_simulation_project' for how 'future_yrs' is created + climScen[, "Delta_yrs"] <- as.integer(substr( + x = climScen[, "DeltaStr_yrs"], + start = 2, + stop = nchar(climScen[, "DeltaStr_yrs"]) - 3 + )) + } #--- List unique sets of requested scenario name parts - reqMs <- unique(climScen[, "Model"]) - reqCSs <- unique(climScen[, "ConcScen"]) + reqMs <- unique(climScen[-1, "Model"]) + reqCSs <- unique(climScen[-1, "ConcScen"]) reqCSsPerM <- lapply(reqMs, function(x) - unique(climScen[x == climScen[, "Model"], "ConcScen"])) + unique(climScen[x == climScen[, "Model"], "ConcScen"]) + ) reqDSsPerM <- lapply(reqMs, function(x) - unique(climScen[x == climScen[, "Model"], "Downscaling"])) + unique(climScen[x == climScen[, "Model"], "Downscaling"]) + ) } else { # Only ambient scenario - climScen <- reqMs <- reqCSs <- reqCSsPerM <- reqDSsPerM <- NULL + reqMs <- reqCSs <- reqCSsPerM <- reqDSsPerM <- NULL } - c(sim_scens, list(id = id, N = N, df = climScen, reqMs = reqMs, - reqCSs = reqCSs, reqCSsPerM = reqCSsPerM, reqDSsPerM = reqDSsPerM)) + c(sim_scens, + list( + id = id, + N = N, + df = climScen, + itime = itime, + is_idem = is_idem, + reqMs = reqMs, + reqCSs = reqCSs, + reqCSsPerM = reqCSsPerM, + reqDSsPerM = reqDSsPerM + ) + ) } setup_meandaily_output <- function(req_mean_daily, opt_agg) { diff --git a/R/OutputDatabase.R b/R/OutputDatabase.R index 787cbba7..041b98b3 100644 --- a/R/OutputDatabase.R +++ b/R/OutputDatabase.R @@ -237,24 +237,26 @@ get_fieldnames <- function(responseName, fields.header, fields.iTable) { responseName <- gsub(".", "_", responseName, fixed = TRUE) for (i in seq_along(responseName)) { - iColumns.iTable <- c(iColumns.iTable, - fields.iTable[grepl(responseName[i], fields.iTable_, fixed = FALSE)]) - iColumns.header <- c(iColumns.header, - fields.header[grepl(responseName[i], fields.header_, fixed = FALSE)]) - outOrder <- c(outOrder, - fields.iTable[grepl(responseName[i], fields.iTable_, fixed = FALSE)], - fields.header[grepl(responseName[i], fields.header_, fixed = FALSE)]) + tmp_h <- grep(responseName[i], fields.header_, fixed = FALSE) + tmp_iT <- grep(responseName[i], fields.iTable_, fixed = FALSE) + + iColumns.header <- c(iColumns.header, fields.header[tmp_h]) + iColumns.iTable <- c(iColumns.iTable, fields.iTable[tmp_iT]) + outOrder <- c(outOrder, fields.header[tmp_h], fields.iTable[tmp_iT]) } + iColumns.iTable <- unique(iColumns.iTable) iColumns.header <- unique(iColumns.header) outOrder <- unique(outOrder) } - list(addPid = addPid, - iTable = iColumns.iTable, - header = iColumns.header, - outOrder = outOrder, - has_columns = length(iColumns.header) > 0 || length(iColumns.iTable) > 0) + list( + addPid = addPid, + iTable = iColumns.iTable, + header = iColumns.header, + outOrder = outOrder, + has_columns = length(iColumns.header) > 0 || length(iColumns.iTable) > 0 + ) } @@ -351,8 +353,12 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, MeanOrSD <- match.arg(MeanOrSD) - dat <- as.data.frame(matrix(NA, nrow = 0, ncol = length(variables), - dimnames = list(NULL, variables))) + dat <- as.data.frame(matrix( + data = NA, + nrow = 0, + ncol = length(variables), + dimnames = list(NULL, variables) + )) if (length(variables) > 0) { con <- dbConnect(SQLite(), fname_dbOut, flags = SQLITE_RO) @@ -361,22 +367,49 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, db_tables <- dbListTables(con) header_fields <- dbListFields(con, "header") - extract_tables <- c("header", "sites", "runs", paste0("overall_", MeanOrSD)) - db_setup <- lapply(extract_tables, function(table) { - tn <- grep(table, db_tables, ignore.case = TRUE, fixed = FALSE, - value = TRUE) - has <- length(tn) > 0 - icols <- if (has) get_fieldnames(variables, - fields.header = header_fields, - fields.iTable = dbListFields(con, tn)) - c(list(name = dbQuoteIdentifier(con, tn), has = has), icols = icols) - }) + # "header" is last because it is a catch-all "view" + noa <- paste0("overall_", MeanOrSD) + extract_tables <- c("sites", "runs", noa, "header") + + db_setup <- vector("list", length(extract_tables)) names(db_setup) <- extract_tables - has_columns <- sapply(db_setup, function(x) - x[["has"]] && x[["icols.has_columns"]]) - add_Pid <- any(sapply(db_setup, function(x) - x[["has"]] && x[["icols.addPid"]])) + for (k in seq_along(extract_tables)) { + tn <- grep( + pattern = extract_tables[k], + x = db_tables, + ignore.case = TRUE, + fixed = FALSE, + value = TRUE + ) + + has <- length(tn) > 0 + + icols <- if (has) get_fieldnames( + responseName = variables, + fields.header = header_fields, + fields.iTable = dbListFields(con, tn) + ) + + tmp <- variables %in% unlist(icols[c("iTable", "header")]) + variables <- variables[!tmp] + + db_setup[[k]] <- c( + list(name = dbQuoteIdentifier(con, tn), has = has), + icols = icols + ) + } + + has_columns <- sapply( + db_setup, + function(x) x[["has"]] && x[["icols.has_columns"]] + ) + + add_Pid <- any(sapply( + db_setup, + function(x) x[["has"]] && x[["icols.addPid"]] + )) + outOrder <- unlist(lapply(db_setup, function(x) x[["icols.outOrder"]])) if (any(has_columns) || add_Pid) { @@ -391,8 +424,10 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, # Add fields from header table if requested if (length(db_setup[["header"]][["icols.header"]]) > 0) { - temp <- dbQuoteIdentifier(con, - db_setup[["header"]][["icols.header"]]) + temp <- dbQuoteIdentifier( + con, + db_setup[["header"]][["icols.header"]] + ) temp <- paste0("header.", temp, " AS ", temp, collapse = ", ") if (need_sep) { paste(",", temp) @@ -404,8 +439,10 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, # Add fields from runs table if requested if (length(db_setup[["runs"]][["icols.iTable"]]) > 0) { - temp <- dbQuoteIdentifier(con, - db_setup[["runs"]][["icols.iTable"]]) + temp <- dbQuoteIdentifier( + con, + db_setup[["runs"]][["icols.iTable"]] + ) temp <- paste0("runs.", temp, " AS ", temp, collapse = ", ") if (need_sep) { paste(",", temp) @@ -417,8 +454,10 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, # Add fields from sites table if requested if (length(db_setup[["sites"]][["icols.iTable"]]) > 0) { - temp <- dbQuoteIdentifier(con, - db_setup[["sites"]][["icols.iTable"]]) + temp <- dbQuoteIdentifier( + con, + db_setup[["sites"]][["icols.iTable"]] + ) temp <- paste0("sites.", temp, " AS ", temp, collapse = ", ") if (need_sep) { paste(",", temp) @@ -429,10 +468,12 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, }, # Add fields from aggregation output table if requested - if (length(db_setup[[4]][["icols.iTable"]]) > 0) { - temp <- dbQuoteIdentifier(con, - db_setup[[4]][["icols.iTable"]]) - temp <- paste0(db_setup[[4]][["name"]], ".", temp, " AS ", temp, + if (length(db_setup[[noa]][["icols.iTable"]]) > 0) { + temp <- dbQuoteIdentifier( + con, + db_setup[[noa]][["icols.iTable"]] + ) + temp <- paste0(db_setup[[noa]][["name"]], ".", temp, " AS ", temp, collapse = ", ") if (need_sep) { paste(",", temp) @@ -442,14 +483,18 @@ dbOut_read_variables_from_scenario <- function(fname_dbOut, variables = NULL, }, " FROM header", - " INNER JOIN ", db_setup[[4]][["name"]], - " ON header.P_id = ", db_setup[[4]][["name"]], ".P_id", + " INNER JOIN ", db_setup[[noa]][["name"]], + " ON header.P_id = ", db_setup[[noa]][["name"]], ".P_id", " INNER JOIN runs ON header.P_id = runs.P_id", " INNER JOIN sites ON runs.site_id = sites.id", " WHERE header.Scenario = ", shQuote(scenario), - if (length(whereClause) > 0) - paste0(" AND ", addHeaderToWhereClause(whereClause, - headers = header_fields)), + + if (length(whereClause) > 0) { + paste0( + " AND ", + addHeaderToWhereClause(whereClause, headers = header_fields) + ) + }, " ORDER BY header.P_id") dat <- dbGetQuery(con, sql)[, outOrder] @@ -3269,8 +3314,13 @@ compare_two_dbOutput <- function(dbOut1, dbOut2, tol = 1e-3, x_test <- dbGetQuery(testDB, sql) #---Compare field data and report if differences were found - ident <- all.equal(x_ref, x_test, tol = tol, - scale = if (comp_absolute) 1 else NULL) + ident <- rSW2utils::all_equal_numeric2( + target = x_ref, + current = x_test, + tolerance = tol, + scaled = !comp_absolute + ) + if (!isTRUE(ident)) { temp <- list(ident) names(temp) <- tocomp_tables[k] diff --git a/R/Parallel.R b/R/Parallel.R index 5bfaa7a6..8ab79673 100644 --- a/R/Parallel.R +++ b/R/Parallel.R @@ -472,7 +472,6 @@ setup_SFSW2_cluster <- function(opt_parallel, dir_out, verbose = FALSE, Rmpi::mpi.spawn.Rslaves(nslaves = opt_parallel[["num_cores"]]) Rmpi::mpi.bcast.cmd(library("rSFSW2")) - Rmpi::mpi.bcast.cmd(library("rSOILWAT2")) SFSW2_glovars[["p_cl"]] <- TRUE SFSW2_glovars[["p_pids"]] <- as.integer( diff --git a/R/PriorCalculations.R b/R/PriorCalculations.R index e90b2c0d..fb13fa9c 100644 --- a/R/PriorCalculations.R +++ b/R/PriorCalculations.R @@ -27,6 +27,9 @@ calc_RequestedSoilLayers <- function(SFSW2_prj_meta, cat("\n")}, add = TRUE) } + # Column name pattern of soil layers, e.g., `depth_L1` + cn_depth <- "depth_L" + # How to add different soil variables # values will be exhausted: sl_vars_sub <- c("EvapCoeff", "TranspCoeff", "Imperm") @@ -108,8 +111,7 @@ calc_RequestedSoilLayers <- function(SFSW2_prj_meta, SFSW2_prj_inputs[["sw_input_soils_use"]][icol] <- TRUE } - icol <- grep("depth_", - names(SFSW2_prj_inputs[["sw_input_soillayers"]]))[lyrs] + icol <- paste0(cn_depth, lyrs) SFSW2_prj_inputs[["sw_input_soillayers"]][irows, icol] <- matrix(ldset, nrow = sum(il_set), ncol = length(ldset), byrow = TRUE) has_changed <- TRUE diff --git a/R/RandomNumberGenerator.R b/R/RandomNumberGenerator.R index 54ff0bd1..66ea7f00 100644 --- a/R/RandomNumberGenerator.R +++ b/R/RandomNumberGenerator.R @@ -110,7 +110,8 @@ set_RNG_stream <- function(seed = NA) { #' #' @section Usage: \var{RNG} - parallelized function calls by \pkg{rSFSW2} #' \itemize{ -#' \item \code{try.ScenarioWeather} wraps \code{calc.ScenarioWeather} which +#' \item \code{try_MonthlyScenarioWeather} wraps +#' \code{calc_MonthlyScenarioWeather} which #' calls \code{set_RNG_stream} to prepare \var{RNG} for functions #' \itemize{ #' \item \code{fix_PPTdata_length} diff --git a/R/RcppExports.R b/R/RcppExports.R deleted file mode 100644 index 477b7f20..00000000 --- a/R/RcppExports.R +++ /dev/null @@ -1,113 +0,0 @@ -# Generated by using Rcpp::compileAttributes() -> do not edit by hand -# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 - -#' Determine wait times until germination based on information on favorable -#' conditions and time required to germinate -#' -#' @section Note: The \pkg{Rcpp} version of the function is about 270x faster -#' for vectors of length 365 and 12,000x faster for vectors of length 11,000 -#' than the R version. The \pkg{Rcpp} version also reduced the memory -#' footprint by a factor of >> 3080. -#' -#' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -#' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -#' to abiotic conditions. Ecol Model, 286, 66-77. -#' -#' @examples -#' # The \pkg{Rcpp} function is equivalent to the following R version -#' germination_wait_times_R <- function(time_to_germinate, duration_fave_cond) { -#' N <- length(time_to_germinate) -#' stats::na.exclude(unlist(lapply(seq_len(N), function(t) { -#' if (is.finite(time_to_germinate[t])) { -#' t1 <- duration_fave_cond[t:N] -#' t2 <- stats::na.exclude(t1) -#' t3 <- which(t2[time_to_germinate[t]] == t1)[1] -#' sum(is.na(t1[1:t3])) -#' } else { -#' NA -#' } -#' }))) -#' } -#' -#' @export -germination_wait_times <- function(time_to_germinate, duration_fave_cond) { - .Call(`_rSFSW2_germination_wait_times`, time_to_germinate, duration_fave_cond) -} - -#' Determine if all conditions across rooted soil layers are deadly -#' -#' Function that checks whether all relevant (those with roots) soil layers -#' are under conditions of mortality (kill.conditions) for each day of a -#' given year -#' -#' \code{relevantLayers} takes either \code{NA} if no soil layers should be -#' considered (e.g., because not yet germinated), or an integer number -#' between 1 and the number of simulated soil layers. The number indicates -#' the depth to which a seedling has grown roots and over which layers -#' \code{kill.conditions} will be evaluated. -#' -#' @section Note: The \pkg{Rcpp} version of the function is about 165x -#' faster than the version previous to commit -#' \var{6344857a9cdb08acf68fa031c43cf4a596613aad} 'Small speed improvements' -#' and about 70x faster than the R version. The \pkg{Rcpp} version also -#' reduced the memory footprint by a factor of 200. -#' -#' @param relevantLayers An integer vector, usually of length 365 or 366 -#' (days). -#' @param kill.conditions A m x p logical matrix with -#' \code{m >= length(relevantLayers)} and p represents the number of -#' simulated soil layers, i.e., \code{p >= max(relevantLayers, na.rm = TRUE)}. -#' -#' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -#' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -#' to abiotic conditions. Ecol Model, 286, 66-77. -#' -#' @return A logical vector of the length of \code{relevantLayers} with -#' values containing \code{NA} for days when conditions were not evaluated, -#' \code{TRUE} if all relevant soil layers (columns) of \code{kill.conditions} -#' were \code{TRUE}, and with \code{FALSE} otherwise -#' -#' @examples -#' # The \pkg{Rcpp} function is equivalent to the following R version -#' get_KilledBySoilLayers_R <- function(relevantLayers, kill.conditions) { -#' vapply(seq_along(relevantLayers), function(k) { -#' if (all(is.finite(relevantLayers[k]))) { -#' all(as.logical(kill.conditions[k, seq_len(relevantLayers[k])])) -#' } else NA -#' }, FUN.VALUE = NA) -#' } -#' -#' @export -get_KilledBySoilLayers <- function(relevantLayers, kill_conditions) { - .Call(`_rSFSW2_get_KilledBySoilLayers`, relevantLayers, kill_conditions) -} - -#' Determine seedling survival in the first season (\var{\sQuote{ss1s}}) -#' -#' @section Note: The \pkg{Rcpp} version of the function is about 270x faster -#' for vectors of length 365 and 12,000x faster for vectors of length 11,000 -#' than the R version. The \pkg{Rcpp} version also reduced the memory -#' footprint by a factor of >> 3080. -#' @section Note: Previous name \code{setFALSE_SeedlingSurvival_1stSeason}. -#' -#' @section C code: \code{ss1s} is a pointer to the data and the original -#' vector will get altered; one would need for a deep copy: -#' \code{LogicalVector out = clone(ss1s)} -#' -#' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -#' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -#' to abiotic conditions. Ecol Model, 286, 66-77. -#' -#' @examples -#' # The \pkg{Rcpp} function is equivalent to the following R version -#' kill_seedling_R <- function(ss1s, ry_year_day, ry_useyrs, y, -#' doy) { -#' ss1s[ry_year_day == ry_useyrs[y]][doy] <- FALSE -#' ss1s -#' } -#' -#' @export -kill_seedling <- function(ss1s, ry_year_day, ry_useyrs, y, doy) { - .Call(`_rSFSW2_kill_seedling`, ss1s, ry_year_day, ry_useyrs, y, doy) -} - diff --git a/R/Simulation_Project.R b/R/Simulation_Project.R index 56470ea2..16f6b898 100644 --- a/R/Simulation_Project.R +++ b/R/Simulation_Project.R @@ -304,8 +304,12 @@ init_rSFSW2_project <- function(fmetar, update = FALSE, verbose = TRUE, init_timer(SFSW2_prj_meta[["fnames_out"]][["timerfile"]]) #--- Update simulation time + is_idem <- isTRUE(SFSW2_prj_meta[["req_scens"]][["method_DS"]] == "idem") + SFSW2_prj_meta[["sim_time"]] <- setup_time_simulation_project( - SFSW2_prj_meta[["sim_time"]], add_st2 = TRUE, + sim_time = SFSW2_prj_meta[["sim_time"]], + is_idem = is_idem, + add_st2 = TRUE, adjust_NS = SFSW2_prj_meta[["opt_agg"]][["adjust_NorthSouth"]], use_doy_range = SFSW2_prj_meta[["opt_agg"]][["use_doy_range"]], doy_ranges = SFSW2_prj_meta[["opt_agg"]][["doy_ranges"]] @@ -313,8 +317,10 @@ init_rSFSW2_project <- function(fmetar, update = FALSE, verbose = TRUE, #--- Determine scenario names SFSW2_prj_meta[["sim_scens"]] <- setup_scenarios( - SFSW2_prj_meta[["req_scens"]], - SFSW2_prj_meta[["sim_time"]][["future_yrs"]]) + sim_scens = SFSW2_prj_meta[["req_scens"]], + is_idem = is_idem, + sim_time = SFSW2_prj_meta[["sim_time"]] + ) #--- Determine requested ensembles across climate scenarios SFSW2_prj_meta <- update_scenarios_with_ensembles(SFSW2_prj_meta) @@ -347,8 +353,12 @@ gather_project_inputs <- function(SFSW2_prj_meta, use_preprocin = TRUE, if (!exists("SFSW2_prj_inputs") || is.null(SFSW2_prj_inputs) || todo_intracker(SFSW2_prj_meta, "load_inputs", "prepared")) { - SFSW2_prj_inputs <- process_inputs(SFSW2_prj_meta[["project_paths"]], - SFSW2_prj_meta[["fnames_in"]], use_preprocin, verbose) + SFSW2_prj_inputs <- process_inputs( + project_paths = SFSW2_prj_meta[["project_paths"]], + fnames_in = SFSW2_prj_meta[["fnames_in"]], + use_preprocin, + verbose + ) #--- Update output aggregation options SFSW2_prj_meta[["opt_agg"]] <- setup_aggregation_options( @@ -496,44 +506,58 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, temp_call <- shQuote(match.call()[1]) print(paste0("rSFSW2's ", temp_call, ": started at ", t1)) - on.exit({ - print(paste0("rSFSW2's ", temp_call, ": ended after ", - round(difftime(Sys.time(), t1, units = "secs"), 2), " s with ", - "input tracker status:")) - print(SFSW2_prj_meta[["input_status"]]) + on.exit( + { + print(paste0( + "rSFSW2's ", temp_call, ": ended after ", + round(difftime(Sys.time(), t1, units = "secs"), 2), " s with ", + "input tracker status:" + )) + print(SFSW2_prj_meta[["input_status"]]) }, - add = TRUE) + add = TRUE + ) } #------ PROJECT INPUTS - temp <- gather_project_inputs(SFSW2_prj_meta, + temp <- gather_project_inputs( + SFSW2_prj_meta, use_preprocin = opt_behave[["use_preprocin"]], - verbose = opt_verbosity[["verbose"]]) + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] # Check that dbWork is available and has up-to-date structure of tables/fields SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbWork", + SFSW2_prj_meta[["input_status"]], + tracker = "dbWork", prepared = - dbWork_check_design(SFSW2_prj_meta[["project_paths"]][["dir_out"]])) + dbWork_check_design(SFSW2_prj_meta[["project_paths"]][["dir_out"]]) + ) # Check that dbOut is available SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbOut", - prepared = file.exists(SFSW2_prj_meta[["fnames_out"]][["dbOutput"]])) + SFSW2_prj_meta[["input_status"]], + tracker = "dbOut", + prepared = file.exists(SFSW2_prj_meta[["fnames_out"]][["dbOutput"]]) + ) #------ Return if all is prepared (from a previous run), input tracker design # is up-to-date, and input object exists and haven't been changed since last # time - if (all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "prepared"])) && + if ( + all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "prepared"])) && check_intracker_design(SFSW2_prj_meta[["input_status"]]) && - exists("SFSW2_prj_inputs")) { + exists("SFSW2_prj_inputs") + ) { - return(list(SFSW2_prj_meta = SFSW2_prj_meta, - SFSW2_prj_inputs = SFSW2_prj_inputs)) + return(list( + SFSW2_prj_meta = SFSW2_prj_meta, + SFSW2_prj_inputs = SFSW2_prj_inputs + )) } @@ -541,10 +565,20 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, # From here on: objects 'SFSW2_prj_meta' and 'SFSW2_prj_inputs' will be # manipulated, i.e., save them to disk upon exiting function (by error to # save intermediate state) or by final 'return' - on.exit(save_to_rds_with_backup(SFSW2_prj_meta, - file = SFSW2_prj_meta[["fnames_in"]][["fmeta"]]), add = TRUE) - on.exit(save_to_rds_with_backup(SFSW2_prj_inputs, - file = SFSW2_prj_meta[["fnames_in"]][["fpreprocin"]]), add = TRUE) + on.exit( + save_to_rds_with_backup( + SFSW2_prj_meta, + file = SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ), + add = TRUE + ) + on.exit( + save_to_rds_with_backup( + SFSW2_prj_inputs, + file = SFSW2_prj_meta[["fnames_in"]][["fpreprocin"]] + ), + add = TRUE + ) #--- Setup random number generator streams for each runsN_master @@ -557,13 +591,19 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, SFSW2_prj_meta[["rng_specs"]] <- setup_RNG( streams_N = SFSW2_prj_meta[["sim_size"]][["runsN_master"]], global_seed = SFSW2_prj_meta[["opt_sim"]][["global_seed"]], - reproducible = SFSW2_prj_meta[["opt_sim"]][["reproducible"]]) + reproducible = SFSW2_prj_meta[["opt_sim"]][["reproducible"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "rng_setup", prepared = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "rng_setup", + prepared = TRUE + ) - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } @@ -574,51 +614,73 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, SFSW2_prj_meta <- set_paths_to_dailyweather_datasources(SFSW2_prj_meta) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbW_paths", prepared = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "dbW_paths", + prepared = TRUE + ) } #--- Determine sources of daily weather if (todo_intracker(SFSW2_prj_meta, "dbW_sources", "prepared")) { - temp1 <- SFSW2_prj_meta[["opt_input"]][["how_determine_sources"]] == + temp1 <- + SFSW2_prj_meta[["opt_input"]][["how_determine_sources"]] == "SWRunInformation" - temp2 <- "dailyweather_source" %in% + temp2 <- + "dailyweather_source" %in% colnames(SFSW2_prj_inputs[["SWRunInformation"]]) if (temp1 && temp2) { - dw_source <- factor(SFSW2_prj_inputs[["SWRunInformation"]][ + dw_source <- factor( + SFSW2_prj_inputs[["SWRunInformation"]][ SFSW2_prj_meta[["sim_size"]][["runIDs_sites"]], "dailyweather_source"], - levels = SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]]) + levels = SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]] + ) do_weather_source <- anyNA(dw_source) } else { dw_source <- factor( rep(NA, SFSW2_prj_meta[["sim_size"]][["runsN_sites"]]), - levels = SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]]) + levels = SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]] + ) do_weather_source <- TRUE } if (do_weather_source) { - SFSW2_prj_inputs[["SWRunInformation"]] <- dw_determine_sources(dw_source, - SFSW2_prj_meta[["exinfo"]], - SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]], - SFSW2_prj_inputs, SFSW2_prj_inputs[["SWRunInformation"]], - SFSW2_prj_meta[["sim_size"]], SFSW2_prj_meta[["sim_time"]], - SFSW2_prj_meta[["fnames_in"]], - SFSW2_prj_meta[["project_paths"]], verbose = opt_verbosity[["verbose"]]) - - SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "load_inputs", - prepared = TRUE, checked = FALSE) + SFSW2_prj_inputs[["SWRunInformation"]] <- dw_determine_sources( + dw_source = dw_source, + exinfo = SFSW2_prj_meta[["exinfo"]], + dw_avail_sources = + SFSW2_prj_meta[["opt_input"]][["dw_source_priority"]], + SFSW2_prj_inputs = SFSW2_prj_inputs, + SWRunInformation = SFSW2_prj_inputs[["SWRunInformation"]], + sim_size = SFSW2_prj_meta[["sim_size"]], + sim_time = SFSW2_prj_meta[["sim_time"]], + fnames_in = SFSW2_prj_meta[["fnames_in"]], + project_paths = SFSW2_prj_meta[["project_paths"]], + verbose = opt_verbosity[["verbose"]] + ) + + SFSW2_prj_meta[["input_status"]] <- update_intracker( + SFSW2_prj_meta[["input_status"]], + tracker = "load_inputs", + prepared = TRUE, + checked = FALSE + ) } SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbW_sources", - prepared = TRUE, clean_subsequent = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "dbW_sources", + prepared = TRUE, + clean_subsequent = TRUE + ) - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } @@ -629,8 +691,10 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, SFSW2_prj_meta[["opt_sim"]][["use_dbW_future"]] <- TRUE SFSW2_prj_meta[["opt_sim"]][["use_dbW_current"]] <- TRUE } - if (SFSW2_prj_meta[["opt_sim"]][["use_dbW_future"]]) + + if (SFSW2_prj_meta[["opt_sim"]][["use_dbW_future"]]) { SFSW2_prj_meta[["opt_sim"]][["use_dbW_current"]] <- TRUE + } if (SFSW2_prj_meta[["opt_sim"]][["use_dbW_current"]]) { # Call to `update_runIDs_sites_by_dbW` does nothing if `dbWeather` does @@ -640,26 +704,37 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, label_WeatherData = SFSW2_prj_inputs[["SWRunInformation"]][, "WeatherFolder"], fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], - verbose = opt_verbosity[["verbose"]]) + verbose = opt_verbosity[["verbose"]] + ) - make_dbW(SFSW2_prj_meta, + make_dbW( + SFSW2_prj_meta, SWRunInformation = SFSW2_prj_inputs[["SWRunInformation"]], opt_parallel, opt_chunks, opt_behave, deleteTmpSQLFiles = opt_out_run[["deleteTmpSQLFiles"]], verbose = opt_verbosity[["verbose"]], - print.debug = opt_verbosity[["print.debug"]]) + print.debug = opt_verbosity[["print.debug"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbW_current", - prepared = TRUE, clean_subsequent = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "dbW_current", + prepared = TRUE, + clean_subsequent = TRUE + ) - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbW_current", - prepared = NA, checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "dbW_current", + prepared = NA, + checked = NA + ) } } @@ -667,80 +742,117 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, #------ DATA EXTRACTIONS #--- Soil data # nolint start - if (SFSW2_prj_meta[["exinfo"]][["ExtractSoilDataFromCONUSSOILFromSTATSGO_USA"]] || + if ( + SFSW2_prj_meta[["exinfo"]][["ExtractSoilDataFromCONUSSOILFromSTATSGO_USA"]] || SFSW2_prj_meta[["exinfo"]][["ExtractSoilDataFromISRICWISEv12_Global"]] || - SFSW2_prj_meta[["exinfo"]][["ExtractSoilDataFromISRICWISE30secV1a_Global"]]) { + SFSW2_prj_meta[["exinfo"]][["ExtractSoilDataFromISRICWISE30secV1a_Global"]] + ) { # nolint end if (todo_intracker(SFSW2_prj_meta, "soil_data", "prepared")) { - SFSW2_prj_inputs <- ExtractData_Soils(SFSW2_prj_meta[["exinfo"]], + SFSW2_prj_inputs <- ExtractData_Soils( + SFSW2_prj_meta[["exinfo"]], SFSW2_prj_meta, SFSW2_prj_inputs, opt_parallel, - resume = opt_behave[["resume"]], verbose = opt_verbosity[["verbose"]]) + resume = opt_behave[["resume"]], + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "soil_data", - prepared = TRUE) - - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + SFSW2_prj_meta[["input_status"]], + tracker = "soil_data", + prepared = TRUE + ) + + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "soil_data", prepared = NA, - checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "soil_data", + prepared = NA, + checked = NA + ) } #--- Mean monthly climate data - if (SFSW2_prj_meta[["exinfo"]][["ExtractSkyDataFromNOAAClimateAtlas_USA"]] || - SFSW2_prj_meta[["exinfo"]][["ExtractSkyDataFromNCEPCFSR_Global"]]) { + if ( + SFSW2_prj_meta[["exinfo"]][["ExtractSkyDataFromNOAAClimateAtlas_USA"]] || + SFSW2_prj_meta[["exinfo"]][["ExtractSkyDataFromNCEPCFSR_Global"]] + ) { if (todo_intracker(SFSW2_prj_meta, "climnorm_data", "prepared")) { SFSW2_prj_inputs <- ExtractData_MeanMonthlyClimate( - SFSW2_prj_meta[["exinfo"]], SFSW2_prj_meta, SFSW2_prj_inputs, - opt_parallel, opt_chunks, resume = opt_behave[["resume"]], - verbose = opt_verbosity[["verbose"]]) + SFSW2_prj_meta[["exinfo"]], + SFSW2_prj_meta, SFSW2_prj_inputs, + opt_parallel, opt_chunks, + resume = opt_behave[["resume"]], + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "climnorm_data", - prepared = TRUE) - - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + SFSW2_prj_meta[["input_status"]], + tracker = "climnorm_data", + prepared = TRUE + ) + + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "climnorm_data", - prepared = NA, checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "climnorm_data", + prepared = NA, + checked = NA + ) } #--- Topographic data - if (SFSW2_prj_meta[["exinfo"]][["ExtractElevation_NED_USA"]] || - SFSW2_prj_meta[["exinfo"]][["ExtractElevation_HWSD_Global"]]) { + if ( + SFSW2_prj_meta[["exinfo"]][["ExtractElevation_NED_USA"]] || + SFSW2_prj_meta[["exinfo"]][["ExtractElevation_HWSD_Global"]] + ) { if (todo_intracker(SFSW2_prj_meta, "elev_data", "prepared")) { - SFSW2_prj_inputs <- ExtractData_Elevation(SFSW2_prj_meta[["exinfo"]], - SFSW2_prj_meta, SFSW2_prj_inputs, resume = opt_behave[["resume"]], - verbose = opt_verbosity[["verbose"]]) + SFSW2_prj_inputs <- ExtractData_Elevation( + exinfo = SFSW2_prj_meta[["exinfo"]], + SFSW2_prj_meta, + SFSW2_prj_inputs, + resume = opt_behave[["resume"]], + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "elev_data", - prepared = TRUE) - - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + SFSW2_prj_meta[["input_status"]], + tracker = "elev_data", + prepared = TRUE + ) + + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "elev_data", prepared = NA, - checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "elev_data", + prepared = NA, + checked = NA + ) } @@ -753,28 +865,40 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, label_WeatherData = SFSW2_prj_inputs[["SWRunInformation"]][, "WeatherFolder"], fdbWeather = SFSW2_prj_meta[["fnames_in"]][["fdbWeather"]], - verbose = opt_verbosity[["verbose"]]) - - temp <- PrepareClimateScenarios(SFSW2_prj_meta, SFSW2_prj_inputs, - opt_parallel, resume = opt_behave[["resume"]], opt_verbosity, - opt_chunks) + verbose = opt_verbosity[["verbose"]] + ) + + temp <- PrepareClimateScenarios( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_parallel, + resume = opt_behave[["resume"]], + opt_verbosity, + opt_chunks + ) SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] - # SFSW2_prj_meta is update with random streams for downscaling + + # SFSW2_prj_meta is updated with random streams for downscaling SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] SFSW2_prj_meta[["input_status"]] <- update_intracker( SFSW2_prj_meta[["input_status"]], tracker = "dbW_scenarios", prepared = TRUE) - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbW_scenarios", - prepared = NA, checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "dbW_scenarios", + prepared = NA, + checked = NA + ) } @@ -788,26 +912,36 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, if (SFSW2_prj_meta[["pcalcs"]][["AddRequestedSoilLayers"]]) { if (todo_intracker(SFSW2_prj_meta, "req_soillayers", "prepared")) { - temp <- calc_RequestedSoilLayers(SFSW2_prj_meta, SFSW2_prj_inputs, + temp <- calc_RequestedSoilLayers( + SFSW2_prj_meta, + SFSW2_prj_inputs, runIDs_adjust, keep_old_depth = SFSW2_prj_meta[["opt_input"]][["keep_old_depth"]], - verbose = opt_verbosity[["verbose"]]) + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "req_soillayers", - prepared = TRUE) - - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + SFSW2_prj_meta[["input_status"]], + tracker = "req_soillayers", + prepared = TRUE + ) + + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "req_soillayers", - prepared = NA, checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "req_soillayers", + prepared = NA, + checked = NA + ) } @@ -815,21 +949,32 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, if (todo_intracker(SFSW2_prj_meta, "calc_bsevap", "prepared")) { SFSW2_prj_inputs <- get_BareSoilEvapCoefs( - SFSW2_prj_meta, SFSW2_prj_inputs, runIDs_adjust, - resume = opt_behave[["resume"]], verbose = opt_verbosity[["verbose"]]) + SFSW2_prj_meta, + SFSW2_prj_inputs, + runIDs_adjust, + resume = opt_behave[["resume"]], + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "calc_bsevap", - prepared = TRUE) - - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + SFSW2_prj_meta[["input_status"]], + tracker = "calc_bsevap", + prepared = TRUE + ) + + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } } else { SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "calc_bsevap", prepared = NA, - checked = NA) + SFSW2_prj_meta[["input_status"]], + tracker = "calc_bsevap", + prepared = NA, + checked = NA + ) } @@ -849,10 +994,15 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, if (SFSW2_prj_meta[["pcalcs"]][["EstimateInitialSoilTemperatureForEachSoilLayer"]]) { # nolint - use.layers <- which(SFSW2_prj_inputs[["sw_input_soils_use"]][ - paste0("Sand_L", SFSW2_glovars[["slyrs_ids"]])]) - index.soilTemp <- paste0("SoilTemp_L", - SFSW2_glovars[["slyrs_ids"]])[use.layers] + use.layers <- which( + SFSW2_prj_inputs[["sw_input_soils_use"]][ + paste0("Sand_L", SFSW2_glovars[["slyrs_ids"]])] + ) + index.soilTemp <- paste0( + "SoilTemp_L", + SFSW2_glovars[["slyrs_ids"]] + )[use.layers] + SFSW2_prj_inputs[["sw_input_soils_use"]][index.soilTemp] <- TRUE } @@ -862,24 +1012,37 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, if (todo_intracker(SFSW2_prj_meta, "table_lookup", "prepared")) { - SFSW2_prj_inputs <- do_prior_TableLookups(SFSW2_prj_meta, SFSW2_prj_inputs, - resume = opt_behave[["resume"]], verbose = opt_verbosity[["verbose"]]) + SFSW2_prj_inputs <- do_prior_TableLookups( + SFSW2_prj_meta, + SFSW2_prj_inputs, + resume = opt_behave[["resume"]], + verbose = opt_verbosity[["verbose"]] + ) SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "table_lookup", - prepared = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "table_lookup", + prepared = TRUE + ) - save_to_rds_with_backup(SFSW2_prj_meta, - SFSW2_prj_meta[["fnames_in"]][["fmeta"]]) + save_to_rds_with_backup( + SFSW2_prj_meta, + SFSW2_prj_meta[["fnames_in"]][["fmeta"]] + ) } #------ CREATE OUTPUT DATABASE (IF NOT ALREADY EXISTING) if (todo_intracker(SFSW2_prj_meta, "dbOut", "prepared")) { - temp <- try(make_dbOutput(SFSW2_prj_meta, SFSW2_prj_inputs, - verbose = opt_verbosity[["verbose"]]), - silent = !opt_verbosity[["print.debug"]]) + temp <- try( + make_dbOutput( + SFSW2_prj_meta, + SFSW2_prj_inputs, + verbose = opt_verbosity[["verbose"]] + ), + silent = !opt_verbosity[["print.debug"]] + ) if (inherits(temp, "try-error")) { stop("Output database failed to setup") @@ -890,7 +1053,10 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, SFSW2_prj_meta[["prj_todos"]][["aon_fields"]] <- temp[["fields"]] SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbOut", prepared = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "dbOut", + prepared = TRUE + ) } @@ -898,15 +1064,20 @@ populate_rSFSW2_project_with_data <- function(SFSW2_prj_meta, opt_behave, if (todo_intracker(SFSW2_prj_meta, "dbWork", "prepared")) { # This requires the presence of dbOutput - temp <- recreate_dbWork(SFSW2_prj_meta = SFSW2_prj_meta, - verbose = opt_verbosity[["print.debug"]]) + temp <- recreate_dbWork( + SFSW2_prj_meta = SFSW2_prj_meta, + verbose = opt_verbosity[["print.debug"]] + ) if (!temp) { stop("Work database failed to setup") } SFSW2_prj_meta[["input_status"]] <- update_intracker( - SFSW2_prj_meta[["input_status"]], tracker = "dbWork", prepared = TRUE) + SFSW2_prj_meta[["input_status"]], + tracker = "dbWork", + prepared = TRUE + ) } @@ -1080,7 +1251,9 @@ check_rSFSW2_project_input_data <- function(SFSW2_prj_meta, SFSW2_prj_inputs, # `PotentialNaturalVegetation_*` columns are turned on pnv0_temp <- "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996" - pnv_temp <- c("PotentialNaturalVegetation_CompositionShrubs_Fraction", + pnv_temp <- c( + "PotentialNaturalVegetation_CompositionShrubs_Fraction", + "PotentialNaturalVegetation_CompositionTotalGrasses_Fraction", "PotentialNaturalVegetation_CompositionC3_Fraction", "PotentialNaturalVegetation_CompositionC4_Fraction", "PotentialNaturalVegetation_CompositionAnnuals_Fraction", @@ -1094,19 +1267,22 @@ check_rSFSW2_project_input_data <- function(SFSW2_prj_meta, SFSW2_prj_inputs, "RootProfile_C4", "RootProfile_Annuals", "RootProfile_Shrubs", - "RootProfile_Forb") + "RootProfile_Forb" + ) temp1 <- pnv0_temp %in% SFSW2_prj_inputs[["create_treatments"]] temp2 <- pnv_temp %in% SFSW2_prj_inputs[["create_treatments"]] icheck <- (!temp1 && all(!temp2)) || (temp1 && any(temp2)) if (any(!icheck)) { - stop("Calculation and/or adjustement of 'potential natural vegetation' ", + stop( + "Calculation and/or adjustement of 'potential natural vegetation' ", "is requested for some composition/biomass/root components: the ", "column ", "'PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996' ", "is the overall gate-keeper for this suit of functionality and must ", - "thus be turned on as well but is currently not.") + "thus be turned on as well but is currently not." + ) } SFSW2_prj_meta[["input_status"]] <- update_intracker( @@ -1344,8 +1520,10 @@ simulate_SOILWAT2_experiment <- function(SFSW2_prj_meta, SFSW2_prj_inputs, args_do_OneSite <- gather_args_do_OneSite(SFSW2_prj_meta, SFSW2_prj_inputs) runs.completed <- run_simulation_experiment( - sim_size = SFSW2_prj_meta[["sim_size"]], SFSW2_prj_inputs, - MoreArgs = args_do_OneSite) + sim_size = SFSW2_prj_meta[["sim_size"]], + SFSW2_prj_inputs = SFSW2_prj_inputs, + MoreArgs = args_do_OneSite + ) } else { runs.completed <- 0 diff --git a/R/Simulation_Run.R b/R/Simulation_Run.R index 142a3881..e4932e83 100644 --- a/R/Simulation_Run.R +++ b/R/Simulation_Run.R @@ -83,9 +83,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } # temporary output database - dbTempFile <- dbConnect(SQLite(), dbname = - file.path(SimParams[["project_paths"]][["dir_out_temp"]], - paste0("SQL_Node_", fid, ".sqlite3"))) + dbTempFile <- dbConnect( + drv = SQLite(), + dbname = file.path( + SimParams[["project_paths"]][["dir_out_temp"]], + paste0("SQL_Node_", fid, ".sqlite3") + ) + ) on.exit(dbDisconnect(dbTempFile), add = TRUE) # Print/tag for function call @@ -109,10 +113,18 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, list2env(as.list(SimParams), envir = environment()) - if (opt_behave[["keep_dbWork_updated"]] && - !(SFSW2_glovars[["p_has"]] && SFSW2_glovars[["p_type"]] == "mpi")) { - stopifnot(dbWork_update_job(project_paths[["dir_out"]], i_sim, status = "inwork", - verbose = opt_verbosity[["print.debug"]])) + if ( + opt_behave[["keep_dbWork_updated"]] && + !(SFSW2_glovars[["p_has"]] && SFSW2_glovars[["p_type"]] == "mpi") + ) { + stopifnot( + dbWork_update_job( + path = project_paths[["dir_out"]], + runID = i_sim, + status = "inwork", + verbose = opt_verbosity[["print.debug"]] + ) + ) } flag.icounter <- formatC(i_sim, width = sim_size[["digitsN_total"]], format = "d", @@ -255,13 +267,39 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } #--- Load previously created rSOILWAT2 run objets - if (file.exists(f_sw_input) && ((any(tasks[, "create"] == 1L) && opt_behave[["resume"]]) || - (all(tasks[, "create"] == -1L) && any(tasks[, "execute"] == 1L, tasks[, "aggregate"] == 1L)))) { + objnames_saveRsoilwatInput <- c( + "swRunScenariosData", + "i_sw_weatherList", + "grasses.c3c4ann.fractions", + "ClimatePerturbationsVals", + "isim_time", + "simTime2" + ) + + if ( + file.exists(f_sw_input) && + ((any(tasks[, "create"] == 1L) && opt_behave[["resume"]]) || + (all(tasks[, "create"] == -1L) && + any(tasks[, "execute"] == 1L, tasks[, "aggregate"] == 1L)) + ) + ) { + + # load objects: objnames_saveRsoilwatInput + tmp <- try( + load(f_sw_input), + silent = TRUE + ) - # load objects: swRunScenariosData, i_sw_weatherList, grasses.c3c4ann.fractions, - # ClimatePerturbationsVals, isim_time, simTime2 - load(f_sw_input) - tasks[, "create"] <- 2L + if ( + !inherits(tmp, "try-error") && + all(sapply(objnames_saveRsoilwatInput, exists)) && + check_rSW2_version( + object = swRunScenariosData[[1]], + strict = opt_out_run[["enforce_rSW2_version"]] + ) + ) { + tasks[, "create"] <- 2L + } } @@ -313,44 +351,46 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, bottomL <- setBottomLayer(soilLayers_N, DeepestTopLayer) - #------Learn about simulation time - isim_time <- sim_time + #------Learn about simulation time (for each scenario) + isim_time <- simTime2 <- vector("list", nrow(sim_scens[["itime"]])) + + for (itime in seq_len(nrow(sim_scens[["itime"]]))) { + isim_time[[itime]] <- list() + + #--- Set start/end year and spinup of simulation + isim_time[[itime]][["spinup_N"]] <- sim_time[["spinup_N"]] - if (any(create_treatments == "YearStart") || any(create_treatments == "YearEnd")) { - #------time frame of simulation - if (any(create_treatments == "YearStart")) { - #year when SOILWAT2 starts the simulation - isim_time[["simstartyr"]] <- i_sw_input_treatments$YearStart - #first year that is used for output aggregation, e.g., simstartyr + 1 - isim_time[["startyr"]] <- rSOILWAT2::getStartYear( - isim_time[["simstartyr"]], isim_time[["spinup_N"]]) + isim_time[[itime]][["simstartyr"]] <- if ( + itime == 1 && any(create_treatments == "YearStart") + ) { + i_sw_input_treatments$YearStart + } else { + sim_scens[["itime"]][itime, "simstartyr"] } - if (any(create_treatments == "YearEnd")) { - #year when SOILWAT2 ends the simulation - isim_time[["endyr"]] <- i_sw_input_treatments$YearEnd + + isim_time[[itime]][["endyr"]] <- if ( + itime == 1 && any(create_treatments == "YearEnd") + ) { + i_sw_input_treatments$YearEnd + } else { + sim_scens[["itime"]][itime, "endyr"] } - #------simulation timing needs to be adjusted - isim_time <- setup_time_simulation_project(isim_time, add_st2 = FALSE) + #--- Calculate time sequences + isim_time[[itime]] <- rSOILWAT2::setup_time_simulation_run( + sim_time = isim_time[[itime]] + ) - simTime2 <- rSOILWAT2::simTiming_ForEachUsedTimeUnit( - useyrs = isim_time[["useyrs"]], + #--- Calculate some more time sequences + simTime2[[itime]] <- rSOILWAT2::simTiming_ForEachUsedTimeUnit( + useyrs = isim_time[[itime]][["useyrs"]], sim_tscales = c("daily", "monthly", "yearly"), latitude = i_SWRunInformation$Y_WGS84, account_NorthSouth = opt_agg[["adjust_NorthSouth"]], use_doy_range = opt_agg[["use_doy_range"]], - doy_ranges = opt_agg[["doy_ranges"]]) - - } else { - simTime2 <- if (i_SWRunInformation$Y_WGS84 >= 0) { - isim_time[["sim_time2_North"]] - } else { - isim_time[["sim_time2_South"]] - } + doy_ranges = opt_agg[["doy_ranges"]] + ) } - - isim_time[["sim_time2_North"]] <- NULL - isim_time[["sim_time2_South"]] <- NULL } @@ -375,10 +415,6 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, swRunScenariosData <- list() swRunScenariosData[[1]] <- swDefaultInputs - #adjust simulation years - rSOILWAT2::swYears_EndYear(swRunScenariosData[[1]]) <- as.integer(isim_time[["endyr"]]) - rSOILWAT2::swYears_StartYear(swRunScenariosData[[1]]) <- as.integer(isim_time[["simstartyr"]]) - #------2. Step: a) Information for this SOILWAT2-run from treatment SOILWAT2 input files stored in dir_in_treat if (any(create_treatments == "sw")) print(paste0(tag_simfid, ": SW treatment is not used because 'rSOILWAT2' package only uses one version of SOILWAT2. Sorry")) @@ -700,12 +736,6 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, as.numeric(i_sw_input_site$Param_UnsaturatedPercolation) } - flags <- c("Latitude", "Altitude", "Slope", "Aspect") - site_use <- sw_input_site_use[flags] - if (any(site_use)) - rSOILWAT2::swSite_IntrinsicSiteParams(swRunScenariosData[[1]])[site_use] <- - as.numeric(i_sw_input_site[flags][site_use]) - if (sw_input_site_use["SoilTemp_Flag"]) { rSOILWAT2::swSite_SoilTemperatureFlag(swRunScenariosData[[1]]) <- as.logical(i_sw_input_site$SoilTemp_Flag) @@ -724,12 +754,16 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, as.numeric(i_sw_input_site[flagsIn][site_use]) } - rSOILWAT2::swSite_IntrinsicSiteParams(swRunScenariosData[[1]])[1] <- - as.numeric(i_SWRunInformation$Y_WGS84 * pi / 180) - if (is.finite(i_SWRunInformation$ELEV_m)) - rSOILWAT2::swSite_IntrinsicSiteParams(swRunScenariosData[[1]])[2] <- - as.numeric(i_SWRunInformation$ELEV_m) + # add site location, elevation, and surface orientation + var_loc <- c("X_WGS84", "Y_WGS84", "ELEV_m", "Slope", "Aspect") + tmp <- as.numeric(i_SWRunInformation[var_loc]) + has_aspect <- is.finite(tmp[5]) && abs(tmp[5]) <= 180 + tmp <- ifelse(is.finite(tmp), tmp, 0) + if (!has_aspect) tmp[4:5] <- c(0, 999) + + rSOILWAT2::swSite_IntrinsicSiteParams(swRunScenariosData[[1]]) <- tmp + #add soil information to soilsin print_debug(opt_verbosity, tag_simfid, "creating", "soils") @@ -802,7 +836,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, soil_swdat[luse[[k]], coefs[["sw"]][iv]] } if (isTRUE(grepl("coeff", coefs[["infile"]][iv], ignore.case = TRUE))) - temp <- scale_by_sum(temp) + temp <- rSW2utils::scale_by_sum(temp) soildat[luse[[k]], coefs[["sw"]][iv]] <- temp } } @@ -906,7 +940,6 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, rSOILWAT2::swWeather_pct_SnowDrift(swRunScenariosData[[1]]) <- i_sw_input_weather$SnowDrift_Percent if (sw_input_weather_use["RunOffOnPerSnowmelt_Percent"]) rSOILWAT2::swWeather_pct_SnowRunoff(swRunScenariosData[[1]]) <- i_sw_input_weather$RunOffOnPerSnowmelt_Percent - rSOILWAT2::swWeather_FirstYearHistorical(swRunScenariosData[[1]]) <- isim_time[["simstartyr"]] # Set simulation_timescales fix to daily, monthly, and yearly rSOILWAT2::swOUT_TimeStepsForEveryKey(swRunScenariosData[[1]]) <- c(daily = 0, monthly = 2, yearly = 3) @@ -921,8 +954,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, dir_data = project_paths[["dir_maurer2002"]], cellname = with(i_SWRunInformation, create_filename_for_Maurer2002_NorthAmerica(X_WGS84, Y_WGS84)), - start_year = isim_time[["simstartyr"]], - end_year = isim_time[["endyr"]], + start_year = isim_time[[1]][["simstartyr"]], + end_year = isim_time[[1]][["endyr"]], verbose = opt_verbosity[["verbose"]]) } else if (i_SWRunInformation$dailyweather_source == "DayMet_NorthAmerica") { @@ -931,7 +964,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, dir_data = dir_daymet, site_ids = NULL, coords_WGS84 = c(X_WGS84, Y_WGS84), - start_year = isim_time[["simstartyr"]], end_year = isim_time[["endyr"]])) + start_year = isim_time[[1]][["simstartyr"]], end_year = isim_time[[1]][["endyr"]])) } else if (i_SWRunInformation$dailyweather_source == "LookupWeatherFolder") { # Read weather data from folder @@ -939,7 +972,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, LookupWeatherFolder = file.path(project_paths[["dir_in_treat"]], "LookupWeatherFolder"), weatherDirName = local_weatherDirName(i_sim, sim_size[["runsN_master"]], sim_scens[["N"]], fnames_out[["dbOutput"]]), filebasename = opt_sim[["tag_WeatherFolder"]], - startYear = isim_time[["simstartyr"]], endYear = isim_time[["endyr"]]), + startYear = isim_time[[1]][["simstartyr"]], endYear = isim_time[[1]][["endyr"]]), silent = !opt_verbosity[["verbose"]]) } @@ -947,8 +980,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, #---Extract weather data print_debug(opt_verbosity, tag_simfid, "creating", "access dbOut for weatherDirName") - weather_label_cur <- try(local_weatherDirName(i_sim, sim_size[["runsN_master"]], sim_scens[["N"]], - fnames_out[["dbOutput"]]), silent = !opt_verbosity[["verbose"]]) + weather_label_cur <- try( + local_weatherDirName( + i_sim = i_sim, + runN = sim_size[["runsN_master"]], + scN = sim_scens[["N"]], + dbOutput = fnames_out[["dbOutput"]] + ), + silent = !opt_verbosity[["verbose"]] + ) if (is.na(weather_label_cur)) weather_label_cur <- try({function() stop(tag_simfid, ": Output DB ", @@ -959,16 +999,24 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, i_sw_weatherList <- weather_label_cur } else { - temp <- if (opt_sim[["use_dbW_future"]]) { - seq_len(sim_scens[["N"]]) - } else 1L - print_debug(opt_verbosity, tag_simfid, "creating", "access dbW for daily weather") - i_sw_weatherList <- try(lapply(sim_scens[["id"]][temp], function(scen) - rSOILWAT2::dbW_getWeatherData(Label = weather_label_cur, - startYear = isim_time[["simstartyr"]], endYear = isim_time[["endyr"]], - Scenario = scen)), silent = !opt_verbosity[["verbose"]]) + i_sw_weatherList <- try( + lapply( + X = if (opt_sim[["use_dbW_future"]]) { + seq_len(sim_scens[["N"]]) + } else { + 1L + }, + function(sc) rSOILWAT2::dbW_getWeatherData( + Label = weather_label_cur, + startYear = isim_time[[sim_scens[["df"]][sc, "itime"]]][["simstartyr"]], + endYear = isim_time[[sim_scens[["df"]][sc, "itime"]]][["endyr"]], + Scenario = sim_scens[["id"]][sc] + ) + ), + silent = !opt_verbosity[["verbose"]] + ) } } @@ -982,8 +1030,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # Copy and make climate scenarios from datafiles if (any(tasks[, "create"] > 0L)) for (sc in seq_len(sim_scens[["N"]])) { - tag_simpidfid <- paste0("[run", i_sim, "/PID", all_Pids[sc], "/sc", sc, - "/work", fid, "]") + tag_simpidfid <- paste0( + "[run", i_sim, "/PID", all_Pids[sc], "/sc", sc, "/work", fid, "]" + ) + + itime <- sim_scens[["df"]][sc, "itime"] if (sc > 1) { swRunScenariosData[[sc]] <- swRunScenariosData[[1]] @@ -992,7 +1043,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # The delta year was originaly designed to only be used by swCarbon to grab the correct ppm values, # but has since been used to also display the correct years in runDataSC, so this information is # extracted regardless of whether or not CO2 effects are being used - delta_yr <- sim_scens[["df"]][sc - 1, "Delta_yrs"] + delta_yr <- sim_scens[["df"]][sc, "Delta_yrs"] if (!is.na(delta_yr)) rSOILWAT2::swCarbon_DeltaYear(swRunScenariosData[[sc]]) <- as.integer(delta_yr) @@ -1002,12 +1053,26 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, do_C4vars <- any(create_treatments == "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996") || isTRUE(prj_todos[["aon"]][["dailyC4_TempVar"]]) #redo SiteClimate_Ambient - SiteClimate_Ambient <- rSOILWAT2::calc_SiteClimate(weatherList = i_sw_weatherList[[1]], - year.start = min(isim_time$useyrs), year.end = max(isim_time$useyrs), - do_C4vars = do_C4vars, simTime2 = simTime2) + SiteClimate_Ambient <- rSOILWAT2::calc_SiteClimate( + weatherList = i_sw_weatherList[[sc]], + year.start = min(isim_time[[itime]]$useyrs), + year.end = max(isim_time[[itime]]$useyrs), + do_C4vars = do_C4vars, + simTime2 = simTime2[[itime]] + ) } } + + #--- adjust simulation years + rSOILWAT2::swYears_EndYear(swRunScenariosData[[sc]]) <- + as.integer(isim_time[[itime]][["endyr"]]) + rSOILWAT2::swYears_StartYear(swRunScenariosData[[sc]]) <- + as.integer(isim_time[[itime]][["simstartyr"]]) + rSOILWAT2::swWeather_FirstYearHistorical(swRunScenariosData[[sc]]) <- + as.integer(isim_time[[itime]][["simstartyr"]]) + + #----- Begin CO2 effects # CO2 effects rely on the information of the current scenario, so the extraction of its Lookup data # doesn't occur until now @@ -1018,46 +1083,56 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, break } - scenario_CO2 <- "Default" - - # Are we modeling a scenario? - if (sc > 1) { - # Did the user request to use the built-in scenario information? - if (toupper(i_sw_input_treatments$LookupCO2data) == "FILL") - scenario_CO2 <- sim_scens[["df"]][sc - 1, "ConcScen"] - } + # Locate the atmospheric CO2 concentration dataset name + if (toupper(i_sw_input_treatments$LookupCO2data) == "FILL") { + # Did the user request to use the values associated with the + # model scenario names? + scenario_CO2 <- sim_scens[["df"]][sc, "ConcScen"] - # Did the user override the scenario name? - if (toupper(i_sw_input_treatments$LookupCO2data) != "FILL") + } else { + # Did the user override the scenario name? scenario_CO2 <- i_sw_input_treatments$LookupCO2data + } - # Save the scenario to the input object just so that the user can see it - rSOILWAT2::swCarbon_Scenario(swRunScenariosData[[sc]]) <- scenario_CO2 - - scenario_index <- which(toupper(colnames(tr_input_CO2data)) == toupper(scenario_CO2)) + scenario_index <- which( + toupper(colnames(tr_input_CO2data)) == toupper(scenario_CO2) + ) - # Was a scenario found? + # Is the scenario available? if (length(scenario_index) == 0) { tasks[sc, "create"] <- 0L - print(paste0(tag_simfid, ": ERROR: Scenario ", scenario_CO2, - " was not found in `LookupCO2data` table")) + print(paste0( + tag_simfid, ": ERROR: CO2-concentration dataset name ", + shQuote(scenario_CO2), " was not found in `LookupCO2data` table" + )) break } + # Save the scenario to the input object just so that the user can see it + rSOILWAT2::swCarbon_Scenario(swRunScenariosData[[sc]]) <- scenario_CO2 + # Normally, we would also check for duplicate scenarios, but when the CSV is read in, duplicate column headers # are already accounted for by incrementing the name. For instance, having two RCP85 scenarios result in these # headers: RCP85, RCP85.1 # Extract CO2 concentration values in units of ppm into swCarbon - ids_years <- match(isim_time$simstartyr:isim_time$endyr + rSOILWAT2::swCarbon_DeltaYear(swRunScenariosData[[sc]]), - tr_input_CO2data[, "Year"], nomatch = 0) + ids_years <- match( + isim_time[[itime]]$simstartyr:isim_time[[itime]]$endyr + + rSOILWAT2::swCarbon_DeltaYear(swRunScenariosData[[sc]]), + tr_input_CO2data[, "Year"], + nomatch = 0 + ) # Convert possible integers to numeric - tr_input_CO2data[ids_years, scenario_index] <- as.numeric(unlist(tr_input_CO2data[ids_years, scenario_index])) + tr_input_CO2data[ids_years, scenario_index] <- as.numeric(unlist( + tr_input_CO2data[ids_years, scenario_index] + )) scenarioCO2_ppm <- tr_input_CO2data[ids_years, c(1, scenario_index)] colnames(scenarioCO2_ppm) <- c("Year", "CO2ppm") - rSOILWAT2::swCarbon_CO2ppm(swRunScenariosData[[sc]]) <- as.matrix(scenarioCO2_ppm, - rownames.force = TRUE) + rSOILWAT2::swCarbon_CO2ppm(swRunScenariosData[[sc]]) <- as.matrix( + scenarioCO2_ppm, + rownames.force = TRUE + ) } # End CO2 effects ----- @@ -1157,16 +1232,20 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, SiteClimate_Scenario$MAP_cm <- sum(SiteClimate_Scenario$meanMonthlyPPTcm) SiteClimate_Scenario$MAT_C <- mean(SiteClimate_Scenario$meanMonthlyTempC) if (do_C4vars) { - SiteClimate_Scenario$dailyTempMin <- SiteClimate_Ambient$dailyTempMin + t_min_f[simTime2$month_ForEachUsedDay] - SiteClimate_Scenario$dailyTempMean <- SiteClimate_Ambient$dailyTempMean + tmean_f[simTime2$month_ForEachUsedDay] - SiteClimate_Scenario$dailyC4vars <- rSOILWAT2::sw_dailyC4_TempVar(SiteClimate_Scenario$dailyTempMin, SiteClimate_Scenario$dailyTempMean, simTime2) + SiteClimate_Scenario$dailyTempMin <- SiteClimate_Ambient$dailyTempMin + t_min_f[simTime2[[itime]]$month_ForEachUsedDay] + SiteClimate_Scenario$dailyTempMean <- SiteClimate_Ambient$dailyTempMean + tmean_f[simTime2[[itime]]$month_ForEachUsedDay] + SiteClimate_Scenario$dailyC4vars <- rSOILWAT2::sw_dailyC4_TempVar(SiteClimate_Scenario$dailyTempMin, SiteClimate_Scenario$dailyTempMean, simTime2[[itime]]) } } } else { - SiteClimate_Scenario <- rSOILWAT2::calc_SiteClimate(weatherList = i_sw_weatherList[[sc]], - year.start = min(isim_time$useyrs), year.end = max(isim_time$useyrs), - do_C4vars = do_C4vars, simTime2 = simTime2) + SiteClimate_Scenario <- rSOILWAT2::calc_SiteClimate( + weatherList = i_sw_weatherList[[sc]], + year.start = min(isim_time[[itime]]$useyrs), + year.end = max(isim_time[[itime]]$useyrs), + do_C4vars = do_C4vars, + simTime2 = simTime2[[itime]] + ) if (sc > 1) { ppt_sc <- (temp <- rSOILWAT2::swWeather_MonScalingParams(swRunScenariosData[[sc]]))[, 1] @@ -1338,11 +1417,16 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, isNorth <- i_SWRunInformation$Y_WGS84 >= 0 - pnv <- try(rSOILWAT2::estimate_PotNatVeg_composition(MAP_mm, MAT_C, - mean_monthly_ppt_mm = monthly.ppt, mean_monthly_Temp_C = monthly.temp, - dailyC4vars = dailyC4vars, isNorth = isNorth, + pnv <- try(rSOILWAT2::estimate_PotNatVeg_composition( + MAP_mm, MAT_C, + mean_monthly_ppt_mm = monthly.ppt, + mean_monthly_Temp_C = monthly.temp, + dailyC4vars = dailyC4vars, + isNorth = isNorth, shrub_limit = opt_sim[["shrub_limit"]], fix_succulents = TRUE, Succulents_Fraction = 0, + fix_sumgrasses = any(create_treatments == "PotentialNaturalVegetation_CompositionTotalGrasses_Fraction"), + SumGrasses_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionTotalGrasses_Fraction, fix_annuals = any(create_treatments == "PotentialNaturalVegetation_CompositionAnnuals_Fraction"), Annuals_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionAnnuals_Fraction, fix_C4grasses = any(create_treatments == "PotentialNaturalVegetation_CompositionC4_Fraction"), @@ -1351,13 +1435,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, C3_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionC3_Fraction, fix_shrubs = any(create_treatments == "PotentialNaturalVegetation_CompositionShrubs_Fraction"), Shrubs_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionShrubs_Fraction, - fix_forbs = TRUE, Forbs_Fraction = 0, + fix_forbs = any(create_treatments == "PotentialNaturalVegetation_CompositionForb_Fraction"), + Forbs_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionForb_Fraction, fix_trees = any(create_treatments == "PotentialNaturalVegetation_CompositionTrees_Fraction"), Trees_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionTrees_Fraction, fix_BareGround = any(create_treatments == "PotentialNaturalVegetation_CompositionBareGround_Fraction"), BareGround_Fraction = i_sw_input_treatments$PotentialNaturalVegetation_CompositionBareGround_Fraction, - fill_empty_with_BareGround = TRUE) - ) + fill_empty_with_BareGround = TRUE + )) if (inherits(pnv, "try-error")) { tasks[sc, "create"] <- 0L @@ -1368,7 +1453,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # Grasses Shrubs Trees Forbs BareGround ids <- c("SW_GRASS", "SW_SHRUB", "SW_TREES", "SW_FORBS", "SW_BAREGROUND") - temp <- finite01(pnv[["Rel_Abundance_L1"]][ids]) + temp <- rSW2utils::finite01(pnv[["Rel_Abundance_L1"]][ids]) rSOILWAT2::swProd_Composition(swRunScenariosData[[sc]]) <- temp grasses.c3c4ann.fractions[[sc]] <- pnv[["Grasses"]] @@ -1377,23 +1462,31 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, print_debug(opt_verbosity, tag_simpidfid, "creating", "potential vegetation") - if (any(create_treatments == "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996") && + if ( + any(create_treatments == "PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996") && i_sw_input_treatments$PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996 && ((any(create_treatments == "AdjMonthlyBioMass_Temperature") && - i_sw_input_treatments$AdjMonthlyBioMass_Temperature) | + i_sw_input_treatments$AdjMonthlyBioMass_Temperature) || (any(create_treatments == "AdjMonthlyBioMass_Precipitation") && - i_sw_input_treatments$AdjMonthlyBioMass_Precipitation))) { + i_sw_input_treatments$AdjMonthlyBioMass_Precipitation)) + ) { - temp <- rSOILWAT2::estimate_PotNatVeg_biomass( + tmp <- rSOILWAT2::estimate_PotNatVeg_biomass( tr_VegBiom = tr_VegetationComposition, - do_adjBiom_by_temp = any(create_treatments == "AdjMonthlyBioMass_Temperature") && i_sw_input_treatments$AdjMonthlyBioMass_Temperature, - do_adjBiom_by_ppt = any(create_treatments == "AdjMonthlyBioMass_Precipitation") & i_sw_input_treatments$AdjMonthlyBioMass_Precipitation, + do_adjust_phenology = + any(create_treatments == "AdjMonthlyBioMass_Temperature") && + i_sw_input_treatments$AdjMonthlyBioMass_Temperature, + do_adjust_biomass = + any(create_treatments == "AdjMonthlyBioMass_Precipitation") && + i_sw_input_treatments$AdjMonthlyBioMass_Precipitation, fgrass_c3c4ann = grasses.c3c4ann.fractions[[sc]], - growing_limit_C = opt_sim[["growseason_Tlimit_C"]], - isNorth = isNorth, MAP_mm = MAP_mm, mean_monthly_temp_C = monthly.temp) + MAP_mm = MAP_mm, + ref_temp = opt_sim[["reference_temperature_default_phenology"]], + target_temp = monthly.temp + ) - rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 1:3] <- temp$grass[, 1:3] - rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 1:3] <- temp$shrub[, 1:3] + rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 1:3] <- tmp[["grass"]][, 1:3] + rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 1:3] <- tmp[["shrub"]][, 1:3] } #adjust Root Profile - need composition fractions set above @@ -1595,10 +1688,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 1:3] <- sweep(rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 1:3], MARGIN = 2, FUN = "*", tree_LitterTotalLiveScalingFactors) rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 1:3] <- sweep(rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 1:3], MARGIN = 2, FUN = "*", forb_LitterTotalLiveScalingFactors) } - rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 3] <- finite01(rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 - rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 3] <- finite01(rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 - rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 3] <- finite01(rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 - rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 3] <- finite01(rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 + rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 3] <- rSW2utils::finite01(rSOILWAT2::swProd_MonProd_grass(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 + rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 3] <- rSW2utils::finite01(rSOILWAT2::swProd_MonProd_shrub(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 + rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 3] <- rSW2utils::finite01(rSOILWAT2::swProd_MonProd_tree(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 + rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 3] <- rSW2utils::finite01(rSOILWAT2::swProd_MonProd_forb(swRunScenariosData[[sc]])[, 3]) #Check that live biomass fraction <= 1 & >= 0 } if (any(create_treatments == "Vegetation_Height_ScalingFactor")) { @@ -1712,8 +1805,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # Save input data if requested if (opt_out_run[["saveRsoilwatInput"]]) { - save(swRunScenariosData, i_sw_weatherList, grasses.c3c4ann.fractions, - ClimatePerturbationsVals, isim_time, simTime2, file = f_sw_input) + save(list = objnames_saveRsoilwatInput, file = f_sw_input) } } #end if do create runs @@ -1836,12 +1928,31 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, print_debug(opt_verbosity, tag_simpidfid, "executing", "SOILWAT2") - if (file.exists(f_sw_output[sc]) && ((tasks[sc, "execute"] == 1L && opt_behave[["resume"]]) || - (tasks[sc, "execute"] == -1L && any(tasks[, "aggregate"] == 1L)))) { + itime <- sim_scens[["df"]][sc, "itime"] - load(f_sw_output[sc]) # load object: runDataSC - if (exists("runDataSC")) + if ( + file.exists(f_sw_output[sc]) && + ((tasks[sc, "execute"] == 1L && opt_behave[["resume"]]) || + (tasks[sc, "execute"] == -1L && any(tasks[, "aggregate"] == 1L)) + ) + ) { + + # load object: runDataSC + tmp <- try( + load(f_sw_output[sc]), + silent = TRUE + ) + + if ( + !inherits(tmp, "try-error") && + exists("runDataSC") && + check_rSW2_version( + object = runDataSC, + strict = opt_out_run[["enforce_rSW2_version"]] + ) + ) { tasks[sc, "execute"] <- 2L + } } if (tasks[sc, "execute"] == 1L) { @@ -1898,9 +2009,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, DeltaX[2] <- if (!inherits(runDataSC, "try-error") && !is_SOILTEMP_INSTABLE[sc]) 2L else -1L #TODO: change deltaX_Param for all [> sc] as well - if (opt_out_run[["saveRsoilwatInput"]]) - save(swRunScenariosData, i_sw_weatherList, grasses.c3c4ann.fractions, - ClimatePerturbationsVals, file = f_sw_input) + if (opt_out_run[["saveRsoilwatInput"]]) { + save(list = objnames_saveRsoilwatInput, file = f_sw_input) + } } else { DeltaX <- c(rSOILWAT2::swSite_SoilTemperatureConsts(swRunScenariosData[[sc]])["deltaX_Param"], 1L) @@ -2024,14 +2135,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, print_debug(opt_verbosity, tag_simpidfid, "aggregating", "input_VegetationBiomassTrends") if (!exists("veg.yr")) { - veg.yr <- get_Vegetation_yr(runDataSC, isim_time) + veg.yr <- get_Vegetation_yr(runDataSC, isim_time[[itime]]) } for (vcomp in c("totalbiomass", "livebiomass", "litter")) { nv_add <- ncol(veg.yr[[vcomp]]) nv_new <- nv + nv_add resMeans[nv:(nv_new - 1)] <- .colMeans(veg.yr[[vcomp]], - m = isim_time$no.useyr, n = nv_add) + m = isim_time[[itime]]$no.useyr, n = nv_add) resSDs[nv:(nv_new - 1)] <- apply(veg.yr[[vcomp]], MARGIN = 2, FUN = stats::sd) nv <- nv_new @@ -2055,8 +2166,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, sumWeightedLiveBiomassByMonth <- apply(sweep(tempdat, MARGIN = 2, fracs, FUN = "*"), MARGIN = 1, sum) #sweep out fractionals, and sum over rows maxMonth <- which(sumWeightedLiveBiomassByMonth == max(sumWeightedLiveBiomassByMonth)) #returns index, which is the month, of max bio - meanPeakMonth <- circ_mean(maxMonth, 12) - duration <- circ_range(maxMonth, 12)+1 + meanPeakMonth <- rSW2utils::circ_mean( + x = maxMonth, + int = 12, + type = "ZeroPlus2Pi" + ) + duration <- 1 + rSW2utils::circ_range( + x = maxMonth, + int = 12 + ) resMeans[nv:(nv+1)] <- c(meanPeakMonth, duration) #just in case we get more then one month nv <- nv+2 @@ -2069,8 +2187,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "input_Phenology") - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) - monthly.temp <- tapply(temp.mo$mean, simTime2$month_ForEachUsedMonth, mean) #get mean monthly temp + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) + monthly.temp <- tapply(temp.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, mean) #get mean monthly temp Months_Above_Threshold <- which(monthly.temp > opt_sim[["growseason_Tlimit_C"]]) #get months above threshold if (i_SWRunInformation$Y_WGS84 < 0) { #check for Southern Hemi monthly.temp <- c(monthly.temp[7:12], monthly.temp[1:6]) #rearrange temp @@ -2132,13 +2250,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, print_debug(opt_verbosity, tag_simpidfid, "aggregating", "input_CO2Effects") if (!exists("co2effects.yr")) { - co2effects.yr <- get_CO2effects_yr(runDataSC, isim_time) + co2effects.yr <- get_CO2effects_yr(runDataSC, isim_time[[itime]]) } nv_add <- ncol(co2effects.yr[["val"]]) nv_new <- nv + nv_add resMeans[nv:(nv_new - 1)] <- .colMeans(co2effects.yr[["val"]], - m = isim_time$no.useyr, n = nv_add) + m = isim_time[[itime]]$no.useyr, n = nv_add) resSDs[nv:(nv_new - 1)] <- apply(co2effects.yr[["val"]], MARGIN = 2, FUN = stats::sd) nv <- nv_new @@ -2151,7 +2269,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyTemp"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyTemp") - if (!exists("temp.yr")) temp.yr <- get_Temp_yr(runDataSC, isim_time) + if (!exists("temp.yr")) temp.yr <- get_Temp_yr(runDataSC, isim_time[[itime]]) resMeans[nv] <- mean(temp.yr$mean) resSDs[nv] <- stats::sd(temp.yr$mean) @@ -2164,8 +2282,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyPPT"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyPPT") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) - if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) + if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time[[itime]]) resMeans[nv] <- mean(prcp.yr$ppt) resSDs[nv] <- stats::sd(prcp.yr$ppt) @@ -2177,15 +2295,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(opt_agg[["use_doy_range"]])) { - dailyrange <- if(length(idx <- grep("yearlyPPT", names(simTime2))) > 1) { - simTime2[[idx]] + dailyrange <- if(length(idx <- grep("yearlyPPT", names(simTime2[[itime]]))) > 1) { + simTime2[[itime]][[idx]] } else { - simTime2[[pmatch("doy_NSadj_default_", names(simTime2))]] + simTime2[[itime]][[pmatch("doy_NSadj_default_", names(simTime2[[itime]]))]] } - yearlyPPT_doyRange <- tapply(prcp.dy$ppt[dailyrange], simTime2$year_ForEachUsedDay_NSadj[dailyrange], sum) + yearlyPPT_doyRange <- tapply(prcp.dy$ppt[dailyrange], simTime2[[itime]]$year_ForEachUsedDay_NSadj[dailyrange], sum) snowofppt_doyRange<- prcp.dy$snowfall[dailyrange]/prcp.dy$ppt[dailyrange] - snowofppt_doyRange <- tapply(snowofppt_doyRange, simTime2$year_ForEachUsedDay_NSadj[dailyrange], mean, na.rm=TRUE) + snowofppt_doyRange <- tapply(snowofppt_doyRange, simTime2[[itime]]$year_ForEachUsedDay_NSadj[dailyrange], mean, na.rm=TRUE) resMeans[nv] <- mean(yearlyPPT_doyRange) resSDs[nv] <- stats::sd(yearlyPPT_doyRange) @@ -2203,13 +2321,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySnowpack"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySnowpack") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) - if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) + if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) # Fraction of rain that falls on snow rainOnSnow <- ifelse(SWE.dy$val > 0, prcp.dy$rain, 0) - rainOnSnow <- as.matrix(tapply(rainOnSnow, simTime2$year_ForEachUsedDay, sum)) + rainOnSnow <- as.matrix(tapply(rainOnSnow, simTime2[[itime]]$year_ForEachUsedDay, sum)) rainOnSnow <- rainOnSnow / prcp.yr$ppt resMeans[nv] <- mean(rainOnSnow, na.rm = TRUE) @@ -2220,12 +2338,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, #10 #daily snowpack: adjust_NorthSouth - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) - if (!exists("wateryears")) wateryears <- simTime2$year_ForEachUsedDay_NSadj_WaterYearAdj + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) + if (!exists("wateryears")) wateryears <- simTime2[[itime]]$year_ForEachUsedDay_NSadj_WaterYearAdj wateryearsN <- length(unique(wateryears)) if (sum(SWE.dy$val) > 0 && wateryearsN - 2 > 0) { - temp <- simTime2$doy_ForEachUsedDay[1] == simTime2$doy_ForEachUsedDay_NSadj[1] + temp <- simTime2[[itime]]$doy_ForEachUsedDay[1] == simTime2[[itime]]$doy_ForEachUsedDay_NSadj[1] adjDays <- if (temp) {365 - 273} else -91 res.snow <- matrix(data = 0, nrow = wateryearsN - 2, ncol = 9, byrow = TRUE) @@ -2268,13 +2386,30 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nvnew <- nv + 7 if (nrow(res.snow) > 1) { resMeans[nv:nvnew] <- c( - apply(res.snow[, 2:4], 2, circ_mean, int = 365, na.rm = TRUE), + apply(res.snow[, 2:4], 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi", + na.rm = TRUE + ), apply(res.snow[, 5:7], 2, mean, na.rm = TRUE), - apply(res.snow[, 8:9], 2, circ_mean, int = 365, na.rm = TRUE)) + apply(res.snow[, 8:9], 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi", + na.rm = TRUE + ) + ) + resSDs[nv:nvnew] <- c( - apply(res.snow[, 2:4], 2, circ_sd, int = 365, na.rm = TRUE), + apply(res.snow[, 2:4], 2, rSW2utils::circ_sd, + int = 365, + na.rm = TRUE + ), apply(res.snow[, 5:7], 2, stats::sd, na.rm = TRUE), - apply(res.snow[, 8:9], 2, circ_sd, int = 365, na.rm = TRUE)) + apply(res.snow[, 8:9], 2, rSW2utils::circ_sd, + int = 365, + na.rm = TRUE + ) + ) } else { resMeans[nv:nvnew] <- res.snow[1, -1] @@ -2286,11 +2421,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(opt_agg[["use_doy_range"]])) { #daily options - idx <- grep("doy_NSadj_dailySnowpack", names(simTime2)) + idx <- grep("doy_NSadj_dailySnowpack", names(simTime2[[itime]])) dailyrange <- if (length(idx) > 1) { - simTime2[[idx]] + simTime2[[itime]][[idx]] } else { - simTime2[[pmatch("doy_NSadj_defaultWateryear", names(simTime2))]] + simTime2[[itime]][[pmatch("doy_NSadj_defaultWateryear", names(simTime2[[itime]]))]] } wateryears.doy <- wateryears[wateryear.trim][dailyrange[wateryear.trim]] @@ -2304,10 +2439,19 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nvnew <- nv + 2 if (nrow(res.snow.doy) > 1) { resMeans[nv:nvnew] <- c( - circ_mean(res.snow.doy[, 2], int = 365, na.rm = TRUE), + rSW2utils::circ_mean( + x = res.snow.doy[, 2], + int = 365, + type = "ZeroPlus2Pi", + na.rm = TRUE + ), apply(res.snow.doy[, 3:4], 2, mean, na.rm = TRUE)) resSDs[nv:nvnew] <- c( - circ_sd(res.snow.doy[, 2], int = 365, na.rm = TRUE), + rSW2utils::circ_sd( + x = res.snow.doy[, 2], + int = 365, + na.rm = TRUE + ), apply(res.snow.doy[, 3:4], 2, stats::sd, na.rm = TRUE)) } else { @@ -2339,9 +2483,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyFrostInSnowfreePeriod"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyFrostInSnowfreePeriod") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) - if (!exists("wateryears")) wateryears <- simTime2$year_ForEachUsedDay_NSadj_WaterYearAdj + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) + if (!exists("wateryears")) wateryears <- simTime2[[itime]]$year_ForEachUsedDay_NSadj_WaterYearAdj # 1. Trimmed water years -- the first simulation year must be ignored wateryear.unique <- unique(wateryears) @@ -2422,10 +2566,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(opt_agg[["use_doy_range"]])) { - dailyrange <- if (length(idx <- grep("doy_NSadj_dailyFrostinSnowPeriod", names(simTime2))) > 1) { - simTime2[[idx]] + dailyrange <- if (length(idx <- grep("doy_NSadj_dailyFrostinSnowPeriod", names(simTime2[[itime]]))) > 1) { + simTime2[[itime]][[idx]] } else { - simTime2[[pmatch("doy_NSadj_defaultWateryear", names(simTime2))]] + simTime2[[itime]][[pmatch("doy_NSadj_defaultWateryear", names(simTime2[[itime]]))]] } for (iTmin in opt_agg[["Tmin_crit_C"]]) { @@ -2450,7 +2594,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyHotDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyHotDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) nv_add <- length(opt_agg[["Tmax_crit_C"]]) @@ -2458,14 +2602,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, matrix(rep.int(opt_agg[["Tmax_crit_C"]], length(temp.dy$max)), ncol = nv_add, byrow = TRUE) - HotDays <- matrix(NA, nrow = isim_time$no.useyr, ncol = nv_add) + HotDays <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = nv_add) for (k in seq_len(nv_add)) HotDays[, k] <- tapply(dailyExcess[, k], - INDEX = simTime2$year_ForEachUsedDay, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, FUN = sum) nv_new <- nv + nv_add - resMeans[nv:(nv_new - 1)] <- .colMeans(HotDays, isim_time$no.useyr, nv_add) + resMeans[nv:(nv_new - 1)] <- .colMeans(HotDays, isim_time[[itime]]$no.useyr, nv_add) resSDs[nv:(nv_new - 1)] <- apply(HotDays, 2, stats::sd) nv <- nv_new @@ -2478,7 +2622,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyWarmDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyWarmDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) nv_add <- length(opt_agg[["Tmean_crit_C"]]) @@ -2486,14 +2630,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, matrix(rep.int(opt_agg[["Tmean_crit_C"]], length(temp.dy$mean)), ncol = nv_add, byrow = TRUE) - WarmDays <- matrix(NA, nrow = isim_time$no.useyr, ncol = nv_add) + WarmDays <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = nv_add) for (k in seq_len(nv_add)) WarmDays[, k] <- tapply(dailyExcess[, k], - INDEX = simTime2$year_ForEachUsedDay, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, FUN = sum) nv_new <- nv + nv_add - resMeans[nv:(nv_new - 1)] <- .colMeans(WarmDays, isim_time$no.useyr, nv_add) + resMeans[nv:(nv_new - 1)] <- .colMeans(WarmDays, isim_time[[itime]]$no.useyr, nv_add) resSDs[nv:(nv_new - 1)] <- apply(WarmDays, 2, stats::sd) nv <- nv_new @@ -2506,7 +2650,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyColdDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyColdDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) nv_add <- length(opt_agg[["Tmin_crit_C"]]) @@ -2514,14 +2658,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, matrix(rep.int(opt_agg[["Tmin_crit_C"]], length(temp.dy$surface)), ncol = nv_add, byrow = TRUE) - ColdDays <- matrix(NA, nrow = isim_time$no.useyr, ncol = nv_add) + ColdDays <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = nv_add) for (k in seq_len(nv_add)) ColdDays[, k] <- tapply(dailyExcess[, k], - INDEX = simTime2$year_ForEachUsedDay, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, FUN = sum) nv_new <- nv + nv_add - resMeans[nv:(nv_new - 1)] <- .colMeans(ColdDays, isim_time$no.useyr, nv_add) + resMeans[nv:(nv_new - 1)] <- .colMeans(ColdDays, isim_time[[itime]]$no.useyr, nv_add) resSDs[nv:(nv_new - 1)] <- apply(ColdDays, 2, stats::sd) nv <- nv_new @@ -2534,7 +2678,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyCoolDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyCoolDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) nv_add <- length(opt_agg[["Tmean_crit_C"]]) @@ -2542,14 +2686,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, matrix(rep.int(opt_agg[["Tmean_crit_C"]], length(temp.dy$surface)), ncol = nv_add, byrow = TRUE) - CoolDays <- matrix(NA, nrow = isim_time$no.useyr, ncol = nv_add) + CoolDays <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = nv_add) for (k in seq_len(nv_add)) CoolDays[, k] <- tapply(dailyExcess[, k], - INDEX = simTime2$year_ForEachUsedDay, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, FUN = sum) nv_new <- nv + nv_add - resMeans[nv:(nv_new - 1)] <- .colMeans(CoolDays, isim_time$no.useyr, nv_add) + resMeans[nv:(nv_new - 1)] <- .colMeans(CoolDays, isim_time[[itime]]$no.useyr, nv_add) resSDs[nv:(nv_new - 1)] <- apply(CoolDays, 2, stats::sd) nv <- nv_new @@ -2562,13 +2706,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyPrecipitationEventSizeDistribution"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyPrecipitationEventSizeDistribution") - if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time) + if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time[[itime]]) #prcp-event sizes in bins ppt_sizes <- tabulate_values_in_bins( x = prcp.dy$ppt, method = "values", vcrit = 0, bins = opt_agg[["bin_prcp_mm"]], nbins = 7, - simTime = isim_time, simTime2 = simTime2) + simTime = isim_time[[itime]], simTime2 = simTime2[[itime]]) resMeans[nv] <- mean(ppt_sizes$eventsPerYear) resSDs[nv] <- stats::sd(ppt_sizes$eventsPerYear) @@ -2585,7 +2729,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyPET"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyPET") - if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time) + if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time[[itime]]) resMeans[nv] <- mean(PET.yr$val) resSDs[nv] <- stats::sd(PET.yr$val) @@ -2599,24 +2743,24 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySeasonalityIndices"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySeasonalityIndices") - if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time) - if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time[[itime]]) + if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) #in case var(ppt or swp) == 0 => cor is undefined: exclude those years - temp <- by(data.frame(PET.mo$val, swpmatric.mo$top), simTime2$yearno_ForEachUsedMonth, cor2) + temp <- by(data.frame(PET.mo$val, swpmatric.mo$top), simTime2[[itime]]$yearno_ForEachUsedMonth, rSW2utils::cor2) resMeans[nv] <- mean(temp, na.rm = TRUE) resSDs[nv] <- stats::sd(temp, na.rm = TRUE) if (length(bottomL) > 0 && !identical(bottomL, 0)) { - temp <- by(data.frame(PET.mo$val, swpmatric.mo$bottom), simTime2$yearno_ForEachUsedMonth, cor2) + temp <- by(data.frame(PET.mo$val, swpmatric.mo$bottom), simTime2[[itime]]$yearno_ForEachUsedMonth, rSW2utils::cor2) resMeans[nv+1] <- mean(temp, na.rm = TRUE) resSDs[nv+1] <- stats::sd(temp, na.rm = TRUE) } - temp <- by(data.frame(temp.mo$mean, prcp.mo$ppt), simTime2$yearno_ForEachUsedMonth, cor2) + temp <- by(data.frame(temp.mo$mean, prcp.mo$ppt), simTime2[[itime]]$yearno_ForEachUsedMonth, rSW2utils::cor2) resMeans[nv+2] <- mean(temp, na.rm = TRUE) resSDs[nv+2] <- stats::sd(temp, na.rm = TRUE) @@ -2631,14 +2775,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlymonthlyTemperateDrylandIndices"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlymonthlyTemperateDrylandIndices") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) - if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time) - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) + if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time[[itime]]) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) di.ts <- calc_drylandindices(annualPPT = prcp.yr$ppt, annualPET = PET.yr$val, monthlyTemp = temp.mo$mean) - meanmonthlyTemp <- tapply(temp.mo$mean, simTime2$month_ForEachUsedMonth, mean) + meanmonthlyTemp <- tapply(temp.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, mean) di.normals <- calc_drylandindices(annualPPT = mean(prcp.yr$ppt), annualPET = mean(PET.yr$val), monthlyTemp = meanmonthlyTemp) @@ -2657,7 +2801,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyDryWetPeriods"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyDryWetPeriods") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) temp.rle <- rle(as.vector(sign(prcp.yr$ppt - mean(prcp.yr$ppt)))) resMeans[nv:(nv+1)] <- c(stats::quantile(temp.rle$lengths[temp.rle$values == -1], probs = 0.9, type = 7), stats::quantile(temp.rle$lengths[temp.rle$values == 1], probs = 0.9, type = 7)) @@ -2672,17 +2816,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyWeatherGeneratorCharacteristics"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyWeatherGeneratorCharacteristics") - if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time) - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time[[itime]]) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) # until rSFSW2 v1.4.4: dws, dds, and tv were calculated as mean of all months # pooled across years # now: they are aggregated across years on the means for each month x year - dws <- daily_spells_permonth(prcp.dy$ppt > 0, simTime2) # wet spells - dds <- daily_spells_permonth(prcp.dy$ppt < SFSW2_glovars[["tol"]], simTime2) # dry spells + dws <- daily_spells_permonth(prcp.dy$ppt > 0, simTime2[[itime]]) # wet spells + dds <- daily_spells_permonth(prcp.dy$ppt < SFSW2_glovars[["tol"]], simTime2[[itime]]) # dry spells temp <- tapply(temp.dy$mean, - simTime2$month_ForEachUsedDay_NSadj + 100 * simTime2$year_ForEachUsedDay_NSadj, + simTime2[[itime]]$month_ForEachUsedDay_NSadj + 100 * simTime2[[itime]]$year_ForEachUsedDay_NSadj, stats::sd) tv <- matrix(temp, nrow = 12) @@ -2703,13 +2847,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyPrecipitationFreeEventDistribution"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyPrecipitationFreeEventDistribution") - if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time) + if (!exists("prcp.dy")) prcp.dy <- get_PPT_dy(runDataSC, isim_time[[itime]]) #duration of prcp-free days in bins ppt_free <- tabulate_values_in_bins( x = prcp.dy$ppt <= SFSW2_glovars[["tol"]], method = "duration", bins = opt_agg[["bin_prcpfree_days"]], nbins = 4, - simTime = isim_time, simTime2 = simTime2) + simTime = isim_time[[itime]], simTime2 = simTime2[[itime]]) resMeans[nv] <- mean(ppt_free$eventsPerYear) resSDs[nv] <- stats::sd(ppt_free$eventsPerYear) @@ -2727,8 +2871,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySPEIEvents") #standardized precipitation-evapotranspiration index, SPEI: Vicente-Serrano, S.M., Beguer, S., Lorenzo-Lacruz, J., Camarero, J.s.J., Lopez-Moreno, J.I., Azorin-Molina, C., Revuelto, J.s., Morn-Tejeda, E. & Sanchez-Lorenzo, A. (2012) Performance of Drought Indices for Ecological, Agricultural, and Hydrological Applications. Earth Interactions, 16, 1-27. - if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time) - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time) + if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time[[itime]]) #n_variables is set for 4*4*3 with length(binSPEI_m) == 4 && length(probs) == 3 binSPEI_m <- c(1, 12, 24, 48) #months @@ -2770,31 +2914,31 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyPlantGrowthControls"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyPlantGrowthControls") - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) - if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time) - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) + if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time[[itime]]) - DayNumber_ForEachUsedMonth <- rle(simTime2$month_ForEachUsedDay)$lengths - DayNumber_ForEachUsedYear <- rle(simTime2$year_ForEachUsedDay)$lengths + DayNumber_ForEachUsedMonth <- rle(simTime2[[itime]]$month_ForEachUsedDay)$lengths + DayNumber_ForEachUsedYear <- rle(simTime2[[itime]]$year_ForEachUsedDay)$lengths #temperature control temp <- ifelse(temp.mo$min > 5, 1, ifelse(temp.mo$min < -5, 0, (5 + temp.mo$min) / 10)) * DayNumber_ForEachUsedMonth - control_temp <- tapply(temp, simTime2$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear + control_temp <- tapply(temp, simTime2[[itime]]$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear #moisture control aridity <- (prcp.mo$rain + prcp.mo$snowmelt) / PET.mo$val temp <- ifelse(aridity > 0.75, 1, ifelse(aridity < 0, 0, aridity / 0.75)) * DayNumber_ForEachUsedMonth - control_water <- tapply(temp, simTime2$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear + control_water <- tapply(temp, simTime2[[itime]]$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear #radiation control cloudiness <- rSOILWAT2::swCloud_SkyCover(swRunScenariosData[[sc]]) - cloudiness <- rep(cloudiness, times = isim_time$no.useyr) + cloudiness <- rep(cloudiness, times = isim_time[[itime]]$no.useyr) temp <- (1 - ifelse(cloudiness < 10, 0, (cloudiness - 10) / 100 * 0.5)) * DayNumber_ForEachUsedMonth - control_radiation <- tapply(temp, simTime2$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear + control_radiation <- tapply(temp, simTime2[[itime]]$yearno_ForEachUsedMonth, sum) / DayNumber_ForEachUsedYear temp <- data.frame(control_temp, control_water, control_radiation) resMeans[nv:(nv+2)] <- apply(temp, 2, mean, na.rm = TRUE) @@ -2810,9 +2954,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyC4_TempVar"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyC4_TempVar") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) - resMeans[nv:(nv+2)] <- (temp <- as.numeric(rSOILWAT2::sw_dailyC4_TempVar(dailyTempMin = temp.dy$min, dailyTempMean = temp.dy$mean, simTime2)))[1:3] #adjust_NorthSouth + resMeans[nv:(nv+2)] <- (temp <- as.numeric(rSOILWAT2::sw_dailyC4_TempVar(dailyTempMin = temp.dy$min, dailyTempMean = temp.dy$mean, simTime2[[itime]])))[1:3] #adjust_NorthSouth resSDs[nv:(nv+2)] <- temp[4:6] nv <- nv+3 @@ -2823,11 +2967,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyDegreeDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyDegreeDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) degday <- ifelse(temp.dy$mean > opt_agg[["Tbase_DD_C"]], temp.dy$mean - opt_agg[["Tbase_DD_C"]], 0) #degree days - temp <- tapply(degday, simTime2$year_ForEachUsedDay, sum) + temp <- tapply(degday, simTime2[[itime]]$year_ForEachUsedDay, sum) resMeans[nv] <- mean(temp) resSDs[nv] <- stats::sd(temp) @@ -2842,8 +2986,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyColdDegreeDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyColdDegreeDays") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) # Cold-degree daily mean temperatures (degree C) with snow ids <- temp.dy$mean < opt_agg[["Tbase_coldDD_C"]] @@ -2854,8 +2998,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, colddegday_snowfree <- ifelse(ids_snowfree, temp.dy$mean - opt_agg[["Tbase_coldDD_C"]], 0) # Sum of daily mean temperatures for snow/snow-free - temp <- data.frame(tapply(colddegday, simTime2$year_ForEachUsedDay, sum), - tapply(colddegday_snowfree, simTime2$year_ForEachUsedDay, sum)) + temp <- data.frame(tapply(colddegday, simTime2[[itime]]$year_ForEachUsedDay, sum), + tapply(colddegday_snowfree, simTime2[[itime]]$year_ForEachUsedDay, sum)) resMeans[nv:(nv+1)] <- apply(temp, 2, mean, na.rm = TRUE) resSDs[nv:(nv+1)] <- apply(temp, 2, stats::sd, na.rm = TRUE) @@ -2872,7 +3016,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyAET"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyAET") - if (!exists("AET.yr")) AET.yr <- get_AET_yr(runDataSC, isim_time) + if (!exists("AET.yr")) AET.yr <- get_AET_yr(runDataSC, isim_time[[itime]]) resMeans[nv] <- mean(AET.yr$val) resSDs[nv] <- stats::sd(AET.yr$val) @@ -2885,16 +3029,16 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyWaterBalanceFluxes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyWaterBalanceFluxes") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) - if (!exists("Esurface.yr")) Esurface.yr <- get_Esurface_yr(runDataSC, isim_time) - if (!exists("intercept.yr")) intercept.yr <- get_Interception_yr(runDataSC, isim_time) - if (!exists("inf.yr")) inf.yr <- get_Inf_yr(runDataSC, isim_time) - if (!exists("runonoff.yr")) runonoff.yr <- get_RunOnOff_yr(runDataSC, isim_time) - if (!exists("transp.yr")) transp.yr <- get_Response_aggL(swof["sw_transp"], tscale = "yr", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("AET.yr")) AET.yr <- get_AET_yr(runDataSC, isim_time) - if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time) - if (!exists("Esoil.yr")) Esoil.yr <- get_Response_aggL(swof["sw_evsoil"], tscale = "yr", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("deepDrain.yr")) deepDrain.yr <- get_DeepDrain_yr(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) + if (!exists("Esurface.yr")) Esurface.yr <- get_Esurface_yr(runDataSC, isim_time[[itime]]) + if (!exists("intercept.yr")) intercept.yr <- get_Interception_yr(runDataSC, isim_time[[itime]]) + if (!exists("inf.yr")) inf.yr <- get_Inf_yr(runDataSC, isim_time[[itime]]) + if (!exists("runonoff.yr")) runonoff.yr <- get_RunOnOff_yr(runDataSC, isim_time[[itime]]) + if (!exists("transp.yr")) transp.yr <- get_Response_aggL(swof["sw_transp"], tscale = "yr", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("AET.yr")) AET.yr <- get_AET_yr(runDataSC, isim_time[[itime]]) + if (!exists("PET.yr")) PET.yr <- get_PET_yr(runDataSC, isim_time[[itime]]) + if (!exists("Esoil.yr")) Esoil.yr <- get_Response_aggL(swof["sw_evsoil"], tscale = "yr", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("deepDrain.yr")) deepDrain.yr <- get_DeepDrain_yr(runDataSC, isim_time[[itime]]) rain_toSoil <- prcp.yr$rain - intercept.yr$sum transp.tot <- transp.yr$top + transp.yr$bottom @@ -2904,21 +3048,21 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, temp1 <- 10 * slot(slot(runDataSC, swof["sw_percolation"]), "Year") drain.topTobottom <- if (length(topL) > 1 && length(bottomL) > 0 && !identical(bottomL, 0)) { - temp1[isim_time$index.useyr, 1+DeepestTopLayer, drop = FALSE] + temp1[isim_time[[itime]]$index.useyr, 1+DeepestTopLayer, drop = FALSE] } else NA temp1 <- 10 * slot(slot(runDataSC, swof["sw_hd"]), "Year") hydred.topTobottom <- if (length(topL) > 1) { - apply(temp1[isim_time$index.useyr, 1+topL, drop = FALSE], 1, sum) + apply(temp1[isim_time[[itime]]$index.useyr, 1+topL, drop = FALSE], 1, sum) } else { - temp1[isim_time$index.useyr, 1+topL, drop = FALSE] + temp1[isim_time[[itime]]$index.useyr, 1+topL, drop = FALSE] } temp1 <- 10 * slot(slot(runDataSC, swof["sw_swcbulk"]), "Day") - index.usedyPlusOne <- if (isim_time$index.usedy[1] == 1) { #simstartyr == startyr, then (isim_time$index.usedy-1) misses first value - isim_time$index.usedy[-length(isim_time$index.usedy)]+1 + index.usedyPlusOne <- if (isim_time[[itime]]$index.usedy[1] == 1) { #simstartyr == startyr, then (isim_time[[itime]]$index.usedy-1) misses first value + isim_time[[itime]]$index.usedy[-length(isim_time[[itime]]$index.usedy)]+1 } else { - isim_time$index.usedy + isim_time[[itime]]$index.usedy } swcdyflux <- if (length(ld) > 1) { apply(temp1[index.usedyPlusOne, 2+ld], 1, sum) - @@ -2967,7 +3111,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["yearlyTranspirationBySoilLayer"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "yearlyTranspirationBySoilLayer") - if (!exists("transp.yr.all")) transp.yr.all <- get_Response_aggL(swof["sw_transp"], tscale = "yrAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("transp.yr.all")) transp.yr.all <- get_Response_aggL(swof["sw_transp"], tscale = "yrAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) # aggregate across years for each soil layer and vegetation type vegtypes <- c("total", "tree", "shrub", "forb", "grass") @@ -2988,20 +3132,20 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySoilWaterPulseVsStorage"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySoilWaterPulseVsStorage") - if (!exists("inf.dy")) inf.dy <- get_Inf_dy(runDataSC, isim_time) - if (!exists("transp.dy.all")) transp.dy.all <- get_Response_aggL(swof["sw_transp"], tscale = "dyAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("Esoil.dy.all")) Esoil.dy.all <- get_Response_aggL(swof["sw_evsoil"], tscale = "dyAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(runDataSC, isim_time) + if (!exists("inf.dy")) inf.dy <- get_Inf_dy(runDataSC, isim_time[[itime]]) + if (!exists("transp.dy.all")) transp.dy.all <- get_Response_aggL(swof["sw_transp"], tscale = "dyAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("Esoil.dy.all")) Esoil.dy.all <- get_Response_aggL(swof["sw_evsoil"], tscale = "dyAll", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(runDataSC, isim_time[[itime]]) percolation <- if (soilLayers_N > 1) { - 10 * slot(slot(runDataSC, swof["sw_percolation"]), "Day")[isim_time$index.usedy, 2 + ld[-soilLayers_N]] + 10 * slot(slot(runDataSC, swof["sw_percolation"]), "Day")[isim_time[[itime]]$index.usedy, 2 + ld[-soilLayers_N]] } else { - rep(0, isim_time$no.usedy) + rep(0, isim_time[[itime]]$no.usedy) } - hydred <- 10 * slot(slot(runDataSC, swof["sw_hd"]), "Day")[isim_time$index.usedy, 2 + ld] + hydred <- 10 * slot(slot(runDataSC, swof["sw_hd"]), "Day")[isim_time[[itime]]$index.usedy, 2 + ld] # Water balance - outputs_by_layer <- inputs_by_layer <- matrix(0, nrow = isim_time$no.usedy, ncol = soilLayers_N, + outputs_by_layer <- inputs_by_layer <- matrix(0, nrow = isim_time[[itime]]$no.usedy, ncol = soilLayers_N, dimnames = list(NULL, paste0("total_Lyr_", ld))) # Inputs: infiltration + received hydraulic redistribution + received percolation inputs_by_layer[, 1] <- inputs_by_layer[, 1] + inf.dy$inf @@ -3014,13 +3158,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (ncol(Esoil.dy.all$val) > 2) { itemp <- seq_len(ncol(Esoil.dy.all$val) - 2) outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + - Esoil.dy.all$val[isim_time$index.usedy, -(1:2)] + Esoil.dy.all$val[isim_time[[itime]]$index.usedy, -(1:2)] } itemp <- grepl("transp_total", colnames(transp.dy.all$val)) if (any(itemp)) { itemp <- seq_len(sum(itemp)) outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + - transp.dy.all$val[isim_time$index.usedy, itemp] + transp.dy.all$val[isim_time[[itime]]$index.usedy, itemp] } itemp <- ncol(outputs_by_layer) outputs_by_layer[, itemp] <- outputs_by_layer[, itemp] + deepDrain.dy$val @@ -3033,7 +3177,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # balance balance <- inputs_by_layer - outputs_by_layer extraction <- balance < 0 - storage_use <- by(cbind(extraction, outputs_by_layer), INDICES = simTime2$year_ForEachUsedDay_NSadj, FUN = function(x) { + storage_use <- by(cbind(extraction, outputs_by_layer), INDICES = simTime2[[itime]]$year_ForEachUsedDay_NSadj, FUN = function(x) { res1 <- apply(x[, ld, drop = FALSE], MARGIN = 2, FUN = rle) res2 <- apply(x[, soilLayers_N + ld, drop = FALSE], MARGIN = 2, FUN = function(y) list(out = y)) utils::modifyList(res1, res2) @@ -3050,7 +3194,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } })) if (!is.matrix(extraction_duration_days)) { - extraction_duration_days <- matrix(extraction_duration_days, nrow = soilLayers_N, ncol = isim_time$no.useyr) + extraction_duration_days <- matrix(extraction_duration_days, nrow = soilLayers_N, ncol = isim_time[[itime]]$no.useyr) } # median annual sum of all extracted water during extracting spells for each layer and each year @@ -3077,7 +3221,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } })) if (!is.matrix(extraction_summed_mm)) { - extraction_summed_mm <- matrix(extraction_summed_mm, nrow = soilLayers_N, ncol = isim_time$no.useyr) + extraction_summed_mm <- matrix(extraction_summed_mm, nrow = soilLayers_N, ncol = isim_time[[itime]]$no.useyr) } # aggregate across years for each soil layer @@ -3099,10 +3243,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyTranspirationExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyTranspirationExtremes") - if (!exists("transp.dy")) transp.dy <- get_Response_aggL(swof["sw_transp"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("transp.dy")) transp.dy <- get_Response_aggL(swof["sw_transp"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) temp <- transp.dy$top + transp.dy$bottom - temp <- tapply(temp, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(temp, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes <- matrix(unlist(temp), ncol = 4, byrow = TRUE) temp <- extremes[, 1:2, drop = FALSE] @@ -3111,8 +3255,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+2 temp <- extremes[, 3:4, drop = FALSE] - resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_mean, int = 365) - resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_sd, int = 365) + resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365 + ) nv <- nv+2 rm(extremes) @@ -3124,11 +3273,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyTotalEvaporationExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyTotalEvaporationExtremes") - if (!exists("Esoil.dy")) Esoil.dy <- get_Response_aggL(swof["sw_evsoil"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("Esurface.dy")) Esurface.dy <- get_Esurface_dy(runDataSC, isim_time) + if (!exists("Esoil.dy")) Esoil.dy <- get_Response_aggL(swof["sw_evsoil"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("Esurface.dy")) Esurface.dy <- get_Esurface_dy(runDataSC, isim_time[[itime]]) temp <- Esoil.dy$top + Esoil.dy$bottom + Esurface.dy$sum - temp <- tapply(temp, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(temp, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes <- matrix(unlist(temp), ncol = 4, byrow = TRUE) temp <- extremes[, 1:2, drop = FALSE] @@ -3137,8 +3286,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+2 temp <- extremes[, 3:4, drop = FALSE] - resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_mean, int = 365) - resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_sd, int = 365) + resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365 + ) nv <- nv+2 rm(extremes) @@ -3150,9 +3304,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyDrainageExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyDrainageExtremes") - if (!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(runDataSC, isim_time) + if (!exists("deepDrain.dy")) deepDrain.dy <- get_DeepDrain_dy(runDataSC, isim_time[[itime]]) - temp <- tapply(deepDrain.dy$val, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(deepDrain.dy$val, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes <- matrix(unlist(temp), ncol = 4, byrow = TRUE) temp <- extremes[, 1:2, drop = FALSE] @@ -3161,8 +3315,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+2 temp <- extremes[, 3:4, drop = FALSE] - resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_mean, int = 365) - resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_sd, int = 365) + resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365 + ) nv <- nv+2 rm(extremes) @@ -3174,9 +3333,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyInfiltrationExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyInfiltrationExtremes") - if (!exists("inf.dy")) inf.dy <- get_Inf_dy(runDataSC, isim_time) + if (!exists("inf.dy")) inf.dy <- get_Inf_dy(runDataSC, isim_time[[itime]]) - temp <- tapply(inf.dy$inf, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(inf.dy$inf, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes <- matrix(unlist(temp), ncol = 4, byrow = TRUE) temp <- extremes[, 1:2, drop = FALSE] @@ -3185,8 +3344,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+2 temp <- extremes[, 3:4, drop = FALSE] - resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_mean, int = 365) - resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_sd, int = 365) + resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365 + ) nv <- nv+2 rm(extremes) @@ -3198,9 +3362,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyAETExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyAETExtremes") - if (!exists("AET.dy")) AET.dy <- get_AET_dy(runDataSC, isim_time) + if (!exists("AET.dy")) AET.dy <- get_AET_dy(runDataSC, isim_time[[itime]]) - temp <- tapply(AET.dy$val, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(AET.dy$val, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes <- matrix(unlist(temp), ncol = 4, byrow = TRUE) temp <- extremes[, 1:2, drop = FALSE] @@ -3209,8 +3373,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+2 temp <- extremes[, 3:4, drop = FALSE] - resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_mean, int = 365) - resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, circ_sd, int = 365) + resMeans[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + resSDs[nv:(nv+1)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365 + ) nv <- nv+2 rm(extremes) @@ -3222,14 +3391,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySWPextremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySWPextremes") - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) - extremes <- matrix(NA, nrow = isim_time$no.useyr, ncol = 2 * 4) - temp <- tapply(swpmatric.dy$top, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + extremes <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = 2 * 4) + temp <- tapply(swpmatric.dy$top, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes[, 1:4] <- matrix(unlist(temp), ncol = 4, byrow = TRUE) if (length(bottomL) > 0 && !identical(bottomL, 0)) { - temp <- tapply(swpmatric.dy$bottom, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(swpmatric.dy$bottom, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes[, 5:8] <- matrix(unlist(temp), ncol = 4, byrow = TRUE) } @@ -3239,8 +3408,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+4 temp <- extremes[, c(3:4, 7:8), drop = FALSE] - resMeans[nv:(nv+3)] <- apply(temp, MARGIN = 2, circ_mean, int = 365, na.rm = TRUE) - resSDs[nv:(nv+3)] <- apply(temp, MARGIN = 2, circ_sd, int = 365, na.rm = TRUE) + resMeans[nv:(nv+3)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi", + na.rm = TRUE + ) + resSDs[nv:(nv+3)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365, + na.rm = TRUE + ) nv <- nv+4 rm(extremes) @@ -3252,17 +3428,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyRechargeExtremes"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyRechargeExtremes") - if (!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(swof["sw_swcbulk"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(swof["sw_swcbulk"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) recharge.dy <- NULL recharge.dy$top <- swcbulk.dy$top / (rSOILWAT2::SWPtoVWC(-0.033, texture$sand.top, texture$clay.top) * 10 * sum(layers_width[topL])) - extremes <- matrix(NA, nrow = isim_time$no.useyr, ncol = 2 * 4) - temp <- tapply(recharge.dy$top, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + extremes <- matrix(NA, nrow = isim_time[[itime]]$no.useyr, ncol = 2 * 4) + temp <- tapply(recharge.dy$top, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes[, 1:4] <- matrix(unlist(temp), ncol = 4, byrow = TRUE) if (length(bottomL) > 0 && !identical(bottomL, 0)) { recharge.dy$bottom <- swcbulk.dy$bottom / (rSOILWAT2::SWPtoVWC(-0.033, texture$sand.bottom, texture$clay.bottom) * 10 * sum(layers_width[bottomL])) - temp <- tapply(recharge.dy$bottom, simTime2$year_ForEachUsedDay, extreme_values_and_doys) + temp <- tapply(recharge.dy$bottom, simTime2[[itime]]$year_ForEachUsedDay, extreme_values_and_doys) extremes[, 5:8] <- matrix(unlist(temp), ncol = 4, byrow = TRUE) } @@ -3272,8 +3448,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv <- nv+4 temp <- extremes[, c(3:4, 7:8), drop = FALSE] - resMeans[nv:(nv+3)] <- apply(temp, MARGIN = 2, circ_mean, int = 365, na.rm = TRUE) - resSDs[nv:(nv+3)] <- apply(temp, MARGIN = 2, circ_sd, int = 365, na.rm = TRUE) + resMeans[nv:(nv+3)] <- apply(temp, MARGIN = 2, rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi", + na.rm = TRUE + ) + resSDs[nv:(nv+3)] <- apply(temp, MARGIN = 2, rSW2utils::circ_sd, + int = 365, + na.rm = TRUE + ) nv <- nv+4 rm(recharge.dy, extremes) @@ -3290,15 +3473,15 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyNRCS_SoilMoistureTemperatureRegimes") - if (!exists("soiltemp.dy.all")) soiltemp.dy.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("soiltemp.yr.all")) soiltemp.yr.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "yrAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("soiltemp.mo.all")) soiltemp.mo.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "moAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("soiltemp.dy.all")) soiltemp.dy.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("soiltemp.yr.all")) soiltemp.yr.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "yrAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("soiltemp.mo.all")) soiltemp.mo.all <- get_Response_aggL(swof["sw_soiltemp"], tscale = "moAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time) - if (!exists("pet.mo")) pet.mo <- get_PET_mo(runDataSC, isim_time) - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time[[itime]]) + if (!exists("pet.mo")) pet.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) sim_agg <- list( soiltemp.dy.all = soiltemp.dy.all, soiltemp.yr.all = soiltemp.yr.all, @@ -3316,7 +3499,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, soil_TOC = soil_TOC, has_soil_temperature = isTRUE(!is_SOILTEMP_INSTABLE[sc]), opt_SMTR = opt_agg[["NRCS_SMTRs"]], - simTime1 = isim_time, simTime2 = simTime2, + simTime1 = isim_time[[itime]], simTime2 = simTime2[[itime]], verbose = opt_verbosity[["verbose"]], msg_tag = tag_simpidfid) if (isTRUE(prj_todos[["aon"]][["dailyNRCS_SoilMoistureTemperatureRegimes_Intermediates"]])) { @@ -3368,7 +3551,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyNRCS_Chambers2014_ResilienceResistance") - if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time) + if (!exists("prcp.yr")) prcp.yr <- get_PPT_yr(runDataSC, isim_time[[itime]]) RR <- rSOILWAT2::calc_RRs_Chambers2014(Tregime, Sregime, MAP_mm = mean(prcp.yr$ppt)) @@ -3406,9 +3589,9 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyWetDegreeDays"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyWetDegreeDays") - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) degday <- ifelse(temp.dy$mean > opt_agg[["Tbase_DD_C"]], temp.dy$mean - opt_agg[["Tbase_DD_C"]], 0) #degree days @@ -3428,7 +3611,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, wetdegday.any <- ifelse(wet.top + wet.bottom > 0, degday, 0) temp <- lapply(list(wetdegday.top, wetdegday.bottom, wetdegday.any), - function(x) tapply(x, simTime2$year_ForEachUsedDay, sum)) + function(x) tapply(x, simTime2[[itime]]$year_ForEachUsedDay, sum)) resMeans[(nv+3*(icrit-1)):(nv+3*(icrit-1)+2)] <- vapply(temp, mean, 1) resSDs[(nv+3*(icrit-1)):(nv+3*(icrit-1)+2)] <- vapply(temp, stats::sd, 1) @@ -3444,10 +3627,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyThermalDrynessStartEnd"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyThermalDrynessStartEnd") - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) - adjDays <- simTime2$doy_ForEachUsedDay_NSadj[1] - simTime2$doy_ForEachUsedDay[1] + adjDays <- simTime2[[itime]]$doy_ForEachUsedDay_NSadj[1] - simTime2[[itime]]$doy_ForEachUsedDay[1] thermal <- temp.dy$mean > 0 @@ -3460,15 +3643,33 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } temp <- stats::aggregate(cbind(thermaldry.top, thermaldry.bottom), - by = list(simTime2$year_ForEachUsedDay_NSadj), - FUN = function(x) max_duration(x, return_doys = TRUE)) + by = list(simTime2[[itime]]$year_ForEachUsedDay_NSadj), + FUN = function(x) rSW2utils::max_duration(x, return_doys = TRUE)) resMeans[nv:(nv+3)] <- c( - apply(temp$thermaldry.top[, 2:3, drop = FALSE], 2, circ_mean, int = 365), - apply(temp$thermaldry.bottom[, 2:3, drop = FALSE], 2, circ_mean, int = 365)) - adjDays + apply(temp$thermaldry.top[, 2:3, drop = FALSE], 2, + rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ), + apply(temp$thermaldry.bottom[, 2:3, drop = FALSE], 2, + rSW2utils::circ_mean, + int = 365, + type = "ZeroPlus2Pi" + ) + ) - adjDays + resSDs[nv:(nv+3)] <- c( - apply(temp$thermaldry.top[, 2:3, drop = FALSE], 2, circ_sd, int = 365), - apply(temp$thermaldry.bottom[, 2:3, drop = FALSE], 2, circ_sd, int = 365)) + apply(temp$thermaldry.top[, 2:3, drop = FALSE], 2, + rSW2utils::circ_sd, + int = 365 + ), + apply(temp$thermaldry.bottom[, 2:3, drop = FALSE], 2, + rSW2utils::circ_sd, + int = 365 + ) + ) + nv <- nv+4 } @@ -3483,11 +3684,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyThermalSWPConditionCount"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyThermalSWPConditionCount") - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) Tcrit_N <- length(opt_agg[["Tmean_crit_C"]]) @@ -3499,8 +3700,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, ncol = opt_agg[["SWPcrit_N"]], byrow = TRUE) n_conds <- 6L conds <- list() # max length(conds) == n_conds - conds[["DryAll"]] <- apply(swpmatric.dy.all$val[isim_time$index.usedy, -(1:2), drop = FALSE], 1, max) < dryness - conds[["WetAll"]] <- apply(swpmatric.dy.all$val[isim_time$index.usedy, -(1:2), drop = FALSE], 1, min) >= dryness + conds[["DryAll"]] <- apply(swpmatric.dy.all$val[isim_time[[itime]]$index.usedy, -(1:2), drop = FALSE], 1, max) < dryness + conds[["WetAll"]] <- apply(swpmatric.dy.all$val[isim_time[[itime]]$index.usedy, -(1:2), drop = FALSE], 1, min) >= dryness conds[["DryTop"]] <- swpmatric.dy$top < dryness conds[["WetTop"]] <- !conds[["DryTop"]] if (length(bottomL) > 0 && !identical(bottomL, 0)) { @@ -3509,12 +3710,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } day_count <- array(NA, - dim = c(isim_time$no.useyr, Tcrit_N, opt_agg[["SWPcrit_N"]], n_conds)) + dim = c(isim_time[[itime]]$no.useyr, Tcrit_N, opt_agg[["SWPcrit_N"]], n_conds)) for (d2 in seq_len(Tcrit_N)) for (d4 in seq_along(conds)) for (d3 in seq_along(opt_agg[["SWPcrit_MPa"]])) day_count[, d2, d3, d4] <- tapply(thermal[, d2] & conds[[d4]][, d3], - INDEX = simTime2$year_ForEachUsedDay, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, FUN = sum) nv_new <- nv + Tcrit_N * opt_agg[["SWPcrit_N"]] * n_conds resMeans[nv:(nv_new - 1)] <- as.vector(colMeans(day_count)) @@ -3530,17 +3731,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySWPdryness"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySWPdryness") - if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) - adjMonths <- ifelse(simTime2$month_ForEachUsedMonth[1] == simTime2$month_ForEachUsedMonth_NSadj[1], 0, 6) + adjMonths <- ifelse(simTime2[[itime]]$month_ForEachUsedMonth[1] == simTime2[[itime]]$month_ForEachUsedMonth_NSadj[1], 0, 6) - drymonths.top <- drymonths.bottom <- array(data = 0, dim = c(opt_agg[["SWPcrit_N"]], isim_time$no.useyr, 12)) + drymonths.top <- drymonths.bottom <- array(data = 0, dim = c(opt_agg[["SWPcrit_N"]], isim_time[[itime]]$no.useyr, 12)) for (icrit in seq_along(opt_agg[["SWPcrit_MPa"]])) { - temp <- tapply(swpmatric.mo$top, simTime2$month_ForEachUsedMonth_NSadj, function(x) x <= opt_agg[["SWPcrit_MPa"]][icrit]) - drymonths.top[icrit, , ] <- matrix(unlist(temp), nrow = isim_time$no.useyr) - temp <- tapply(swpmatric.mo$bottom, simTime2$month_ForEachUsedMonth_NSadj, function(x) x <= opt_agg[["SWPcrit_MPa"]][icrit]) - drymonths.bottom[icrit, , ] <- matrix(unlist(temp), nrow = isim_time$no.useyr) + temp <- tapply(swpmatric.mo$top, simTime2[[itime]]$month_ForEachUsedMonth_NSadj, function(x) x <= opt_agg[["SWPcrit_MPa"]][icrit]) + drymonths.top[icrit, , ] <- matrix(unlist(temp), nrow = isim_time[[itime]]$no.useyr) + temp <- tapply(swpmatric.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth_NSadj, function(x) x <= opt_agg[["SWPcrit_MPa"]][icrit]) + drymonths.bottom[icrit, , ] <- matrix(unlist(temp), nrow = isim_time[[itime]]$no.useyr) } years.top <- apply(drymonths.top, MARGIN = 1:2, FUN = sum) @@ -3556,10 +3757,25 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, start.bottom <- apply(drymonths.bottom, MARGIN = 1:2, FUN = match, x = 1, nomatch = 0) start.bottom[start.bottom != 0] <- ifelse((temp <- (start.bottom[start.bottom != 0] + adjMonths) %% 12) == 0, 12, temp) - resMeans[nv:(nv+2*opt_agg[["SWPcrit_N"]]-1)] <- c(apply(start.top, MARGIN = 1, circ_mean, int = 12), - apply(start.bottom, MARGIN = 1, circ_mean, int = 12)) - resSDs[nv:(nv+2*opt_agg[["SWPcrit_N"]]-1)] <- c(apply(start.top, MARGIN = 1, circ_sd, int = 12), - apply(start.bottom, MARGIN = 1, circ_sd, int = 12)) + resMeans[nv:(nv+2*opt_agg[["SWPcrit_N"]]-1)] <- c( + apply(start.top, MARGIN = 1, rSW2utils::circ_mean, + int = 12, + type = "ZeroPlus2Pi" + ), + apply(start.bottom, MARGIN = 1, rSW2utils::circ_mean, + int = 12, + type = "ZeroPlus2Pi" + ) + ) + + resSDs[nv:(nv+2*opt_agg[["SWPcrit_N"]]-1)] <- c( + apply(start.top, MARGIN = 1, rSW2utils::circ_sd, + int = 12 + ), + apply(start.bottom, MARGIN = 1, rSW2utils::circ_sd, + int = 12 + ) + ) nv <- nv+2*opt_agg[["SWPcrit_N"]] @@ -3572,21 +3788,21 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySWPdrynessANDwetness"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySWPdrynessANDwetness") - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers - adjDays <- simTime2$doy_ForEachUsedDay_NSadj[1] - simTime2$doy_ForEachUsedDay[1] + adjDays <- simTime2[[itime]]$doy_ForEachUsedDay_NSadj[1] - simTime2[[itime]]$doy_ForEachUsedDay[1] durationDryPeriods.min <- 10 # days for (icrit in seq_along(opt_agg[["SWPcrit_MPa"]])) { wet_crit <- swpmatric.dy.all$val >= opt_agg[["SWPcrit_MPa"]][icrit] wet <- list() - wet$top <- apply(wet_crit[isim_time$index.usedy, 2+topL, drop = FALSE], 1, sum) + wet$top <- apply(wet_crit[isim_time[[itime]]$index.usedy, 2+topL, drop = FALSE], 1, sum) if (length(bottomL) > 0 && !identical(bottomL, 0)) { - wet$bottom <- apply(wet_crit[isim_time$index.usedy, 2+bottomL, drop = FALSE], 1, sum) + wet$bottom <- apply(wet_crit[isim_time[[itime]]$index.usedy, 2+bottomL, drop = FALSE], 1, sum) } else { - wet$bottom <- rep(NA, isim_time$no.usedy) + wet$bottom <- rep(NA, isim_time[[itime]]$no.usedy) } AtLeastOneWet <- lapply(wet, function(x) x > 0) @@ -3596,36 +3812,51 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, AtLeastOneDry <- lapply(AllWet, `!`) #wet periods - res.wet <- matrix(0, nrow = simTime2$no.useyr_NSadj, ncol = 8) - res.wet[, 1] <- tapply(AtLeastOneWet$top, simTime2$year_ForEachUsedDay_NSadj, sum) # total number of days per year when at least one top layer is wet - res.wet[, 2] <- tapply(AtLeastOneWet$bottom, simTime2$year_ForEachUsedDay_NSadj, sum) # total number of days per year when at least one top layer is wet - res.wet[, 3] <- tapply(AtLeastOneWet$top, simTime2$year_ForEachUsedDay_NSadj, max_duration) # maximum number of continous days when at least one top layers is wet - res.wet[, 4] <- tapply(AtLeastOneWet$bottom, simTime2$year_ForEachUsedDay_NSadj, max_duration) # maximum number of continous days when at least one top layers is wet - res.wet[, 5] <- tapply(AllWet$top, simTime2$year_ForEachUsedDay_NSadj, sum) # total number of days per year when all top layer are wet - res.wet[, 6] <- tapply(AllWet$bottom, simTime2$year_ForEachUsedDay_NSadj, sum) # total number of days per year when all top layer are wet - res.wet[, 7] <- tapply(AllWet$top, simTime2$year_ForEachUsedDay_NSadj, max_duration) # maximum number of continous days when all top layers are wet - res.wet[, 8] <- tapply(AllWet$bottom, simTime2$year_ForEachUsedDay_NSadj, max_duration) # maximum number of continous days when all top layers are wet + res.wet <- matrix(0, nrow = simTime2[[itime]]$no.useyr_NSadj, ncol = 8) + res.wet[, 1] <- tapply(AtLeastOneWet$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) # total number of days per year when at least one top layer is wet + res.wet[, 2] <- tapply(AtLeastOneWet$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) # total number of days per year when at least one top layer is wet + res.wet[, 3] <- tapply(AtLeastOneWet$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) # maximum number of continous days when at least one top layers is wet + res.wet[, 4] <- tapply(AtLeastOneWet$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) # maximum number of continous days when at least one top layers is wet + res.wet[, 5] <- tapply(AllWet$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) # total number of days per year when all top layer are wet + res.wet[, 6] <- tapply(AllWet$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) # total number of days per year when all top layer are wet + res.wet[, 7] <- tapply(AllWet$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) # maximum number of continous days when all top layers are wet + res.wet[, 8] <- tapply(AllWet$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) # maximum number of continous days when all top layers are wet #dry periods - res.dry <- matrix(0, nrow = simTime2$no.useyr_NSadj, ncol = 8) - res.dry[, 3] <- tapply(AllDry$top, simTime2$year_ForEachUsedDay_NSadj, sum) #total number of days/year when all top layers are dry - res.dry[, 7] <- tapply(AllDry$bottom, simTime2$year_ForEachUsedDay_NSadj, sum) #total number of days/year when all bottom layers are dry - res.dry[, 4] <- tapply(AllDry$top, simTime2$year_ForEachUsedDay_NSadj, max_duration) #maximum number of continous days when all top layers are dry - res.dry[, 8] <- tapply(AllDry$bottom, simTime2$year_ForEachUsedDay_NSadj, max_duration) #maximum number of continous days when all bottom layers are dry - res.dry[, 1] <- tapply(AtLeastOneDry$top, simTime2$year_ForEachUsedDay_NSadj, startDoyOfDuration, duration = durationDryPeriods.min) # start days/year when at least one of top layers are dry for at least ten days - res.dry[, 5] <- tapply(AtLeastOneDry$bottom, simTime2$year_ForEachUsedDay_NSadj, startDoyOfDuration, duration = durationDryPeriods.min) # start days/year when at least one of bottom layers are dry for at least ten days - res.dry[, 2] <- tapply(AtLeastOneDry$top, simTime2$year_ForEachUsedDay_NSadj, endDoyAfterDuration, duration = durationDryPeriods.min) # end days/year when at least one of top layers have been dry for at least ten days - res.dry[, 6] <- tapply(AtLeastOneDry$bottom, simTime2$year_ForEachUsedDay_NSadj, endDoyAfterDuration, duration = durationDryPeriods.min) # end days/year when at least one of bottom layers have been dry for at least ten days + res.dry <- matrix(0, nrow = simTime2[[itime]]$no.useyr_NSadj, ncol = 8) + res.dry[, 3] <- tapply(AllDry$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) #total number of days/year when all top layers are dry + res.dry[, 7] <- tapply(AllDry$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum) #total number of days/year when all bottom layers are dry + res.dry[, 4] <- tapply(AllDry$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) #maximum number of continous days when all top layers are dry + res.dry[, 8] <- tapply(AllDry$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, rSW2utils::max_duration) #maximum number of continous days when all bottom layers are dry + res.dry[, 1] <- tapply(AtLeastOneDry$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, startDoyOfDuration, duration = durationDryPeriods.min) # start days/year when at least one of top layers are dry for at least ten days + res.dry[, 5] <- tapply(AtLeastOneDry$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, startDoyOfDuration, duration = durationDryPeriods.min) # start days/year when at least one of bottom layers are dry for at least ten days + res.dry[, 2] <- tapply(AtLeastOneDry$top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, endDoyAfterDuration, duration = durationDryPeriods.min) # end days/year when at least one of top layers have been dry for at least ten days + res.dry[, 6] <- tapply(AtLeastOneDry$bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, endDoyAfterDuration, duration = durationDryPeriods.min) # end days/year when at least one of bottom layers have been dry for at least ten days res.dry[, c(1:2, 5:5)] <- res.dry[, c(1:2, 5:5)] - adjDays res.dry[res.dry[, 1] > res.dry[, 2], 3] <- 0 #correct [, c(3, 7)] for years when start res.dry[, 6], 7] <- 0 #correct [, c(3, 7)] for years when start 0) return(max(temp)) else return(0)}) - durations.bottom <- sapply(isim_time$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle((snowfree & niceTemp & wet.bottom)[simTime2$year_ForEachUsedDay == y]))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) + durations.top <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle((snowfree & niceTemp & wet.top)[simTime2[[itime]]$year_ForEachUsedDay == y]))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) + durations.bottom <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle((snowfree & niceTemp & wet.bottom)[simTime2[[itime]]$year_ForEachUsedDay == y]))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) resMeans[nv:(nv+2*length(quantiles)-1)] <- c(stats::quantile(durations.top, probs = quantiles, type = 8), stats::quantile(durations.bottom, probs = quantiles, type = 8)) @@ -3673,25 +3904,25 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySuitablePeriodsAvailableWater"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySuitablePeriodsAvailableWater") - if (!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(swof["sw_swcbulk"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("swcbulk.dy")) swcbulk.dy <- get_Response_aggL(swof["sw_swcbulk"], tscale = "dy", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) suitable <- (SWE.dy$val == 0) & (temp.dy$mean >= opt_agg[["Tbase_DD_C"]]) for (icrit in seq(along = opt_agg[["SWPcrit_MPa"]])) { SWCcritT <- rSOILWAT2::SWPtoVWC(opt_agg[["SWPcrit_MPa"]][icrit], texture$sand.top, texture$clay.top) * 10 * sum(layers_width[topL]) - swa.top <- ifelse(suitable, cut0Inf(swcbulk.dy$top - SWCcritT, val = 0), 0) + swa.top <- ifelse(suitable, rSW2utils::cut0Inf(swcbulk.dy$top - SWCcritT, val = 0), 0) if (length(bottomL) > 0 && !identical(bottomL, 0)) { SWCcritB <- rSOILWAT2::SWPtoVWC(opt_agg[["SWPcrit_MPa"]][icrit], texture$sand.bottom, texture$clay.bottom) * 10 * sum(layers_width[bottomL]) - swa.bottom <- ifelse(suitable, cut0Inf(swcbulk.dy$bottom - SWCcritB, val = 0), 0) + swa.bottom <- ifelse(suitable, rSW2utils::cut0Inf(swcbulk.dy$bottom - SWCcritB, val = 0), 0) } else { swa.bottom <- rep(0, length(swa.top)) } - temp <- list(t = tapply(swa.top, simTime2$year_ForEachUsedDay_NSadj, sum), - b = tapply(swa.bottom, simTime2$year_ForEachUsedDay_NSadj, sum)) + temp <- list(t = tapply(swa.top, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum), + b = tapply(swa.bottom, simTime2[[itime]]$year_ForEachUsedDay_NSadj, sum)) resMeans[nv:(nv+1)] <- sapply(temp, mean) resSDs[nv:(nv+1)] <- sapply(temp, stats::sd) nv <- nv+2 @@ -3706,28 +3937,28 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySuitablePeriodsDrySpells"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySuitablePeriodsDrySpells") - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) suitable <- (SWE.dy$val == 0) & (temp.dy$mean >= opt_agg[["Tbase_DD_C"]]) - adjDays <- simTime2$doy_ForEachUsedDay_NSadj[1] - simTime2$doy_ForEachUsedDay[1] + adjDays <- simTime2[[itime]]$doy_ForEachUsedDay_NSadj[1] - simTime2[[itime]]$doy_ForEachUsedDay[1] durationDryPeriods.min <- 10 # days for (icrit in seq(along = opt_agg[["SWPcrit_MPa"]])) { dry_crit <- swpmatric.dy.all$val < opt_agg[["SWPcrit_MPa"]][icrit] if (length(topL) > 1) { - dry.top <- apply(dry_crit[isim_time$index.usedy, 2+topL], 1, sum) + dry.top <- apply(dry_crit[isim_time[[itime]]$index.usedy, 2+topL], 1, sum) } else { - dry.top <- dry_crit[isim_time$index.usedy, 2+topL] + dry.top <- dry_crit[isim_time[[itime]]$index.usedy, 2+topL] } dry.top <- (suitable & dry.top >= length(topL)) if (length(bottomL) > 1) { - dry.bottom <- apply(dry_crit[isim_time$index.usedy, 2+bottomL], 1, sum) + dry.bottom <- apply(dry_crit[isim_time[[itime]]$index.usedy, 2+bottomL], 1, sum) } else if (length(bottomL) > 0 && !identical(bottomL, 0)) { - dry.bottom <- ifelse(dry_crit[isim_time$index.usedy, 2+bottomL], 1, 0) + dry.bottom <- ifelse(dry_crit[isim_time[[itime]]$index.usedy, 2+bottomL], 1, 0) } if (length(bottomL) > 0 && !identical(bottomL, 0)) { dry.bottom <- (suitable & dry.bottom >= length(bottomL)) @@ -3735,9 +3966,36 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, dry.bottom <- rep(FALSE, length(dry.top)) } - temp <- stats::aggregate(cbind(dry.top, dry.bottom), by = list(simTime2$year_ForEachUsedDay_NSadj), FUN = function(x) c(if (any((temp <- rle(x))$values)) c(mean(temp$lengths[temp$values]), max(temp$lengths[temp$values])) else c(0, 0), sum(x), startDoyOfDuration(x, duration = durationDryPeriods.min) - adjDays)) - resMeans[nv:(nv+7)] <- c(apply(temp$dry.top[, 1:3, drop = FALSE], 2, mean), circ_mean(x = temp$dry.top[, 4], int = 365), apply(temp$dry.bottom[, 1:3, drop = FALSE], 2, mean), circ_mean(x = temp$dry.bottom[, 4], int = 365)) - resSDs[nv:(nv+7)] <- c(apply(temp$dry.top[, 1:3, drop = FALSE], 2, stats::sd), circ_sd(x = temp$dry.top[, 4], int = 365), apply(temp$dry.bottom[, 1:3, drop = FALSE], 2, stats::sd), circ_sd(x = temp$dry.bottom[, 4], int = 365)) + temp <- stats::aggregate(cbind(dry.top, dry.bottom), by = list(simTime2[[itime]]$year_ForEachUsedDay_NSadj), FUN = function(x) c(if (any((temp <- rle(x))$values)) c(mean(temp$lengths[temp$values]), max(temp$lengths[temp$values])) else c(0, 0), sum(x), startDoyOfDuration(x, duration = durationDryPeriods.min) - adjDays)) + + resMeans[nv:(nv+7)] <- c( + apply(temp$dry.top[, 1:3, drop = FALSE], 2, mean), + rSW2utils::circ_mean( + x = temp$dry.top[, 4], + int = 365, + type = "ZeroPlus2Pi" + ), + apply(temp$dry.bottom[, 1:3, drop = FALSE], 2, mean), + rSW2utils::circ_mean( + x = temp$dry.bottom[, 4], + int = 365, + type = "ZeroPlus2Pi" + ) + ) + + resSDs[nv:(nv+7)] <- c( + apply(temp$dry.top[, 1:3, drop = FALSE], 2, stats::sd), + rSW2utils::circ_sd( + x = temp$dry.top[, 4], + int = 365 + ), + apply(temp$dry.bottom[, 1:3, drop = FALSE], 2, stats::sd), + rSW2utils::circ_sd( + x = temp$dry.bottom[, 4], + int = 365 + ) + ) + nv <- nv+8 } @@ -3750,14 +4008,14 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySWPdrynessDurationDistribution"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySWPdrynessDurationDistribution") - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) deciles <- (0:10)*10/100 quantiles <- (0:4)/4 mo_seasons <- matrix(data = c(12, 1:11), ncol = 3, nrow = 4, byrow = TRUE) season.flag <- c("DJF", "MAM", "JJA", "SON") - seasonal.years <- c(simTime2$year_ForEachUsedDay[-(1:31)], rep(-9999, times = 31)) #shift beginning of year to Dec 1 + seasonal.years <- c(simTime2[[itime]]$year_ForEachUsedDay[-(1:31)], rep(-9999, times = 31)) #shift beginning of year to Dec 1 for (icrit in seq(along = opt_agg[["SWPcrit_MPa"]])) { @@ -3766,8 +4024,8 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (length(bottomL) > 0 && !identical(bottomL, 0)) wet.bottom <- swpmatric.dy$bottom >= opt_agg[["SWPcrit_MPa"]][icrit] for (season in 1:nrow(mo_seasons)) { - durations.top <- sapply(isim_time$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle(wet.top[seasonal.years == y & (simTime2$month_ForEachUsedDay %in% mo_seasons[season, ])] == 0))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) - if (length(bottomL) > 0 && !identical(bottomL, 0)) durations.bottom <- sapply(isim_time$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle(wet.bottom[seasonal.years == y & (simTime2$month_ForEachUsedDay %in% mo_seasons[season, ])] == 0))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) + durations.top <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle(wet.top[seasonal.years == y & (simTime2[[itime]]$month_ForEachUsedDay %in% mo_seasons[season, ])] == 0))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) + if (length(bottomL) > 0 && !identical(bottomL, 0)) durations.bottom <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) {if (length(temp <- (temp <- rle(wet.bottom[seasonal.years == y & (simTime2[[itime]]$month_ForEachUsedDay %in% mo_seasons[season, ])] == 0))$lengths[temp$values]) > 0) return(max(temp)) else return(0)}) resMeans[nv:(nv+length(quantiles)-1)] <- stats::quantile(durations.top, probs = quantiles, type = 7) resMeans[(nv+length(quantiles)):(nv+2*length(quantiles)-1)] <- if (length(bottomL) > 0 && !identical(bottomL, 0)) stats::quantile(durations.bottom, probs = quantiles, type = 7) else 0 @@ -3786,21 +4044,21 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySWPdrynessEventSizeDistribution"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySWPdrynessEventSizeDistribution") - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) binSize <- c(1, 8, 15, 29, 57, 183, 367) #closed interval lengths in [days] within a year; NOTE: n_variables is set for binsN == 6 binsN <- length(binSize) - 1 for (icrit in seq_along(opt_agg[["SWPcrit_MPa"]])) { - dry.top <- swpmatric.dy$top[isim_time$index.usedy] < opt_agg[["SWPcrit_MPa"]][icrit] + dry.top <- swpmatric.dy$top[isim_time[[itime]]$index.usedy] < opt_agg[["SWPcrit_MPa"]][icrit] if (length(bottomL) > 0 && !identical(bottomL, 0)) { - dry.bottom <- swpmatric.dy$bottom[isim_time$index.usedy] < opt_agg[["SWPcrit_MPa"]][icrit] + dry.bottom <- swpmatric.dy$bottom[isim_time[[itime]]$index.usedy] < opt_agg[["SWPcrit_MPa"]][icrit] } #apply over each year, rle just on selected year store runs in vec, if that is greater than 0 then add to that years bins else return 0s for that year. Will result in a matrix of 4 by Years - binsYears.top <- stats::aggregate(dry.top, by = list(simTime2$year_ForEachUsedDay_NSadj), FUN = EventDistribution, N = binsN, size = binSize)$x + binsYears.top <- stats::aggregate(dry.top, by = list(simTime2[[itime]]$year_ForEachUsedDay_NSadj), FUN = EventDistribution, N = binsN, size = binSize)$x eventsPerYear <- apply(binsYears.top, MARGIN = 1, FUN = sum) freqBins <- sweep(binsYears.top, MARGIN = 1, STATS = eventsPerYear, FUN = "/") events.top <- c(mean(eventsPerYear, na.rm = TRUE), stats::sd(eventsPerYear, na.rm = TRUE)) @@ -3813,7 +4071,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, resSDs[(nv+1):(nv+binsN)] <- bin_top_sd if (length(bottomL) > 0 && !identical(bottomL, 0)) { - binsYears.bottom <- stats::aggregate(dry.bottom, by = list(simTime2$year_ForEachUsedDay_NSadj), FUN = EventDistribution, N = binsN, size = binSize)$x + binsYears.bottom <- stats::aggregate(dry.bottom, by = list(simTime2[[itime]]$year_ForEachUsedDay_NSadj), FUN = EventDistribution, N = binsN, size = binSize)$x eventsPerYear <- apply(binsYears.bottom, MARGIN = 1, FUN = sum) freqBins <- sweep(binsYears.bottom, MARGIN = 1, STATS = eventsPerYear, FUN = "/") events.bottom <- c(mean(eventsPerYear, na.rm = TRUE), stats::sd(eventsPerYear, na.rm = TRUE)) @@ -3839,7 +4097,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailySWPdrynessIntensity"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailySWPdrynessIntensity") - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) SWCtop <- vwcmatric.dy$top * sum(layers_width[topL])*10 if (length(bottomL) > 0 && !identical(bottomL, 0)) SWCbottom <- vwcmatric.dy$bottom * sum(layers_width[bottomL])*10 @@ -3847,17 +4105,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, for (icrit in seq(along = opt_agg[["SWPcrit_MPa"]])) { #amount of SWC required so that layer wouldn't be dry SWCcritT <- rSOILWAT2::SWPtoVWC(opt_agg[["SWPcrit_MPa"]][icrit], texture$sand.top, texture$clay.top) * sum(layers_width[topL])*10 - missingSWCtop <- cut0Inf(SWCcritT - SWCtop, val = 0) - IntensitySum_top <- c(mean(temp <- sapply(isim_time$useyrs, FUN = function(y) sum(missingSWCtop[simTime2$year_ForEachUsedDay == y])), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) - IntensityMean_top <- c(mean(temp <- sapply(isim_time$useyrs, FUN = function(y) mean((temp <- missingSWCtop[simTime2$year_ForEachUsedDay == y])[temp > 0], na.rm = TRUE)), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) - IntensityDurationAndNumber_top <- c(apply(temp <- sapply(isim_time$useyrs, FUN = function(y) c(mean(temp <- (temp <- rle(missingSWCtop[simTime2$year_ForEachUsedDay == y] > 0))$lengths[temp$values]), length(temp))), 1, mean), apply(temp, 1, stats::sd))[c(1, 3, 2, 4)] + missingSWCtop <- rSW2utils::cut0Inf(SWCcritT - SWCtop, val = 0) + IntensitySum_top <- c(mean(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) sum(missingSWCtop[simTime2[[itime]]$year_ForEachUsedDay == y])), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) + IntensityMean_top <- c(mean(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) mean((temp <- missingSWCtop[simTime2[[itime]]$year_ForEachUsedDay == y])[temp > 0], na.rm = TRUE)), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) + IntensityDurationAndNumber_top <- c(apply(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) c(mean(temp <- (temp <- rle(missingSWCtop[simTime2[[itime]]$year_ForEachUsedDay == y] > 0))$lengths[temp$values]), length(temp))), 1, mean), apply(temp, 1, stats::sd))[c(1, 3, 2, 4)] if (length(bottomL) > 0 && !identical(bottomL, 0)) { SWCcritB <- rSOILWAT2::SWPtoVWC(opt_agg[["SWPcrit_MPa"]][icrit], texture$sand.bottom, texture$clay.bottom) * sum(layers_width[bottomL])*10 - missingSWCbottom <- cut0Inf(SWCcritB - SWCbottom, val = 0) - IntensitySum_bottom <- c(mean(temp <- sapply(isim_time$useyrs, FUN = function(y) sum(missingSWCbottom[simTime2$year_ForEachUsedDay == y])), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) - IntensityMean_bottom <- c(mean(temp <- sapply(isim_time$useyrs, FUN = function(y) mean((temp <- missingSWCbottom[simTime2$year_ForEachUsedDay == y])[temp > 0], na.rm = TRUE)), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) - IntensityDurationAndNumber_bottom <- c(apply(temp <- sapply(isim_time$useyrs, FUN = function(y) c(mean(temp <- (temp <- rle(missingSWCbottom[simTime2$year_ForEachUsedDay == y] > 0))$lengths[temp$values]), length(temp))), 1, mean), apply(temp, 1, stats::sd))[c(1, 3, 2, 4)] + missingSWCbottom <- rSW2utils::cut0Inf(SWCcritB - SWCbottom, val = 0) + IntensitySum_bottom <- c(mean(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) sum(missingSWCbottom[simTime2[[itime]]$year_ForEachUsedDay == y])), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) + IntensityMean_bottom <- c(mean(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) mean((temp <- missingSWCbottom[simTime2[[itime]]$year_ForEachUsedDay == y])[temp > 0], na.rm = TRUE)), na.rm = TRUE), stats::sd(temp, na.rm = TRUE)) + IntensityDurationAndNumber_bottom <- c(apply(temp <- sapply(isim_time[[itime]]$useyrs, FUN = function(y) c(mean(temp <- (temp <- rle(missingSWCbottom[simTime2[[itime]]$year_ForEachUsedDay == y] > 0))$lengths[temp$values]), length(temp))), 1, mean), apply(temp, 1, stats::sd))[c(1, 3, 2, 4)] } resMeans[nv:(nv+3)] <- c(IntensitySum_top[1], IntensityMean_top[1], IntensityDurationAndNumber_top[c(1, 3)]) @@ -3877,32 +4135,32 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["dailyThermalDrynessStress"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyThermalDrynessStress") - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- get_SWPmatric_aggL(vwcmatric.dy.all, texture, sand, clay) #swp.dy.all is required to get all layers - if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy")) vwcmatric.dy <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dy", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.dy")) swpmatric.dy <- get_SWPmatric_aggL(vwcmatric.dy, texture, sand, clay) - if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time) - if (!exists("vpd.dy")) vpd.dy <- get_VPD_dy(sc, temp.dy, xin = swRunScenariosData, st2 = simTime2) - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("temp.dy")) temp.dy <- get_Temp_dy(runDataSC, isim_time[[itime]]) + if (!exists("vpd.dy")) vpd.dy <- get_VPD_dy(sc, temp.dy, xin = swRunScenariosData, st2 = simTime2[[itime]]) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) # Aggregate for hottest and for coldest conditions extreme <- c(hottest = TRUE, coldest = FALSE) # Set up soil moisture stress conditions - dryness <- matrix(rep.int(opt_agg[["SWPcrit_MPa"]], isim_time$no.usedy), + dryness <- matrix(rep.int(opt_agg[["SWPcrit_MPa"]], isim_time[[itime]]$no.usedy), ncol = opt_agg[["SWPcrit_N"]], byrow = TRUE) snowfree <- SWE.dy$val <= SFSW2_glovars[["tol"]] n_conds <- 4L conds <- list() # max length(conds) == n_conds - conds[["Always"]] <- matrix(TRUE, nrow = isim_time$no.usedy, ncol = 1) - temp <- swpmatric.dy.all$val[isim_time$index.usedy, -(1:2), drop = FALSE] + conds[["Always"]] <- matrix(TRUE, nrow = isim_time[[itime]]$no.usedy, ncol = 1) + temp <- swpmatric.dy.all$val[isim_time[[itime]]$index.usedy, -(1:2), drop = FALSE] conds[["DryAll"]] <- apply(temp, 1, max) < dryness conds[["DryTop"]] <- swpmatric.dy$top < dryness conds[["DryBottom"]] <- if (length(bottomL) > 0 && !identical(bottomL, 0)) { swpmatric.dy$bottom < dryness } else{ - matrix(FALSE, nrow = isim_time$no.usedy, ncol = opt_agg[["SWPcrit_N"]]) + matrix(FALSE, nrow = isim_time[[itime]]$no.usedy, ncol = opt_agg[["SWPcrit_N"]]) } for (d3 in seq_len(n_conds)) { @@ -3919,26 +4177,28 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # Output container for VPD and Temp on 10 hottest/coldest, soil-dry days # and for Temp on 10 hottest/coldest, snowfree, soil-dry days - out_during_Stress <- array(NA, dim = c(isim_time$no.useyr, 3 * N)) + out_during_Stress <- array(NA, dim = c(isim_time[[itime]]$no.useyr, 3 * N)) for (ihot in seq_along(extreme)) { for (d2 in Ns) { # indices (=doy) of k-largest/smallest temperature values per year given soil is dry ids_hotcold <- tapply(Temp_during_Stress1[, d2], - INDEX = simTime2$year_ForEachUsedDay, FUN = fun_kLargest, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, + FUN = rSW2utils::fun_kLargest, largest = extreme[ihot], fun = "index", k = 10L, na.rm = TRUE) # values of mean VPD and of mean temperature during k-indices per year - out_during_Stress[, c(d2, N + d2)] <- t(sapply(seq_len(isim_time$no.useyr), + out_during_Stress[, c(d2, N + d2)] <- t(sapply(seq_len(isim_time[[itime]]$no.useyr), function(j) { - ids <- simTime2$doy_ForEachUsedDay %in% ids_hotcold[[j]] & - simTime2$year_ForEachUsedDay == isim_time$useyrs[j] + ids <- simTime2[[itime]]$doy_ForEachUsedDay %in% ids_hotcold[[j]] & + simTime2[[itime]]$year_ForEachUsedDay == isim_time[[itime]]$useyrs[j] c(mean(VPD_during_Stress[ids, d2]), mean(Temp_during_Stress1[ids, d2])) })) # mean temperature during 10 hottest/coldest, snowfree, soil-dry days out_during_Stress[, 2 * N + d2] <- tapply(Temp_during_Stress2[, d2], - INDEX = simTime2$year_ForEachUsedDay, FUN = fun_kLargest, + INDEX = simTime2[[itime]]$year_ForEachUsedDay, + FUN = rSW2utils::fun_kLargest, largest = extreme[ihot], fun = mean, k = 10L, na.rm = TRUE) } @@ -3946,7 +4206,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv_new <- nv + nv_add resMeans[nv:(nv_new - 1)] <- .colMeans(out_during_Stress, - isim_time$no.useyr, nv_add) + isim_time[[itime]]$no.useyr, nv_add) resSDs[nv:(nv_new - 1)] <- apply(out_during_Stress, 2, stats::sd) nv <- nv_new @@ -3975,7 +4235,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(opt_agg$use_doy_range)) { print_debug(opt_verbosity, tag_simpidfid, "aggregating", "periodicVWCmatricFirstLayer") - if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.dy.all")) vwcmatric.dy.all <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "dyAll", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) doy.trim <- if (!is.null(opt_agg[["doy_ranges"]][["periodicVWCmatric"]])){ c(opt_agg[["doy_ranges"]][["periodicVWCmatric"]][1]:opt_agg[["doy_ranges"]][["periodicVWCmatric"]][2]) @@ -4012,10 +4272,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyTemp"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyTemp") - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp.mo$mean, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp.mo$mean, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyTemp") @@ -4025,10 +4285,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyPPT"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyPPT") - if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time) + if (!exists("prcp.mo")) prcp.mo <- get_PPT_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(prcp.mo$ppt, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(prcp.mo$ppt, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(prcp.mo$ppt, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(prcp.mo$ppt, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyPPT") @@ -4038,10 +4298,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySnowpack"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySnowpack") - if (!exists("SWE.mo")) SWE.mo <- get_SWE_mo(runDataSC, isim_time) + if (!exists("SWE.mo")) SWE.mo <- get_SWE_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(SWE.mo$val, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(SWE.mo$val, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(SWE.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(SWE.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlySnowpack") @@ -4051,12 +4311,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySoilTemp"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySoilTemp") - if (!exists("soiltemp.mo")) soiltemp.mo <- get_Response_aggL(swof["sw_soiltemp"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("soiltemp.mo")) soiltemp.mo <- get_Response_aggL(swof["sw_soiltemp"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(soiltemp.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(soiltemp.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(soiltemp.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(soiltemp.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(soiltemp.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(soiltemp.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(soiltemp.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(soiltemp.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlySoilTemp") @@ -4066,12 +4326,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyRunoff"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyRunoff") - if (!exists("runonoff.mo")) runonoff.mo <- get_RunOnOff_mo(runDataSC, isim_time) + if (!exists("runonoff.mo")) runonoff.mo <- get_RunOnOff_mo(runDataSC, isim_time[[itime]]) resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(runonoff.mo$total_runoff, - simTime2$month_ForEachUsedMonth, mean) + simTime2[[itime]]$month_ForEachUsedMonth, mean) resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(runonoff.mo$total_runoff, - simTime2$month_ForEachUsedMonth, stats::sd) + simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyRunoff") @@ -4080,12 +4340,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyRunon"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyRunon") - if (!exists("runonoff.mo")) runonoff.mo <- get_RunOnOff_mo(runDataSC, isim_time) + if (!exists("runonoff.mo")) runonoff.mo <- get_RunOnOff_mo(runDataSC, isim_time[[itime]]) resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(runonoff.mo$total_runon, - simTime2$month_ForEachUsedMonth, mean) + simTime2[[itime]]$month_ForEachUsedMonth, mean) resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(runonoff.mo$total_runon, - simTime2$month_ForEachUsedMonth, stats::sd) + simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyRunon") @@ -4095,12 +4355,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyHydraulicRedistribution"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyHydraulicRedistribution") - if (!exists("hydred.mo")) hydred.mo <- get_Response_aggL(swof["sw_hd"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("hydred.mo")) hydred.mo <- get_Response_aggL(swof["sw_hd"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(hydred.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(hydred.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(hydred.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(hydred.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(hydred.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(hydred.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(hydred.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(hydred.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyHydraulicRedistribution") @@ -4110,10 +4370,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyInfiltration"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyInfiltration") - if (!exists("inf.mo")) inf.mo <- get_Inf_mo(runDataSC, isim_time) + if (!exists("inf.mo")) inf.mo <- get_Inf_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(inf.mo$inf, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(inf.mo$inf, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(inf.mo$inf, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(inf.mo$inf, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyInfiltration") @@ -4123,10 +4383,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyDeepDrainage"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyDeepDrainage") - if (!exists("deepDrain.mo")) deepDrain.mo <- get_DeepDrain_mo(runDataSC, isim_time) + if (!exists("deepDrain.mo")) deepDrain.mo <- get_DeepDrain_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(deepDrain.mo$val, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(deepDrain.mo$val, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(deepDrain.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(deepDrain.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyDeepDrainage") @@ -4136,7 +4396,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySWPmatric"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySWPmatric") - if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) if (!exists("swpmatric.mo")) swpmatric.mo <- get_SWPmatric_aggL(vwcmatric.mo, texture, sand, clay) resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- swpmatric.mo$aggMean.top @@ -4150,12 +4410,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyVWCbulk"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyVWCbulk") - if (!exists("vwcbulk.mo")) vwcbulk.mo <- get_Response_aggL(swof["sw_vwcbulk"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcbulk.mo")) vwcbulk.mo <- get_Response_aggL(swof["sw_vwcbulk"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcbulk.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcbulk.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcbulk.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcbulk.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcbulk.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcbulk.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcbulk.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcbulk.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyVWCbulk") @@ -4165,12 +4425,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyVWCmatric"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyVWCmatric") - if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcmatric.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcmatric.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcmatric.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcmatric.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcmatric.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(vwcmatric.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcmatric.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(vwcmatric.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyVWCmatric") @@ -4180,12 +4440,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySWCbulk"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySWCbulk") - if (!exists("swcbulk.mo")) swcbulk.mo <- get_Response_aggL(swof["sw_swcbulk"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("swcbulk.mo")) swcbulk.mo <- get_Response_aggL(swof["sw_swcbulk"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(swcbulk.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(swcbulk.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(swcbulk.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(swcbulk.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(swcbulk.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(swcbulk.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(swcbulk.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(swcbulk.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlySWCbulk") @@ -4195,7 +4455,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySWAbulk"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySWAbulk") - if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("vwcmatric.mo")) vwcmatric.mo <- get_Response_aggL(swof["sw_vwcmatric"], tscale = "mo", scaler = 1, FUN = stats::weighted.mean, weights = layers_width, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) VWCcritsT <- rSOILWAT2::SWPtoVWC(opt_agg[["SWPcrit_MPa"]], texture$sand.top, texture$clay.top) VWCcritsB <- if (length(bottomL) > 0 && !identical(bottomL, 0)) { @@ -4206,13 +4466,13 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, for (icrit in opt_agg[["SWPcrit_MPa"]]) { temp_top_mo <- 10 * sum(layers_width[topL]) * (vwcmatric.mo$top - VWCcritsT[icrit]) - temp_top_mean <- tapply(temp_top_mo, simTime2$month_ForEachUsedMonth, mean) - temp_top_sd <- tapply(temp_top_mo, simTime2$month_ForEachUsedMonth, mean) + temp_top_mean <- tapply(temp_top_mo, simTime2[[itime]]$month_ForEachUsedMonth, mean) + temp_top_sd <- tapply(temp_top_mo, simTime2[[itime]]$month_ForEachUsedMonth, mean) if (length(bottomL) > 0 && !identical(bottomL, 0)) { temp_bottom_mo <- 10 * sum(layers_width[bottomL]) * (vwcmatric.mo$bottom - VWCcritsB[icrit]) - temp_bottom_mean <- tapply(temp_bottom_mo, simTime2$month_ForEachUsedMonth, mean) - temp_bottom_sd <- tapply(temp_bottom_mo, simTime2$month_ForEachUsedMonth, mean) + temp_bottom_mean <- tapply(temp_bottom_mo, simTime2[[itime]]$month_ForEachUsedMonth, mean) + temp_bottom_sd <- tapply(temp_bottom_mo, simTime2[[itime]]$month_ForEachUsedMonth, mean) } else { temp_bottom_mo <- temp_bottom_mean <- temp_bottom_sd <- rep(NA, 12) } @@ -4233,12 +4493,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyTranspiration"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyTranspiration") - if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(transp.mo$top, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(transp.mo$top, simTime2$month_ForEachUsedMonth, stats::sd) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(transp.mo$bottom, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(transp.mo$bottom, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(transp.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(transp.mo$top, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(transp.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(transp.mo$bottom, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyTranspiration") @@ -4248,11 +4508,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlySoilEvaporation"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlySoilEvaporation") - if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) temp <- Esoil.mo$top + Esoil.mo$bottom - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlySoilEvaporation") @@ -4262,10 +4522,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyAET"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyAET") - if (!exists("AET.mo")) AET.mo <- get_AET_mo(runDataSC, isim_time) + if (!exists("AET.mo")) AET.mo <- get_AET_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(AET.mo$val, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(AET.mo$val, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(AET.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(AET.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyAET") @@ -4275,10 +4535,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyPET"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyPET") - if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time) + if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(PET.mo$val, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(PET.mo$val, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(PET.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(PET.mo$val, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+12 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyPET") @@ -4288,12 +4548,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyVPD"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyVPD") - if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time) - if (!exists("vpd.mo")) vpd.mo <- get_VPD_mo(sc, temp.mo, xin = swRunScenariosData, st2 = simTime2) + if (!exists("temp.mo")) temp.mo <- get_Temp_mo(runDataSC, isim_time[[itime]]) + if (!exists("vpd.mo")) vpd.mo <- get_VPD_mo(sc, temp.mo, xin = swRunScenariosData, st2 = simTime2[[itime]]) nv_new <- nv + 12 - resMeans[nv:(nv_new - 1)] <- tapply(vpd.mo$mean, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv:(nv_new - 1)] <- tapply(vpd.mo$mean, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv:(nv_new - 1)] <- tapply(vpd.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv:(nv_new - 1)] <- tapply(vpd.mo$mean, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv_new print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyVPD") @@ -4303,17 +4563,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyAETratios"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyAETratios") - if (!exists("AET.mo")) AET.mo <- get_AET_mo(runDataSC, isim_time) - if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("AET.mo")) AET.mo <- get_AET_mo(runDataSC, isim_time[[itime]]) + if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) temp <- ifelse(AET.mo$val < SFSW2_glovars[["tol"]], 0, (transp.mo$top + transp.mo$bottom) / AET.mo$val) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) temp <- ifelse(AET.mo$val < SFSW2_glovars[["tol"]], 0, (Esoil.mo$top + Esoil.mo$bottom) / AET.mo$val) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyAETratios") @@ -4323,17 +4583,17 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, if (isTRUE(prj_todos[["aon"]][["monthlyPETratios"]])) { nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "monthlyPETratios") - if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time) - if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) - if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time, st2 = simTime2, topL = topL, bottomL = bottomL) + if (!exists("PET.mo")) PET.mo <- get_PET_mo(runDataSC, isim_time[[itime]]) + if (!exists("Esoil.mo")) Esoil.mo <- get_Response_aggL(swof["sw_evsoil"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) + if (!exists("transp.mo")) transp.mo <- get_Response_aggL(swof["sw_transp"], tscale = "mo", scaler = 10, FUN = sum, x = runDataSC, st = isim_time[[itime]], st2 = simTime2[[itime]], topL = topL, bottomL = bottomL) temp <- ifelse(PET.mo$val < SFSW2_glovars[["tol"]], 0, (transp.mo$top + transp.mo$bottom) / PET.mo$val) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) temp <- ifelse(PET.mo$val < SFSW2_glovars[["tol"]], 0, (Esoil.mo$top + Esoil.mo$bottom) / PET.mo$val) - resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2$month_ForEachUsedMonth, mean) - resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2$month_ForEachUsedMonth, stats::sd) + resMeans[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, mean) + resSDs[nv+SFSW2_glovars[["st_mo"]]-1+12] <- tapply(temp, simTime2[[itime]]$month_ForEachUsedMonth, stats::sd) nv <- nv+24 print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "monthlyPETratios") @@ -4347,11 +4607,11 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, nv0 <- nv print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyRegeneration_bySWPSnow") if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- list(val = -1/10*slot(slot(runDataSC, swof["sw_swp"]), "Day")) #no vwcdy available! - if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time) + if (!exists("SWE.dy")) SWE.dy <- get_SWE_dy(runDataSC, isim_time[[itime]]) - swp.surface <- swpmatric.dy.all$val[isim_time$index.usedy, 3] + swp.surface <- swpmatric.dy.all$val[isim_time[[itime]]$index.usedy, 3] temp <- c(by(data = data.frame(swp.surface, SWE.dy$val), - INDICES = simTime2$year_ForEachUsedDay_NSadj, FUN = regenerationThisYear_YN, + INDICES = simTime2[[itime]]$year_ForEachUsedDay_NSadj, FUN = regenerationThisYear_YN, params = opt_agg[["dailyRegeneration_bySWPSnow"]])) resMeans[nv] <- mean(temp) @@ -4364,7 +4624,7 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } #Artemisia tridentata regeneration according to factor model (2012-02-15, drs), call for every regeneration species - #adjust_NorthSouth: param$Doy_SeedDispersalStart0 must be set correctly\ + #adjust_NorthSouth: param$Doy_SeedDispersalStart0 must be set correctly #63 if (isTRUE(prj_todos[["aon"]][["dailyRegeneration_GISSM"]])) { nv0 <- nv @@ -4372,392 +4632,123 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, # Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). Modeling regeneration responses of big sagebrush (Artemisia tridentata) to abiotic conditions. Ecol Model, 286, 66-77. print_debug(opt_verbosity, tag_simpidfid, "aggregating", "dailyRegeneration_GISSM") - #---Access daily data, which do not depend on specific species parameters, i.e., start of season - - if (!exists("swpmatric.dy.all")) swpmatric.dy.all <- list(val = -1/10*slot(slot(runDataSC, swof["sw_swp"]), "Day")) #no vwcdy available! - temp.snow <- slot(slot(runDataSC, swof["sw_snow"]), "Day") - temp.temp <- slot(slot(runDataSC, swof["sw_temp"]), "Day") - TmeanJan <- mean(temp.temp[isim_time$index.usedy, 5][simTime2$month_ForEachUsedDay_NSadj == 1], na.rm = TRUE) #mean January (N-hemisphere)/July (S-hemisphere) air temperature based on normal 'doy' - temp.soiltemp <- slot(slot(runDataSC, swof["sw_soiltemp"]), "Day") - if (inherits(temp.soiltemp, "try-error") || anyNA(temp.soiltemp[, -(1:2)]) || all(temp.soiltemp[, -(1:2)] == 0)) { - use.soiltemp <- FALSE #flag whether soil temperature output is available or not (and then air temperature is used instead of top soil temperature) - } else { - use.soiltemp <- TRUE #currently we have only mean daily soil temperatures and not min/max which we need fo the model + # Extract daily data + if (!exists("swpmatric.dy.all")) { + swpmatric.dy.all <- list( + val = -1 / 10 * slot(slot(runDataSC, swof["sw_swp"]), "Day") + ) } + tmp_airtemp <- slot(slot(runDataSC, swof["sw_temp"]), "Day") + tmp_soiltemp <- slot(slot(runDataSC, swof["sw_soiltemp"]), "Day") + + sim_vals_daily <- list( + SWP_MPa = swpmatric.dy.all[["val"]][, 2 + ld, drop = FALSE], + Snowpack_SWE_mm = 10 * slot( + slot(runDataSC, swof["sw_snow"]), + "Day" + )[, "snowpackWaterEquivalent_cm"], + air_Tmin_C = tmp_airtemp[, "min_C"], + air_Tmean_C = tmp_airtemp[, "avg_C"], + air_Tmax_C = tmp_airtemp[, "max_C"], + # TODO: replace with daily min/max soil temperature once available + shallowsoil_Tmin_C = tmp_soiltemp[, "Lyr_1"], + shallowsoil_Tmean_C = tmp_soiltemp[, "Lyr_1"], + shallowsoil_Tmax_C = tmp_soiltemp[, "Lyr_1"] + ) - #Loop through each species - prev.Doy_SeedDispersalStart <- 0 - for (sp in seq_len(opt_agg[["GISSM_species_No"]])) { - param <- data.frame(t(opt_agg[["GISSM_params"]][, sp])) - - #Regeneration year = RY: RYdoy = 1 == start of seed dispersal = start of 'regeneration year' - temp <- param$Doy_SeedDispersalStart0 + - param$SeedDispersalStart_DependencyOnMeanTempJanuary * TmeanJan - Doy_SeedDispersalStart <- as.integer(max(round(temp, 0) %% 365, 1)) - - moveByDays <- if (Doy_SeedDispersalStart > 1) { - temp <- ISOdate(isim_time$useyrs[1] - 1, 12, 31, tz = "UTC") - - ISOdate(isim_time$useyrs[1] - 1, 1, 1, tz = "UTC") + 1 - - (Doy_SeedDispersalStart - 1) - as.integer(max(c(as.numeric(temp) %% 365, 1))) - } else { - 1L - } - - #Calculate regeneration year dates - et <- isim_time$no.usedy - itail <- (et - moveByDays + 1):et - if (isim_time[["startyr"]] > isim_time[["simstartyr"]]) { - #start earlier to complete RY - st <- isim_time$index.usedy[1] - RY.index.usedy <- c((st - moveByDays):(st - 1), isim_time$index.usedy[-itail]) #index indicating which rows of the daily SOILWAT2 output is used - RYyear_ForEachUsedDay <- simTime2$year_ForEachUsedDay #'regeneration year' for each used day - RYdoy_ForEachUsedDay <- simTime2$doy_ForEachUsedDay #'doy of the regeneration year' for each used day - - } else { - #start later to get a complete RY - RY.index.usedy <- isim_time$index.usedy[-c(1:(Doy_SeedDispersalStart - 1), itail)] - temp <- which(simTime2$year_ForEachUsedDay == simTime2$year_ForEachUsedDay[1]) - RYyear_ForEachUsedDay <- simTime2$year_ForEachUsedDay[-temp] - RYdoy_ForEachUsedDay <- simTime2$doy_ForEachUsedDay[-temp] - } - RY.useyrs <- unique(RYyear_ForEachUsedDay) #list of 'regeneration years' that are used for aggregation - - # normal year for each used 'doy of the regeneration year' - RY_N_usedy <- length(RY.index.usedy) - itail <- (RY_N_usedy - moveByDays + 1):RY_N_usedy - year_ForEachUsedRYDay <- c(rep(isim_time$useyrs[1] - 1, moveByDays), - RYyear_ForEachUsedDay[-itail]) - # normal doy for each used 'doy of the regeneration year' - st <- isim_time$index.usedy[1] - doy_ForEachUsedRYDay <- c((st - moveByDays):(st - 1), - RYdoy_ForEachUsedDay[-itail]) - - #Access daily data, the first time and afterwards only if Doy_SeedDispersalStart is different from value of previous species - if (sp == 1 || Doy_SeedDispersalStart != prev.Doy_SeedDispersalStart) { - swp <- swpmatric.dy.all$val[RY.index.usedy, 2 + ld, drop = FALSE] - snow <- temp.snow[RY.index.usedy, 3]*10 #mm swe in snowpack - airTminSnow <- ifelse(snow > 0, param$Temp_ExperiencedUnderneathSnowcover, temp.temp[RY.index.usedy, 4]) - airTmax <- temp.temp[RY.index.usedy, 3] - if (use.soiltemp) { - soilTmeanSnow <- ifelse(snow > 0, param$Temp_ExperiencedUnderneathSnowcover, temp.soiltemp[RY.index.usedy, 3]) - soilTminSnow <- ifelse(snow > 0, param$Temp_ExperiencedUnderneathSnowcover, temp.soiltemp[RY.index.usedy, 3]) - soilTmax <- temp.soiltemp[RY.index.usedy, 3] - - } else { - soilTmeanSnow <- ifelse(snow > 0, param$Temp_ExperiencedUnderneathSnowcover, temp.temp[RY.index.usedy, 5]) - soilTminSnow <- airTminSnow - soilTmax <- airTmax - } - } - - #----GERMINATION - - #---1. Germination periods: sequence of days with favorable conditions for germination defined by upper/lower limits - #Maximal temperature for germination - Germination_AtBelowTmax <- soilTmax <= param$Temp_MaximumForGermination - - #Minimal temperature for germination - Germination_AtAboveTmin <- soilTminSnow >= param$Temp_MinimumForGermination - - #Minimum soil water for germination in relevant soil layer - SoilLayers_RelevantToGermination <- SoilLayer_at_SoilDepth(param$SoilDepth_RelevantToGermination, layers_depth) - if (length(SoilLayers_RelevantToGermination) == 1) { - Germination_AtMoreThanTopSWPmin <- swp[, SoilLayers_RelevantToGermination] >= param$SWP_MinimumForGermination - swp.TopMean <- swp[, SoilLayers_RelevantToGermination] - } else { - Germination_AtMoreThanTopSWPmin <- apply(swp[, SoilLayers_RelevantToGermination], MARGIN = 1, FUN = function(x) all(x >= param$SWP_MinimumForGermination)) - swp.TopMean <- apply(swp[, SoilLayers_RelevantToGermination], MARGIN = 1, FUN = mean, na.rm = TRUE) - } - - #Put all limits together - Germination_WhileFavorable <- Germination_AtBelowTmax & Germination_AtAboveTmin & Germination_AtMoreThanTopSWPmin - - #---2. Time to germinate - #for each day with favorable conditions, determine whether period of favorable conditions (resumed or reset if broken) is long enough for successful completion of germination under current mean conditions - LengthDays_FavorableConditions <- unlist(lapply(RY.useyrs, FUN = calc_DurationFavorableConds, - consequences.unfavorable = param$GerminationPeriods_0ResetOr1Resume, - Germination_WhileFavorable = Germination_WhileFavorable, - RYyear_ForEachUsedDay = RYyear_ForEachUsedDay)) - Germination_TimeToGerminate <- unlist(lapply(RY.useyrs, FUN = calc_TimeToGerminate, - Germination_WhileFavorable = Germination_WhileFavorable, - LengthDays_FavorableConditions = LengthDays_FavorableConditions, - RYyear_ForEachUsedDay = RYyear_ForEachUsedDay, - soilTmeanSnow = soilTmeanSnow, - swp.TopMean = swp.TopMean, - TmeanJan = TmeanJan, param = param)) - - Germination_RestrictedByTimeToGerminate <- rep(FALSE, RY_N_usedy) - Germination_RestrictedByTimeToGerminate[Germination_WhileFavorable & is.na(Germination_TimeToGerminate)] <- TRUE - - #---3. Successful germinations - GerminationSuccess_Initiated <- !is.na(Germination_TimeToGerminate) - germ.starts <- which(GerminationSuccess_Initiated) - germ.durs <- Germination_TimeToGerminate[germ.starts] - 1 - if (param$GerminationPeriods_0ResetOr1Resume == 1) { - germ.durs <- germ.durs + germination_wait_times(Germination_TimeToGerminate, - LengthDays_FavorableConditions) - } - emergence.doys <- germ.starts + germ.durs #index of start of successful germinations + time to germinate (including wait time during unfavorable conditions if 'resume') - Germination_Emergence <- rep(FALSE, RY_N_usedy) - Germination_Emergence[emergence.doys] <- TRUE - Germination_Emergence.doys <- rep(NA, RY_N_usedy) - Germination_Emergence.doys[GerminationSuccess_Initiated] <- emergence.doys - - - #----SEEDLING SURVIVAL - - #---1. Seedling survival periods: - # mortality = !survival: days with conditions which kill a seedling, defined by upper/lower limits - # growth: days with conditions which allows a seedling to grow (here, roots), defined by upper/lower limits - SeedlingMortality_UnderneathSnowCover <- calc_SeedlingMortality(kill.conditions = (snow > param$SWE_MaximumForSeedlingGrowth), max.duration.before.kill = param$Days_SnowCover_MaximumForSeedlingSurvival) - SeedlingMortality_ByTmin <- calc_SeedlingMortality(kill.conditions = (airTminSnow < param$Temp_MinimumForSeedlingSurvival), max.duration.before.kill = 0) - SeedlingMortality_ByTmax <- calc_SeedlingMortality(kill.conditions = (airTmax > param$Temp_MaximumForSeedlingSurvival), max.duration.before.kill = 0) - SeedlingMortality_ByChronicSWPMax <- calc_SeedlingMortality(kill.conditions = (swp > param$SWP_ChronicMaximumForSeedlingSurvival), max.duration.before.kill = param$Days_ChronicMaximumForSeedlingSurvival) - SeedlingMortality_ByChronicSWPMin <- calc_SeedlingMortality(kill.conditions = (swp < param$SWP_ChronicMinimumForSeedlingSurvival), max.duration.before.kill = param$Days_ChronicMinimumForSeedlingSurvival) - SeedlingMortality_ByAcuteSWPMin <- calc_SeedlingMortality(kill.conditions = (swp < param$SWP_AcuteMinimumForSeedlingSurvival), max.duration.before.kill = 0) - - SeedlingGrowth_AbsenceOfSnowCover <- (snow <= param$SWE_MaximumForSeedlingGrowth) - SeedlingGrowth_AtAboveTmin <- (airTminSnow >= param$Temp_MinimumForSeedlingGrowth) - SeedlingGrowth_AtBelowTmax <- (airTmax <= param$Temp_MaximumForSeedlingGrowth) - - #---2. Grow and kill the seedlings - SeedlingSurvival_1stSeason <- Seedling_Starts <- Germination_Emergence #TRUE = seedling that germinated on that day and survives until end of season; FALSE = no germination or seedling dies during the first season - SeedlingSurvival_1stSeason[] <- SeedlingSurvival_1stSeason # deep copy because Rcpp-version of get_KilledBySoilLayers changes in place which has otherwise side effects on Seedling_Starts and Germination_Emergence - SeedlingMortality_CausesByYear <- matrix(0, nrow = length(RY.useyrs), ncol = 9) - colnames(SeedlingMortality_CausesByYear) <- paste0("Seedlings1stSeason.Mortality.", c("UnderneathSnowCover", "ByTmin", "ByTmax", "ByChronicSWPMax", "ByChronicSWPMin", "ByAcuteSWPMin", - "DuringStoppedGrowth.DueSnowCover", "DuringStoppedGrowth.DueTmin", "DuringStoppedGrowth.DueTmax")) - for (y in seq_along(RY.useyrs)) {#for each year - index.thisYear <- RYyear_ForEachUsedDay == RY.useyrs[y] - RYDoys_SeedlingStarts_ThisYear <- which(Seedling_Starts[index.thisYear]) - if (length(RYDoys_SeedlingStarts_ThisYear) > 0) {#if there are any germinations - #init values for this year - no.days <- sum(index.thisYear) - thisYear_SeedlingMortality_UnderneathSnowCover <- SeedlingMortality_UnderneathSnowCover[index.thisYear] - thisYear_SeedlingMortality_ByTmin <- SeedlingMortality_ByTmin[index.thisYear] - thisYear_SeedlingMortality_ByTmax <- SeedlingMortality_ByTmax[index.thisYear] - thisYear_SeedlingMortality_ByChronicSWPMax <- SeedlingMortality_ByChronicSWPMax[index.thisYear, , drop = FALSE] - thisYear_SeedlingMortality_ByChronicSWPMin <- SeedlingMortality_ByChronicSWPMin[index.thisYear, , drop = FALSE] - thisYear_SeedlingMortality_ByAcuteSWPMin <- SeedlingMortality_ByAcuteSWPMin[index.thisYear, , drop = FALSE] - thisYear_SeedlingGrowth_AbsenceOfSnowCover <- SeedlingGrowth_AbsenceOfSnowCover[index.thisYear] - thisYear_SeedlingGrowth_AtAboveTmin <- SeedlingGrowth_AtAboveTmin[index.thisYear] - thisYear_SeedlingGrowth_AtBelowTmax <- SeedlingGrowth_AtBelowTmax[index.thisYear] - - for (sg_RYdoy in RYDoys_SeedlingStarts_ThisYear) {#for each seedling indexed by day of germination - #init values for this seedling and season - temp <- seq_len(no.days) - index.thisSeedlingSeason <- temp[temp > sg_RYdoy] - killed_byCauses_onRYdoy <- rep(NA, times = 6) #book-keeping of mortality causes - names(killed_byCauses_onRYdoy) <- colnames(SeedlingMortality_CausesByYear)[1:6] - stopped_byCauses_onRYdoy <- rep(NA, times = 3) #book-keeping of causes why growth stopped - names(stopped_byCauses_onRYdoy) <- colnames(SeedlingMortality_CausesByYear)[7:9] - - #Establish days of growth ( = TRUE) and surviving, but no growth ( = FALSE) - thisSeedlingGrowing <- rep(TRUE, no.days) - if (sg_RYdoy > 1) - thisSeedlingGrowing[seq_len(sg_RYdoy - 1)] <- FALSE #seedling germinated on sg_RYdoy, hence it cannot grow before germination day - - #Check growth under above-ground conditions - #Snow cover - thisSeedlingGrowth_AbsenceOfSnowCover <- check_SuitableGrowthThisYear(favorable.conditions = thisSeedlingGrowing & thisYear_SeedlingGrowth_AbsenceOfSnowCover, consequences.unfavorable = param$SeedlingGrowth_0StopOr1Resume) - temp <- !thisSeedlingGrowth_AbsenceOfSnowCover[index.thisSeedlingSeason] - if (any(temp)) - stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueSnowCover"] <- sg_RYdoy + which(temp)[1] - #Minimum temperature - thisSeedlingGrowth_AtAboveTmin <- check_SuitableGrowthThisYear(favorable.conditions = thisSeedlingGrowing & thisYear_SeedlingGrowth_AtAboveTmin, consequences.unfavorable = param$SeedlingGrowth_0StopOr1Resume) - temp <- !thisSeedlingGrowth_AtAboveTmin[index.thisSeedlingSeason] - if (any(temp)) - stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueTmin"] <- sg_RYdoy + which(temp)[1] - #Maximum temperature - thisSeedlingGrowth_AtBelowTmax <- check_SuitableGrowthThisYear(favorable.conditions = thisSeedlingGrowing & thisYear_SeedlingGrowth_AtBelowTmax, consequences.unfavorable = param$SeedlingGrowth_0StopOr1Resume) - temp <- !thisSeedlingGrowth_AtBelowTmax[index.thisSeedlingSeason] - if (any(temp)) - stopped_byCauses_onRYdoy["Seedlings1stSeason.Mortality.DuringStoppedGrowth.DueTmax"] <- sg_RYdoy + which(temp)[1] - #Updated days of growth or surviving - thisSeedlingGrowing <- thisSeedlingGrowing & thisSeedlingGrowth_AbsenceOfSnowCover & thisSeedlingGrowth_AtAboveTmin & thisSeedlingGrowth_AtBelowTmax - thisSeedlingLivingButNotGrowing <- !thisSeedlingGrowing - if (sg_RYdoy > 1) - thisSeedlingLivingButNotGrowing[seq_len(sg_RYdoy - 1)] <- FALSE #seedling germinated on sg_RYdoy, hence it cannot live before germination day - - #Book-keeping survival under above-ground conditions - temp <- thisYear_SeedlingMortality_UnderneathSnowCover[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.UnderneathSnowCover"] <- sg_RYdoy + which(temp)[1] - 1 - temp <- thisYear_SeedlingMortality_ByTmin[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByTmin"] <- sg_RYdoy + which(temp)[1] - 1 - temp <- thisYear_SeedlingMortality_ByTmax[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByTmax"] <- sg_RYdoy + which(temp)[1] - 1 - - #If not killed (yet) then grow and check survival below-ground - if (all(is.na(killed_byCauses_onRYdoy))) { - #Grow: estimate rooting depth for this seedling for each day of this year - thisSeedling_thisYear_RootingDepth <- rep(NA, times = no.days) - temp <- sum(thisSeedlingGrowing) - if (temp > 0) { - thisSeedlingGrowing_AgeDays <- seq_len(temp) - thisSeedlingGrowing_RootingDepth <- SeedlingRootingDepth(thisSeedlingGrowing_AgeDays, param$Seedling_SoilDepth.PO, param$Seedling_SoilDepth.K, param$Seedling_SoilDepth.r) - thisSeedling_thisYear_RootingDepth[thisSeedlingGrowing] <- thisSeedlingGrowing_RootingDepth - if (any(thisSeedlingLivingButNotGrowing, na.rm = TRUE)) { - #for days when growth stopped then copy relevant soil depth - stopg <- addDepths <- rle(thisSeedlingLivingButNotGrowing) - RYDoys_stopg <- c(1, cumsum(stopg$lengths)) - for (p in seq_along(stopg$values)[stopg$values]) { - addDepths$values[p] <- if (is.na(thisSeedling_thisYear_RootingDepth[RYDoys_stopg[p]])) { - if (is.na(thisSeedling_thisYear_RootingDepth[1 + RYDoys_stopg[p+1]])) { - param$Seedling_SoilDepth.K - } else { - thisSeedling_thisYear_RootingDepth[1 + RYDoys_stopg[p+1]] - } - } else { - thisSeedling_thisYear_RootingDepth[RYDoys_stopg[p]] - } - } - RYDoys_addDepths <- inverse.rle(addDepths) - thisSeedling_thisYear_RootingDepth <- ifelse(RYDoys_addDepths > 0, RYDoys_addDepths, thisSeedling_thisYear_RootingDepth) - } - - } else { - thisSeedling_thisYear_RootingDepth[thisSeedlingLivingButNotGrowing] <- param$Seedling_SoilDepth.PO/10 - } - thisSeedling_thisYear_RootingSoilLayers <- SoilLayer_at_SoilDepth(thisSeedling_thisYear_RootingDepth, layers_depth) - - #Check survival under chronic SWPMax - thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMax <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByChronicSWPMax) - temp <- thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMax[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByChronicSWPMax"] <- sg_RYdoy + which(temp)[1] - 1 - #Check survival under chronic SWPMin - thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMin <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByChronicSWPMin) - temp <- thisSeedling_thisYear_SeedlingMortality_ByChronicSWPMin[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByChronicSWPMin"] <- sg_RYdoy + which(temp)[1] - 1 - #Check survival under acute SWPMin - thisSeedling_thisYear_SeedlingMortality_ByAcuteSWPMin <- get_KilledBySoilLayers(thisSeedling_thisYear_RootingSoilLayers, thisYear_SeedlingMortality_ByAcuteSWPMin) - temp <- thisSeedling_thisYear_SeedlingMortality_ByAcuteSWPMin[index.thisSeedlingSeason] - if (any(temp)) - killed_byCauses_onRYdoy["Seedlings1stSeason.Mortality.ByAcuteSWPMin"] <- sg_RYdoy + which(temp)[1] - 1 - } + request_otrace_GISSM <- + as.integer(any(prj_todos[["otrace"]] == "dailyRegeneration_GISSM")) - #If killed then establish which factor killed first and if and how growth was stopped before kill - if (any(!is.na(killed_byCauses_onRYdoy))) { - kill.factor <- which.min(killed_byCauses_onRYdoy) - SeedlingMortality_CausesByYear[y, kill.factor] <- SeedlingMortality_CausesByYear[y, kill.factor] + 1 - stop.factor <- which.min(stopped_byCauses_onRYdoy) - if (any(!is.na(stopped_byCauses_onRYdoy)) && - killed_byCauses_onRYdoy[kill.factor] > stopped_byCauses_onRYdoy[stop.factor]) { - SeedlingMortality_CausesByYear[y, 6+stop.factor] <- SeedlingMortality_CausesByYear[y, 6+stop.factor] + 1 - } + # Loop through each species + for (sp in seq_len(opt_agg[["GISSM_species_No"]])) { - SeedlingSurvival_1stSeason <- kill_seedling( - SeedlingSurvival_1stSeason, RYyear_ForEachUsedDay, - RY.useyrs, y, sg_RYdoy) - } - } - } else {#no germination during this year -> no seedlings to grow or die - SeedlingMortality_CausesByYear[y, ] <- NA - } - }#end of year loop of seedling growth - - #---Aggregate output - dat_gissm1 <- cbind(Germination_Emergence, SeedlingSurvival_1stSeason) - dat_gissm2 <- cbind(!Germination_AtBelowTmax, !Germination_AtAboveTmin, - !Germination_AtMoreThanTopSWPmin, !Germination_WhileFavorable, - Germination_RestrictedByTimeToGerminate) - - #Fraction of years with success - index_RYuseyr <- unique(year_ForEachUsedRYDay) %in% isim_time$useyr - res1.yr_v0 <- stats::aggregate(dat_gissm1, by = list(year_ForEachUsedRYDay), FUN = sum) - res1.yr <- res1.yr_v0[index_RYuseyr, -1] - stemp <- res1.yr > 0 - resMeans[nv:(nv+1)] <- apply(stemp, 2, mean, na.rm = TRUE) - resSDs[nv:(nv+1)] <- apply(stemp, 2, stats::sd, na.rm = TRUE) - #Periods with no successes - rleGerm <- rle(stemp[, 1]) - if (any(!rleGerm$values)) - resMeans[(nv+2):(nv+4)] <- stats::quantile(rleGerm$lengths[!rleGerm$values], - probs = c(0.05, 0.5, 0.95), type = 7) - rleSling <- rle(stemp[, 2]) - if (any(!rleSling$values)) - resMeans[(nv+5):(nv+7)] <- stats::quantile(rleSling$lengths[!rleSling$values], - probs = c(0.05, 0.5, 0.95), type = 7) - #Mean number of days per year with success - resMeans[(nv+8):(nv+9)] <- apply(res1.yr, 2, mean) - resSDs[(nv+8):(nv+9)] <- apply(res1.yr, 2, stats::sd) - #Days of year (in normal count) of most frequent successes among years: #toDoy <- function(x) sort(ifelse((temp <- x+Doy_SeedDispersalStart-1) > 365, temp-365, temp)) #convert to normal doys - res1.dy <- stats::aggregate(dat_gissm1, by = list(doy_ForEachUsedRYDay), FUN = sum) - resMeans[(nv+10):(nv+15)] <- get.DoyMostFrequentSuccesses(res1.dy, dat_gissm1) - #Mean number of days when germination is restricted due to conditions - res2.yr_v0 <- stats::aggregate(dat_gissm2, by = list(year_ForEachUsedRYDay), sum) - res2.yr <- res2.yr_v0[index_RYuseyr, -1] - resMeans[(nv+16):(nv+20)] <- apply(res2.yr, 2, mean) - resSDs[(nv+16):(nv+20)] <- apply(res2.yr, 2, stats::sd) - #Mean time to germinate in days - res3.yr_v0 <- tapply(Germination_TimeToGerminate, year_ForEachUsedRYDay, mean, na.rm = TRUE) - res3.yr <- res3.yr_v0[index_RYuseyr] - resMeans[nv+21] <- mean(res3.yr, na.rm = TRUE) - resSDs[nv+21] <- stats::sd(res3.yr, na.rm = TRUE) - #Mean number of days per year of different types of mortalities - resMeans[(nv+22):(nv+30)] <- apply(SeedlingMortality_CausesByYear, 2, mean, na.rm = TRUE) #if value == NA, then no germinations that year - resSDs[(nv+22):(nv+30)] <- apply(SeedlingMortality_CausesByYear, 2, stats::sd, na.rm = TRUE) #if value == NA, then no germinations that year + GISSM <- rSW2funs::calc_GISSM( + x = sim_vals_daily, + soillayer_depths_cm = layers_depth, + params = as.list(opt_agg[["GISSM_params"]][, sp]), + has_soil_temperature = isTRUE(!is_SOILTEMP_INSTABLE[sc]), + simTime1 = isim_time[[itime]], + simTime2 = simTime2[[itime]], + debug_output = 1 + request_otrace_GISSM, + path = project_paths[["dir_out_traces"]], + filename_tag = paste0( + "Scenario", + formatC(sc - 1, width = 2, format = "d", flag = "0"), "_", + sim_scens[["id"]][sc], "_", + i_label, "_", + colnames(opt_agg[["GISSM_params"]])[sp], + "_Regeneration" + ) + ) + + # Frequency of years with success + resMeans[nv:(nv+1)] <- colMeans( + x = GISSM[["outcome"]][, -1], + na.rm = TRUE + ) + resSDs[nv:(nv+1)] <- apply( + X = GISSM[["outcome"]][, -1], + MARGIN = 2, + FUN = stats::sd, + na.rm = TRUE + ) + + # Periods with no successes + resMeans[(nv+2):(nv+4)] <- stats::quantile( + GISSM[["nogermination_periods_yrs"]], + probs = c(0.05, 0.5, 0.95), + type = 7 + ) + resMeans[(nv+5):(nv+7)] <- stats::quantile( + GISSM[["noseedlings_periods_yrs"]], + probs = c(0.05, 0.5, 0.95), + type = 7 + ) + + # Mean number of days per year with success + resMeans[(nv+8):(nv+9)] <- colMeans(GISSM[["successes_days"]]) + resSDs[(nv+8):(nv+9)] <- apply( + X = GISSM[["successes_days"]], + MARGIN = 2, + FUN = stats::sd + ) + + # Days of year of most frequent successes among years + resMeans[(nv+10):(nv+15)] <- GISSM[["success_mostfrequent_doy"]] + + # Mean number of days without germination + resMeans[(nv+16):(nv+20)] <- colMeans(GISSM[["nogermination_days"]]) + resSDs[(nv+16):(nv+20)] <- apply( + X = GISSM[["nogermination_days"]], + MARGIN = 2, + FUN = stats::sd + ) + + # Mean time to germinate in days + resMeans[nv+21] <- mean( + GISSM[["time_to_germinate_days"]], + na.rm = TRUE + ) + resSDs[nv+21] <- stats::sd( + GISSM[["time_to_germinate_days"]], + na.rm = TRUE + ) + + # Mean number of days per year of different types of mortalities + # if value == NA, then no germinations that year + resMeans[(nv+22):(nv+30)] <- colMeans( + GISSM[["mortality_causes"]], + na.rm = TRUE + ) + resSDs[(nv+22):(nv+30)] <- apply( + GISSM[["mortality_causes"]], + MARGIN = 2, + FUN = stats::sd, + na.rm = TRUE + ) nv <- nv+31 - - #---Aggregate time series output - if (any(prj_todos[["otrace"]] == "dailyRegeneration_GISSM")) { - #Table with data for every year - res1.yr.doy <- t(simplify2array(by(dat_gissm1, INDICES = year_ForEachUsedRYDay, - FUN = function(x) get.DoyMostFrequentSuccesses(x, dat_gissm1))))[isim_time$index.useyr, , drop = FALSE] - - res.yr <- data.frame(data.frame(res1.yr_v0, res2.yr_v0[, -1], res3.yr_v0)[index_RYuseyr, ], SeedlingMortality_CausesByYear, res1.yr.doy) - temp.header2 <- c("DaysWith_GerminationSuccess", "DaysWith_SeedlingSurvival1stSeason", - "Days_GerminationRestrictedByTmax", "Days_GerminationRestrictedByTmin", - "Days_GerminationRestrictedBySWPmin", "Days_GerminationRestrictedByAnyCondition", - "Days_GerminationRestrictedByTimeToGerminate", "MeanDays_TimeToGerminate", - paste("Days", colnames(SeedlingMortality_CausesByYear), sep = "_"), - paste(rep(c("Start90%", "Median", "End90%"), times = 2), - rep(c("DoyMostFrequent_GerminationSuccess", "DoyMostFrequent_SeedlingSurvival1stSeason"), - each = 3), sep = "_")) - colnames(res.yr) <- c("Year", temp.header2) - utils::write.csv(res.yr, file = file.path(project_paths[["dir_out_traces"]], - paste0("Scenario", formatC(sc-1, width = 2, format = "d", flag = "0"), "_", - sim_scens[["id"]][sc], "_", i_label, "_", colnames(opt_agg[["GISSM_params"]])[sp], - "_Regeneration.csv"))) - - #Plot with data for every day - grDevices::pdf(file = file.path(project_paths[["dir_out_traces"]], paste0("Scenario", - formatC(sc-1, width = 2, format = "d", flag = "0"), "_", sim_scens[["id"]][sc], - "_", i_label, "_", colnames(opt_agg[["GISSM_params"]])[sp], - "_Regeneration.pdf")), - width = max(4, 2*length(isim_time$index.useyr)), height = 4.5) - - op <- graphics::par(mar = c(1, 3, 0.1, 0.1), mgp = c(2, 0.5, 0), las = 1) - ylim <- c(-17.5, max(max(snow, na.rm = TRUE), max(Germination_TimeToGerminate, na.rm = TRUE))) - p.cex <- max(0.5, min(1, exp(-0.01 * ylim[2]) + 0.5)) - xp <- 1:length(snow) + Doy_SeedDispersalStart-1 - - graphics::plot(xp, snow, type = "l", ylim = ylim, xlab = "Year", ylab = "SWE (mm), Time to germinate (days)", axes = FALSE) - graphics::axis(1, pos = ylim[1], at = 365*(1:(length(isim_time$index.useyr))), labels = isim_time$useyr) - graphics::axis(2, pos = graphics::par("usr")[1], at = (temp <- graphics::axTicks(2))[temp >= 0]) - graphics::lines(xp, Germination_TimeToGerminate, col = "red", type = "b", pch = 19, cex = p.cex/5) - graphics::points(xp, ifelse(SeedlingSurvival_1stSeason, 0, NA), col = "green", pch = 19) - x0.temp <- (temp <- data.frame(xp, ifelse(GerminationSuccess_Initiated, -7.5, NA)))[stats::complete.cases(temp), ] - x1.temp <- (temp <- data.frame(Germination_Emergence.doys + Doy_SeedDispersalStart-1, ifelse(GerminationSuccess_Initiated, -2.5, NA)))[stats::complete.cases(temp), ] - graphics::segments(x0 = x0.temp[, 1], y0 = x0.temp[, 2], x1 = x1.temp[, 1], y1 = x1.temp[, 2], col = "blue") - graphics::points(xp, ifelse(Germination_RestrictedByTimeToGerminate, -10, NA), col = "black", pch = 4, cex = p.cex) - graphics::points(xp, ifelse(!Germination_AtAboveTmin, -12.5, NA), col = grDevices::gray(0.3), pch = 4, cex = p.cex) - graphics::points(xp, ifelse(!Germination_AtMoreThanTopSWPmin, -15, NA), col = grDevices::gray(0.7), pch = 4, cex = p.cex) - graphics::mtext(i_label) - graphics::legend("topright", legend = c("SWE", "Time to germinate", "Seedling survival", "Emergence", "Too short favorable conditions", "Too cold", "Too dry"), - bty = "n", lty = c(1, 1, -1, 1, -1, -1, -1), pch = c(-1, -1, 19, -1, 4, 4, 4), col = c("black", "red", "green", "blue", "black", grDevices::gray(0.3), grDevices::gray(0.7)), merge = TRUE) - graphics::par(op) - grDevices::dev.off() - } - - #Prepare next species - prev.Doy_SeedDispersalStart <- Doy_SeedDispersalStart - }#end of species loop + } # end of species loop print_debugN(opt_verbosity, tag_simpidfid, prj_todos, nv - nv0, "dailyRegeneration_GISSM") } @@ -4911,10 +4902,10 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, } for (al in 1:aggLs_no) { if (length(aggLs[[al]]) > 1) { - agg.dat[[al]] <- apply(temp1[isim_time$index.usedy, 2 + aggLs[[al]]], 1, agg.agg, w = agg.w[aggLs[[al]]]) + agg.dat[[al]] <- apply(temp1[isim_time[[itime]]$index.usedy, 2 + aggLs[[al]]], 1, agg.agg, w = agg.w[aggLs[[al]]]) } else { if (!(is.null(aggLs[[al]]) | length(aggLs[[al]]) == 0)) { - agg.dat[[al]] <- temp1[isim_time$index.usedy, 2 + aggLs[[al]]] + agg.dat[[al]] <- temp1[isim_time[[itime]]$index.usedy, 2 + aggLs[[al]]] } } } @@ -4933,16 +4924,16 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, TemperatureMax = 3, Runoff = 4:5, Runon = 6) agg.dat[[1]] <- if (length(agg.column) > 1) { - apply(temp1[isim_time$index.usedy, agg.column], 1, sum) + apply(temp1[isim_time[[itime]]$index.usedy, agg.column], 1, sum) } else { - temp1[isim_time$index.usedy, agg.column] + temp1[isim_time[[itime]]$index.usedy, agg.column] } } if (agg.resp == "EvaporationTotal") { if ((colN <- ncol(temp1)) > 3) { - agg.dat[[1]] <- apply(temp1[isim_time$index.usedy, 3:colN], 1, sum) + temp2[isim_time$index.usedy, 3] + agg.dat[[1]] <- apply(temp1[isim_time[[itime]]$index.usedy, 3:colN], 1, sum) + temp2[isim_time[[itime]]$index.usedy, 3] } else { - agg.dat[[1]] <- temp1[isim_time$index.usedy, 3] + temp2[isim_time$index.usedy, 3] + agg.dat[[1]] <- temp1[isim_time[[itime]]$index.usedy, 3] + temp2[isim_time[[itime]]$index.usedy, 3] } } } @@ -4951,12 +4942,12 @@ do_OneSite <- function(i_sim, i_SWRunInformation, i_sw_input_soillayers, #calculate mean/stats::sd daily values for (al in seq_len(agg.no)) { ir <- (al - 1) * 366 + 1:366 - res.dailyMean[ir] <- stats::aggregate(scaler * agg.dat[[al]], by = list(simTime2$doy_ForEachUsedDay), FUN = mean)[, 2] + res.dailyMean[ir] <- stats::aggregate(scaler * agg.dat[[al]], by = list(simTime2[[itime]]$doy_ForEachUsedDay), FUN = mean)[, 2] if (agg.resp == "SWPmatric") { ##post-aggregate calculation of SWP: convert VWC to SWP res.dailyMean[ir] <- rSOILWAT2::VWCtoSWP(res.dailyMean[ir], textureDAgg$sand[al], textureDAgg$clay[al]) res.dailySD[ir] <- 0 #was NA now 0 } else { - res.dailySD[ir] <- stats::aggregate(scaler * agg.dat[[al]], by = list(simTime2$doy_ForEachUsedDay), FUN = stats::sd)[, 2] + res.dailySD[ir] <- stats::aggregate(scaler * agg.dat[[al]], by = list(simTime2[[itime]]$doy_ForEachUsedDay), FUN = stats::sd)[, 2] } } @@ -5145,17 +5136,22 @@ run_simulation_experiment <- function(sim_size, SFSW2_prj_inputs, MoreArgs) { #--- prepare the temporary output databases - make_dbTempOut(dbOutput = MoreArgs[["fnames_out"]][["dbOutput"]], + make_dbTempOut( + dbOutput = MoreArgs[["fnames_out"]][["dbOutput"]], dir_out_temp = MoreArgs[["project_paths"]][["dir_out_temp"]], fields = MoreArgs[["prj_todos"]][["aon_fields"]], adaily = MoreArgs[["prj_todos"]][["adaily"]], - verbose = MoreArgs[["opt_verbosity"]][["verbose"]]) + verbose = MoreArgs[["opt_verbosity"]][["verbose"]] + ) #--- set dbWork as modified if not being kept up-to-date if (!MoreArgs[["opt_behave"]][["keep_dbWork_updated"]]) { - dbWork_update_status(MoreArgs[["project_paths"]][["dir_out"]], - status = TRUE, verbose = MoreArgs[["opt_verbosity"]][["print.debug"]]) + dbWork_update_status( + path = MoreArgs[["project_paths"]][["dir_out"]], + status = TRUE, + verbose = MoreArgs[["opt_verbosity"]][["print.debug"]] + ) } @@ -5346,10 +5342,12 @@ run_simulation_experiment <- function(sim_size, SFSW2_prj_inputs, MoreArgs) { rSOILWAT2::dbW_setConnection(MoreArgs[["fnames_in"]][["fdbWeather"]]) on.exit(rSOILWAT2::dbW_disconnectConnection(), add = TRUE) - runs.completed <- lapply(seq_along(MoreArgs[["sim_size"]][["runIDs_todo"]]), + runs.completed <- lapply( + X = seq_along(MoreArgs[["sim_size"]][["runIDs_todo"]]), function(i) { i_site <- i_sites[i] - do_OneSite(i_sim = MoreArgs[["sim_size"]][["runIDs_todo"]][i], + do_OneSite( + i_sim = MoreArgs[["sim_size"]][["runIDs_todo"]][i], i_SWRunInformation = SFSW2_prj_inputs[["SWRunInformation"]][i_site, ], i_sw_input_soillayers = SFSW2_prj_inputs[["sw_input_soillayers"]][i_site, ], i_sw_input_treatments = SFSW2_prj_inputs[["sw_input_treatments"]][i_site, ], @@ -5360,7 +5358,8 @@ run_simulation_experiment <- function(sim_size, SFSW2_prj_inputs, MoreArgs) { i_sw_input_weather = SFSW2_prj_inputs[["sw_input_weather"]][i_site, ], i_sw_input_climscen = SFSW2_prj_inputs[["sw_input_climscen"]][i_site, ], i_sw_input_climscen_values = SFSW2_prj_inputs[["sw_input_climscen_values"]][i_site, ], - SimParams = MoreArgs) + SimParams = MoreArgs + ) } ) runs.completed <- length(unlist(runs.completed)) diff --git a/R/Soils_Functions.R b/R/Soils_Functions.R index 6c3107fe..1802e7c8 100644 --- a/R/Soils_Functions.R +++ b/R/Soils_Functions.R @@ -119,8 +119,8 @@ assign_aggregation_soillayers <- function(layers_depth, daily_lyr_agg) { init_soiltemperature <- function(layers_depth, lower.Tdepth, soilTupper, soilTlower) { - sl <- c(0, lower.Tdepth) - st <- c(soilTupper, soilTlower) + sl <- c(0, lower.Tdepth) # nolint + st <- c(soilTupper, soilTlower) # nolint stats::predict(stats::lm(st ~ sl), data.frame(sl = layers_depth)) } diff --git a/R/Testproject_Functions.R b/R/Testproject_Functions.R index 2d34a0d0..0a9f30a8 100644 --- a/R/Testproject_Functions.R +++ b/R/Testproject_Functions.R @@ -30,6 +30,17 @@ #' test and reference databases including the output of a call to #' \code{\link{compare_test_output}}} #' } +#' +#' @examples +#' \dontrun{ +#' # Run test project 4 inside development version of package +#' # Assume that working directory is `tests/test_data/TestPrj4/` +#' if (file.exists("SFSW2_project_code.R")) { +#' res <- run_test_projects(dir_tests = ".", delete_output = TRUE) +#' } +#' } +#' + #' @export run_test_projects <- function(dir_tests, dir_prj_tests = NULL, dir_ref = NULL, dir_prev = NULL, which_tests_torun = seq_along(dir_tests), @@ -510,6 +521,9 @@ check_aggregated_output <- function(x) { #' #' # Compare output database with reference database #' comp <- compare_test_output(".", dir_ref = "../0_ReferenceOutput/") +#' +#' # Clean up +#' delete_test_output(".") #' } #' } #' diff --git a/R/Time_SimulationWorld.R b/R/Time_SimulationWorld.R index d875d976..9709b520 100644 --- a/R/Time_SimulationWorld.R +++ b/R/Time_SimulationWorld.R @@ -2,9 +2,6 @@ -isLeapYear <- rSOILWAT2:::isLeapYear - - #' Determine maximal span of simulation years across all experimental and design #' treatments #' @@ -76,8 +73,9 @@ get_simulation_time <- function(st, SFSW2_prj_inputs) { #' @param A named list, i.e., the updated version of \code{sim_time}. #' #' @seealso \code{\link[rSOILWAT2]{setup_time_simulation_run}} -setup_time_simulation_project <- function(sim_time, add_st2 = FALSE, - adjust_NS = FALSE, use_doy_range = FALSE, doy_ranges = list()) { +setup_time_simulation_project <- function(sim_time, is_idem = FALSE, + add_st2 = FALSE, adjust_NS = FALSE, + use_doy_range = FALSE, doy_ranges = list()) { sim_time <- rSOILWAT2::setup_time_simulation_run(sim_time = sim_time) @@ -98,6 +96,18 @@ setup_time_simulation_project <- function(sim_time, add_st2 = FALSE, stop("'setup_time_simulation_project': incorrect format of 'future_yrs'") } + + # Add "dall" to `future_yrs` data.frame + if (is_idem) { + tmp <- sim_time[["future_yrs"]] + tmp0 <- as.data.frame(matrix(NA, nrow = 1, ncol = ncol(tmp), + dimnames = list("dall", colnames(tmp)) + )) + + sim_time[["future_yrs"]] <- rbind(tmp0, tmp) + } + + sim_time[["future_N"]] <- dim(sim_time[["future_yrs"]])[1] if (add_st2) { diff --git a/R/WeatherDB.R b/R/WeatherDB.R index 213e28d1..a957ecd5 100644 --- a/R/WeatherDB.R +++ b/R/WeatherDB.R @@ -125,7 +125,8 @@ make_dbW <- function(SFSW2_prj_meta, SWRunInformation, opt_parallel, opt_chunks, site_data = site_data, Scenarios = SFSW2_prj_meta[["sim_scens"]][["id"]], compression_type = - SFSW2_prj_meta[["opt_input"]][["set_dbW_compresstype"]])) + SFSW2_prj_meta[["opt_input"]][["set_dbW_compresstype"]] + )) do_add <- TRUE add_runIDs_sites <- temp_runIDs_sites } @@ -219,6 +220,7 @@ make_dbW <- function(SFSW2_prj_meta, SWRunInformation, opt_parallel, opt_chunks, ids_NRCan_extraction <- which(dw_source == "NRCan_10km_Canada") ids_NCEPCFSR_extraction <- which(dw_source == "NCEPCFSR_Global") ids_Livneh_extraction <- which(dw_source == "Livneh2013_NorthAmerica") + ids_gridMET_extraction <- which(dw_source == "gridMET_NorthAmerica") # Weather extraction with parallel support if (length(ids_NRCan_extraction) > 0 || @@ -289,6 +291,21 @@ make_dbW <- function(SFSW2_prj_meta, SWRunInformation, opt_parallel, opt_chunks, verbose = verbose) } + if (length(ids_gridMET_extraction) > 0) { + irow <- add_runIDs_sites[ids_gridMET_extraction] + extract_daily_weather_from_gridMET( + dir_data = SFSW2_prj_meta[["project_paths"]][["dir_gridMET"]], + site_ids = SWRunInformation$site_id[irow], + site_ids_by_dbW = add_siteIDs_by_dbW[ids_gridMET_extraction], + coords = SWRunInformation[irow, c("X_WGS84", "Y_WGS84"), drop = FALSE], + start_year = SFSW2_prj_meta[["sim_time"]][["overall_simstartyr"]], + end_year = SFSW2_prj_meta[["sim_time"]][["overall_endyr"]], + comp_type = SFSW2_prj_meta[["opt_input"]][["set_dbW_compresstype"]], + dbW_digits = SFSW2_prj_meta[["opt_sim"]][["dbW_digits"]], + verbose = verbose + ) + } + if (length(ids_NCEPCFSR_extraction) > 0) { if (is.null(SFSW2_prj_meta[["prepd_CFSR"]]) || inherits(SFSW2_prj_meta[["prepd_CFSR"]], "try-error") || @@ -369,9 +386,16 @@ prepare_NCEPCFSR_extraction <- function(dir_in, dir.cfsr.data, #Check for wgrib2 (http://www.cpc.ncep.noaa.gov/products/wesley/wgrib2/) if (!file.exists(wgrib2 <- file.path(dir_ex_cfsr, "wgrib2"))) { - path_wgrib2 <- if (nchar(temp <- Sys.which("wgrib2")) > 0) { - temp - } else if (file.exists(temp <- "/opt/local/bin/wgrib2")) temp else "" + tmp <- Sys.which("wgrib2") + path_wgrib2 <- if (nchar(tmp) > 0) { + tmp + } else { + tmp <- system2( + command = "command", + args = paste("-v", shQuote("wgrib2")) + ) + if (nchar(tmp) > 0) tmp else "" + } stopifnot(nchar(path_wgrib2) > 0) file.copy(from = path_wgrib2, to = wgrib2) } @@ -702,7 +726,7 @@ get_DayMet_NorthAmerica <- function(dir_data, cellID, Xdm_WGS84, Ydm_WGS84, irow <- data_all["Year"] == years[y] data_sw[1:365, req_cols] <- data_all[irow, req_cols] - if (isLeapYear(years[y])) { + if (rSW2utils::isLeapYear(years[y])) { doys <- 1:366 data_sw[366, ] <- c(366, data_sw[365, -1]) } else { @@ -944,7 +968,8 @@ ExtractGriddedDailyWeatherFromNRCan_10km_Canada <- function(dir_data, site_ids, NRC_days <- list.files(path = file.path(dir_temp, NRC_use_years[iy]), full.names = TRUE) ndays <- length(NRC_days) / length(vars) - stopifnot(ndays == if (isLeapYear(NRC_use_years[iy])) 366 else 365) + tmp <- if (rSW2utils::isLeapYear(NRC_use_years[iy])) 366 else 365 + stopifnot(ndays == tmp) # Stack rasters for each day and extract data NRC_stack <- raster::stack(NRC_days, RAT = FALSE, quick = TRUE) @@ -979,7 +1004,7 @@ ExtractGriddedDailyWeatherFromNRCan_10km_Canada <- function(dir_data, site_ids, weatherData <- list() for (iy in seq_along(NRC_target_years)) { - doys <- if (isLeapYear(NRC_use_years[iy])) 1:366 else 1:365 + doys <- if (rSW2utils::isLeapYear(NRC_use_years[iy])) 1:366 else 1:365 #DOY Tmax(C) Tmin(C) PPT(cm) [ppt was converted from mm to cm] data_sw <- cbind(doys, NRC_weather[i, doys, iy, ]) colnames(data_sw) <- c("DOY", "Tmax_C", "Tmin_C", "PPT_cm") @@ -1547,7 +1572,11 @@ if (!interactive()) { tmin <- round(tmin, dbW_digits) # Add data to global data array - ids <- if (isLeapYear(seq_years[i])) seq_len(366) else seq_len(365) + ids <- if (rSW2utils::isLeapYear(seq_years[i])) { + seq_len(366) + } else { + seq_len(365) + } data_sw[, ids, 1, i] <- tmax[, ids] data_sw[, ids, 2, i] <- tmin[, ids] data_sw[, ids, 3, i] <- prec[, ids] @@ -1561,7 +1590,11 @@ if (!interactive()) { for (i in seq_len(site_length)) { weather_data <- list() for (k in seq_len(len_years)) { - doys <- if (isLeapYear(seq_years[k])) seq_len(366) else seq_len(365) + doys <- if (rSW2utils::isLeapYear(seq_years[k])) { + seq_len(366) + } else { + seq_len(365) + } out <- cbind(doys, data_sw[i, doys, , k]) colnames(out) <- c("DOY", "Tmax_C", "Tmin_C", "PPT_cm") weather_data[[k]] <- @@ -1613,6 +1646,290 @@ if (!interactive()) { ######################################################## + +#' Description of the \var{gridMET} dataset +#' @return A named list. +#' @export +gridMET_metadata <- function() { + list( + # order of variables expected by SOILWAT2 + vars = c("tmmx", "tmmn", "pr"), + # convert to units expected by SOILWAT2: + # K -> C, K -> C, mm / day -> cm / day + funits = list( + function(x) x - 273.15, + function(x) x - 273.15, + function(x) x / 10 + ), + start_year = 1979, + end_year = 2019 # updated yearly + ) +} + +#' List \var{gridMET} data files available on disk +find_gridMET_files <- function(dir_data, vars = gridMET_metadata()[["vars"]]) { + x <- lapply(vars, function(var) { + list.files(dir_data, + pattern = paste0("^(", var, ").+(\\.nc)$"), + full.names = TRUE + ) + }) + names(x) <- vars + + x +} + +#' Prepare script to download all or missing files of the \var{gridMET} dataset +#' +#' @param dir_data A character string. Path to where the \var{gridMET} dataset +#' is/will be stored on disk. +#' @param desc A named list. Describing the \var{gridMET} dataset. +#' +#' @return If all files are available, then a message is printed to the +#' R console with that information. Otherwise, the message points to a +#' \var{.sh} script that was created at \code{dir_data} which must be run +#' separately to download the missing files. +#' +#' @section Notes: The download scripts use \var{wget}, i.e., it must be +#' available on your system to work. The scripts are based on the dataset +#' repository setup at \url{http://www.climatologylab.org/gridmet.html} as of +#' Nov 2019. This dataset has also been know as \var{METDATA}. +#' +#' @references Abatzoglou, J. T. (2013) Development of gridded surface +#' meteorological data for ecological applications and modelling. +#' \var{Int. J. Climatol.}, 33: 121–131. +#' +#' @examples +#' if (exists("SFSW2_prj_meta")) { +#' gridMET_download_and_check( +#' dir_data = SFSW2_prj_meta[["project_paths"]][["dir_gridMET"]] +#' ) +#' } +#' +#' @export +gridMET_download_and_check <- function(dir_data, desc = gridMET_metadata()) { + dir.create(dir_data, recursive = TRUE, showWarnings = FALSE) + + years <- seq(desc[["start_year"]], desc[["end_year"]]) + + #--- Check which files are missing + fnames_gridMET <- sapply(desc[["vars"]], + function(var) paste0(var, "_", years, ".nc") + ) + + is_missing <- matrix( + data = !file.exists(file.path(dir_data, fnames_gridMET)), + nrow = length(years), + ncol = length(desc[["vars"]]) + ) + + + #--- Create script to download files if any are missing + if (any(is_missing)) { + metdata_bash <- "#!/bin/bash" + + for (iv in seq_along(desc[["vars"]])) if (any(is_missing[, iv])) { + metdata_bash <- c(metdata_bash, + paste0( + "wget -nc -c -nd ", + "http://www.northwestknowledge.net/metdata/data/", + fnames_gridMET[is_missing[, iv], iv] + ) + ) + } + + fname_bash <- file.path(dir_data, + paste0("metdata_wget_", format(Sys.time(), "%Y%m%d%H%M%S"), ".sh") + ) + + writeLines(metdata_bash, con = fname_bash) + + stop("Please execute script ", + shQuote(basename(fname_bash)), + " to download missing gridMET data." + ) + + } else { + print("All gridMET files are available.") + } +} + + +#' Extract daily gridded weather data from the \var{gridMET} dataset +#' +#' Extracts daily gridded weather data, including precipitation, +#' maximum temperature and minimum temperature from the \var{gridMET} +#' (Abatzoglou 2013) database: a 1/24 degree gridded weather database that +#' contains data for the years 1979 - yesterday. +#' +#' @section Details: Run the function \code{\link{gridMET_download_and_check}} +#' to download and check the dataset. +#' +#' @references Abatzoglou, J. T. (2013) Development of gridded surface +#' meteorological data for ecological applications and modelling. +#' \var{Int. J. Climatol.}, 33: 121–131. +#' +#' @param dir_data A character string. The directory containing the +#' \var{gridMET} dataset files. +#' @param site_ids An integer vector. The indices of sites for which to extract +#' \var{gridMET} weather data. +#' @param coords A two-dimensional numerical object. The coordinates for each +#' site in \var{WGS84}. +#' @param start_year An integer value. The first calendar year for which to +#' extract daily weather data. +#' @param end_year An integer value. The last calendar year for which to +#' extract daily weather data. +#' @param comp_type A character string. The compression type used by the +#' weather database. +#' @param dbW_digits An integer value. The number of digits to which the +#' daily weather values are rounded to. +#' @param verbose A logical value. +#' +#' @export +extract_daily_weather_from_gridMET <- function(dir_data, site_ids, + site_ids_by_dbW, coords, start_year, end_year, comp_type = "gzip", + dbW_digits = 2, verbose = FALSE) { + + if (verbose) { + t1 <- Sys.time() + temp_call <- shQuote(match.call()[1]) + print(paste0("rSFSW2's ", temp_call, ": started at ", t1)) + + on.exit({ + print(paste0("rSFSW2's ", temp_call, ": ended after ", + round(difftime(Sys.time(), t1, units = "secs"), 2), " s")) + cat("\n")}, add = TRUE) + } else { + temp_call <- NULL + } + + # gridMET metadata + desc <- gridMET_metadata() + + # Check requested years + year_range <- rSOILWAT2::update_requested_years(start_year, end_year, + has_start_year = desc[["start_year"]], + has_end_year = desc[["end_year"]], + temp_call = temp_call, + verbose = verbose + ) + + # List gridMET data files + fnames_gridMET <- find_gridMET_files(dir_data, desc[["vars"]]) + + # Create coordinates as spatial points for extraction with raster layers + prj_geographicWGS84 <- sp::CRS(paste("+proj=longlat +ellps=WGS84", + "+datum=WGS84 +no_defs +towgs84=0,0,0")) + sp_locs <- sp::SpatialPoints(coords = coords, + proj4string = prj_geographicWGS84 + ) + + # Create variables and containers for extraction + seq_years <- seq(year_range[["start_year"]], year_range[["end_year"]]) + seq_leaps <- rSW2utils::isLeapYear(seq_years) + seq365 <- seq_len(365) + seq366 <- seq_len(366) + + #TODO: this uses too much memory if too many sites and/or years are requested + # --> group sites into chunks and loop over chunks + res <- array(NA, + dim = c(length(site_ids), 366, length(desc[["vars"]]), length(seq_years)) + ) + + #--- Extract data for each year and each variable + for (iy in seq_along(seq_years)) { + if (verbose) { + print(paste0(Sys.time(), ": extracting gridMET data for year ", + seq_years[iy]) + ) + } + + # Data file names for respective year + dfiles <- sapply(fnames_gridMET, function(files) { + grep(seq_years[iy], files, value = TRUE) + }) + + days <- if (seq_leaps[iy]) seq366 else seq365 + + for (iv in seq_along(desc[["vars"]])) { + dbrick <- raster::brick(dfiles[iv]) + + res[, days, iv, iy] <- raster::extract( + x = dbrick, + y = sp_locs, + method = "simple" + ) + } + } + + # Convert units + for (iv in seq_along(desc[["funits"]])) { + if (!is.null(desc[["funits"]][iv])) { + f <- match.fun(desc[["funits"]][[iv]]) + + res[, , iv, ] <- f(res[, , iv, ]) + } + } + + # Format data and add it to the weather database + if (verbose) { + print("Inserting data into weather database.") + } + + wd_template <- matrix(NA, nrow = 366, ncol = 4, + dimnames = list(NULL, c("DOY", "Tmax_C", "Tmin_C", "PPT_cm")) + ) + wd_template[, "DOY"] <- seq366 + + + for (k in seq_along(site_ids)) { + if (verbose) { + print(paste0(Sys.time(), ": inserting gridMET data for site ", + site_ids[k]) + ) + } + + weather_data <- vector("list", length = length(seq_years)) + names(weather_data) <- seq_years + + for (iy in seq_along(seq_years)) { + days <- if (seq_leaps[iy]) seq366 else seq365 + out <- wd_template[days, ] + out[, -1] <- round(res[k, days, , iy], dbW_digits) + + weather_data[[iy]] <- new("swWeatherData", + year = seq_years[iy], + data = out + ) + } + + + # Store site weather data in weather database + rSOILWAT2:::dbW_addWeatherDataNoCheck( + Site_id = site_ids_by_dbW[k], + Scenario_id = 1, + StartYear = year_range[["start_year"]], + EndYear = year_range[["end_year"]], + weather_blob = rSOILWAT2::dbW_weatherData_to_blob( + weatherData = weather_data, + type = comp_type + ) + ) + } + + if (verbose) { + print("gridMET weather data has successfully been extracted.") + } + + # Remove files & clean garbage to free-up RAM + rm(res) + gc() + + invisible(0) +} + + + #---Functions to determine sources of daily weather dw_LookupWeatherFolder <- function(dw_source, dw_names, exinfo, site_dat, sim_time, path = NULL, MoreArgs = NULL) { @@ -1706,6 +2023,7 @@ dw_DayMet_NorthAmerica <- function(dw_source, dw_names, exinfo, site_dat, stop("'dw_DayMet_NorthAmerica': ", path, " does not exist.") there <- 0 + if (exinfo$GriddedDailyWeatherFromDayMet_NorthAmerica) { # Check which of the DayMet weather data are available # - Temperature: 2-meter air temperature in Celsius degrees @@ -1817,6 +2135,61 @@ dw_Livneh2013_NorthAmerica <- function(dw_source, dw_names, exinfo, site_dat, } +dw_gridMET_NorthAmerica <- function(dw_source, dw_names, exinfo, site_dat, + sim_time, path = NULL, MoreArgs = NULL) { + + if (!dir.exists(path)) + stop("'dw_gridMET_NorthAmerica': ", path, " does not exist.") + + there <- 0 + + if (exinfo$GriddedDailyWeatherFromgridMET_NorthAmerica) { + # Check which requested gridMET weather data are available + tmp <- list.files(path, pattern = "(pr_)[[:digit:]]{4}(.nc)") + has_years <- range(as.integer(gsub("(pr_)|(.nc)", "", tmp))) + + if (length(has_years) > 0) { + # gridMET should cover 1979-yesterday + there <- + sim_time[["overall_simstartyr"]] <= has_years[2] && + sim_time[["overall_endyr"]] >= has_years[1] + + ftemp <- file.path(path, paste0("pr_", has_years[1], ".nc")) + + if (any(there) && file.exists(ftemp)) { + sp_locs <- sp::SpatialPoints( + coords = site_dat[, c("X_WGS84", "Y_WGS84")], + proj4string = sp::CRS("+init=epsg:4326") # WGS84 + ) + + ftmp <- raster::raster(ftemp, band = 1) + + # (2020-June-15): raster package does not correctly parse projection + # information of gridMET file(s) + if (!grepl("+datum=WGS84", raster::crs(ftmp, asText = TRUE))) { + warning("`dw_gridMET_NorthAmerica()`: overrides CRS of gridMET data.") + raster::crs(ftmp) <- sp::CRS("+init=epsg:4326") + } + + there <- !is.na(raster::extract(ftmp, y = sp_locs)) + + if (any(there)) { + dw_source[there] <- "gridMET_NorthAmerica" + dw_names[there] <- paste0( + site_dat[there, "Label"], "_gridMET_", + formatC(site_dat[there, "X_WGS84"], digits = 5, format = "f"), + "_", + formatC(site_dat[there, "Y_WGS84"], digits = 5, format = "f") + ) + } + } + } + } + + list(source = dw_source, name = dw_names, n = sum(there)) +} + + dw_NCEPCFSR_Global <- function(dw_source, dw_names, exinfo, site_dat, sim_time, path = NULL, MoreArgs = NULL) { @@ -1878,7 +2251,9 @@ dw_determine_sources <- function(dw_source, exinfo, dw_avail_sources, file.path(project_paths[["dir_in_treat"]], "LookupWeatherFolder"), NCEPCFSR_Global = project_paths[["dir.ex.NCEPCFSR"]], Livneh2013_NorthAmerica = project_paths[["dir.ex.Livneh2013"]], - DayMet_NorthAmerica = project_paths[["dir_daymet"]]) + DayMet_NorthAmerica = project_paths[["dir_daymet"]], + gridMET_NorthAmerica = project_paths[["dir_gridMET"]] + ) MoreArgs <- list(LookupWeatherFolder = list( create_treatments = SFSW2_prj_inputs[["create_treatments"]], @@ -1949,6 +2324,9 @@ set_paths_to_dailyweather_datasources <- function(SFSW2_prj_meta) { SFSW2_prj_meta[["project_paths"]][["dir.ex.Livneh2013"]] <- file.path(dir_dW, "Livneh_NA_2013", "MONTHLY_GRIDS") + SFSW2_prj_meta[["project_paths"]][["dir_gridMET"]] <- file.path(dir_dW, + "gridMET_4km_NA", "YEARLY_GRIDS") + SFSW2_prj_meta[["project_paths"]][["dir.ex.NCEPCFSR"]] <- file.path(dir_dW, "NCEPCFSR_Global", "CFSR_weather_prog08032012") diff --git a/R/WeatherDB_Check.R b/R/WeatherDB_Check.R index 45bf71b8..f5d5ae14 100644 --- a/R/WeatherDB_Check.R +++ b/R/WeatherDB_Check.R @@ -540,7 +540,7 @@ check_weatherDB <- function(dir_prj, fdbWeather, repeats = 2L, print(paste0("Unsuccessful extractions: n = ", sum(failed), "; f = ", signif(sum(failed) / length(failed), 2))) tmp <- climate[failed, ] - plot(tmp[, "Site_id_by_dbW"], tmp[, "Scenario_id"]) + graphics::plot(tmp[, "Site_id_by_dbW"], tmp[, "Scenario_id"]) failed_siteID <- climate[failed, "Site_id_by_dbW"] print(paste0("Sites with at least one unsuccessful extractions: n = ", diff --git a/R/WorkDatabase.R b/R/WorkDatabase.R index 8a0ee35b..56408e6e 100644 --- a/R/WorkDatabase.R +++ b/R/WorkDatabase.R @@ -988,7 +988,7 @@ recreate_dbWork <- function(path, dbOutput, use_granular_control, }) #-- Update table 'work' - has_pids_complete <- intersect2(has_pids_per_table) + has_pids_complete <- rSW2utils::intersect2(has_pids_per_table) if (length(has_pids_complete) > 0) { # Get runID from Pid diff --git a/R/netCDF_prepare_climatedata_files.R b/R/netCDF_prepare_climatedata_files.R index 2c31dbb2..3a65388b 100755 --- a/R/netCDF_prepare_climatedata_files.R +++ b/R/netCDF_prepare_climatedata_files.R @@ -7,12 +7,14 @@ do_compare_nc <- function(fnc1, fnc2, var) { nc2 <- ncdf4::nc_open(fnc2) comp <- list() - temp <- unlist(do_compare(nc1, nc2)) + temp <- unlist(rSW2utils::all_equal_recursively(nc1, nc2)) temp <- temp[!is.na(temp)] comp[["layout"]] <- temp[!grepl("id|group_id|filename", names(temp))] - comp[["var"]] <- do_compare(ncdf4::ncvar_get(nc1, var), - ncdf4::ncvar_get(nc2, var)) + comp[["var"]] <- rSW2utils::all_equal_recursively( + ncdf4::ncvar_get(nc1, var), + ncdf4::ncvar_get(nc2, var) + ) comp[["varname"]] <- var ncdf4::nc_close(nc1) @@ -197,7 +199,7 @@ check_nc_time_axis <- function(filename, dim_time = NULL, ztime = NULL) { time_agrees_with_ncfilename <- function(filename, ftime) { temp <- calc_ncfile_times(ftime) ts_yrmo <- as.POSIXlt(temp[["ts_yrmo"]]) - ctime <- rSFSW2::read_time_netCDF(filename) + ctime <- rSFSW2::read_time_netCDF(filename, tres = "monthly") c( step_N = ctime[["N"]] == length(ts_yrmo), @@ -371,11 +373,11 @@ prepare_climatedata_netCDFs <- function(dir_code, dir_data, dir_duplicates, # Check that all data are packed/compress identically comp_vars <- is.na(sapply(f_attr_var, function(p) - do_compare(f_attr_var[[1]], p))) + rSW2utils::all_equal_recursively(f_attr_var[[1]], p))) # Check that calendar and time units are identically temp <- is.na(sapply(f_attr_time, function(p) - do_compare(f_attr_time[[1]], p))) + rSW2utils::all_equal_recursively(f_attr_time[[1]], p))) comp_time1 <- apply(temp, 2, all) # Check that record variable is 'time' @@ -429,7 +431,10 @@ prepare_climatedata_netCDFs <- function(dir_code, dir_data, dir_duplicates, # first (reference) file is by definition identical to itself has_identical_dups <- c(TRUE, sapply(f_suit_dupvals, function(x) { - temp <- do_compare(x[["ref1"]], x[["x"]]) + temp <- rSW2utils::all_equal_recursively( + x[["ref1"]], + x[["x"]] + ) !is.null(x[["ref1"]]) && !is.list(temp) && is.na(temp) })) diff --git a/R/rSFSW2-package.R b/R/rSFSW2-package.R index 4a8b08d2..2b22d3c4 100644 --- a/R/rSFSW2-package.R +++ b/R/rSFSW2-package.R @@ -47,12 +47,11 @@ SFSW2_glovars <- new.env() ##------ Import from other packages +#' @import rSW2utils +#' @import rSW2funs +#' @import rSOILWAT2 ## Package uses S3/S4 classes - they are defined in package:methods -## Package uses methods from 'RSQLite' package (which re-exports 'DBI' methods) #' @import methods +## Package uses methods from 'RSQLite' package (which re-exports 'DBI' methods) #' @import RSQLite NULL - -##------ Support Rcpp -#' @importFrom Rcpp sourceCpp evalCpp -NULL diff --git a/R/rSOILWAT2_DataAccess.R b/R/rSOILWAT2_DataAccess.R index 4dd176df..2295181b 100644 --- a/R/rSOILWAT2_DataAccess.R +++ b/R/rSOILWAT2_DataAccess.R @@ -1,3 +1,99 @@ + +# Based on code from \code{\link[tools][.split_dependencies]} and +# \code{\link[tools][.split_op_version]} +.split_dependencies <- function(x) { + .split_op_version <- function(x) + { + pat <- "^([^\\([:space:]]+)[[:space:]]*\\(([^\\)]+)\\).*" + x1 <- sub(pat, "\\1", x) + x2 <- sub(pat, "\\2", x) + if (x2 != x1) { + pat <- "[[:space:]]*([[<>=!]+)[[:space:]]+(.*)" + version <- sub(pat, "\\2", x2) + if (!startsWith(version, "r")) { + version <- package_version(version) + } + list(name = x1, op = sub(pat, "\\1", x2), version = version) + } else { + list(name = x1) + } + } + + .split2 <- function(x) { + x <- sub("[[:space:]]+$", "", x) + x <- unique(sub("^[[:space:]]*(.*)", "\\1", x)) + names(x) <- sub("^([[:alnum:].]+).*$", "\\1", x) + x <- x[names(x) != "R"] + x <- x[nzchar(x)] + x <- x[!duplicated(names(x))] + lapply(x, .split_op_version) + } + + if (!any(nzchar(x))) { + return(list()) + } + + unlist( + x = lapply(strsplit(x, ","), .split2), + recursive = FALSE, + use.names = FALSE + ) +} + +get_minVersion_rSOILWAT2 <- function() { + tmp <- .split_dependencies( + x = utils::packageDescription(pkg = "rSFSW2", fields = "Depends") + ) + + ntmp <- sapply(tmp, function(x) x[["name"]]) + id <- "rSOILWAT2" == ntmp + if (sum(id) == 1) { + tmp[id][[1]][["version"]] + } else { + NA_integer_ + } +} + + + +#' Check version of a \pkg{rSOILWAT2} input or output object compared to +#' declared dependency in the package \var{DESCRIPTION} +#' +#' @param object An object of \pkg{rSOILWAT2} classes +#' \code{\linkS4class{swInputData}} or \code{\linkS4class{swOutput}}. +#' @param strict A logical value. If \code{FALSE} and check would fail, then +#' a warning is issued (and \code{TRUE} is returned nevertheless). +#' +#' @return A logical value. +#' Returns \code{TRUE} if version of \code{object} meets at least the minimal +#' required \pkg{rSOILWAT2} version -- or if \code{strict} is \code{FALSE}. +#' Returns \code{FALSE} otherwise. +#' +#' @seealso \code{\link[rSOILWAT2]{check_version}} +#' +#' @export +check_rSW2_version <- function(object, strict = TRUE) { + tmp1 <- get_version(object) + tmp2 <- SFSW2_glovars[["minVersion_rSOILWAT2"]] + + res <- if (is.na(tmp1) || is.na(tmp2)) { + FALSE + } else { + as.numeric_version(tmp1) >= as.numeric_version(tmp2) + } + + if (!strict && !res) { + warning( + "Code requires 'rSOILWAT2' v", tmp2, + ", but ", shQuote(deparse(substitute(object))), "has v", tmp1 + ) + res <- TRUE + } + + res +} + + #' \pkg{rSOILWAT2} data access functions #' #' @param x An object of class @@ -172,14 +268,14 @@ get_Temp_dy <- function(x, st) { #' @inheritParams swOutput_access #' @rdname swOutput_access get_VPD_mo <- function(sc, temp.mo, xin, st2) { - rH <- rSOILWAT2::swCloud_SkyCover(xin[[sc]]) + rH <- rSOILWAT2::swCloud_Humidity(xin[[sc]]) rH <- as.vector(rH[st2$month_ForEachUsedMonth]) list(mean = vpd(temp.mo$min, temp.mo$max, rH)) } get_VPD_dy <- function(sc, temp.dy, xin, st2) { - rH <- rSOILWAT2::swCloud_SkyCover(xin[[sc]]) + rH <- rSOILWAT2::swCloud_Humidity(xin[[sc]]) rH <- as.vector(rH[st2$month_ForEachUsedDay]) list(mean = vpd(temp.dy$min, temp.dy$max, rH)) diff --git a/R/sysdata.rda b/R/sysdata.rda index b1efcd1f..d96b2bc7 100644 Binary files a/R/sysdata.rda and b/R/sysdata.rda differ diff --git a/R/zzz.R b/R/zzz.R index 4577c55b..5d8c18d0 100644 --- a/R/zzz.R +++ b/R/zzz.R @@ -1,9 +1,12 @@ .onAttach <- function(libname, pkgname) { if (interactive()) { - meta <- utils::packageDescription(pkgname) - packageStartupMessage("Package ", shQuote(pkgname), " v", meta$Version, - " (", meta$Date, ") attached/loaded.") + packageStartupMessage( + "Package ", shQuote(pkgname), + " v", utils::packageVersion(pkgname), + " (", utils::packageDate(pkgname), ")", + " attached/loaded." + ) } invisible() @@ -21,13 +24,24 @@ #--- Define package level variables that should be hidden from package user # and should not be changed - assign("minVersion_dbWeather", numeric_version("3.1.0"), - envir = SFSW2_glovars) + assign( + x = "minVersion_dbWeather", + value = numeric_version("3.2.0"), + envir = SFSW2_glovars + ) + assign( + x = "minVersion_rSOILWAT2", + value = numeric_version(get_minVersion_rSOILWAT2()), + envir = SFSW2_glovars + ) # number of implemented soil layers assign("slyrs_maxN", 20L, envir = SFSW2_glovars) - assign("slyrs_ids", seq_len(SFSW2_glovars[["slyrs_maxN"]]), - envir = SFSW2_glovars) + assign( + x = "slyrs_ids", + value = seq_len(SFSW2_glovars[["slyrs_maxN"]]), + envir = SFSW2_glovars + ) # SOILWAT2 assumes 2 m height assign("windspeed_height_m", 2L, envir = SFSW2_glovars) diff --git a/appveyor.yml b/appveyor.yml index e2579a35..ba95fe79 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,6 +3,7 @@ environment: - R_ARCH: i386 - R_ARCH: x64 USE_RTOOLS: true + R_REMOTES_STANDALONE: true _R_CHECK_FORCE_SUGGESTS_: false matrix: @@ -18,9 +19,7 @@ init: install: - ps: Bootstrap - - travis-tool.sh install_r blob DBI RSQLite - - git clone -b master --single-branch --recursive https://github.com/Burke-Lauenroth-Lab/rSOILWAT2.git /tmp/rSOILWAT2 - - R CMD INSTALL C:/tmp/rSOILWAT2 + # Adapt as necessary starting from here cache: @@ -29,6 +28,11 @@ cache: - C:\RLibrary -> .appveyor_clear_cache.txt build_script: + - travis-tool.sh install_r blob DBI RSQLite circular mvtnorm + - travis-tool.sh install_github DrylandEcology/rSW2utils + # `remotes::install_github` supports submodules since v2.0.0! + - travis-tool.sh install_github DrylandEcology/rSOILWAT2 + - travis-tool.sh install_github DrylandEcology/rSW2funs - travis-tool.sh install_deps test_script: diff --git a/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv b/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv deleted file mode 100644 index f6f0dda0..00000000 --- a/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv +++ /dev/null @@ -1,3 +0,0 @@ -Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forb,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Imperm_L1,Exclude_ClimateAmbient,Grass_HydRed_OnOff,Shrub_HydRed_OnOff,Tree_HydRed_OnOff,Forb_HydRed_OnOff,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,PET_multiplier,Grass_SWPcrit_MPa,Shrub_SWPcrit_MPa,Tree_SWPcrit_MPa,Forb_SWPcrit_MPa,UseCO2BiomassMultiplier,UseCO2WUEMultiplier,SoilTemp_Flag -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1 -DefaultSettings,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,,,,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,1,1,1 diff --git a/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv b/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv new file mode 100644 index 00000000..90af9fc8 --- /dev/null +++ b/data-raw/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv @@ -0,0 +1,3 @@ +Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionTotalGrasses_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forb,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Imperm_L1,Exclude_ClimateAmbient,Grass_HydRed_OnOff,Shrub_HydRed_OnOff,Tree_HydRed_OnOff,Forb_HydRed_OnOff,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,PET_multiplier,Grass_SWPcrit_MPa,Shrub_SWPcrit_MPa,Tree_SWPcrit_MPa,Forb_SWPcrit_MPa,UseCO2BiomassMultiplier,UseCO2WUEMultiplier,SoilTemp_Flag +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1 +DefaultSettings,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,,,,,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,1,1,1 diff --git a/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv b/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv deleted file mode 100644 index 4604f017..00000000 --- a/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv +++ /dev/null @@ -1,2 +0,0 @@ -Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forbs,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,Exclude_ClimateAmbient,MaxTempDepth,UseCO2BiomassMultiplier,UseCO2WUEMultiplier -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv b/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv new file mode 100644 index 00000000..81dc5663 --- /dev/null +++ b/data-raw/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv @@ -0,0 +1,2 @@ +Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionTotalGrasses_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forbs,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,Exclude_ClimateAmbient,MaxTempDepth,UseCO2BiomassMultiplier,UseCO2WUEMultiplier +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v11.csv b/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v11.csv deleted file mode 100644 index 4928c647..00000000 --- a/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v11.csv +++ /dev/null @@ -1 +0,0 @@ -Label,site_id,Region,WeatherFolder,X_WGS84,Y_WGS84,ELEV_m,ASPECT,SLOPE,Include_YN diff --git a/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v12.csv b/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v12.csv new file mode 100644 index 00000000..9a83a478 --- /dev/null +++ b/data-raw/1_Input/SWRuns_InputMaster_YOURPROJECT_v12.csv @@ -0,0 +1 @@ +Label,site_id,Include_YN,WeatherFolder,X_WGS84,Y_WGS84,ELEV_m,Slope,Aspect diff --git a/data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv b/data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv similarity index 68% rename from data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv rename to data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv index 2096c534..1b07edef 100644 --- a/data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv +++ b/data-raw/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv @@ -1,2 +1,2 @@ -Label,SWC_min,SWC_init,SWC_wet,SWC_YearlyReset,SWC_Deepdrain,PET_multiplier,SoilTemp_Flag,SoilTempC_atUpperBoundary,SoilTempC_atLowerBoundary,SoilTemp_BiomassLimiter_gPERm2,SoilTemp_T1constant_a,SoilTemp_T1constant_b,SoilTemp_T1constant_c,SoilTemp_SoilThermCondct,SoilTemp_cs_constant,SoilTemp_SpecificHeatCapacity,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,Latitude,Altitude,Slope,Aspect,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Param_UnsaturatedPercolation -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +Label,SWC_min,SWC_init,SWC_wet,SWC_YearlyReset,SWC_Deepdrain,PET_multiplier,SoilTemp_Flag,SoilTempC_atUpperBoundary,SoilTempC_atLowerBoundary,SoilTemp_BiomassLimiter_gPERm2,SoilTemp_T1constant_a,SoilTemp_T1constant_b,SoilTemp_T1constant_c,SoilTemp_SoilThermCondct,SoilTemp_cs_constant,SoilTemp_SpecificHeatCapacity,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Param_UnsaturatedPercolation +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/data-raw/1_Input/treatments/LookupCO2data/AtmosCO2.csv b/data-raw/1_Input/treatments/LookupCO2data/AtmosCO2.csv index 59a1b899..a4a0ca7a 100644 --- a/data-raw/1_Input/treatments/LookupCO2data/AtmosCO2.csv +++ b/data-raw/1_Input/treatments/LookupCO2data/AtmosCO2.csv @@ -1,4 +1,4 @@ -Year,Default,RCP85,20TH_CENTURY,RCP3PD,RCP45,RCP6 +Year,Fix360ppm,RCP85,historical,RCP3PD,RCP45,RCP6 1765,360,278.05158,278.05158,278.05158,278.05158,278.05158 1766,360,278.10615,278.10615,278.10615,278.10615,278.10615 1767,360,278.22039,278.22039,278.22039,278.22039,278.22039 diff --git a/data-raw/1_Input/treatments/tr_cloudin/climate.in b/data-raw/1_Input/treatments/tr_cloudin/climate.in index 758b863f..cc2e6166 100644 --- a/data-raw/1_Input/treatments/tr_cloudin/climate.in +++ b/data-raw/1_Input/treatments/tr_cloudin/climate.in @@ -11,9 +11,6 @@ # Relative humidity (%) 61.0 61.0 61.0 51.0 51.0 51.0 41.0 41.0 51.0 51.0 61.0 61.0 -# Atmospheric transmissivity (relative) -1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 - # Snow density (kg / m3) 213.7 241.6 261.0 308.0 398.1 464.5 0.0 0.0 0.0 140.0 161.6 185.1 diff --git a/data-raw/1_Input/treatments/tr_siteparamin/siteparam.in b/data-raw/1_Input/treatments/tr_siteparamin/siteparam.in index 9a8fd022..28b936e9 100644 --- a/data-raw/1_Input/treatments/tr_siteparamin/siteparam.in +++ b/data-raw/1_Input/treatments/tr_siteparamin/siteparam.in @@ -53,10 +53,13 @@ 1.1 # range #---- Site location and topography -0.681 # latitude of the site in radians -1000 # elevation of site (m a.s.l.) -0 # slope at site (degrees): no slope = 0 --1 # aspect at site (degrees): N=0, E=90, S=180, W=270, no slope:-1 +-105.58 # longitude (degrees; W < 0; E > 0) + # (currently not used by simulation, but useful for site documentation) +39.59 # latitude (degrees; N > 0; S < 0) +1000 # elevation (m a.s.l.) +0 # slope (degrees): no slope = 0, vertical surface = 90 +NAN # aspect = surface azimuth angle (degrees): S=0, E=-90, N=±180, W=90; + # ignored if slope = 0 or aspect takes a missing value, i.e., NAN or 999 #---- Soil temperature # from Parton 1978, ch. 2.2.2 Temperature-profile Submodel diff --git a/data-raw/1_Input/treatments/tr_weathersetupin/weathsetup.in b/data-raw/1_Input/treatments/tr_weathersetupin/weathsetup.in index 45b7787d..55ddd973 100755 --- a/data-raw/1_Input/treatments/tr_weathersetupin/weathsetup.in +++ b/data-raw/1_Input/treatments/tr_weathersetupin/weathsetup.in @@ -7,11 +7,11 @@ #--- Activate/deactivate weather generator / historical daily weather inputs -0 # 0 = use historical data only; 1 = use markov process for missing weather -1980 # first year to begin historical weather - # (filename, e.g., Input/data_weather/weath.1949; see `files.in` for - # relative pathname and basis of filename) -5 # number of days to use in a moving average of temperature +0 # 0 = use historical data only + # 1 = use weather generator for (partially) missing weather inputs + # 2 = use weather generator for all weather (don't check weather inputs) +-1 # first year to begin historical weather + # if -1, then use first year of simulation (see `years.in`) #--- Monthly scaling parameters: @@ -22,17 +22,16 @@ # SkyCover = additive for mean monthly sky cover [%]; min(100, max(0, scale + sky cover)) # Wind = multiplicative for mean monthly wind speed; max(0, scale * wind speed) # rH = additive for mean monthly relative humidity [%]; min(100, max(0, scale + rel. Humidity)) -# Transmissivity = multiplicative for mean monthly relative transmissivity; min(1, max(0, scale * transmissivity)) -#Mon PPT MaxT MinT SkyCover Wind rH Transmissivity -1 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -2 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -3 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -4 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -5 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -6 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -7 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -8 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -9 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -10 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -11 1.000 0.00 0.00 0.0 1.0 0.0 1.0 -12 1.000 0.00 0.00 0.0 1.0 0.0 1.0 +#Mon PPT MaxT MinT SkyCover Wind rH +1 1.000 0.00 0.00 0.0 1.0 0.0 +2 1.000 0.00 0.00 0.0 1.0 0.0 +3 1.000 0.00 0.00 0.0 1.0 0.0 +4 1.000 0.00 0.00 0.0 1.0 0.0 +5 1.000 0.00 0.00 0.0 1.0 0.0 +6 1.000 0.00 0.00 0.0 1.0 0.0 +7 1.000 0.00 0.00 0.0 1.0 0.0 +8 1.000 0.00 0.00 0.0 1.0 0.0 +9 1.000 0.00 0.00 0.0 1.0 0.0 +10 1.000 0.00 0.00 0.0 1.0 0.0 +11 1.000 0.00 0.00 0.0 1.0 0.0 +12 1.000 0.00 0.00 0.0 1.0 0.0 diff --git a/data-raw/prepare_default_project_infrastructure.R b/data-raw/prepare_default_project_infrastructure.R index ad79f0cd..2e9b6ac6 100755 --- a/data-raw/prepare_default_project_infrastructure.R +++ b/data-raw/prepare_default_project_infrastructure.R @@ -15,7 +15,8 @@ tr_update <- list( tr_prodin = basename(rSOILWAT2::swFiles_Prod(sw_in)), tr_siteparamin = basename(rSOILWAT2::swFiles_SiteParams(sw_in)), tr_soilsin = basename(rSOILWAT2::swFiles_Soils(sw_in)), - tr_weathersetupin = basename(rSOILWAT2::swFiles_WeatherSetup(sw_in))) + tr_weathersetupin = basename(rSOILWAT2::swFiles_WeatherSetup(sw_in)) +) path_demo <- system.file("extdata", "example1", "Input", package = "rSOILWAT2") diff --git a/demo/SFSW2_project_code.R b/demo/SFSW2_project_code.R index 8ccc3b81..67599b24 100644 --- a/demo/SFSW2_project_code.R +++ b/demo/SFSW2_project_code.R @@ -5,7 +5,7 @@ # EXECUTING SIMULATIONS, AND AGGREGATING OUTPUTS #----- LICENSE -# Copyright (C) 2017 by `r packageDescription("rSFSW2")[["Author"]]` +# Copyright (C) 2017-2019 by `r packageDescription("rSFSW2")[["Author"]]` # Contact information `r packageDescription("rSFSW2")[["Maintainer"]]` # This program is free software: you can redistribute it and/or modify @@ -19,8 +19,8 @@ # GNU General Public License for more details. #------ NOTES: -# - You get an overview by: `r package?rSFSW2` -# - An index of functionality is displayed by: `r help(package = "rSFSW2")` +# - Display a package overview: `r package?rSFSW2` +# - List package functionality: `r help(package = "rSFSW2")` #------------------------------------------------------------------------------# @@ -68,16 +68,23 @@ actions <- list( dir_prj <- getwd() -writeLines(c("", "", +writeLines(c( + "", "", "###########################################################################", - paste("#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), "run started at", - t_job_start), + paste( + "#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), + "run started at", t_job_start + ), "###########################################################################", - "")) + "" +)) SFSW2_prj_meta <- init_rSFSW2_project( - fmetar = file.path(dir_prj, "SFSW2_project_descriptions.R"), update = FALSE, - verbose = interactive(), print.debug = FALSE) + fmetar = file.path(dir_prj, "SFSW2_project_descriptions.R"), + update = FALSE, + verbose = interactive(), + print.debug = FALSE +) @@ -85,25 +92,42 @@ SFSW2_prj_meta <- init_rSFSW2_project( #------ 2) LOAD THE SETTINGS FOR THIS RUN -------------------------------------- # Setting objects: # opt_behave, opt_parallel, opt_verbosity, opt_out_run, opt_chunks -source(file.path(dir_prj, "SFSW2_project_settings.R"), verbose = interactive(), - keep.source = FALSE) +source( + file = file.path(dir_prj, "SFSW2_project_settings.R"), + keep.source = FALSE +) -SFSW2_prj_meta <- update_actions(SFSW2_prj_meta, actions, - wipe_dbOutput = opt_out_run[["wipe_dbOutput"]]) +SFSW2_prj_meta <- update_actions( + SFSW2_prj_meta, + actions, + wipe_dbOutput = opt_out_run[["wipe_dbOutput"]] +) ################################################################################ #------ 3) POPULATE PROJECT WITH INPUT DATA (REPEAT UNTIL COMPLETE) ------------ -temp <- populate_rSFSW2_project_with_data(SFSW2_prj_meta, opt_behave, - opt_parallel, opt_chunks, opt_out_run, opt_verbosity) +temp <- populate_rSFSW2_project_with_data( + SFSW2_prj_meta, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity +) -if (isTRUE(opt_verbosity[["verbose"]]) && - !identical(SFSW2_prj_meta, temp[["SFSW2_prj_meta"]])) { - warning("'SFSW2_prj_meta' has changed: modify/reset input tracker status ", +if ( + isTRUE(opt_verbosity[["verbose"]]) && + !identical(SFSW2_prj_meta, temp[["SFSW2_prj_meta"]]) +) { + warning( + "'SFSW2_prj_meta' has changed: ", + "modify/reset input tracker status ", "'SFSW2_prj_meta[['input_status']]', if needed ", "(see help `?update_intracker`) and re-run project.", - call. = FALSE, immediate. = TRUE) + call. = FALSE, + immediate. = TRUE + ) } SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] @@ -116,17 +140,27 @@ SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] if (isTRUE(actions[["check_inputs"]])) { - temp <- check_rSFSW2_project_input_data(SFSW2_prj_meta, SFSW2_prj_inputs, - opt_chunks, opt_verbosity) + temp <- check_rSFSW2_project_input_data( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_chunks, + opt_verbosity + ) SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] - if (isTRUE(opt_verbosity[["verbose"]]) && - !all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "checked"]))) { - warning("'SFSW2_prj_meta[['input_status']]': some input tracker checks ", - "failed; fix inputs, if needed, and re-run project.", - call. = FALSE, immediate. = TRUE) + if ( + isTRUE(opt_verbosity[["verbose"]]) && + !all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "checked"])) + ) { + warning( + "'SFSW2_prj_meta[['input_status']]': ", + "some input tracker checks failed; ", + "fix inputs, if needed, and re-run project.", + call. = FALSE, + immediate. = TRUE + ) } } @@ -137,16 +171,30 @@ if (isTRUE(actions[["check_inputs"]])) { if (any(unlist(actions[c("sim_create", "sim_execute", "sim_aggregate")]))) { - SFSW2_prj_meta <- simulate_SOILWAT2_experiment(SFSW2_prj_meta, - SFSW2_prj_inputs, opt_behave, opt_parallel, opt_chunks, opt_out_run, - opt_verbosity) + SFSW2_prj_meta <- simulate_SOILWAT2_experiment( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity + ) } if (isTRUE(actions[["concat_dbOut"]])) { - stopifnot(move_output_to_dbOutput(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_behave, opt_out_run, opt_verbosity, - check_if_Pid_present = opt_verbosity[["print.debug"]])) + stopifnot( + move_output_to_dbOutput( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + check_if_Pid_present = opt_verbosity[["print.debug"]] + ) + ) } @@ -156,8 +204,13 @@ if (isTRUE(actions[["concat_dbOut"]])) { if (isTRUE(actions[["ensemble"]])) { - rSFSW2:::generate_ensembles(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_chunks, verbose = opt_verbosity[["verbose"]]) + rSFSW2:::generate_ensembles( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_chunks, + verbose = opt_verbosity[["verbose"]] + ) } @@ -167,8 +220,13 @@ if (isTRUE(actions[["ensemble"]])) { if (isTRUE(actions[["check_dbOut"]])) { - info_missing <- check_outputDB_completeness(SFSW2_prj_meta, opt_parallel, - opt_behave, opt_out_run, opt_verbosity) + info_missing <- check_outputDB_completeness( + SFSW2_prj_meta, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity + ) } @@ -182,8 +240,11 @@ exit_SFSW2_cluster(verbose = opt_verbosity[["verbose"]]) #--- Goodbye message writeLines(c("", "###########################################################################", - paste("#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), "run on", - SFSW2_prj_meta[["opt_platform"]][["host"]], "platform ended at", - Sys.time()), + paste( + "#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), + "run on", SFSW2_prj_meta[["opt_platform"]][["host"]], "platform ended at", + Sys.time() + ), "###########################################################################", - "")) + "" +)) diff --git a/demo/SFSW2_project_descriptions.R b/demo/SFSW2_project_descriptions.R index f5aa6537..262d61ab 100644 --- a/demo/SFSW2_project_descriptions.R +++ b/demo/SFSW2_project_descriptions.R @@ -89,15 +89,15 @@ project_paths <- list( #------ Base names or full names of input files fnames_in <- list( - fmaster = "SWRuns_InputMaster_YOURPROJECT_v11.csv", + fmaster = "SWRuns_InputMaster_YOURPROJECT_v12.csv", fslayers = "SWRuns_InputData_SoilLayers_v9.csv", - ftreatDesign = "SWRuns_InputData_TreatmentDesign_v17.csv", - fexpDesign = "SWRuns_InputData_ExperimentalDesign_v09.csv", + ftreatDesign = "SWRuns_InputData_TreatmentDesign_v18.csv", + fexpDesign = "SWRuns_InputData_ExperimentalDesign_v10.csv", fclimnorm = "SWRuns_InputData_cloud_v10.csv", fvegetation = "SWRuns_InputData_prod_v11.csv", - fsite = "SWRuns_InputData_siteparam_v14.csv", + fsite = "SWRuns_InputData_siteparam_v15.csv", fsoils = "SWRuns_InputData_soils_v12.csv", fweathersetup = "SWRuns_InputData_weathersetup_v10.csv", fclimscen_delta = "SWRuns_InputData_ClimateScenarios_Change_v11.csv", @@ -180,6 +180,11 @@ opt_input <- list( # file.path(project_paths[["dir_ex_weather"]], "Livneh_NA_2013", # "MONTHLY_GRIDS") "GriddedDailyWeatherFromLivneh2013_NorthAmerica", 0, + # - Abatzoglou et al. 2013: 1/24 degree res. for 1979-yesterday; + # data expected at file.path(project_paths[["dir_ex_weather"]], + # "gridMET_4km_NA", "YEARLY_GRIDS"); + # obtain data with function `gridMET_download_and_check` + "GriddedDailyWeatherFromgridMET_NorthAmerica", 0, # Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, # climate condition names must be of the form SCENARIO.GCM with @@ -242,8 +247,9 @@ opt_input <- list( # etc. # Do not change/remove/add entries; only re-order to set different priorities dw_source_priority = c("DayMet_NorthAmerica", "LookupWeatherFolder", - "Maurer2002_NorthAmerica", "Livneh2013_NorthAmerica", "NRCan_10km_Canada", - "NCEPCFSR_Global"), + "Maurer2002_NorthAmerica", "Livneh2013_NorthAmerica", + "gridMET_NorthAmerica", "NRCan_10km_Canada", "NCEPCFSR_Global" + ), # Creation of dbWeather # Compression type of dbWeather; one value of eval(formals(memCompress)[[2]]) @@ -307,7 +313,14 @@ opt_sim <- list( # temperate climate := has >=4 & < 8 months with > 10C # - 4 C based standard input of mean monthly biomass values described in # Bradford et al. 2014 Journal of Ecology - growseason_Tlimit_C = 4 + growseason_Tlimit_C = 4, + # Mean monthly reference temperature describing default phenology values + # - below values calculated by KP as median across 898 big sagebrush sites + # (see https://github.com/DrylandEcology/rSFSTEP2/issues/195) + reference_temperature_default_phenology = c( + -4.6768, -2.7282, 1.8257, 6.0538, 10.696, 15.3878, + 19.7777, 18.8755, 13.7868, 7.2843, 0.4167, -4.6912 + ) ) @@ -377,21 +390,31 @@ sim_time <- list( startyr = startyr <- 1980, endyr = endyr <- 2010, - #Future time period(s): - # Each list element of 'future_yrs' will be applied to every - # climate.conditions + #--- Future time period(s): # Each list element of 'future_yrs' is a vector with three elements # \code{c(delta, DSfut_startyr, DSfut_endyr)} - # future simulation years = delta + simstartyr:endyr - # future simulation years downscaled based on - # - current conditions = DScur_startyr:DScur_endyr - # - future conditions = DSfut_startyr:DSfut_endyr + # + # Daily scenario data "idem" (pass through): + # - Each list element ("row") of 'future_yrs' must match exactly one + # of the scenario experiments of 'req_scens[["models"]]` + # (e.g., "historical", "RCP45", "RCP85") -- in the correct order + # - Value of 'delta' is ignored + # + # Monthly scenario data: + # - Each list element ("row") of 'future_yrs' will be applied to every + # climate.conditions + # - future simulation years = delta + simstartyr:endyr + # - future simulation years downscaled based on + # - current conditions = DScur_startyr:DScur_endyr + # - future conditions = DSfut_startyr:DSfut_endyr + # # NOTE: Multiple time periods doesn't work with external type # 'ClimateWizardEnsembles' DScur_startyr = startyr, DScur_endyr = endyr, future_yrs = list( + c(d <- 0, startyr + d, endyr + d), # historical runs of GCMs c(d <- 40, startyr + d, endyr + d), c(d <- 90, startyr + d, endyr + d - 1) # most GCMs don't have data for 2100 ) @@ -404,6 +427,11 @@ req_scens <- list( # perturbations are all off ambient = "Current", + # Name of atmospheric CO2-concentration dataset to be used for "ambient" + # conditions. + # The string must match a column name of `LookupCO2data/AtmosCO2.csv` + tag_aCO2_ambient = "Fix360ppm", # e.g., "Fix360ppm", etc. + # Names of climate scenarios # - If a simulation project does not include future climate conditions, # then set models = NULL @@ -458,11 +486,15 @@ req_scens <- list( # requires live internet access # - "BCSD_SageSeer_USA": monthly time-series at 1-km resolution for the # western US prepared by Katie Renwick + # - "CMIP5_MACAv2metdata_USA": daily time series at 1/24-degree + # resolution for the US (requires `method_DS = "idem"`) # - "ESGF_Global": monthly time-series at varying resolution dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" ), # Downscaling method (applied to each each climate.conditions) + # Daily scenario data + # - "idem" (pass through) # Monthly scenario -> daily forcing variables # One or multiple elements of # - "raw" diff --git a/demo/SFSW2_project_settings.R b/demo/SFSW2_project_settings.R index a44932bd..d9594a3e 100644 --- a/demo/SFSW2_project_settings.R +++ b/demo/SFSW2_project_settings.R @@ -100,6 +100,9 @@ opt_out_run <- list( saveRsoilwatInput = FALSE, saveRsoilwatOutput = FALSE, + # Enforce that rSOILWAT2 objects meet the current version requirement + enforce_rSW2_version = TRUE, + # Write data to big input files for experimental design x treatment design makeInputForExperimentalDesign = FALSE, diff --git a/demo/SFSW2_project_xMaintenance_dbOutputTemp.R b/demo/SFSW2_project_xMaintenance_dbOutputTemp.R index 8cd0f869..e655c4d9 100755 --- a/demo/SFSW2_project_xMaintenance_dbOutputTemp.R +++ b/demo/SFSW2_project_xMaintenance_dbOutputTemp.R @@ -5,7 +5,7 @@ # EXECUTING SIMULATIONS, AND AGGREGATING OUTPUTS #----- LICENSE -# Copyright (C) 2017 by `r packageDescription("rSFSW2")[["Author"]]` +# Copyright (C) 2017-2019 by `r packageDescription("rSFSW2")[["Author"]]` # Contact information `r packageDescription("rSFSW2")[["Maintainer"]]` # This program is free software: you can redistribute it and/or modify diff --git a/inst/WORDLIST b/inst/WORDLIST index b5afac7b..275b34cf 100644 --- a/inst/WORDLIST +++ b/inst/WORDLIST @@ -54,6 +54,7 @@ multilayer multiplicatively normals operationalization +parallelize Pedotransfer phenology pre @@ -84,7 +85,9 @@ FAO GCM ISRIC NCEP +netCDF NOAA +ORCID ORNL RCM UNEP @@ -93,6 +96,7 @@ UNEP #------------------------------------ #------ Names +Abatzoglou Anandhi Artemisia tridentata Batjes diff --git a/man/ExtractClimateChangeScenarios.Rd b/man/ExtractClimateChangeScenarios.Rd index db26a878..63b9a44a 100644 --- a/man/ExtractClimateChangeScenarios.Rd +++ b/man/ExtractClimateChangeScenarios.Rd @@ -4,9 +4,17 @@ \alias{ExtractClimateChangeScenarios} \title{Extract climate scenarios} \usage{ -ExtractClimateChangeScenarios(climDB_metas, SFSW2_prj_meta, - SFSW2_prj_inputs, todos, opt_parallel, opt_chunks, resume, - verbose = FALSE, print.debug = FALSE) +ExtractClimateChangeScenarios( + climDB_metas, + SFSW2_prj_meta, + SFSW2_prj_inputs, + todos, + opt_parallel, + opt_chunks, + resume, + verbose = FALSE, + print.debug = FALSE +) } \arguments{ \item{todos}{A logical vector of length \code{runsN_master}. Element locations with diff --git a/man/ExtractClimateWizard.Rd b/man/ExtractClimateWizard.Rd index 6735499b..9ae49a54 100644 --- a/man/ExtractClimateWizard.Rd +++ b/man/ExtractClimateWizard.Rd @@ -4,8 +4,13 @@ \alias{ExtractClimateWizard} \title{Extract climate scenarios from downloaded \url{ClimateWizard.org} data} \usage{ -ExtractClimateWizard(climDB_metas, SFSW2_prj_meta, SFSW2_prj_inputs, todos, - verbose = FALSE) +ExtractClimateWizard( + climDB_metas, + SFSW2_prj_meta, + SFSW2_prj_inputs, + todos, + verbose = FALSE +) } \description{ Extract climate scenarios from downloaded \url{ClimateWizard.org} data diff --git a/man/ExtractData_Elevation.Rd b/man/ExtractData_Elevation.Rd index 23a06261..6fce62e2 100644 --- a/man/ExtractData_Elevation.Rd +++ b/man/ExtractData_Elevation.Rd @@ -4,8 +4,13 @@ \alias{ExtractData_Elevation} \title{Extract elevation data} \usage{ -ExtractData_Elevation(exinfo, SFSW2_prj_meta, SFSW2_prj_inputs, - resume = FALSE, verbose = FALSE) +ExtractData_Elevation( + exinfo, + SFSW2_prj_meta, + SFSW2_prj_inputs, + resume = FALSE, + verbose = FALSE +) } \description{ Extract elevation data diff --git a/man/ExtractData_MeanMonthlyClimate.Rd b/man/ExtractData_MeanMonthlyClimate.Rd index 235cd985..28a562aa 100644 --- a/man/ExtractData_MeanMonthlyClimate.Rd +++ b/man/ExtractData_MeanMonthlyClimate.Rd @@ -5,8 +5,15 @@ \title{Extract mean monthly climate data: cloud cover, relative humidity, and wind speed} \usage{ -ExtractData_MeanMonthlyClimate(exinfo, SFSW2_prj_meta, SFSW2_prj_inputs, - opt_parallel, opt_chunks, resume = FALSE, verbose = FALSE) +ExtractData_MeanMonthlyClimate( + exinfo, + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_parallel, + opt_chunks, + resume = FALSE, + verbose = FALSE +) } \description{ Extract mean monthly climate data: cloud cover, relative humidity, and diff --git a/man/ExtractData_Soils.Rd b/man/ExtractData_Soils.Rd index d84c1ae0..5bc47d4c 100644 --- a/man/ExtractData_Soils.Rd +++ b/man/ExtractData_Soils.Rd @@ -4,8 +4,14 @@ \alias{ExtractData_Soils} \title{Extract soil characteristics} \usage{ -ExtractData_Soils(exinfo, SFSW2_prj_meta, SFSW2_prj_inputs, opt_parallel, - resume, verbose = FALSE) +ExtractData_Soils( + exinfo, + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_parallel, + resume, + verbose = FALSE +) } \description{ Extract soil characteristics diff --git a/man/ExtractDayMet.Rd b/man/ExtractDayMet.Rd index ffc3afeb..65f7ebe5 100644 --- a/man/ExtractDayMet.Rd +++ b/man/ExtractDayMet.Rd @@ -7,13 +7,27 @@ \title{Extract gridded daily weather from \var{\dQuote{DayMet}} for North American sites} \usage{ -ExtractGriddedDailyWeatherFromDayMet_NorthAmerica_swWeather(dir_data, - site_ids, coords_WGS84, start_year, end_year, dbW_digits) +ExtractGriddedDailyWeatherFromDayMet_NorthAmerica_swWeather( + dir_data, + site_ids, + coords_WGS84, + start_year, + end_year, + dbW_digits +) -ExtractGriddedDailyWeatherFromDayMet_NorthAmerica_dbW(dir_data, site_ids, - site_ids_by_dbW, coords_WGS84, start_year, end_year, - dir_temp = tempdir(), dbW_compression_type = "gzip", dbW_digits, - verbose = FALSE) +ExtractGriddedDailyWeatherFromDayMet_NorthAmerica_dbW( + dir_data, + site_ids, + site_ids_by_dbW, + coords_WGS84, + start_year, + end_year, + dir_temp = tempdir(), + dbW_compression_type = "gzip", + dbW_digits, + verbose = FALSE +) } \value{ A list of class diff --git a/man/ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica.Rd b/man/ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica.Rd index 6544f87c..bed44147 100644 --- a/man/ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica.Rd +++ b/man/ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica.Rd @@ -5,8 +5,14 @@ \title{Extract gridded daily weather from Maurer et al. 2002 (updated in 2010) for North American sites} \usage{ -ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica(dir_data, cellname, - start_year, end_year, dbW_digits, verbose = FALSE) +ExtractGriddedDailyWeatherFromMaurer2002_NorthAmerica( + dir_data, + cellname, + start_year, + end_year, + dbW_digits, + verbose = FALSE +) } \value{ An invisible zero. A list of which each element represents one year diff --git a/man/ExtractGriddedDailyWeatherFromNRCan_10km_Canada.Rd b/man/ExtractGriddedDailyWeatherFromNRCan_10km_Canada.Rd index fa282853..e376d028 100644 --- a/man/ExtractGriddedDailyWeatherFromNRCan_10km_Canada.Rd +++ b/man/ExtractGriddedDailyWeatherFromNRCan_10km_Canada.Rd @@ -4,10 +4,18 @@ \alias{ExtractGriddedDailyWeatherFromNRCan_10km_Canada} \title{Extract gridded daily weather from NR Canada for Canadian sites} \usage{ -ExtractGriddedDailyWeatherFromNRCan_10km_Canada(dir_data, site_ids, - site_ids_by_dbW, coords_WGS84, start_year, end_year, - dir_temp = tempdir(), dbW_compression_type = "gzip", dbW_digits, - verbose = FALSE) +ExtractGriddedDailyWeatherFromNRCan_10km_Canada( + dir_data, + site_ids, + site_ids_by_dbW, + coords_WGS84, + start_year, + end_year, + dir_temp = tempdir(), + dbW_compression_type = "gzip", + dbW_digits, + verbose = FALSE +) } \value{ An invisible zero. A list of which each element represents one year diff --git a/man/GriddedDailyWeatherFromNCEPCFSR_Global.Rd b/man/GriddedDailyWeatherFromNCEPCFSR_Global.Rd index 187a90f4..51108962 100644 --- a/man/GriddedDailyWeatherFromNCEPCFSR_Global.Rd +++ b/man/GriddedDailyWeatherFromNCEPCFSR_Global.Rd @@ -4,11 +4,23 @@ \alias{GriddedDailyWeatherFromNCEPCFSR_Global} \title{Extract gridded daily weather from NCEP/CFSR for sites globally} \usage{ -GriddedDailyWeatherFromNCEPCFSR_Global(site_ids, site_ids_by_dbW, - dat_sites, tag_WeatherFolder, start_year, end_year, meta_cfsr, - n_site_per_core = 100, rm_temp = TRUE, resume = FALSE, - dir_temp = tempdir(), dbW_compression_type = "gzip", dbW_digits, - verbose = FALSE, print.debug = FALSE) +GriddedDailyWeatherFromNCEPCFSR_Global( + site_ids, + site_ids_by_dbW, + dat_sites, + tag_WeatherFolder, + start_year, + end_year, + meta_cfsr, + n_site_per_core = 100, + rm_temp = TRUE, + resume = FALSE, + dir_temp = tempdir(), + dbW_compression_type = "gzip", + dbW_digits, + verbose = FALSE, + print.debug = FALSE +) } \value{ An invisible zero. A list of which each element represents one year diff --git a/man/PrepareClimateScenarios.Rd b/man/PrepareClimateScenarios.Rd index 55b64d3c..7e9bb29e 100644 --- a/man/PrepareClimateScenarios.Rd +++ b/man/PrepareClimateScenarios.Rd @@ -4,8 +4,14 @@ \alias{PrepareClimateScenarios} \title{Extracts climate change scenarios and downscales monthly to daily time series} \usage{ -PrepareClimateScenarios(SFSW2_prj_meta, SFSW2_prj_inputs, opt_parallel, - resume, opt_verbosity, opt_chunks) +PrepareClimateScenarios( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_parallel, + resume, + opt_verbosity, + opt_chunks +) } \description{ Extracts climate change scenarios and downscales monthly to daily time series diff --git a/man/SeedlingRootingDepth.Rd b/man/SeedlingRootingDepth.Rd deleted file mode 100644 index e2d336b9..00000000 --- a/man/SeedlingRootingDepth.Rd +++ /dev/null @@ -1,16 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{SeedlingRootingDepth} -\alias{SeedlingRootingDepth} -\title{Function to calculate rooting depth at given age -Units: [age] = days, [P0, K, r] = mm} -\usage{ -SeedlingRootingDepth(age, P0, K, r) -} -\value{ -A numeric vector of rooting depth in units of centimeters. -} -\description{ -Function to calculate rooting depth at given age -Units: [age] = days, [P0, K, r] = mm -} diff --git a/man/SoilLayer_at_SoilDepth.Rd b/man/SoilLayer_at_SoilDepth.Rd deleted file mode 100644 index a72e5f34..00000000 --- a/man/SoilLayer_at_SoilDepth.Rd +++ /dev/null @@ -1,11 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{SoilLayer_at_SoilDepth} -\alias{SoilLayer_at_SoilDepth} -\title{Function to convert soil depth to soil layer} -\usage{ -SoilLayer_at_SoilDepth(depth_cm, layers_depth) -} -\description{ -Function to convert soil depth to soil layer -} diff --git a/man/add_delta_to_PPT.Rd b/man/add_delta_to_PPT.Rd index da54cd35..530a0431 100644 --- a/man/add_delta_to_PPT.Rd +++ b/man/add_delta_to_PPT.Rd @@ -4,8 +4,12 @@ \alias{add_delta_to_PPT} \title{Additive Precipitation adjustment by a delta value} \usage{ -add_delta_to_PPT(data, ind_events = NULL, addDelta = NULL, - deltaPerEvent = NULL) +add_delta_to_PPT( + data, + ind_events = NULL, + addDelta = NULL, + deltaPerEvent = NULL +) } \arguments{ \item{data}{A numeric vector. Daily values of precipitation.} diff --git a/man/applyDeltas.Rd b/man/applyDeltas.Rd index c2d34b30..93bb993c 100644 --- a/man/applyDeltas.Rd +++ b/man/applyDeltas.Rd @@ -5,8 +5,14 @@ \title{Add/multiply deltas to historic daily data to generate future daily \pkg{rSOILWAT2}-formatted weather.} \usage{ -applyDeltas(obs.hist.daily, obs.hist.monthly, delta_ts, ppt_fun, - sigmaN = 6, do_checks = FALSE) +applyDeltas( + obs.hist.daily, + obs.hist.monthly, + delta_ts, + ppt_fun, + sigmaN = 6, + do_checks = FALSE +) } \description{ Used by \code{downscale.raw}, \code{downscale.delta}, and \code{downscale.deltahybrid} diff --git a/man/applyPPTdelta_detailed.Rd b/man/applyPPTdelta_detailed.Rd index d8a42701..822273b9 100644 --- a/man/applyPPTdelta_detailed.Rd +++ b/man/applyPPTdelta_detailed.Rd @@ -5,8 +5,7 @@ \title{Add/multiply deltas to historic daily precipitation to generate future daily precipitation with checks} \usage{ -applyPPTdelta_detailed(m, data, ydelta, add_days, mult_days, daily, - monthly) +applyPPTdelta_detailed(m, data, ydelta, add_days, mult_days, daily, monthly) } \arguments{ \item{m}{An integer vector. Each element corresponds to a day (i.e., \code{length(m)} diff --git a/man/applyPPTdelta_simple.Rd b/man/applyPPTdelta_simple.Rd index 199cc954..c26aa3ca 100644 --- a/man/applyPPTdelta_simple.Rd +++ b/man/applyPPTdelta_simple.Rd @@ -5,8 +5,14 @@ \title{Add/multiply deltas to historic daily precipitation to generate future daily precipitation without checks} \usage{ -applyPPTdelta_simple(m, data, ydelta, add_days, mult_days, - set_negPPT_to0 = TRUE) +applyPPTdelta_simple( + m, + data, + ydelta, + add_days, + mult_days, + set_negPPT_to0 = TRUE +) } \arguments{ \item{m}{An integer vector. Each element corresponds to a day (i.e., \code{length(m)} diff --git a/man/calc.ScenarioWeather.Rd b/man/calc.ScenarioWeather.Rd deleted file mode 100644 index 1d99e548..00000000 --- a/man/calc.ScenarioWeather.Rd +++ /dev/null @@ -1,15 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ExtractData_ClimateDownscaling.R -\name{calc.ScenarioWeather} -\alias{calc.ScenarioWeather} -\title{Extract climate scenario data and downscale to daily weather data} -\usage{ -calc.ScenarioWeather(i, ig, il, gcm, site_id, i_tag, clim_source, use_CF, - use_NEX, climDB_meta, climDB_files, reqGCMs, reqRCPsPerGCM, - reqDownscalingsPerGCM, climate.ambient, locations, compression_type, - getYears, assocYears, sim_time, task_seed, opt_DS, project_paths, - dir_failed, resume, verbose, print.debug) -} -\description{ -Extract climate scenario data and downscale to daily weather data -} diff --git a/man/calc_BareSoilEvapCoefs.Rd b/man/calc_BareSoilEvapCoefs.Rd index b51e7fc2..b4457f6a 100644 --- a/man/calc_BareSoilEvapCoefs.Rd +++ b/man/calc_BareSoilEvapCoefs.Rd @@ -4,8 +4,7 @@ \alias{calc_BareSoilEvapCoefs} \title{Calculate potential bare-soil evaporation coefficients} \usage{ -calc_BareSoilEvapCoefs(layers_depth, sand, clay, - depth_max_bs_evap_cm = 15) +calc_BareSoilEvapCoefs(layers_depth, sand, clay, depth_max_bs_evap_cm = 15) } \arguments{ \item{layers_depth}{A numeric vector, matrix, or data.frame. Values describe diff --git a/man/calc_DailyScenarioWeather.Rd b/man/calc_DailyScenarioWeather.Rd new file mode 100644 index 00000000..4b1b3981 --- /dev/null +++ b/man/calc_DailyScenarioWeather.Rd @@ -0,0 +1,50 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{calc_DailyScenarioWeather} +\alias{calc_DailyScenarioWeather} +\title{Extract daily climate scenario data} +\usage{ +calc_DailyScenarioWeather( + ids_ToDo, + clim_source, + climDB_meta, + climDB_files, + reqGCMs, + reqRCPsPerGCM, + reqDownscalingsPerGCM, + locations, + compression_type, + getYears, + sim_scen_ids, + dir_out_temp, + dir_failed, + fdbWeather, + resume, + verbose +) +} +\description{ +Extract daily climate scenario data +} +\section{Details}{ + +This function parallelize over \code{reqGCMs} x \code{reqRCPsPerGCM} +combinations, i.e., data for all \code{locations} are extracted for +one value of \code{reqGCMs} x \code{reqRCPsPerGCM} at a time. +This is good if file handling is slow and memory is not limiting. +However, if data cannot be loaded into memory, then this function +cannot work. In this case, this function would need to be re-written to +additionally loop over chunks of \code{locations}. +} + +\section{Notes}{ + This function works only if +\itemize{ + \item \code{sim_time[["future_yrs"]]} contains \var{"dall"} and + \item \code{reqDownscalingsPerGCM} is \var{"idem"}. +} +} + +\seealso{ +\code{\link{calc_MonthlyScenarioWeather}} +} diff --git a/man/calc_Days_withLoweredPPT.Rd b/man/calc_Days_withLoweredPPT.Rd index e9926210..85368e81 100644 --- a/man/calc_Days_withLoweredPPT.Rd +++ b/man/calc_Days_withLoweredPPT.Rd @@ -4,8 +4,14 @@ \alias{calc_Days_withLoweredPPT} \title{Distribute precipitation among days without too extreme values} \usage{ -calc_Days_withLoweredPPT(data_N, this_newPPTevent_N, sigmaN, - this_i_extreme, this_pptToDistribute, seed = NA) +calc_Days_withLoweredPPT( + data_N, + this_newPPTevent_N, + sigmaN, + this_i_extreme, + this_pptToDistribute, + seed = NA +) } \arguments{ \item{data_N}{An integer value. The number of days of the precipitation record to diff --git a/man/calc_DurationFavorableConds.Rd b/man/calc_DurationFavorableConds.Rd deleted file mode 100644 index c5f939ae..00000000 --- a/man/calc_DurationFavorableConds.Rd +++ /dev/null @@ -1,16 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{calc_DurationFavorableConds} -\alias{calc_DurationFavorableConds} -\title{Function to calculate for each day of the year, duration in days of -upcoming favorable conditions accounting for consequences.unfavorable = 0 -(if conditions become unfavorable, then restart the count), =1 (resume)} -\usage{ -calc_DurationFavorableConds(RYyear, consequences.unfavorable, - Germination_WhileFavorable, RYyear_ForEachUsedDay) -} -\description{ -Function to calculate for each day of the year, duration in days of -upcoming favorable conditions accounting for consequences.unfavorable = 0 -(if conditions become unfavorable, then restart the count), =1 (resume) -} diff --git a/man/calc_MonthlyScenarioWeather.Rd b/man/calc_MonthlyScenarioWeather.Rd new file mode 100644 index 00000000..2092f989 --- /dev/null +++ b/man/calc_MonthlyScenarioWeather.Rd @@ -0,0 +1,42 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{calc_MonthlyScenarioWeather} +\alias{calc_MonthlyScenarioWeather} +\title{Extract monthly climate scenario data and downscale to daily weather data} +\usage{ +calc_MonthlyScenarioWeather( + i, + ig, + il, + gcm, + site_id, + i_tag, + clim_source, + use_CF, + use_NEX, + climDB_meta, + climDB_files, + reqGCMs, + reqRCPsPerGCM, + reqDownscalingsPerGCM, + climate.ambient, + locations, + compression_type, + getYears, + assocYears, + sim_time, + task_seed, + opt_DS, + project_paths, + dir_failed, + resume, + verbose, + print.debug +) +} +\description{ +Extract monthly climate scenario data and downscale to daily weather data +} +\seealso{ +\code{\link{calc_DailyScenarioWeather}} +} diff --git a/man/calc_RequestedSoilLayers.Rd b/man/calc_RequestedSoilLayers.Rd index 452bdf60..dd00bc57 100644 --- a/man/calc_RequestedSoilLayers.Rd +++ b/man/calc_RequestedSoilLayers.Rd @@ -5,8 +5,13 @@ \title{Add soil layers (by interpolation) if not already present and store in soil data input file} \usage{ -calc_RequestedSoilLayers(SFSW2_prj_meta, SFSW2_prj_inputs, runIDs_adjust, - keep_old_depth = TRUE, verbose = FALSE) +calc_RequestedSoilLayers( + SFSW2_prj_meta, + SFSW2_prj_inputs, + runIDs_adjust, + keep_old_depth = TRUE, + verbose = FALSE +) } \description{ Add soil layers (by interpolation) if not already present and store in diff --git a/man/calc_SeedlingMortality.Rd b/man/calc_SeedlingMortality.Rd deleted file mode 100644 index 2fe77abc..00000000 --- a/man/calc_SeedlingMortality.Rd +++ /dev/null @@ -1,11 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{calc_SeedlingMortality} -\alias{calc_SeedlingMortality} -\title{Function to calculate mortality under conditions and checks survival limit} -\usage{ -calc_SeedlingMortality(kill.conditions, max.duration.before.kill) -} -\description{ -Function to calculate mortality under conditions and checks survival limit -} diff --git a/man/calc_TimeToGerminate.Rd b/man/calc_TimeToGerminate.Rd deleted file mode 100644 index ae849672..00000000 --- a/man/calc_TimeToGerminate.Rd +++ /dev/null @@ -1,20 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{calc_TimeToGerminate} -\alias{calc_TimeToGerminate} -\title{Function to estimate time to germinate for each day of a given year and -conditions (temperature, top soil \var{SWP})} -\usage{ -calc_TimeToGerminate(RYyear, Germination_WhileFavorable, - LengthDays_FavorableConditions, RYyear_ForEachUsedDay, soilTmeanSnow, - swp.TopMean, TmeanJan, param, seed = NA) -} -\arguments{ -\item{seed}{A seed set, \code{NULL}, or \code{NA}. \code{NA} will not affect -the state of the \var{RNG}; \code{NULL} will re-initialize the \var{RNG}; -and all other values are passed to \code{\link{set.seed}}.} -} -\description{ -Function to estimate time to germinate for each day of a given year and -conditions (temperature, top soil \var{SWP}) -} diff --git a/man/calc_cell_ISRICWISE.Rd b/man/calc_cell_ISRICWISE.Rd index e476eff0..17246727 100644 --- a/man/calc_cell_ISRICWISE.Rd +++ b/man/calc_cell_ISRICWISE.Rd @@ -5,8 +5,18 @@ \title{Calculate weighted mean soil variables from one of the \var{\sQuote{ISRIC-WISE}} data bases for one simulation cell} \usage{ -calc_cell_ISRICWISE(i, i_sim_cells_SUIDs, sim_soils, layer_N, layer_Nsim, - ldepth, dat_wise, nvars, var_tags, val_rocks = NULL) +calc_cell_ISRICWISE( + i, + i_sim_cells_SUIDs, + sim_soils, + layer_N, + layer_Nsim, + ldepth, + dat_wise, + nvars, + var_tags, + val_rocks = NULL +) } \arguments{ \item{i}{An integer value. The number of the simulation site/cell location.} diff --git a/man/calc_timeSlices.Rd b/man/calc_timeSlices.Rd index c116f0fc..f04c4db8 100644 --- a/man/calc_timeSlices.Rd +++ b/man/calc_timeSlices.Rd @@ -10,13 +10,13 @@ calc_timeSlices(sim_time, tbox) \item{sim_time}{A list with elements \code{future_N}, \code{future_yrs}, \code{DScur_startyr}, and \code{DScur_endyr}.} -\item{tbox}{A data.frame or matrix with two rows \code{start} and \code{end} and two -columns \code{first} and \code{second} describing years including in a specific -climate data source.} +\item{tbox}{A data.frame or matrix with two rows \code{start} and +\code{end} and two columns \code{first} and \code{second} describing years +including in a specific climate data source.} } \value{ -A data.frame with rows for each extraction run-slice and four columns 'Run', - 'Slice', 'Time', and 'Year'. +A data.frame with rows for each extraction run-slice and + four columns 'Run', 'Slice', 'Time', and 'Year'. } \description{ Calculate historical and future simulation time slices diff --git a/man/check_SuitableGrowthThisYear.Rd b/man/check_SuitableGrowthThisYear.Rd deleted file mode 100644 index 8b3ca876..00000000 --- a/man/check_SuitableGrowthThisYear.Rd +++ /dev/null @@ -1,14 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{check_SuitableGrowthThisYear} -\alias{check_SuitableGrowthThisYear} -\title{Function to calculate favorable conditions for seedling growth for each day -of a given year} -\usage{ -check_SuitableGrowthThisYear(favorable.conditions, - consequences.unfavorable) -} -\description{ -Function to calculate favorable conditions for seedling growth for each day -of a given year -} diff --git a/man/check_monotonic_increase.Rd b/man/check_monotonic_increase.Rd deleted file mode 100644 index c37ee326..00000000 --- a/man/check_monotonic_increase.Rd +++ /dev/null @@ -1,39 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{check_monotonic_increase} -\alias{check_monotonic_increase} -\title{Check that values in matrix-like object are (strictly) monotonically -increasing/decreasing} -\usage{ -check_monotonic_increase(x, MARGIN = 1, increase = TRUE, - strictly = FALSE, fail = FALSE, replacement = NA, na.rm = FALSE) -} -\arguments{ -\item{x}{A numeric matrix like object.} - -\item{MARGIN}{An integer value giving the subscripts over which the -monotonicity will be checked; 1 indicates rows, 2 indicates columns.} - -\item{increase}{A logical value. If \code{TRUE}, check monotonic increase; if -\code{FALSE}, check monotonic decrease.} - -\item{strictly}{A logical value. If \code{TRUE}, check for a strict monotonic -pattern.} - -\item{fail}{A logical value. If \code{TRUE}, throw error if monotonic check -fails.} - -\item{replacement}{A value that replaces non-(strictly) monotonically -increasing/decreasing values if \code{fail} is \code{FALSE}.} - -\item{na.rm}{A logical value. If \code{TRUE}, then ignore \code{NA}s; if -\code{FALSE}, then fail if \code{strictly} or replace with -\code{replacement}.} -} -\value{ -The updated \code{x}. -} -\description{ -Check that values in matrix-like object are (strictly) monotonically -increasing/decreasing -} diff --git a/man/check_outputDB_completeness.Rd b/man/check_outputDB_completeness.Rd index 6ed5a57d..e018f867 100644 --- a/man/check_outputDB_completeness.Rd +++ b/man/check_outputDB_completeness.Rd @@ -5,8 +5,13 @@ \title{Check whether \var{\sQuote{dbOutput}} contains a complete set of output/simulation results} \usage{ -check_outputDB_completeness(SFSW2_prj_meta, opt_parallel, opt_behave, - opt_out_run, opt_verbosity) +check_outputDB_completeness( + SFSW2_prj_meta, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity +) } \description{ Check whether \var{\sQuote{dbOutput}} contains a complete set of diff --git a/man/check_rSFSW2_project_input_data.Rd b/man/check_rSFSW2_project_input_data.Rd index 60c08def..e5f28989 100644 --- a/man/check_rSFSW2_project_input_data.Rd +++ b/man/check_rSFSW2_project_input_data.Rd @@ -4,8 +4,12 @@ \alias{check_rSFSW2_project_input_data} \title{Attempt to check input data of a \pkg{rSFSW2} project for consistency} \usage{ -check_rSFSW2_project_input_data(SFSW2_prj_meta, SFSW2_prj_inputs, - opt_chunks, opt_verbosity) +check_rSFSW2_project_input_data( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_chunks, + opt_verbosity +) } \description{ Attempt to check input data of a \pkg{rSFSW2} project for consistency diff --git a/man/check_rSW2_version.Rd b/man/check_rSW2_version.Rd new file mode 100644 index 00000000..a035fe1e --- /dev/null +++ b/man/check_rSW2_version.Rd @@ -0,0 +1,29 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/rSOILWAT2_DataAccess.R +\name{check_rSW2_version} +\alias{check_rSW2_version} +\title{Check version of a \pkg{rSOILWAT2} input or output object compared to +declared dependency in the package \var{DESCRIPTION}} +\usage{ +check_rSW2_version(object, strict = TRUE) +} +\arguments{ +\item{object}{An object of \pkg{rSOILWAT2} classes +\code{\linkS4class{swInputData}} or \code{\linkS4class{swOutput}}.} + +\item{strict}{A logical value. If \code{FALSE} and check would fail, then +a warning is issued (and \code{TRUE} is returned nevertheless).} +} +\value{ +A logical value. + Returns \code{TRUE} if version of \code{object} meets at least the minimal + required \pkg{rSOILWAT2} version -- or if \code{strict} is \code{FALSE}. + Returns \code{FALSE} otherwise. +} +\description{ +Check version of a \pkg{rSOILWAT2} input or output object compared to +declared dependency in the package \var{DESCRIPTION} +} +\seealso{ +\code{\link[rSOILWAT2]{check_version}} +} diff --git a/man/check_weatherDB.Rd b/man/check_weatherDB.Rd index b47dca4b..58e16404 100644 --- a/man/check_weatherDB.Rd +++ b/man/check_weatherDB.Rd @@ -4,9 +4,16 @@ \alias{check_weatherDB} \title{Checks data in a weather database} \usage{ -check_weatherDB(dir_prj, fdbWeather, repeats = 2L, - do_preprocess_tempfiles = TRUE, n_cores = 20L, startyear = 1979, - endyear = 2010, seed = NA) +check_weatherDB( + dir_prj, + fdbWeather, + repeats = 2L, + do_preprocess_tempfiles = TRUE, + n_cores = 20L, + startyear = 1979, + endyear = 2010, + seed = NA +) } \arguments{ \item{dir_prj}{A character string. The directory path the \pkg{rSFSW2} diff --git a/man/circ_add.Rd b/man/circ_add.Rd deleted file mode 100644 index 1c754c23..00000000 --- a/man/circ_add.Rd +++ /dev/null @@ -1,31 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{circ_add} -\alias{circ_add} -\title{Calculate the circular addition \var{x + y}} -\usage{ -circ_add(x, y, int) -} -\arguments{ -\item{x}{A numeric vector or array.} - -\item{y}{A numeric vector or array.} - -\item{int}{A numeric value. The number of units of \code{x} in a full circle, -e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}.} -} -\description{ -Calculate the circular addition \var{x + y} -} -\examples{ -# Matrix examples: day of year -x <- matrix(c(260, 240, 10, 360, 0, 360), nrow = 3, ncol = 2) -y <- matrix(c(240, 260, 360, 10, 360, 0), nrow = 3, ncol = 2) -circ_add(x, y, int = 365) - -# Circular addition and subtraction -r1 <- circ_add(circ_minus(x, y, int = 365), y, int = 365) -r2 <- circ_minus(circ_add(x, y, int = 365), y, int = 365) -all.equal(r1, r2) - -} diff --git a/man/circ_minus.Rd b/man/circ_minus.Rd deleted file mode 100644 index 8e5b3c71..00000000 --- a/man/circ_minus.Rd +++ /dev/null @@ -1,36 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{circ_minus} -\alias{circ_minus} -\title{Calculate the circular subtraction \var{x - y}} -\usage{ -circ_minus(x, y, int) -} -\arguments{ -\item{x}{A numeric vector or array.} - -\item{y}{A numeric vector or array.} - -\item{int}{A numeric value. The number of units of \code{x} in a full circle, -e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}.} -} -\description{ -Calculate the circular subtraction \var{x - y} -} -\examples{ -# Days of year -circ_minus(260, 240, int = 365) ## expected: +20 -circ_minus(240, 260, int = 365) ## expected: -20 -circ_minus(10, 360, int = 365) ## expected: +15 -circ_minus(360, 10, int = 365) ## expected: -15 -circ_minus(0, 360, int = 365) ## expected: +5 -circ_minus(360, 0, int = 365) ## expected: -5 - -# Matrix examples -x <- matrix(c(260, 240, 10, 360, 0, 360), nrow = 3, ncol = 2) -y <- matrix(c(240, 260, 360, 10, 360, 0), nrow = 3, ncol = 2) -circ_minus(x, y, int = 365) -y2 <- y -y2[1, 1] <- NA -circ_minus(y2, x, int = 365) -} diff --git a/man/circular.Rd b/man/circular.Rd deleted file mode 100644 index 255b8324..00000000 --- a/man/circular.Rd +++ /dev/null @@ -1,35 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{circular} -\alias{circular} -\alias{circ_mean} -\alias{circ_range} -\alias{circ_sd} -\title{Functions for circular descriptive statistics} -\usage{ -circ_mean(x, int, na.rm = FALSE) - -circ_range(x, int, na.rm = FALSE) - -circ_sd(x, int, na.rm = FALSE) -} -\arguments{ -\item{x}{A numeric vector or a matrix. If a data.frame is supplied, then -\code{x} is coerced to a matrix.} - -\item{int}{A numeric value. The number of units of \code{x} in a full circle, -e.g., for unit days: \code{int = 365}; for unit months: \code{int = 12}.} - -\item{na.rm}{A logical value indicating whether \code{NA} values should be -stripped before the computation proceeds.} -} -\value{ -A numeric value or \code{NA}. -} -\description{ -Functions for circular descriptive statistics -} -\seealso{ -\code{\link[circular]{mean.circular}}, - \code{\link[circular]{range.circular}}, \code{\link[circular]{sd.circular}} -} diff --git a/man/compare_test_output.Rd b/man/compare_test_output.Rd index f7203900..4d925502 100644 --- a/man/compare_test_output.Rd +++ b/man/compare_test_output.Rd @@ -30,6 +30,9 @@ the file name. # Compare output database with reference database comp <- compare_test_output(".", dir_ref = "../0_ReferenceOutput/") + + # Clean up + delete_test_output(".") } } diff --git a/man/compare_two_dbOutput.Rd b/man/compare_two_dbOutput.Rd index 402aea11..2e1447c8 100644 --- a/man/compare_two_dbOutput.Rd +++ b/man/compare_two_dbOutput.Rd @@ -4,8 +4,13 @@ \alias{compare_two_dbOutput} \title{Compare one output database with another output database} \usage{ -compare_two_dbOutput(dbOut1, dbOut2, tol = 0.001, comp_absolute = TRUE, - verbose = FALSE) +compare_two_dbOutput( + dbOut1, + dbOut2, + tol = 0.001, + comp_absolute = TRUE, + verbose = FALSE +) } \arguments{ \item{dbOut1}{A character string. Path to first output database used as diff --git a/man/compile_overall_timer.Rd b/man/compile_overall_timer.Rd index 790601d7..306891ac 100644 --- a/man/compile_overall_timer.Rd +++ b/man/compile_overall_timer.Rd @@ -4,10 +4,18 @@ \alias{compile_overall_timer} \title{Write timing information to the timing file} \usage{ -compile_overall_timer(timerfile2, dir_out, workersN = 0, - runs.completed = 0, scenario_No = 0, ensembles.completed = 0, - delta.overall = NA, delta.outputDB = NA, delta.check = NA, - delta.ensembles = NA) +compile_overall_timer( + timerfile2, + dir_out, + workersN = 0, + runs.completed = 0, + scenario_No = 0, + ensembles.completed = 0, + delta.overall = NA, + delta.outputDB = NA, + delta.check = NA, + delta.ensembles = NA +) } \description{ Write timing information to the timing file diff --git a/man/controlExtremePPTevents.Rd b/man/controlExtremePPTevents.Rd index 8d080864..0c20f36f 100644 --- a/man/controlExtremePPTevents.Rd +++ b/man/controlExtremePPTevents.Rd @@ -4,8 +4,14 @@ \alias{controlExtremePPTevents} \title{Check for and handle days with too extreme precipitation values} \usage{ -controlExtremePPTevents(data, dailyPPTceiling, sigmaN, do_checks = FALSE, - mfact = 10, seed = NA) +controlExtremePPTevents( + data, + dailyPPTceiling, + sigmaN, + do_checks = FALSE, + mfact = 10, + seed = NA +) } \arguments{ \item{data}{A numeric vector. Daily values of precipitation.} diff --git a/man/convert_precipitation.Rd b/man/convert_precipitation.Rd deleted file mode 100644 index a09e7d3e..00000000 --- a/man/convert_precipitation.Rd +++ /dev/null @@ -1,29 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Miscellaneous_Functions.R -\name{convert_precipitation} -\alias{convert_precipitation} -\title{Converts units of precipitation data} -\usage{ -convert_precipitation(x, dpm, unit_from, unit_to = "cm month-1") -} -\arguments{ -\item{x}{A numeric vector. Precipitation data as monthly series in units of -\code{unit_from}.} - -\item{dpm}{A numeric vector. Number of days per month in the time series -\code{x}.} - -\item{unit_from}{A character string. Units of data in \code{x}. Currently, -supported units include "mm/month", "mm month-1", "mm/d", "mm d-1", -"kg/m2/s", "kg m-2 s-1", "mm/s", "mm s-1", "cm/month", "cm month-1".} - -\item{unit_to}{A character string. Units to which data are converted. -Currently, supported unit is "cm month-1" respectively "cm/month".} -} -\value{ -A numeric vector of the same size as \code{x} in units of - \code{unit_to}. -} -\description{ -Converts units of precipitation data -} diff --git a/man/convert_temperature.Rd b/man/convert_temperature.Rd deleted file mode 100644 index 8391ea07..00000000 --- a/man/convert_temperature.Rd +++ /dev/null @@ -1,25 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Miscellaneous_Functions.R -\name{convert_temperature} -\alias{convert_temperature} -\title{Converts units of temperature data} -\usage{ -convert_temperature(x, unit_from, unit_to = "C") -} -\arguments{ -\item{x}{A numeric vector. Temperature data as monthly series in units of -\code{unit_from}.} - -\item{unit_from}{A character string. Units of data in \code{x}. Currently, -supported units include "K", "F", and "C".} - -\item{unit_to}{A character string. Units to which data are converted. -Currently, supported unit is "C".} -} -\value{ -A numeric vector of the same size as \code{x} in units of - \code{unit_to}. -} -\description{ -Converts units of temperature data -} diff --git a/man/dbConnect2.Rd b/man/dbConnect2.Rd index bf23f211..891b9321 100644 --- a/man/dbConnect2.Rd +++ b/man/dbConnect2.Rd @@ -4,8 +4,14 @@ \alias{dbConnect2} \title{Connect to \var{SQLite}-database} \usage{ -dbConnect2(dbname, flags = SQLITE_RW, verbose = FALSE, repeats = 10L, - sleep_s = 5, seed = NA) +dbConnect2( + dbname, + flags = SQLITE_RW, + verbose = FALSE, + repeats = 10L, + sleep_s = 5, + seed = NA +) } \arguments{ \item{dbname}{A character string. The path including name to the database.} diff --git a/man/dbExecute2.Rd b/man/dbExecute2.Rd index 46e8d642..67635a08 100644 --- a/man/dbExecute2.Rd +++ b/man/dbExecute2.Rd @@ -4,8 +4,7 @@ \alias{dbExecute2} \title{Execute a SQL statement on a database connection with safeguards} \usage{ -dbExecute2(con, SQL, verbose = FALSE, repeats = 10L, sleep_s = 5, - seed = NA) +dbExecute2(con, SQL, verbose = FALSE, repeats = 10L, sleep_s = 5, seed = NA) } \arguments{ \item{con}{A \code{\link[DBI:DBIConnection-class]{DBIConnection}} or diff --git a/man/dbOut_check_values.Rd b/man/dbOut_check_values.Rd index 1b2bd750..8b53d85b 100644 --- a/man/dbOut_check_values.Rd +++ b/man/dbOut_check_values.Rd @@ -5,8 +5,13 @@ \title{Check that cells of \var{dbOutput} agree with corresponding cells of another database} \usage{ -dbOut_check_values(dbOut_fname, dbNew_fname, fields_check = NULL, - tol = 0.001, verbose = FALSE) +dbOut_check_values( + dbOut_fname, + dbNew_fname, + fields_check = NULL, + tol = 0.001, + verbose = FALSE +) } \arguments{ \item{dbOut_fname}{A character string. The file path of the main diff --git a/man/dbOut_read_variables_from_scenario.Rd b/man/dbOut_read_variables_from_scenario.Rd index fe7691b6..79e20e73 100644 --- a/man/dbOut_read_variables_from_scenario.Rd +++ b/man/dbOut_read_variables_from_scenario.Rd @@ -6,9 +6,13 @@ runs and one of the overall aggregation tables for one of the scenarios and optionally subsets rows by a where-clause} \usage{ -dbOut_read_variables_from_scenario(fname_dbOut, variables = NULL, - MeanOrSD = c("Mean", "SD"), scenario = "Current", - whereClause = NULL) +dbOut_read_variables_from_scenario( + fname_dbOut, + variables = NULL, + MeanOrSD = c("Mean", "SD"), + scenario = "Current", + whereClause = NULL +) } \arguments{ \item{fname_dbOut}{A character string. The path to the output database.} diff --git a/man/dbOut_update_values.Rd b/man/dbOut_update_values.Rd index 671678f6..b5f582db 100644 --- a/man/dbOut_update_values.Rd +++ b/man/dbOut_update_values.Rd @@ -4,8 +4,13 @@ \alias{dbOut_update_values} \title{Update values of \var{dbOutput} based on a new database} \usage{ -dbOut_update_values(dbOut_fname, dbNew_fname, fields_update = NULL, - fields_exclude = NULL, verbose = FALSE) +dbOut_update_values( + dbOut_fname, + dbNew_fname, + fields_update = NULL, + fields_exclude = NULL, + verbose = FALSE +) } \arguments{ \item{dbOut_fname}{A character string. The file path of the main diff --git a/man/dbOutput_Tables_have_SoilLayers.Rd b/man/dbOutput_Tables_have_SoilLayers.Rd index 25f3048f..107d7220 100644 --- a/man/dbOutput_Tables_have_SoilLayers.Rd +++ b/man/dbOutput_Tables_have_SoilLayers.Rd @@ -5,8 +5,7 @@ \title{Checks whether output tables of \var{\sQuote{dbOutput}} store output of variables for each soil layer} \usage{ -dbOutput_Tables_have_SoilLayers(tables = NULL, con = NULL, - dbname = NULL) +dbOutput_Tables_have_SoilLayers(tables = NULL, con = NULL, dbname = NULL) } \arguments{ \item{tables}{A vector of character strings. The names of those tables that diff --git a/man/dbOutput_add_calculated_field.Rd b/man/dbOutput_add_calculated_field.Rd index 6ff592a4..1ca43d73 100644 --- a/man/dbOutput_add_calculated_field.Rd +++ b/man/dbOutput_add_calculated_field.Rd @@ -5,8 +5,17 @@ \title{Add new field(s) to a table in \var{dbOutput} that is/are based on a calculation of values from (an) existing field(s)} \usage{ -dbOutput_add_calculated_field(dbOut_fname, table, vars_orig, vars_new, FUN, - ..., overwrite = FALSE, verbose = FALSE, chunk_size = 1e+05) +dbOutput_add_calculated_field( + dbOut_fname, + table, + vars_orig, + vars_new, + FUN, + ..., + overwrite = FALSE, + verbose = FALSE, + chunk_size = 1e+05 +) } \arguments{ \item{dbOut_fname}{A character string. The path to the output database.} diff --git a/man/dbOutput_subset.Rd b/man/dbOutput_subset.Rd index bcf6c569..9f40ea52 100644 --- a/man/dbOutput_subset.Rd +++ b/man/dbOutput_subset.Rd @@ -4,9 +4,15 @@ \alias{dbOutput_subset} \title{Make a copy of \var{\code{dbOutput}} with a subset of tables and/or fields} \usage{ -dbOutput_subset(dbOut_fname, dbNew_fname, fields_include = NULL, - fields_exclude = NULL, subset_scenarios = NULL, - subset_experiments = NULL, verbose = FALSE) +dbOutput_subset( + dbOut_fname, + dbNew_fname, + fields_include = NULL, + fields_exclude = NULL, + subset_scenarios = NULL, + subset_experiments = NULL, + verbose = FALSE +) } \arguments{ \item{dbOut_fname}{A character string. The file path of the main diff --git a/man/dbOutput_update_OverallAggregationTable.Rd b/man/dbOutput_update_OverallAggregationTable.Rd index 725acae3..ac311652 100644 --- a/man/dbOutput_update_OverallAggregationTable.Rd +++ b/man/dbOutput_update_OverallAggregationTable.Rd @@ -4,8 +4,12 @@ \alias{dbOutput_update_OverallAggregationTable} \title{Add fields to an existing \var{\sQuote{dbOutput}}} \usage{ -dbOutput_update_OverallAggregationTable(SFSW2_prj_meta, col_ids = NULL, - chunksize = 1000, verbose = FALSE) +dbOutput_update_OverallAggregationTable( + SFSW2_prj_meta, + col_ids = NULL, + chunksize = 1000, + verbose = FALSE +) } \arguments{ \item{SFSW2_prj_meta}{See elsewhere} diff --git a/man/dbWork_checkpoint.Rd b/man/dbWork_checkpoint.Rd index e3cf3332..7e6eab82 100644 --- a/man/dbWork_checkpoint.Rd +++ b/man/dbWork_checkpoint.Rd @@ -5,9 +5,13 @@ \title{Initiate a checkpoint operation on a \var{SQLite}-database \code{dbWork} of a \pkg{rSFSW2} simulation project} \usage{ -dbWork_checkpoint(path = NULL, con = NULL, mode = c("PASSIVE", - "FULL", "RESTART", "TRUNCATE", ""), failure = c("silent", "warning", - "error"), verbose = FALSE) +dbWork_checkpoint( + path = NULL, + con = NULL, + mode = c("PASSIVE", "FULL", "RESTART", "TRUNCATE", ""), + failure = c("silent", "warning", "error"), + verbose = FALSE +) } \description{ Initiate a checkpoint operation on a \var{SQLite}-database \code{dbWork} of a diff --git a/man/dbWork_report_completion.Rd b/man/dbWork_report_completion.Rd index 42e8c5b9..72d26de3 100644 --- a/man/dbWork_report_completion.Rd +++ b/man/dbWork_report_completion.Rd @@ -4,8 +4,11 @@ \alias{dbWork_report_completion} \title{Estimate percentage of completed runs} \usage{ -dbWork_report_completion(path, use_granular_control = FALSE, - SFSW2_prj_meta = NULL) +dbWork_report_completion( + path, + use_granular_control = FALSE, + SFSW2_prj_meta = NULL +) } \arguments{ \item{path}{A character string. Path to the folder where the database will be diff --git a/man/dbWork_update_IncludeYN.Rd b/man/dbWork_update_IncludeYN.Rd index f7d62712..e3674aad 100644 --- a/man/dbWork_update_IncludeYN.Rd +++ b/man/dbWork_update_IncludeYN.Rd @@ -4,8 +4,7 @@ \alias{dbWork_update_IncludeYN} \title{Update \var{\sQuote{include_YN}}} \usage{ -dbWork_update_IncludeYN(con, table, id_name, has_include_YN, - should_include_YN) +dbWork_update_IncludeYN(con, table, id_name, has_include_YN, should_include_YN) } \description{ Update \var{\sQuote{include_YN}} diff --git a/man/dbWork_update_job.Rd b/man/dbWork_update_job.Rd index 4243b463..675f9664 100644 --- a/man/dbWork_update_job.Rd +++ b/man/dbWork_update_job.Rd @@ -4,8 +4,7 @@ \alias{dbWork_update_job} \title{Update run information of a \pkg{rSFSW2} simulation project} \usage{ -dbWork_update_job(path, runID, status, time_s = "NULL", - verbose = FALSE) +dbWork_update_job(path, runID, status, time_s = "NULL", verbose = FALSE) } \arguments{ \item{path}{A character string. Path to the folder where the database will be diff --git a/man/determine_simulation_size.Rd b/man/determine_simulation_size.Rd index d2e7a3e0..4b77d1b2 100644 --- a/man/determine_simulation_size.Rd +++ b/man/determine_simulation_size.Rd @@ -4,8 +4,12 @@ \alias{determine_simulation_size} \title{Calculate the size of the simulation experiment: number of runs, etc.} \usage{ -determine_simulation_size(SWRunInformation, include_YN, - sw_input_experimentals, sim_scens) +determine_simulation_size( + SWRunInformation, + include_YN, + sw_input_experimentals, + sim_scens +) } \description{ Calculate the size of the simulation experiment: number of runs, etc. diff --git a/man/dir_safe_create.Rd b/man/dir_safe_create.Rd index 22cfa2ae..e2b74e14 100644 --- a/man/dir_safe_create.Rd +++ b/man/dir_safe_create.Rd @@ -4,8 +4,7 @@ \alias{dir_safe_create} \title{Create the elements of paths} \usage{ -dir_safe_create(paths, showWarnings = FALSE, recursive = TRUE, - mode = "0777") +dir_safe_create(paths, showWarnings = FALSE, recursive = TRUE, mode = "0777") } \arguments{ \item{paths}{A list or vector of strings. Path names to be created.} @@ -16,7 +15,7 @@ dir_safe_create(paths, showWarnings = FALSE, recursive = TRUE, last be created? If true, like the Unix command \command{mkdir -p}.} \item{mode}{the mode to be used on Unix-alikes: it will be - coerced by \code{\link{as.octmode}}. For \code{Sys.chmod} it is + coerced by \code{\link[base]{as.octmode}}. For \code{Sys.chmod} it is recycled along \code{paths}.} } \value{ diff --git a/man/doQmapQUANT.Rd b/man/doQmapQUANT.Rd index 35beb37f..3a85b968 100644 --- a/man/doQmapQUANT.Rd +++ b/man/doQmapQUANT.Rd @@ -6,12 +6,26 @@ \alias{doQmapQUANT_drs} \title{Apply a quantile mapping} \usage{ -doQmapQUANT.default_drs(x, fobj, type = NULL, lin_extrapol = NULL, - spline_method = NULL, monthly_extremes = NULL, fix_spline = NULL, - ...) +doQmapQUANT.default_drs( + x, + fobj, + type = NULL, + lin_extrapol = NULL, + spline_method = NULL, + monthly_extremes = NULL, + fix_spline = NULL, + ... +) -doQmapQUANT_drs(x, fobj, type_map = NULL, monthly_obs_base = NULL, - monthly_extremes = NULL, fix_spline = NULL, ...) +doQmapQUANT_drs( + x, + fobj, + type_map = NULL, + monthly_obs_base = NULL, + monthly_extremes = NULL, + fix_spline = NULL, + ... +) } \arguments{ \item{x}{A numeric vector. The values to map.} diff --git a/man/do_OneSite.Rd b/man/do_OneSite.Rd index 9e0bfec2..678c9a42 100644 --- a/man/do_OneSite.Rd +++ b/man/do_OneSite.Rd @@ -4,10 +4,20 @@ \alias{do_OneSite} \title{The main simulation function which does all the heavy lifting} \usage{ -do_OneSite(i_sim, i_SWRunInformation, i_sw_input_soillayers, - i_sw_input_treatments, i_sw_input_cloud, i_sw_input_prod, - i_sw_input_site, i_sw_input_soils, i_sw_input_weather, - i_sw_input_climscen, i_sw_input_climscen_values, SimParams) +do_OneSite( + i_sim, + i_SWRunInformation, + i_sw_input_soillayers, + i_sw_input_treatments, + i_sw_input_cloud, + i_sw_input_prod, + i_sw_input_site, + i_sw_input_soils, + i_sw_input_weather, + i_sw_input_climscen, + i_sw_input_climscen_values, + SimParams +) } \description{ The main simulation function which does all the heavy lifting diff --git a/man/do_compare.Rd b/man/do_compare.Rd deleted file mode 100644 index 66cf1eb6..00000000 --- a/man/do_compare.Rd +++ /dev/null @@ -1,45 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{do_compare} -\alias{do_compare} -\title{Recursive comparisons which also works for nested lists} -\usage{ -do_compare(x1, x2) -} -\arguments{ -\item{x1}{A R object} - -\item{x2}{A R object} -} -\value{ -\itemize{ - \item If both \code{x1} and \code{x2} are lists, then \code{do_compare} - is called recursively on mutually shared names if names exists and on - each element otherwise, and the output is a list from the return value - of each recursive call. - \item Otherwise, the function \code{\link{all.equal}} is called. If the - result is \code{TRUE}, then \code{NA} is returned. If the result is - \code{FALSE}, then a list with three elements is returned with \describe{ - \item{eq}{the result of the call to \code{\link{all.equal}}} - \item{x1}{The object \code{x1}} - \item{x2}{The object \code{x2}} - }} -} -\description{ -Recursive comparisons which also works for nested lists -} -\examples{ - ## expected result: NA - do_compare(1L, 1L) - - ## expected result: list(eq = "Mean relative difference: 1", x1 = 1, x2 = 2) - do_compare(1, 2) - - do_compare(list(1, 2), list(1, 3)) - ## expected result: comparison for elements a and b return NA; comparison - ## for element c shows a difference - do_compare(list(a = 1, b = 2), list(b = 2, c = 0, a = 1)) -} -\seealso{ -\code{\link{all.equal}} -} diff --git a/man/do_prior_TableLookups.Rd b/man/do_prior_TableLookups.Rd index 9340eedb..0b0b17d2 100644 --- a/man/do_prior_TableLookups.Rd +++ b/man/do_prior_TableLookups.Rd @@ -4,8 +4,12 @@ \alias{do_prior_TableLookups} \title{Look-up input values from spreadsheet tables} \usage{ -do_prior_TableLookups(SFSW2_prj_meta, SFSW2_prj_inputs, resume = TRUE, - verbose = FALSE) +do_prior_TableLookups( + SFSW2_prj_meta, + SFSW2_prj_inputs, + resume = TRUE, + verbose = FALSE +) } \description{ Look-up input values from spreadsheet tables diff --git a/man/downscale.delta.Rd b/man/downscale.delta.Rd index 0310b400..c9f046d0 100644 --- a/man/downscale.delta.Rd +++ b/man/downscale.delta.Rd @@ -4,10 +4,19 @@ \alias{downscale.delta} \title{Downscale with the 'delta approach'} \usage{ -downscale.delta(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, - scen.fut.monthly, itime, years = NULL, sim_time = NULL, +downscale.delta( + obs.hist.daily, + obs.hist.monthly, + scen.hist.monthly, + scen.fut.monthly, + itime, + years = NULL, + sim_time = NULL, opt_DS = list(ppt_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), - dailyPPTceiling, do_checks = TRUE, ...) + dailyPPTceiling, + do_checks = TRUE, + ... +) } \arguments{ \item{obs.hist.daily}{A list. Each element corresponds to one year of diff --git a/man/downscale.deltahybrid.Rd b/man/downscale.deltahybrid.Rd index 6a2bce3e..f453f462 100644 --- a/man/downscale.deltahybrid.Rd +++ b/man/downscale.deltahybrid.Rd @@ -4,10 +4,18 @@ \alias{downscale.deltahybrid} \title{Downscale with the 'delta-hybrid approach' old version (prior to May 2016)} \usage{ -downscale.deltahybrid(obs.hist.daily, obs.hist.monthly, scen.hist.monthly, - scen.fut.monthly, itime, years = NULL, sim_time = NULL, - opt_DS = list(sigmaN = 6, PPTratioCutoff = 10), do_checks = TRUE, - ...) +downscale.deltahybrid( + obs.hist.daily, + obs.hist.monthly, + scen.hist.monthly, + scen.fut.monthly, + itime, + years = NULL, + sim_time = NULL, + opt_DS = list(sigmaN = 6, PPTratioCutoff = 10), + do_checks = TRUE, + ... +) } \arguments{ \item{obs.hist.daily}{A list. Each element corresponds to one year of diff --git a/man/downscale.deltahybrid3mod.Rd b/man/downscale.deltahybrid3mod.Rd index 0c3fe71b..03c99611 100644 --- a/man/downscale.deltahybrid3mod.Rd +++ b/man/downscale.deltahybrid3mod.Rd @@ -5,12 +5,21 @@ \title{Downscale with the new version of the \var{sQuote{delta-hybrid approach}} (post to May 2016)} \usage{ -downscale.deltahybrid3mod(obs.hist.daily, obs.hist.monthly, - scen.hist.monthly, scen.fut.monthly, itime, years = NULL, - sim_time = NULL, opt_DS = list(extrapol_type = - "linear_Thermessl2012CC.QMv1b", ppt_type = "detailed", sigmaN = 6, - PPTratioCutoff = 10, fix_spline = "attempt"), dailyPPTceiling, - monthly_extremes, do_checks = TRUE, ...) +downscale.deltahybrid3mod( + obs.hist.daily, + obs.hist.monthly, + scen.hist.monthly, + scen.fut.monthly, + itime, + years = NULL, + sim_time = NULL, + opt_DS = list(extrapol_type = "linear_Thermessl2012CC.QMv1b", ppt_type = "detailed", + sigmaN = 6, PPTratioCutoff = 10, fix_spline = "attempt"), + dailyPPTceiling, + monthly_extremes, + do_checks = TRUE, + ... +) } \arguments{ \item{obs.hist.daily}{A list. Each element corresponds to one year of diff --git a/man/downscale.periods.Rd b/man/downscale.periods.Rd index 7bc080fb..12ac09a3 100644 --- a/man/downscale.periods.Rd +++ b/man/downscale.periods.Rd @@ -4,10 +4,17 @@ \alias{downscale.periods} \title{Time periods for downscaling functions} \usage{ -downscale.periods(obs.hist.daily, obs.hist.monthly, - scen.hist.monthly = NULL, scen.fut.monthly = NULL, years = NULL, - DScur_startyear = NULL, DScur_endyear = NULL, - DSfut_startyear = NULL, DSfut_endyear = NULL) +downscale.periods( + obs.hist.daily, + obs.hist.monthly, + scen.hist.monthly = NULL, + scen.fut.monthly = NULL, + years = NULL, + DScur_startyear = NULL, + DScur_endyear = NULL, + DSfut_startyear = NULL, + DSfut_endyear = NULL +) } \arguments{ \item{obs.hist.daily}{A list. Each element corresponds to one year of diff --git a/man/downscale.raw.Rd b/man/downscale.raw.Rd index 7d2df4f8..36f6308c 100644 --- a/man/downscale.raw.Rd +++ b/man/downscale.raw.Rd @@ -4,10 +4,18 @@ \alias{downscale.raw} \title{Downscale with the 'direct approach'} \usage{ -downscale.raw(obs.hist.daily, obs.hist.monthly, scen.fut.monthly, itime, - years = NULL, sim_time = NULL, opt_DS = list(ppt_type = "detailed", - sigmaN = 6, PPTratioCutoff = 10), dailyPPTceiling, do_checks = TRUE, - ...) +downscale.raw( + obs.hist.daily, + obs.hist.monthly, + scen.fut.monthly, + itime, + years = NULL, + sim_time = NULL, + opt_DS = list(ppt_type = "detailed", sigmaN = 6, PPTratioCutoff = 10), + dailyPPTceiling, + do_checks = TRUE, + ... +) } \arguments{ \item{obs.hist.daily}{A list. Each element corresponds to one year of diff --git a/man/dw_determine_sources.Rd b/man/dw_determine_sources.Rd index 88cc5362..0ebea50c 100644 --- a/man/dw_determine_sources.Rd +++ b/man/dw_determine_sources.Rd @@ -4,9 +4,18 @@ \alias{dw_determine_sources} \title{Determine sources of daily weather} \usage{ -dw_determine_sources(dw_source, exinfo, dw_avail_sources, SFSW2_prj_inputs, - SWRunInformation, sim_size, sim_time, fnames_in, project_paths, - verbose = FALSE) +dw_determine_sources( + dw_source, + exinfo, + dw_avail_sources, + SFSW2_prj_inputs, + SWRunInformation, + sim_size, + sim_time, + fnames_in, + project_paths, + verbose = FALSE +) } \description{ Determine order of priorities (highest priority comes last): i.e., the diff --git a/man/erf.Rd b/man/erf.Rd deleted file mode 100644 index c3742667..00000000 --- a/man/erf.Rd +++ /dev/null @@ -1,20 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{erf} -\alias{erf} -\title{Error function} -\usage{ -erf(x) -} -\arguments{ -\item{x}{A numeric vector.} -} -\value{ -A numeric vector of the size of \code{x}. -} -\description{ -Error function -} -\seealso{ -Code is from examples of \code{\link[stats]{pnorm}}. -} diff --git a/man/export_objects_to_workers.Rd b/man/export_objects_to_workers.Rd index 021dba81..186f7a7f 100644 --- a/man/export_objects_to_workers.Rd +++ b/man/export_objects_to_workers.Rd @@ -4,8 +4,11 @@ \alias{export_objects_to_workers} \title{Export objects to workers} \usage{ -export_objects_to_workers(obj_env, parallel_backend = c("mpi", "socket"), - cl = NULL) +export_objects_to_workers( + obj_env, + parallel_backend = c("mpi", "socket"), + cl = NULL +) } \arguments{ \item{obj_env}{An environment containing R objects to export.} diff --git a/man/extract_climate_NCEPCFSR.Rd b/man/extract_climate_NCEPCFSR.Rd index 332ba102..b92318d6 100644 --- a/man/extract_climate_NCEPCFSR.Rd +++ b/man/extract_climate_NCEPCFSR.Rd @@ -4,8 +4,15 @@ \alias{extract_climate_NCEPCFSR} \title{Extract gridded mean monthly data from \var{NCEP/CFSR} for sites globally} \usage{ -extract_climate_NCEPCFSR(MMC, SWRunInformation, SFSW2_prj_meta, - opt_parallel, opt_chunks, resume, verbose) +extract_climate_NCEPCFSR( + MMC, + SWRunInformation, + SFSW2_prj_meta, + opt_parallel, + opt_chunks, + resume, + verbose +) } \description{ Extract gridded mean monthly data from \var{NCEP/CFSR} for sites globally diff --git a/man/extract_daily_weather_from_gridMET.Rd b/man/extract_daily_weather_from_gridMET.Rd new file mode 100644 index 00000000..a5dfc920 --- /dev/null +++ b/man/extract_daily_weather_from_gridMET.Rd @@ -0,0 +1,58 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeatherDB.R +\name{extract_daily_weather_from_gridMET} +\alias{extract_daily_weather_from_gridMET} +\title{Extract daily gridded weather data from the \var{gridMET} dataset} +\usage{ +extract_daily_weather_from_gridMET( + dir_data, + site_ids, + site_ids_by_dbW, + coords, + start_year, + end_year, + comp_type = "gzip", + dbW_digits = 2, + verbose = FALSE +) +} +\arguments{ +\item{dir_data}{A character string. The directory containing the +\var{gridMET} dataset files.} + +\item{site_ids}{An integer vector. The indices of sites for which to extract +\var{gridMET} weather data.} + +\item{coords}{A two-dimensional numerical object. The coordinates for each +site in \var{WGS84}.} + +\item{start_year}{An integer value. The first calendar year for which to +extract daily weather data.} + +\item{end_year}{An integer value. The last calendar year for which to +extract daily weather data.} + +\item{comp_type}{A character string. The compression type used by the +weather database.} + +\item{dbW_digits}{An integer value. The number of digits to which the +daily weather values are rounded to.} + +\item{verbose}{A logical value.} +} +\description{ +Extracts daily gridded weather data, including precipitation, +maximum temperature and minimum temperature from the \var{gridMET} +(Abatzoglou 2013) database: a 1/24 degree gridded weather database that +contains data for the years 1979 - yesterday. +} +\section{Details}{ + Run the function \code{\link{gridMET_download_and_check}} + to download and check the dataset. +} + +\references{ +Abatzoglou, J. T. (2013) Development of gridded surface + meteorological data for ecological applications and modelling. + \var{Int. J. Climatol.}, 33: 121–131. +} diff --git a/man/extract_daily_weather_from_livneh.Rd b/man/extract_daily_weather_from_livneh.Rd index 5ab37f5c..f98f1a9d 100644 --- a/man/extract_daily_weather_from_livneh.Rd +++ b/man/extract_daily_weather_from_livneh.Rd @@ -4,10 +4,20 @@ \alias{extract_daily_weather_from_livneh} \title{Extract Gridded Weather Data from a Livneh Database} \usage{ -extract_daily_weather_from_livneh(dir_data, dir_temp, site_ids, - site_ids_by_dbW, coords, start_year, end_year, f_check = TRUE, - backup = TRUE, comp_type = "gzip", dbW_digits = 2, - verbose = FALSE) +extract_daily_weather_from_livneh( + dir_data, + dir_temp, + site_ids, + site_ids_by_dbW, + coords, + start_year, + end_year, + f_check = TRUE, + backup = TRUE, + comp_type = "gzip", + dbW_digits = 2, + verbose = FALSE +) } \arguments{ \item{dir_data}{directory containing Livneh data} diff --git a/man/extract_rSFSW2.Rd b/man/extract_rSFSW2.Rd index 66e385b5..42625ff2 100644 --- a/man/extract_rSFSW2.Rd +++ b/man/extract_rSFSW2.Rd @@ -1,6 +1,5 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/Spatial_Functions.R -\docType{methods} \name{extract_rSFSW2} \alias{extract_rSFSW2} \alias{extract_rSFSW2,Raster,vector,character-method} @@ -25,19 +24,15 @@ \S4method{extract_rSFSW2}{Raster,Raster,character}(x, y, type, ...) -\S4method{extract_rSFSW2}{SpatialPolygons,SpatialPoints,character}(x, y, - type, ...) +\S4method{extract_rSFSW2}{SpatialPolygons,SpatialPoints,character}(x, y, type, ...) \S4method{extract_rSFSW2}{character,ANY,character}(x, y, type, ...) -\S4method{extract_rSFSW2}{SpatialPolygons,SpatialPolygons,character}(x, y, - type, ...) +\S4method{extract_rSFSW2}{SpatialPolygons,SpatialPolygons,character}(x, y, type, ...) -\S4method{extract_rSFSW2}{SpatialPolygons,vector,character}(x, y, type, - ...) +\S4method{extract_rSFSW2}{SpatialPolygons,vector,character}(x, y, type, ...) -\S4method{extract_rSFSW2}{SpatialPolygons,matrix,character}(x, y, type, - ...) +\S4method{extract_rSFSW2}{SpatialPolygons,matrix,character}(x, y, type, ...) } \arguments{ \item{x}{A \linkS4class{Raster} object from which data are extracted.} diff --git a/man/extract_soil_CONUSSOIL.Rd b/man/extract_soil_CONUSSOIL.Rd index ef98f44e..7ba75e61 100644 --- a/man/extract_soil_CONUSSOIL.Rd +++ b/man/extract_soil_CONUSSOIL.Rd @@ -5,8 +5,16 @@ \title{\var{\sQuote{CONUS-SOIL}} is a rasterized and controlled \var{\sQuote{STATSGO}} dataset; information for 11 soil are layers available.} \usage{ -extract_soil_CONUSSOIL(MMC, sim_size, sim_space, dir_ex_soil, fnames_in, - resume, verbose, default_TOC_GperKG = 0) +extract_soil_CONUSSOIL( + MMC, + sim_size, + sim_space, + dir_ex_soil, + fnames_in, + resume, + verbose, + default_TOC_GperKG = 0 +) } \arguments{ \item{default_TOC_GperKG}{A numeric value. The default value is diff --git a/man/extract_soil_ISRICWISE.Rd b/man/extract_soil_ISRICWISE.Rd index 086c0cf1..61ddc6de 100644 --- a/man/extract_soil_ISRICWISE.Rd +++ b/man/extract_soil_ISRICWISE.Rd @@ -4,8 +4,16 @@ \alias{extract_soil_ISRICWISE} \title{Extract soil data from one of the \var{\sQuote{ISRIC-WISE}} datasets} \usage{ -extract_soil_ISRICWISE(MMC, sim_size, sim_space, dir_ex_soil, fnames_in, - dataset = c("ISRICWISEv12", "ISRICWISE30secV1a"), resume, verbose) +extract_soil_ISRICWISE( + MMC, + sim_size, + sim_space, + dir_ex_soil, + fnames_in, + dataset = c("ISRICWISEv12", "ISRICWISE30secV1a"), + resume, + verbose +) } \arguments{ \item{dataset}{A character string. Identifies the \var{\dQuote{ISRIC-WISE}} diff --git a/man/find_gridMET_files.Rd b/man/find_gridMET_files.Rd new file mode 100644 index 00000000..8689bfcc --- /dev/null +++ b/man/find_gridMET_files.Rd @@ -0,0 +1,11 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeatherDB.R +\name{find_gridMET_files} +\alias{find_gridMET_files} +\title{List \var{gridMET} data files available on disk} +\usage{ +find_gridMET_files(dir_data, vars = gridMET_metadata()[["vars"]]) +} +\description{ +List \var{gridMET} data files available on disk +} diff --git a/man/find_sites_with_bad_weather.Rd b/man/find_sites_with_bad_weather.Rd index ba20819b..cd798a5e 100644 --- a/man/find_sites_with_bad_weather.Rd +++ b/man/find_sites_with_bad_weather.Rd @@ -5,9 +5,15 @@ \title{Determine which site_ids in the weather database do not have weather data for every requested climate scenario} \usage{ -find_sites_with_bad_weather(fdbWeather, site_labels = NULL, - siteID_by_dbW = NULL, scen_labels = NULL, scenID_by_dbW = NULL, - chunk_size = 500L, verbose = FALSE) +find_sites_with_bad_weather( + fdbWeather, + site_labels = NULL, + siteID_by_dbW = NULL, + scen_labels = NULL, + scenID_by_dbW = NULL, + chunk_size = 500L, + verbose = FALSE +) } \value{ A logical vector of length \code{site_labels} respectively diff --git a/man/fun_kLargest.Rd b/man/fun_kLargest.Rd deleted file mode 100644 index b603449d..00000000 --- a/man/fun_kLargest.Rd +++ /dev/null @@ -1,41 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{fun_kLargest} -\alias{fun_kLargest} -\title{Find the \code{k}-largest/smallest values (and apply a function to these -values)} -\usage{ -fun_kLargest(x, largest = TRUE, fun = NULL, k = 10L, na.rm = FALSE, - ...) -} -\arguments{ -\item{x}{A numeric vector} - -\item{largest}{A logical value. See return value.} - -\item{fun}{A function which requires one argument or \code{"index"}. -\code{fun} will be applied to the \code{k}-largest/smallest values of -\code{x}.} - -\item{k}{An integer value. The \code{k}-largest/smallest value(s) of \code{x} -will be used. The largest/smallest value will be used if 0 or negative.} - -\item{na.rm}{A logical value indicating whether \code{NA} values should be -stripped before the computation proceeds.} - -\item{\dots}{Optional arguments to be passed to \code{fun}} -} -\value{ -A vector of length \code{k}, \itemize{ - \item if \code{is.null(fun)}, then a vector with the \code{k}-largest - (if \code{largest = TRUE}) or \code{k}-smallest - (if \code{largest = FALSE}) values of \code{x}; - \item if \code{fun = "index"}, then a vector with indices of the - \code{k}-largest/smallest values (NOTE: this is truncated to the - \code{k}-first indices!). } Otherwise, the result of applying \code{fun} - to the \code{k}-largest/smallest values. -} -\description{ -Find the \code{k}-largest/smallest values (and apply a function to these -values) -} diff --git a/man/germination_wait_times.Rd b/man/germination_wait_times.Rd deleted file mode 100644 index 01f92389..00000000 --- a/man/germination_wait_times.Rd +++ /dev/null @@ -1,42 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RcppExports.R -\name{germination_wait_times} -\alias{germination_wait_times} -\title{Determine wait times until germination based on information on favorable - conditions and time required to germinate} -\usage{ -germination_wait_times(time_to_germinate, duration_fave_cond) -} -\description{ -Determine wait times until germination based on information on favorable - conditions and time required to germinate -} -\section{Note}{ - The \pkg{Rcpp} version of the function is about 270x faster - for vectors of length 365 and 12,000x faster for vectors of length 11,000 - than the R version. The \pkg{Rcpp} version also reduced the memory - footprint by a factor of >> 3080. -} - -\examples{ - # The \\pkg{Rcpp} function is equivalent to the following R version - germination_wait_times_R <- function(time_to_germinate, duration_fave_cond) { - N <- length(time_to_germinate) - stats::na.exclude(unlist(lapply(seq_len(N), function(t) { - if (is.finite(time_to_germinate[t])) { - t1 <- duration_fave_cond[t:N] - t2 <- stats::na.exclude(t1) - t3 <- which(t2[time_to_germinate[t]] == t1)[1] - sum(is.na(t1[1:t3])) - } else { - NA - } - }))) - } - -} -\references{ -Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). - Modeling regeneration responses of big sagebrush (Artemisia tridentata) - to abiotic conditions. Ecol Model, 286, 66-77. -} diff --git a/man/get.SeveralOverallVariables.Rd b/man/get.SeveralOverallVariables.Rd index 8c93793a..34940465 100644 --- a/man/get.SeveralOverallVariables.Rd +++ b/man/get.SeveralOverallVariables.Rd @@ -5,9 +5,16 @@ \title{Get data of variables in the overall aggregation table for one of the \code{climCat} rows (combining 'Current' and ensembles)} \usage{ -get.SeveralOverallVariables(fdbrSFSW2, fdbrSFSW2ens, climCat, responseName, - MeanOrSD = "Mean", i_climCat = 1, whereClause = NULL, - climate.ambient = "Current") +get.SeveralOverallVariables( + fdbrSFSW2, + fdbrSFSW2ens, + climCat, + responseName, + MeanOrSD = "Mean", + i_climCat = 1, + whereClause = NULL, + climate.ambient = "Current" +) } \description{ Get data of variables in the overall aggregation table for one of the diff --git a/man/get.SeveralOverallVariables_Ensemble.Rd b/man/get.SeveralOverallVariables_Ensemble.Rd index 68be4d2b..fe7ed08d 100644 --- a/man/get.SeveralOverallVariables_Ensemble.Rd +++ b/man/get.SeveralOverallVariables_Ensemble.Rd @@ -5,8 +5,15 @@ \title{Get data of variables in the overall aggregation table for one of the ensembles} \usage{ -get.SeveralOverallVariables_Ensemble(fdbrSFSW2, fdbrSFSW2ens, responseName, - MeanOrSD = "Mean", fam, level, whereClause = NULL) +get.SeveralOverallVariables_Ensemble( + fdbrSFSW2, + fdbrSFSW2ens, + responseName, + MeanOrSD = "Mean", + fam, + level, + whereClause = NULL +) } \description{ Get data of variables in the overall aggregation table for one of the diff --git a/man/get.SeveralOverallVariables_Scenario.Rd b/man/get.SeveralOverallVariables_Scenario.Rd index 127a612c..97f20609 100644 --- a/man/get.SeveralOverallVariables_Scenario.Rd +++ b/man/get.SeveralOverallVariables_Scenario.Rd @@ -5,8 +5,13 @@ \title{Get data of variables in the overall aggregation table for one of the scenarios} \usage{ -get.SeveralOverallVariables_Scenario(fdbrSFSW2, responseName, - MeanOrSD = "Mean", scenario = "Current", whereClause = NULL) +get.SeveralOverallVariables_Scenario( + fdbrSFSW2, + responseName, + MeanOrSD = "Mean", + scenario = "Current", + whereClause = NULL +) } \description{ Get data of variables in the overall aggregation table for one of the diff --git a/man/get.SeveralOverallVariables_Scenario_old.Rd b/man/get.SeveralOverallVariables_Scenario_old.Rd index a53d9096..8f86bc79 100644 --- a/man/get.SeveralOverallVariables_Scenario_old.Rd +++ b/man/get.SeveralOverallVariables_Scenario_old.Rd @@ -5,8 +5,13 @@ \title{Get data of variables in the overall aggregation table for one of the scenarios} \usage{ -get.SeveralOverallVariables_Scenario_old(fdbrSFSW2, responseName, - MeanOrSD = "Mean", scenario = "Current", whereClause = NULL) +get.SeveralOverallVariables_Scenario_old( + fdbrSFSW2, + responseName, + MeanOrSD = "Mean", + scenario = "Current", + whereClause = NULL +) } \description{ Get data of variables in the overall aggregation table for one of the diff --git a/man/get.Table.Rd b/man/get.Table.Rd index 536a98c3..430c5d2b 100644 --- a/man/get.Table.Rd +++ b/man/get.Table.Rd @@ -5,9 +5,17 @@ \title{Get data-part for an entire table for one of the \code{climCat} rows (combining 'Current' and ensembles)} \usage{ -get.Table(fdbrSFSW2, fdbrSFSW2ens, climCat, responseName, - MeanOrSD = "Mean", i_climCat = 1, whereClause = NULL, - addPid = FALSE, climate.ambient = "Current") +get.Table( + fdbrSFSW2, + fdbrSFSW2ens, + climCat, + responseName, + MeanOrSD = "Mean", + i_climCat = 1, + whereClause = NULL, + addPid = FALSE, + climate.ambient = "Current" +) } \description{ Get data-part for an entire table for one of the \code{climCat} rows diff --git a/man/get.Table_Ensemble.Rd b/man/get.Table_Ensemble.Rd index daa0bc00..ef923ece 100644 --- a/man/get.Table_Ensemble.Rd +++ b/man/get.Table_Ensemble.Rd @@ -4,8 +4,16 @@ \alias{get.Table_Ensemble} \title{Get header and data for an entire table for one of the ensembles} \usage{ -get.Table_Ensemble(fdbrSFSW2, fdbrSFSW2ens, responseName, - MeanOrSD = "Mean", fam, level, whereClause = NULL, header = FALSE) +get.Table_Ensemble( + fdbrSFSW2, + fdbrSFSW2ens, + responseName, + MeanOrSD = "Mean", + fam, + level, + whereClause = NULL, + header = FALSE +) } \description{ Get header and data for an entire table for one of the ensembles diff --git a/man/get.Table_Scenario.Rd b/man/get.Table_Scenario.Rd index 27fccaa9..38acbb69 100644 --- a/man/get.Table_Scenario.Rd +++ b/man/get.Table_Scenario.Rd @@ -4,8 +4,14 @@ \alias{get.Table_Scenario} \title{Get header and data for an entire table for one of the scenarios} \usage{ -get.Table_Scenario(fdbrSFSW2, responseName, MeanOrSD = "Mean", - scenario = "Current", whereClause = NULL, header = FALSE) +get.Table_Scenario( + fdbrSFSW2, + responseName, + MeanOrSD = "Mean", + scenario = "Current", + whereClause = NULL, + header = FALSE +) } \description{ Get header and data for an entire table for one of the scenarios diff --git a/man/get_BareSoilEvapCoefs.Rd b/man/get_BareSoilEvapCoefs.Rd index 21cd6aef..83613e7f 100644 --- a/man/get_BareSoilEvapCoefs.Rd +++ b/man/get_BareSoilEvapCoefs.Rd @@ -5,8 +5,13 @@ \title{Calculate bare-soil evaporation coefficients based on soil texture and store in soil input file} \usage{ -get_BareSoilEvapCoefs(SFSW2_prj_meta, SFSW2_prj_inputs, runIDs_adjust, - resume = TRUE, verbose = FALSE) +get_BareSoilEvapCoefs( + SFSW2_prj_meta, + SFSW2_prj_inputs, + runIDs_adjust, + resume = TRUE, + verbose = FALSE +) } \description{ Calculate bare-soil evaporation coefficients based on soil texture and store diff --git a/man/get_DailyGCMdata_netCDF.Rd b/man/get_DailyGCMdata_netCDF.Rd new file mode 100644 index 00000000..59c6124b --- /dev/null +++ b/man/get_DailyGCMdata_netCDF.Rd @@ -0,0 +1,26 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{get_DailyGCMdata_netCDF} +\alias{get_DailyGCMdata_netCDF} +\title{Extract all daily \var{GCM} projection values for precipitation, minimum and +maximum temperature from each one \var{netCDF} file.} +\usage{ +get_DailyGCMdata_netCDF( + i_tag, + climDB_meta, + ncFiles, + startyear, + endyear, + lon, + lat +) +} +\value{ +A list with three 3-dimensional arrays +\var{\dQuote{tmax}}, \var{\dQuote{tmin}}, and \var{\dQuote{prcp}}. +Units are [degree Celsius] for temperature and [cm / day] for precipitation. +} +\description{ +Extract all daily \var{GCM} projection values for precipitation, minimum and +maximum temperature from each one \var{netCDF} file. +} diff --git a/man/get_GCMdata_NEX.Rd b/man/get_GCMdata_NEX.Rd index 69c14655..a4196d4c 100644 --- a/man/get_GCMdata_NEX.Rd +++ b/man/get_GCMdata_NEX.Rd @@ -4,8 +4,20 @@ \alias{get_GCMdata_NEX} \title{Download downscaled \var{GCM} projections from \var{NEX}} \usage{ -get_GCMdata_NEX(i_tag, ts_mons, dpm, gcm, scen, rip, lon, lat, startyear, - endyear, climDB_meta, ...) +get_GCMdata_NEX( + i_tag, + time, + dpm, + gcm, + scen, + rip, + lon, + lat, + startyear, + endyear, + climDB_meta, + ... +) } \value{ A list of one data.frame object with 5 columns and names of diff --git a/man/get_GCMdata_netCDF.Rd b/man/get_GCMdata_netCDF.Rd deleted file mode 100644 index 97e27753..00000000 --- a/man/get_GCMdata_netCDF.Rd +++ /dev/null @@ -1,19 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ExtractData_ClimateDownscaling.R -\name{get_GCMdata_netCDF} -\alias{get_GCMdata_netCDF} -\title{Extract \var{GCM} projection from a \var{netCDF} file} -\usage{ -get_GCMdata_netCDF(i_tag, ts_mons, dpm, gcm, scen, rip, lon, lat, - startyear, endyear, climDB_meta, ...) -} -\value{ -A list of one data.frame object with 5 columns and names of - \var{\dQuote{year}}, \var{\dQuote{month}}, \var{\dQuote{tmax}}, \var{\dQuote{tmin}}, - and \var{\dQuote{prcp}}. Each row is one day. -Units are [degree Celsius] for temperature and [cm / day] and [cm / month], -respectively, for precipitation. -} -\description{ -Extract \var{GCM} projection from a \var{netCDF} file -} diff --git a/man/get_KilledBySoilLayers.Rd b/man/get_KilledBySoilLayers.Rd deleted file mode 100644 index 3a2d40f6..00000000 --- a/man/get_KilledBySoilLayers.Rd +++ /dev/null @@ -1,58 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RcppExports.R -\name{get_KilledBySoilLayers} -\alias{get_KilledBySoilLayers} -\title{Determine if all conditions across rooted soil layers are deadly} -\usage{ -get_KilledBySoilLayers(relevantLayers, kill_conditions) -} -\arguments{ -\item{relevantLayers}{An integer vector, usually of length 365 or 366 -(days).} - -\item{kill.conditions}{A m x p logical matrix with -\code{m >= length(relevantLayers)} and p represents the number of -simulated soil layers, i.e., \code{p >= max(relevantLayers, na.rm = TRUE)}.} -} -\value{ -A logical vector of the length of \code{relevantLayers} with - values containing \code{NA} for days when conditions were not evaluated, - \code{TRUE} if all relevant soil layers (columns) of \code{kill.conditions} - were \code{TRUE}, and with \code{FALSE} otherwise -} -\description{ -Function that checks whether all relevant (those with roots) soil layers - are under conditions of mortality (kill.conditions) for each day of a - given year -} -\details{ -\code{relevantLayers} takes either \code{NA} if no soil layers should be - considered (e.g., because not yet germinated), or an integer number - between 1 and the number of simulated soil layers. The number indicates - the depth to which a seedling has grown roots and over which layers - \code{kill.conditions} will be evaluated. -} -\section{Note}{ - The \pkg{Rcpp} version of the function is about 165x - faster than the version previous to commit - \var{6344857a9cdb08acf68fa031c43cf4a596613aad} 'Small speed improvements' - and about 70x faster than the R version. The \pkg{Rcpp} version also - reduced the memory footprint by a factor of 200. -} - -\examples{ - # The \\pkg{Rcpp} function is equivalent to the following R version - get_KilledBySoilLayers_R <- function(relevantLayers, kill.conditions) { - vapply(seq_along(relevantLayers), function(k) { - if (all(is.finite(relevantLayers[k]))) { - all(as.logical(kill.conditions[k, seq_len(relevantLayers[k])])) - } else NA - }, FUN.VALUE = NA) - } - -} -\references{ -Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). - Modeling regeneration responses of big sagebrush (Artemisia tridentata) - to abiotic conditions. Ecol Model, 286, 66-77. -} diff --git a/man/get_MonthlyGCMdata_netCDF.Rd b/man/get_MonthlyGCMdata_netCDF.Rd new file mode 100644 index 00000000..5770cf5d --- /dev/null +++ b/man/get_MonthlyGCMdata_netCDF.Rd @@ -0,0 +1,34 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{get_MonthlyGCMdata_netCDF} +\alias{get_MonthlyGCMdata_netCDF} +\title{Extract monthly \var{GCM} projection from a \var{netCDF} file} +\usage{ +get_MonthlyGCMdata_netCDF( + i_tag, + time, + dpm, + gcm, + scen, + rip, + lon, + lat, + startyear, + endyear, + climDB_meta, + ncg, + nct, + ncFiles +) +} +\value{ +A list of one data.frame object with 5 columns and names of + \var{\dQuote{year}}, \var{\dQuote{month}}, + \var{\dQuote{tmax}}, \var{\dQuote{tmin}}, and \var{\dQuote{prcp}}. +Each row represents one month. +Units are [degree Celsius] for temperature and [cm / month] +for precipitation. +} +\description{ +Extract monthly \var{GCM} projection from a \var{netCDF} file +} diff --git a/man/get_fnames_temporaryOutput.Rd b/man/get_fnames_temporaryOutput.Rd index 2ed6e1cf..b0012362 100644 --- a/man/get_fnames_temporaryOutput.Rd +++ b/man/get_fnames_temporaryOutput.Rd @@ -4,8 +4,12 @@ \alias{get_fnames_temporaryOutput} \title{Locate file names of temporary output text files} \usage{ -get_fnames_temporaryOutput(dir_out_temp, concatFile, - deleteTmpSQLFiles = TRUE, resume = TRUE) +get_fnames_temporaryOutput( + dir_out_temp, + concatFile, + deleteTmpSQLFiles = TRUE, + resume = TRUE +) } \description{ Locate file names of temporary output text files diff --git a/man/get_modifiedHardegree2006NLR.Rd b/man/get_modifiedHardegree2006NLR.Rd deleted file mode 100644 index c32b078c..00000000 --- a/man/get_modifiedHardegree2006NLR.Rd +++ /dev/null @@ -1,22 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/GISSM.R -\name{get_modifiedHardegree2006NLR} -\alias{get_modifiedHardegree2006NLR} -\title{Based on the \var{NLR} model (equation 5) in Hardegree (2006) and modified -by Schlaepfer et al. (2014) by making time to germinate dependent on -mean January temperature and soil water potential} -\usage{ -get_modifiedHardegree2006NLR(RYdoy, Estimate_TimeToGerminate, TmeanJan, a, - b, c, d, k1_meanJanTemp, k2_meanJanTempXIncubationTemp, k3_IncubationSWP, - Tgerm.year, SWPgerm.year, durations, rec.delta = 1, nrec.max = 10L) -} -\description{ -Based on the \var{NLR} model (equation 5) in Hardegree (2006) and modified -by Schlaepfer et al. (2014) by making time to germinate dependent on -mean January temperature and soil water potential -} -\references{ -Hardegree SP (2006) Predicting Germination Response to - Temperature. I. Cardinal-temperature Models and Subpopulation-specific - Regression. Annals of Botany, 97, 1115-1125. -} diff --git a/man/gridMET_download_and_check.Rd b/man/gridMET_download_and_check.Rd new file mode 100644 index 00000000..26a3ec08 --- /dev/null +++ b/man/gridMET_download_and_check.Rd @@ -0,0 +1,43 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeatherDB.R +\name{gridMET_download_and_check} +\alias{gridMET_download_and_check} +\title{Prepare script to download all or missing files of the \var{gridMET} dataset} +\usage{ +gridMET_download_and_check(dir_data, desc = gridMET_metadata()) +} +\arguments{ +\item{dir_data}{A character string. Path to where the \var{gridMET} dataset +is/will be stored on disk.} + +\item{desc}{A named list. Describing the \var{gridMET} dataset.} +} +\value{ +If all files are available, then a message is printed to the + R console with that information. Otherwise, the message points to a + \var{.sh} script that was created at \code{dir_data} which must be run + separately to download the missing files. +} +\description{ +Prepare script to download all or missing files of the \var{gridMET} dataset +} +\section{Notes}{ + The download scripts use \var{wget}, i.e., it must be + available on your system to work. The scripts are based on the dataset + repository setup at \url{http://www.climatologylab.org/gridmet.html} as of + Nov 2019. This dataset has also been know as \var{METDATA}. +} + +\examples{ +if (exists("SFSW2_prj_meta")) { + gridMET_download_and_check( + dir_data = SFSW2_prj_meta[["project_paths"]][["dir_gridMET"]] + ) +} + +} +\references{ +Abatzoglou, J. T. (2013) Development of gridded surface + meteorological data for ecological applications and modelling. + \var{Int. J. Climatol.}, 33: 121–131. +} diff --git a/man/gridMET_metadata.Rd b/man/gridMET_metadata.Rd new file mode 100644 index 00000000..33ea0c8e --- /dev/null +++ b/man/gridMET_metadata.Rd @@ -0,0 +1,14 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeatherDB.R +\name{gridMET_metadata} +\alias{gridMET_metadata} +\title{Description of the \var{gridMET} dataset} +\usage{ +gridMET_metadata() +} +\value{ +A named list. +} +\description{ +Description of the \var{gridMET} dataset +} diff --git a/man/init_rSFSW2_project.Rd b/man/init_rSFSW2_project.Rd index 36eb7d79..7de57c8d 100644 --- a/man/init_rSFSW2_project.Rd +++ b/man/init_rSFSW2_project.Rd @@ -4,8 +4,12 @@ \alias{init_rSFSW2_project} \title{Initialize a \pkg{rSFSW2} project (setup description file)} \usage{ -init_rSFSW2_project(fmetar, update = FALSE, verbose = TRUE, - print.debug = FALSE) +init_rSFSW2_project( + fmetar, + update = FALSE, + verbose = TRUE, + print.debug = FALSE +) } \arguments{ \item{fmetar}{A character string. The path name to the project description diff --git a/man/intersect2.Rd b/man/intersect2.Rd deleted file mode 100644 index fcd07844..00000000 --- a/man/intersect2.Rd +++ /dev/null @@ -1,20 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{intersect2} -\alias{intersect2} -\title{The intersection on any number of vectors} -\usage{ -intersect2(...) -} -\arguments{ -\item{\dots}{Any number of vectors or a list of vectors.} -} -\value{ -A vector of the same mode as inputs. -} -\description{ -The intersection on any number of vectors -} -\seealso{ -\code{\link{intersect}} -} diff --git a/man/is.natural.Rd b/man/is.natural.Rd deleted file mode 100644 index 927ac4ca..00000000 --- a/man/is.natural.Rd +++ /dev/null @@ -1,17 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{is.natural} -\alias{is.natural} -\title{Test whether input represents a natural number} -\usage{ -is.natural(x) -} -\arguments{ -\item{x}{An integer, numeric, or complex vector, matrix, or array.} -} -\value{ -A logical value. -} -\description{ -Test whether input represents a natural number -} diff --git a/man/is_project_script_file_recent.Rd b/man/is_project_script_file_recent.Rd index e0d48940..035ccfad 100644 --- a/man/is_project_script_file_recent.Rd +++ b/man/is_project_script_file_recent.Rd @@ -5,8 +5,11 @@ \title{Compare elements and 1-st level structure of project script file with installed \pkg{rSFSW2}-package version} \usage{ -is_project_script_file_recent(dir_prj, - script = "SFSW2_project_descriptions.R", ...) +is_project_script_file_recent( + dir_prj, + script = "SFSW2_project_descriptions.R", + ... +) } \arguments{ \item{dir_prj}{A character string. Path the simulation project folder.} diff --git a/man/kill_seedling.Rd b/man/kill_seedling.Rd deleted file mode 100644 index b9b4346a..00000000 --- a/man/kill_seedling.Rd +++ /dev/null @@ -1,40 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/RcppExports.R -\name{kill_seedling} -\alias{kill_seedling} -\title{Determine seedling survival in the first season (\var{\sQuote{ss1s}})} -\usage{ -kill_seedling(ss1s, ry_year_day, ry_useyrs, y, doy) -} -\description{ -Determine seedling survival in the first season (\var{\sQuote{ss1s}}) -} -\section{Note}{ - The \pkg{Rcpp} version of the function is about 270x faster - for vectors of length 365 and 12,000x faster for vectors of length 11,000 - than the R version. The \pkg{Rcpp} version also reduced the memory - footprint by a factor of >> 3080. - - Previous name \code{setFALSE_SeedlingSurvival_1stSeason}. -} - -\section{C code}{ - \code{ss1s} is a pointer to the data and the original - vector will get altered; one would need for a deep copy: - \code{LogicalVector out = clone(ss1s)} -} - -\examples{ - # The \\pkg{Rcpp} function is equivalent to the following R version - kill_seedling_R <- function(ss1s, ry_year_day, ry_useyrs, y, - doy) { - ss1s[ry_year_day == ry_useyrs[y]][doy] <- FALSE - ss1s - } - -} -\references{ -Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). - Modeling regeneration responses of big sagebrush (Artemisia tridentata) - to abiotic conditions. Ecol Model, 286, 66-77. -} diff --git a/man/make_dbW.Rd b/man/make_dbW.Rd index 69835e97..3934f896 100644 --- a/man/make_dbW.Rd +++ b/man/make_dbW.Rd @@ -4,8 +4,16 @@ \alias{make_dbW} \title{Create and populate a \pkg{rSOILWAT2} daily weather \var{SQLite} database} \usage{ -make_dbW(SFSW2_prj_meta, SWRunInformation, opt_parallel, opt_chunks, - opt_behave, deleteTmpSQLFiles, verbose = FALSE, print.debug = FALSE) +make_dbW( + SFSW2_prj_meta, + SWRunInformation, + opt_parallel, + opt_chunks, + opt_behave, + deleteTmpSQLFiles, + verbose = FALSE, + print.debug = FALSE +) } \description{ Create and populate a \pkg{rSOILWAT2} daily weather \var{SQLite} database diff --git a/man/make_test_output_reference.Rd b/man/make_test_output_reference.Rd index 60f39410..54f7b475 100644 --- a/man/make_test_output_reference.Rd +++ b/man/make_test_output_reference.Rd @@ -4,8 +4,7 @@ \alias{make_test_output_reference} \title{Copy output database of a test project to reference folder} \usage{ -make_test_output_reference(dir_test, dir_ref = NULL, - SFSW2_version = NULL) +make_test_output_reference(dir_test, dir_ref = NULL, SFSW2_version = NULL) } \arguments{ \item{dir_test}{A character string. Path to test project folder.} diff --git a/man/merge_2soils.Rd b/man/merge_2soils.Rd index 4f386671..a085215a 100644 --- a/man/merge_2soils.Rd +++ b/man/merge_2soils.Rd @@ -4,8 +4,18 @@ \alias{merge_2soils} \title{Merge two soil input datafiles} \usage{ -merge_2soils(fmaster, fmaster1, fmaster2, fslayer, fslayer1, fslayer2, - fstexture, fstexture1, fstexture2, var_from2 = NULL) +merge_2soils( + fmaster, + fmaster1, + fmaster2, + fslayer, + fslayer1, + fslayer2, + fstexture, + fstexture1, + fstexture2, + var_from2 = NULL +) } \arguments{ \item{fmaster}{A character string. Path to the target master file.} diff --git a/man/move_dbTempOut_to_dbOut.Rd b/man/move_dbTempOut_to_dbOut.Rd index 61bc84b3..38e326f1 100644 --- a/man/move_dbTempOut_to_dbOut.Rd +++ b/man/move_dbTempOut_to_dbOut.Rd @@ -5,9 +5,17 @@ \title{Moves simulation output that was written to temporary \var{SQL}-databases to a final output \var{SQL}-database} \usage{ -move_dbTempOut_to_dbOut(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_behave, opt_out_run, opt_verbosity, chunk_size = -1L, - dir_out_temp = NULL, check_if_Pid_present = FALSE) +move_dbTempOut_to_dbOut( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + chunk_size = -1L, + dir_out_temp = NULL, + check_if_Pid_present = FALSE +) } \value{ Invisibly the number of temporary \var{SQL}-databases. diff --git a/man/move_output_to_dbOutput.Rd b/man/move_output_to_dbOutput.Rd index fb8b3381..fcaca1c5 100644 --- a/man/move_output_to_dbOutput.Rd +++ b/man/move_output_to_dbOutput.Rd @@ -4,9 +4,16 @@ \alias{move_output_to_dbOutput} \title{Move temporary output data to output databases} \usage{ -move_output_to_dbOutput(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_behave, opt_out_run, opt_verbosity, check_if_Pid_present = FALSE, - dir_out_temp = NULL) +move_output_to_dbOutput( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + check_if_Pid_present = FALSE, + dir_out_temp = NULL +) } \arguments{ \item{dir_out_temp}{A character string. The path to temporary output files. diff --git a/man/move_temporary_to_outputDB.Rd b/man/move_temporary_to_outputDB.Rd index de74fcee..770742fe 100644 --- a/man/move_temporary_to_outputDB.Rd +++ b/man/move_temporary_to_outputDB.Rd @@ -6,14 +6,28 @@ \title{Moves simulation output that was written to temporary text files to a \var{SQL}-database} \usage{ -move_temporary_to_outputDB(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_behave, opt_out_run, opt_verbosity, chunk_size = 1000L, - dir_out_temp = NULL) +move_temporary_to_outputDB( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + chunk_size = 1000L, + dir_out_temp = NULL +) -move_temporary_to_outputDB_withChecks(SFSW2_prj_meta, t_job_start, - opt_parallel, opt_behave, opt_out_run, opt_verbosity, - chunk_size = 1000L, check_if_Pid_present = TRUE, - dir_out_temp = NULL) +move_temporary_to_outputDB_withChecks( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + chunk_size = 1000L, + check_if_Pid_present = TRUE, + dir_out_temp = NULL +) } \arguments{ \item{chunk_size}{An integer value. The number of lines that are read at once diff --git a/man/obtain_CMIP5_MACAv2metdata_USA.Rd b/man/obtain_CMIP5_MACAv2metdata_USA.Rd new file mode 100644 index 00000000..6250afd8 --- /dev/null +++ b/man/obtain_CMIP5_MACAv2metdata_USA.Rd @@ -0,0 +1,49 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{obtain_CMIP5_MACAv2metdata_USA} +\alias{obtain_CMIP5_MACAv2metdata_USA} +\title{Check and prepare local copy of \var{CMIP5_MACAv2metdata} dataset} +\usage{ +obtain_CMIP5_MACAv2metdata_USA(locations, dir_ex_fut) +} +\arguments{ +\item{locations}{A data frame. Two columns \code{X_WGS84} and +\code{Y_WGS84} of locations describe rectangle +for which data will be downloaded.} + +\item{dir_ex_fut}{A character string. The path name to future climate +projections.} +} +\value{ +If all files are available, then a message is printed to the + R console with that information. Otherwise, the message points to a + \var{.sh} script that was created at the + \code{MACAv2metdata_USA} sub-folder. This script must be run + separately to download the missing files. +} +\description{ +Check and prepare local copy of \var{CMIP5_MACAv2metdata} dataset +} +\section{Notes}{ + The download scripts use \var{wget}, i.e., it must be + available on your system to work. The scripts are based on the dataset + repository setup at + \url{https://climate.northwestknowledge.net/MACA/index.php} as of + Dec 2019. This dataset has been bias corrected against \var{gridMET}. +} + +\examples{ +if (exists("SFSW2_prj_meta") && exists("SFSW2_prj_inputs")) { + obtain_CMIP5_MACAv2metdata_USA( + locations = + SFSW2_prj_inputs[["SWRunInformation"]][, c("X_WGS84", "Y_WGS84")], + dir_ex_fut = SFSW2_prj_meta[["project_paths"]][["dir_ex_fut"]], + ) +} + +} +\references{ +Abatzoglou, J. T. (2013) Development of gridded surface + meteorological data for ecological applications and modelling. + \var{Int. J. Climatol.}, 33: 121–131. +} diff --git a/man/populate_rSFSW2_project_with_data.Rd b/man/populate_rSFSW2_project_with_data.Rd index 84bbfed0..e22d97d9 100644 --- a/man/populate_rSFSW2_project_with_data.Rd +++ b/man/populate_rSFSW2_project_with_data.Rd @@ -4,8 +4,14 @@ \alias{populate_rSFSW2_project_with_data} \title{Populate \pkg{rSFSW2} project with input data} \usage{ -populate_rSFSW2_project_with_data(SFSW2_prj_meta, opt_behave, opt_parallel, - opt_chunks, opt_out_run, opt_verbosity) +populate_rSFSW2_project_with_data( + SFSW2_prj_meta, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity +) } \description{ Populate \pkg{rSFSW2} project with input data diff --git a/man/prepare_ExtractData_Soils.Rd b/man/prepare_ExtractData_Soils.Rd index 14c8fc66..6bf70ce2 100644 --- a/man/prepare_ExtractData_Soils.Rd +++ b/man/prepare_ExtractData_Soils.Rd @@ -4,9 +4,16 @@ \alias{prepare_ExtractData_Soils} \title{Preparations for the extraction of external soil datasets} \usage{ -prepare_ExtractData_Soils(SWRunInformation, sim_size, field_sources, - field_include, how_determine_sources, sw_input_soillayers, - sw_input_soils_use, sw_input_soils) +prepare_ExtractData_Soils( + SWRunInformation, + sim_size, + field_sources, + field_include, + how_determine_sources, + sw_input_soillayers, + sw_input_soils_use, + sw_input_soils +) } \description{ Preparations for the extraction of external soil datasets diff --git a/man/prepare_climatedata_netCDFs.Rd b/man/prepare_climatedata_netCDFs.Rd index 0872e6d5..829ab7de 100644 --- a/man/prepare_climatedata_netCDFs.Rd +++ b/man/prepare_climatedata_netCDFs.Rd @@ -5,9 +5,17 @@ \title{Process downloaded \var{netCDF} files to concatenate if needed otherwise move to dedicated directory} \usage{ -prepare_climatedata_netCDFs(dir_code, dir_data, dir_duplicates, - dir_concatables, dir_delete, dir_scrutinize, dir_out, - climDB_tag = NULL, climDB_meta = NULL) +prepare_climatedata_netCDFs( + dir_code, + dir_data, + dir_duplicates, + dir_concatables, + dir_delete, + dir_scrutinize, + dir_out, + climDB_tag = NULL, + climDB_meta = NULL +) } \description{ Process downloaded \var{netCDF} files to concatenate if needed otherwise diff --git a/man/process_inputs.Rd b/man/process_inputs.Rd index 87c17b83..88b308fa 100644 --- a/man/process_inputs.Rd +++ b/man/process_inputs.Rd @@ -4,8 +4,7 @@ \alias{process_inputs} \title{Load and prepare inputs for a \pkg{rSFSW2} simulation project} \usage{ -process_inputs(project_paths, fnames_in, use_preprocin = TRUE, - verbose = FALSE) +process_inputs(project_paths, fnames_in, use_preprocin = TRUE, verbose = FALSE) } \description{ Load and prepare inputs for a \pkg{rSFSW2} simulation project diff --git a/man/quickprepare_dbOutput_dbWork.Rd b/man/quickprepare_dbOutput_dbWork.Rd index 8320a28d..80ed48b4 100644 --- a/man/quickprepare_dbOutput_dbWork.Rd +++ b/man/quickprepare_dbOutput_dbWork.Rd @@ -5,8 +5,7 @@ \title{Prepare output database without running proper steps of \file{SFSW2_project_code.R}} \usage{ -quickprepare_dbOutput_dbWork(actions, path, SFSW2_prj_meta, - verbose = FALSE) +quickprepare_dbOutput_dbWork(actions, path, SFSW2_prj_meta, verbose = FALSE) } \arguments{ \item{path}{A character string. The path at which the databases will be diff --git a/man/rSFSW2.Rd b/man/rSFSW2.Rd index 129a402c..b91181a5 100644 --- a/man/rSFSW2.Rd +++ b/man/rSFSW2.Rd @@ -64,7 +64,7 @@ Useful links: } \author{ -\strong{Maintainer}: Daniel Schlaepfer \email{daniel.schlaepfer@yale.edu} (0000-0001-9973-2065) +\strong{Maintainer}: Daniel Schlaepfer \email{daniel.schlaepfer@alumni.ethz.ch} (\href{https://orcid.org/0000-0001-9973-2065}{ORCID}) Other contributors: \itemize{ diff --git a/man/read_time_netCDF.Rd b/man/read_time_netCDF.Rd index 5df581f4..13a4ae5e 100644 --- a/man/read_time_netCDF.Rd +++ b/man/read_time_netCDF.Rd @@ -4,10 +4,13 @@ \alias{read_time_netCDF} \title{Read and interpret time dimension of a \var{netCDF} file with \acronym{CF} 1 or larger} \usage{ -read_time_netCDF(filename) +read_time_netCDF(filename, tres = c("monthly", "daily")) } \arguments{ -\item{filename}{A character string. The name of a \var{netCDF} file.} +\item{filename}{A character string, the name of a \var{netCDF} file; or, +the result of \code{\link[ncdf4]{nc_open}}.} + +\item{tres}{A character string. The temporal resolution (time step).} } \value{ A list with six elements: diff --git a/man/reaggregate_raster.Rd b/man/reaggregate_raster.Rd index c6acc3e8..a543a443 100644 --- a/man/reaggregate_raster.Rd +++ b/man/reaggregate_raster.Rd @@ -4,8 +4,14 @@ \alias{reaggregate_raster} \title{Extract all raster cell values that occur within each rectangle} \usage{ -reaggregate_raster(x, coords, to_res = c(0, 0), with_weights = NULL, - method = c("raster", "raster_con", "block"), tol = 0.01) +reaggregate_raster( + x, + coords, + to_res = c(0, 0), + with_weights = NULL, + method = c("raster", "raster_con", "block"), + tol = 0.01 +) } \arguments{ \item{x}{A \code{\link[raster:Raster-class]{raster::Raster}} object from diff --git a/man/recreate_dbWork.Rd b/man/recreate_dbWork.Rd index 51fedc3f..e3a5c7eb 100644 --- a/man/recreate_dbWork.Rd +++ b/man/recreate_dbWork.Rd @@ -4,8 +4,14 @@ \alias{recreate_dbWork} \title{Re-create or update \var{\sQuote{dbWork}} based on \var{\sQuote{dbOutput}}} \usage{ -recreate_dbWork(path, dbOutput, use_granular_control, - SFSW2_prj_meta = NULL, verbose = FALSE, print.debug = FALSE) +recreate_dbWork( + path, + dbOutput, + use_granular_control, + SFSW2_prj_meta = NULL, + verbose = FALSE, + print.debug = FALSE +) } \arguments{ \item{path}{A character string. Path to the folder where the database will be diff --git a/man/run_test_projects.Rd b/man/run_test_projects.Rd index 40c80cf8..fd457943 100644 --- a/man/run_test_projects.Rd +++ b/man/run_test_projects.Rd @@ -4,10 +4,18 @@ \alias{run_test_projects} \title{Run test projects} \usage{ -run_test_projects(dir_tests, dir_prj_tests = NULL, dir_ref = NULL, - dir_prev = NULL, which_tests_torun = seq_along(dir_tests), - delete_output = FALSE, force_delete_output = FALSE, - make_new_ref = FALSE, write_report_to_disk = TRUE, verbose = FALSE) +run_test_projects( + dir_tests, + dir_prj_tests = NULL, + dir_ref = NULL, + dir_prev = NULL, + which_tests_torun = seq_along(dir_tests), + delete_output = FALSE, + force_delete_output = FALSE, + make_new_ref = FALSE, + write_report_to_disk = TRUE, + verbose = FALSE +) } \arguments{ \item{dir_tests}{A vector of character strings. Paths to individual test @@ -54,3 +62,13 @@ A list with two elements: \describe{ \description{ Run test projects } +\examples{ +\dontrun{ + # Run test project 4 inside development version of package + # Assume that working directory is `tests/test_data/TestPrj4/` + if (file.exists("SFSW2_project_code.R")) { + res <- run_test_projects(dir_tests = ".", delete_output = TRUE) + } +} + +} diff --git a/man/save_to_rds_with_backup.Rd b/man/save_to_rds_with_backup.Rd index 0ca49960..0171b077 100644 --- a/man/save_to_rds_with_backup.Rd +++ b/man/save_to_rds_with_backup.Rd @@ -9,7 +9,7 @@ save_to_rds_with_backup(object, file, tag_backup = "backup", ...) \arguments{ \item{object}{\R object to serialize.} -\item{file}{a \link{connection} or the name of the file where the \R object +\item{file}{a \link[base]{connection} or the name of the file where the \R object is saved to or read from.} \item{tag_backup}{A character string. A tag that is appended at the end of diff --git a/man/select_suitable_CFs.Rd b/man/select_suitable_CFs.Rd new file mode 100644 index 00000000..0503a2ee --- /dev/null +++ b/man/select_suitable_CFs.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{select_suitable_CFs} +\alias{select_suitable_CFs} +\title{Subset a list of netCDF CF file names to specific models, scenarios, +and variables} +\usage{ +select_suitable_CFs( + climDB_files, + climDB_meta, + getYears, + model_name, + scenario_names +) +} +\description{ +Subset a list of netCDF CF file names to specific models, scenarios, +and variables +} diff --git a/man/set_options_warn_error.Rd b/man/set_options_warn_error.Rd index 606f5e14..33acb243 100644 --- a/man/set_options_warn_error.Rd +++ b/man/set_options_warn_error.Rd @@ -4,8 +4,12 @@ \alias{set_options_warn_error} \title{Setting global 'warn' and 'error' options} \usage{ -set_options_warn_error(debug.warn.level = 1L, - debug.dump.objects = FALSE, dir_prj = ".", verbose = FALSE) +set_options_warn_error( + debug.warn.level = 1L, + debug.dump.objects = FALSE, + dir_prj = ".", + verbose = FALSE +) } \arguments{ \item{debug.warn.level}{An integer value. Sets the \code{warn} option.} diff --git a/man/setup_RNG.Rd b/man/setup_RNG.Rd index 0db567c9..5da4bbe7 100644 --- a/man/setup_RNG.Rd +++ b/man/setup_RNG.Rd @@ -34,7 +34,8 @@ Organizing previous state and streams of random number generator \section{Usage}{ \var{RNG} - parallelized function calls by \pkg{rSFSW2} \itemize{ - \item \code{try.ScenarioWeather} wraps \code{calc.ScenarioWeather} which + \item \code{try_MonthlyScenarioWeather} wraps + \code{calc_MonthlyScenarioWeather} which calls \code{set_RNG_stream} to prepare \var{RNG} for functions \itemize{ \item \code{fix_PPTdata_length} diff --git a/man/setup_SFSW2_cluster.Rd b/man/setup_SFSW2_cluster.Rd index 3430dcb9..d78cb979 100644 --- a/man/setup_SFSW2_cluster.Rd +++ b/man/setup_SFSW2_cluster.Rd @@ -4,8 +4,12 @@ \alias{setup_SFSW2_cluster} \title{Set-up a parallel cluster to be used for a \pkg{rSFSW2} simulation project} \usage{ -setup_SFSW2_cluster(opt_parallel, dir_out, verbose = FALSE, - print.debug = FALSE) +setup_SFSW2_cluster( + opt_parallel, + dir_out, + verbose = FALSE, + print.debug = FALSE +) } \description{ Set-up a parallel cluster to be used for a \pkg{rSFSW2} simulation project diff --git a/man/setup_dbWork.Rd b/man/setup_dbWork.Rd index 1cd661e5..594f13fd 100644 --- a/man/setup_dbWork.Rd +++ b/man/setup_dbWork.Rd @@ -5,8 +5,7 @@ \title{Setup or connect to \var{SQLite}-database \code{dbWork} to manage runs of a \pkg{rSFSW2} simulation project} \usage{ -setup_dbWork(path, sim_size, include_YN, resume = FALSE, - SFSW2_prj_meta = NULL) +setup_dbWork(path, sim_size, include_YN, resume = FALSE, SFSW2_prj_meta = NULL) } \arguments{ \item{path}{A character string. Path to the folder where the database will be diff --git a/man/setup_rSFSW2_project_infrastructure.Rd b/man/setup_rSFSW2_project_infrastructure.Rd index 72ff44f2..7dd5c6b9 100644 --- a/man/setup_rSFSW2_project_infrastructure.Rd +++ b/man/setup_rSFSW2_project_infrastructure.Rd @@ -4,8 +4,11 @@ \alias{setup_rSFSW2_project_infrastructure} \title{Setup infrastructure (skeleton) for a new \pkg{rSFSW2} simulation experiment} \usage{ -setup_rSFSW2_project_infrastructure(dir_prj, verbose = TRUE, - print.debug = FALSE) +setup_rSFSW2_project_infrastructure( + dir_prj, + verbose = TRUE, + print.debug = FALSE +) } \arguments{ \item{dir_prj}{A character string. The path to the new simulation project. diff --git a/man/setup_spatial_simulation.Rd b/man/setup_spatial_simulation.Rd index b38f0909..d4abdb7d 100644 --- a/man/setup_spatial_simulation.Rd +++ b/man/setup_spatial_simulation.Rd @@ -4,8 +4,12 @@ \alias{setup_spatial_simulation} \title{Set-up information for a spatially aware simulation project} \usage{ -setup_spatial_simulation(SFSW2_prj_meta, SFSW2_prj_inputs, - use_sim_spatial = FALSE, verbose = FALSE) +setup_spatial_simulation( + SFSW2_prj_meta, + SFSW2_prj_inputs, + use_sim_spatial = FALSE, + verbose = FALSE +) } \description{ Set-up information for a spatially aware simulation project diff --git a/man/setup_time_simulation_project.Rd b/man/setup_time_simulation_project.Rd index 704afbb2..69fd6f1d 100644 --- a/man/setup_time_simulation_project.Rd +++ b/man/setup_time_simulation_project.Rd @@ -4,8 +4,14 @@ \alias{setup_time_simulation_project} \title{Describe the time of a simulation project} \usage{ -setup_time_simulation_project(sim_time, add_st2 = FALSE, - adjust_NS = FALSE, use_doy_range = FALSE, doy_ranges = list()) +setup_time_simulation_project( + sim_time, + is_idem = FALSE, + add_st2 = FALSE, + adjust_NS = FALSE, + use_doy_range = FALSE, + doy_ranges = list() +) } \arguments{ \item{sim_time}{A list with at least values for four named elements: diff --git a/man/simulate_SOILWAT2_experiment.Rd b/man/simulate_SOILWAT2_experiment.Rd index a7b97409..1fee5052 100644 --- a/man/simulate_SOILWAT2_experiment.Rd +++ b/man/simulate_SOILWAT2_experiment.Rd @@ -4,8 +4,15 @@ \alias{simulate_SOILWAT2_experiment} \title{Carry out a \pkg{rSFSW2} simulation experiment} \usage{ -simulate_SOILWAT2_experiment(SFSW2_prj_meta, SFSW2_prj_inputs, opt_behave, - opt_parallel, opt_chunks, opt_out_run, opt_verbosity) +simulate_SOILWAT2_experiment( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity +) } \description{ Carry out a \pkg{rSFSW2} simulation experiment diff --git a/man/stretch_values.Rd b/man/stretch_values.Rd deleted file mode 100644 index 2b6a4962..00000000 --- a/man/stretch_values.Rd +++ /dev/null @@ -1,21 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{stretch_values} -\alias{stretch_values} -\title{Stretch values} -\usage{ -stretch_values(x, lambda = 0) -} -\arguments{ -\item{x}{A numeric vector.} - -\item{lambda}{A numeric value. The stretching factor applied to \code{x}.} -} -\value{ -A numeric vector of the size of \code{x}. -} -\description{ -Values above the mean of \code{x} are made larger and -values below the mean are made smaller - each by -\code{lambda * dist(x, mean(x))}. -} diff --git a/man/swOutput_access.Rd b/man/swOutput_access.Rd index 0a2e961b..c97d9bc0 100644 --- a/man/swOutput_access.Rd +++ b/man/swOutput_access.Rd @@ -31,9 +31,18 @@ \alias{get_CO2effects_yr} \title{\pkg{rSOILWAT2} data access functions} \usage{ -get_Response_aggL(response, tscale = c("dy", "dyAll", "mo", "moAll", - "yr", "yrAll"), scaler = 10, FUN, weights = NULL, x, st, st2, topL, - bottomL) +get_Response_aggL( + response, + tscale = c("dy", "dyAll", "mo", "moAll", "yr", "yrAll"), + scaler = 10, + FUN, + weights = NULL, + x, + st, + st2, + topL, + bottomL +) get_Temp_yr(x, st) diff --git a/man/test_sigmaGamma.Rd b/man/test_sigmaGamma.Rd deleted file mode 100644 index 72738d05..00000000 --- a/man/test_sigmaGamma.Rd +++ /dev/null @@ -1,28 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{test_sigmaGamma} -\alias{test_sigmaGamma} -\title{Check that data are within range of an approximated gamma distribution} -\usage{ -test_sigmaGamma(data, sigmaN = 6) -} -\arguments{ -\item{data}{A numeric vector. Daily values of precipitation.} - -\item{sigmaN}{An integer value. A multiplier of \code{stats::sd}.} -} -\description{ -Check that data are within range of an approximated gamma distribution -} -\section{Note}{ - Approximate shape and scale instead of very slow call: - \code{g <- MASS::fitdistr(data, "gamma")} -} - -\references{ -Choi, S. C., and R. Wette. 1969. Maximum Likelihood Estimation of - the Parameters of the Gamma Distribution and Their Bias. Technometrics - 11:683-690. - -\url{http://en.wikipedia.org/wiki/Gamma_distribution#Maximum_likelihood_estimation} -} diff --git a/man/test_sigmaNormal.Rd b/man/test_sigmaNormal.Rd deleted file mode 100644 index 1180064a..00000000 --- a/man/test_sigmaNormal.Rd +++ /dev/null @@ -1,16 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/Mathematical_Functions.R -\name{test_sigmaNormal} -\alias{test_sigmaNormal} -\title{Check that data are within range of normal distribution} -\usage{ -test_sigmaNormal(data, sigmaN = 6) -} -\arguments{ -\item{data}{A numeric vector. Daily values of temperature.} - -\item{sigmaN}{An integer value. A multiplier of \code{stats::sd}.} -} -\description{ -Check that data are within range of normal distribution -} diff --git a/man/try.ScenarioWeather.Rd b/man/try.ScenarioWeather.Rd deleted file mode 100644 index e107a348..00000000 --- a/man/try.ScenarioWeather.Rd +++ /dev/null @@ -1,15 +0,0 @@ -% Generated by roxygen2: do not edit by hand -% Please edit documentation in R/ExtractData_ClimateDownscaling.R -\name{try.ScenarioWeather} -\alias{try.ScenarioWeather} -\title{Make daily weather for a scenario} -\usage{ -try.ScenarioWeather(i, clim_source, use_CF, use_NEX, climDB_meta, - climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, - climate.ambient, locations, compression_type, getYears, assocYears, - sim_time, seeds_DS, opt_DS, project_paths, dir_failed, fdbWeather, - resume, verbose, print.debug) -} -\description{ -A wrapper function for \code{calc.ScenarioWeather} with error control. -} diff --git a/man/tryToGet_ClimDB.Rd b/man/tryToGet_ClimDB.Rd index c45725bb..33140cc7 100644 --- a/man/tryToGet_ClimDB.Rd +++ b/man/tryToGet_ClimDB.Rd @@ -2,21 +2,55 @@ % Please edit documentation in R/ExtractData_ClimateDownscaling.R \name{tryToGet_ClimDB} \alias{tryToGet_ClimDB} -\title{Organizes the calls (in parallel) which obtain specified scenario weather for the - weather database from one of the available \var{GCM} sources} +\title{Organizes the calls (in parallel) which obtain specified scenario weather +for the weather database from one of the available \var{GCM} sources} \usage{ -tryToGet_ClimDB(ids_ToDo, clim_source, use_CF, use_NEX, climDB_meta, - climDB_files, reqGCMs, reqRCPsPerGCM, reqDownscalingsPerGCM, locations, - getYears, assocYears, project_paths, dir_failed, fdbWeather, - climate.ambient, dbW_compression_type, sim_time, seeds_DS, opt_DS, - resume, verbose, print.debug, seed = NA) +tryToGet_ClimDB( + ids_ToDo, + clim_source, + use_CF, + use_NEX, + climDB_meta, + climDB_files, + reqGCMs, + reqRCPsPerGCM, + reqDownscalingsPerGCM, + locations, + getYears, + assocYears, + project_paths, + dir_failed, + fdbWeather, + climate.ambient, + dbW_compression_type, + sim_time, + seeds_DS, + sim_scens, + resume, + verbose, + print.debug, + seed = NA +) } \arguments{ \item{seed}{A seed set, \code{NULL}, or \code{NA}. \code{NA} will not affect -the state of the \acronym{RNG}; \code{NULL} will re-initialize the \acronym{RNG}; -and all other values are passed to \code{\link{set.seed}}.} +the state of the \acronym{RNG}; \code{NULL} will re-initialize the +\acronym{RNG}; and all other values are passed to \code{\link{set.seed}}.} } \description{ -This function assumes that a whole bunch of global variables exist and contain - appropriate values. +This function assumes that a whole bunch of global variables exist and +contain appropriate values. } +\section{Details}{ + +The daily extractions parallelize over \var{GCM} x \var{scenario} +combinations, i.e., data for all \var{locations} are extracted for +one value of \var{GCM} x \var{scenario} at a time. This is good if +file handling is slow and memory is not limiting. + +The monthly extractions parallelize over \var{GCM} x \var{locations} +combinations, i.e., data for one \var{location} is extracted for +all \var{scenarios} of one \var{GCM} at a time. This is good if file +handling is fast and memory is limiting. +} + diff --git a/man/try_MonthlyScenarioWeather.Rd b/man/try_MonthlyScenarioWeather.Rd new file mode 100644 index 00000000..0c634ede --- /dev/null +++ b/man/try_MonthlyScenarioWeather.Rd @@ -0,0 +1,35 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ExtractData_ClimateDownscaling.R +\name{try_MonthlyScenarioWeather} +\alias{try_MonthlyScenarioWeather} +\title{Make daily weather for a scenario} +\usage{ +try_MonthlyScenarioWeather( + i, + clim_source, + use_CF, + use_NEX, + climDB_meta, + climDB_files, + reqGCMs, + reqRCPsPerGCM, + reqDownscalingsPerGCM, + climate.ambient, + locations, + compression_type, + getYears, + assocYears, + sim_time, + seeds_DS, + opt_DS, + project_paths, + dir_failed, + fdbWeather, + resume, + verbose, + print.debug +) +} +\description{ +A wrapper function for \code{calc_MonthlyScenarioWeather} with error control. +} diff --git a/man/try_cell_ISRICWISE.Rd b/man/try_cell_ISRICWISE.Rd index 398f55df..44d04db0 100644 --- a/man/try_cell_ISRICWISE.Rd +++ b/man/try_cell_ISRICWISE.Rd @@ -5,8 +5,18 @@ \title{Wrapping \code{\link{calc_cell_ISRICWISE}} in \code{\link[base]{try}} to handle errors without stopping a \pkg{rSFSW2} run.} \usage{ -try_cell_ISRICWISE(i, sim_cells_SUIDs, template_simulationSoils, layer_N, - layer_Nsim, ldepth, dat_wise = dat_wise, nvars, var_tags, val_rocks) +try_cell_ISRICWISE( + i, + sim_cells_SUIDs, + template_simulationSoils, + layer_N, + layer_Nsim, + ldepth, + dat_wise = dat_wise, + nvars, + var_tags, + val_rocks +) } \description{ Wrapping \code{\link{calc_cell_ISRICWISE}} in \code{\link[base]{try}} to diff --git a/man/update_intracker.Rd b/man/update_intracker.Rd index e27a8f0e..c7c18c29 100644 --- a/man/update_intracker.Rd +++ b/man/update_intracker.Rd @@ -4,8 +4,13 @@ \alias{update_intracker} \title{Update input tracker status} \usage{ -update_intracker(ist, tracker, prepared = NULL, checked = NULL, - clean_subsequent = FALSE) +update_intracker( + ist, + tracker, + prepared = NULL, + checked = NULL, + clean_subsequent = FALSE +) } \arguments{ \item{ist}{A data.frame representing an input tracker as generated by diff --git a/man/update_runIDs_sites_by_dbW.Rd b/man/update_runIDs_sites_by_dbW.Rd index d099747d..b12910a0 100644 --- a/man/update_runIDs_sites_by_dbW.Rd +++ b/man/update_runIDs_sites_by_dbW.Rd @@ -4,8 +4,12 @@ \alias{update_runIDs_sites_by_dbW} \title{Lookup IDs of sites as found in a weather database} \usage{ -update_runIDs_sites_by_dbW(sim_size, label_WeatherData, - fdbWeather = NULL, verbose = FALSE) +update_runIDs_sites_by_dbW( + sim_size, + label_WeatherData, + fdbWeather = NULL, + verbose = FALSE +) } \arguments{ \item{sim_size}{A list with at least one named element \code{runIDs_sites}.} diff --git a/src/GISSM_germination_wait_times.cpp b/src/GISSM_germination_wait_times.cpp deleted file mode 100644 index 730eb8c8..00000000 --- a/src/GISSM_germination_wait_times.cpp +++ /dev/null @@ -1,68 +0,0 @@ -#include -using namespace Rcpp; - -//' Determine wait times until germination based on information on favorable -//' conditions and time required to germinate -//' -//' @section Note: The \pkg{Rcpp} version of the function is about 270x faster -//' for vectors of length 365 and 12,000x faster for vectors of length 11,000 -//' than the R version. The \pkg{Rcpp} version also reduced the memory -//' footprint by a factor of >> 3080. -//' -//' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -//' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -//' to abiotic conditions. Ecol Model, 286, 66-77. -//' -//' @examples -//' # The \pkg{Rcpp} function is equivalent to the following R version -//' germination_wait_times_R <- function(time_to_germinate, duration_fave_cond) { -//' N <- length(time_to_germinate) -//' stats::na.exclude(unlist(lapply(seq_len(N), function(t) { -//' if (is.finite(time_to_germinate[t])) { -//' t1 <- duration_fave_cond[t:N] -//' t2 <- stats::na.exclude(t1) -//' t3 <- which(t2[time_to_germinate[t]] == t1)[1] -//' sum(is.na(t1[1:t3])) -//' } else { -//' NA -//' } -//' }))) -//' } -//' -//' @export -// [[Rcpp::export]] -IntegerVector germination_wait_times(const IntegerVector& time_to_germinate, - const IntegerVector& duration_fave_cond) { - int n = time_to_germinate.size(); - - // throw input errors - if (n != duration_fave_cond.size()) { - throw std::invalid_argument("'germination_wait_times': arguments must be of identical length"); - } - - // calculate - int i, j = 0, t1, n_nas; - int k = sum(!is_na(time_to_germinate)); - IntegerVector out(k); - - for (i = 0; i < n; ++i) { - - if (!IntegerVector::is_na(time_to_germinate[i])) { - // throw error if germination takes too long - if (IntegerVector::is_na(duration_fave_cond[i]) || - time_to_germinate[i] > duration_fave_cond[i]) { - throw std::runtime_error("'germination_wait_times': values of time_to_germinate are larger than those of duration_fave_cond (or the latter are NAs)"); - } - - n_nas = 0; - // count NAs between i and i + time_to_germinate[i] - for (t1 = 0; t1 - n_nas < time_to_germinate[i] && i + t1 < n; ++t1) { - if (IntegerVector::is_na(duration_fave_cond[i + t1])) ++n_nas; - } - - out[j++] = n_nas; - } - } - - return Rcpp::wrap(out); -} diff --git a/src/GISSM_get_KilledBySoilLayers.cpp b/src/GISSM_get_KilledBySoilLayers.cpp deleted file mode 100644 index 2437ebd2..00000000 --- a/src/GISSM_get_KilledBySoilLayers.cpp +++ /dev/null @@ -1,76 +0,0 @@ -#include -using namespace Rcpp; - -//' Determine if all conditions across rooted soil layers are deadly -//' -//' Function that checks whether all relevant (those with roots) soil layers -//' are under conditions of mortality (kill.conditions) for each day of a -//' given year -//' -//' \code{relevantLayers} takes either \code{NA} if no soil layers should be -//' considered (e.g., because not yet germinated), or an integer number -//' between 1 and the number of simulated soil layers. The number indicates -//' the depth to which a seedling has grown roots and over which layers -//' \code{kill.conditions} will be evaluated. -//' -//' @section Note: The \pkg{Rcpp} version of the function is about 165x -//' faster than the version previous to commit -//' \var{6344857a9cdb08acf68fa031c43cf4a596613aad} 'Small speed improvements' -//' and about 70x faster than the R version. The \pkg{Rcpp} version also -//' reduced the memory footprint by a factor of 200. -//' -//' @param relevantLayers An integer vector, usually of length 365 or 366 -//' (days). -//' @param kill.conditions A m x p logical matrix with -//' \code{m >= length(relevantLayers)} and p represents the number of -//' simulated soil layers, i.e., \code{p >= max(relevantLayers, na.rm = TRUE)}. -//' -//' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -//' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -//' to abiotic conditions. Ecol Model, 286, 66-77. -//' -//' @return A logical vector of the length of \code{relevantLayers} with -//' values containing \code{NA} for days when conditions were not evaluated, -//' \code{TRUE} if all relevant soil layers (columns) of \code{kill.conditions} -//' were \code{TRUE}, and with \code{FALSE} otherwise -//' -//' @examples -//' # The \pkg{Rcpp} function is equivalent to the following R version -//' get_KilledBySoilLayers_R <- function(relevantLayers, kill.conditions) { -//' vapply(seq_along(relevantLayers), function(k) { -//' if (all(is.finite(relevantLayers[k]))) { -//' all(as.logical(kill.conditions[k, seq_len(relevantLayers[k])])) -//' } else NA -//' }, FUN.VALUE = NA) -//' } -//' -//' @export -// [[Rcpp::export]] -LogicalVector get_KilledBySoilLayers(const IntegerVector& relevantLayers, - const LogicalMatrix& kill_conditions) { - - int n = relevantLayers.size(); - - // catch errors - if (max(relevantLayers) > kill_conditions.ncol() || - n > kill_conditions.nrow() || - is_true(any(relevantLayers < 0))) { - throw std::invalid_argument("'get_KilledBySoilLayers': inadmissible value(s) of relevantLayers"); - } - - // calculate - int i, j; - IntegerVector killed(n); - for (i = 0; i < n; i++) { - if (IntegerVector::is_na(relevantLayers[i])) { - killed[i] = NA_INTEGER; - - } else { - for (j = 0; j < relevantLayers[i] && kill_conditions(i, j); ++j) ; - - killed[i] = (j == relevantLayers[i] && kill_conditions(i, j - 1) ? 1 : 0); - } - } - - return Rcpp::wrap(killed); -} diff --git a/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp b/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp deleted file mode 100644 index 07e8a23c..00000000 --- a/src/GISSM_setFALSE_SeedlingSurvival_1stSeason.cpp +++ /dev/null @@ -1,55 +0,0 @@ -#include -using namespace Rcpp; - -//' Determine seedling survival in the first season (\var{\sQuote{ss1s}}) -//' -//' @section Note: The \pkg{Rcpp} version of the function is about 270x faster -//' for vectors of length 365 and 12,000x faster for vectors of length 11,000 -//' than the R version. The \pkg{Rcpp} version also reduced the memory -//' footprint by a factor of >> 3080. -//' @section Note: Previous name \code{setFALSE_SeedlingSurvival_1stSeason}. -//' -//' @section C code: \code{ss1s} is a pointer to the data and the original -//' vector will get altered; one would need for a deep copy: -//' \code{LogicalVector out = clone(ss1s)} -//' -//' @references Schlaepfer, D.R., Lauenroth, W.K. & Bradford, J.B. (2014). -//' Modeling regeneration responses of big sagebrush (Artemisia tridentata) -//' to abiotic conditions. Ecol Model, 286, 66-77. -//' -//' @examples -//' # The \pkg{Rcpp} function is equivalent to the following R version -//' kill_seedling_R <- function(ss1s, ry_year_day, ry_useyrs, y, -//' doy) { -//' ss1s[ry_year_day == ry_useyrs[y]][doy] <- FALSE -//' ss1s -//' } -//' -//' @export -// [[Rcpp::export]] -LogicalVector kill_seedling(LogicalVector& ss1s, - const IntegerVector& ry_year_day, const IntegerVector& ry_useyrs, int y, - int doy) { - - int i, n = ry_year_day.size(); - - // throw input errors - if (n != ss1s.size() || ry_useyrs.size() < y || - ry_useyrs[y - 1] > max(ry_year_day) || ry_useyrs[y - 1] < min(ry_year_day)) { - throw std::invalid_argument("'kill_seedling': invalid arguments."); - } - - // calculate - for (i = 0; i < n && ry_year_day[i] != ry_useyrs[y - 1]; ++i); // y is a 1-based index to ry_useyrs - - // throw error - if (i + doy > n) { - throw std::runtime_error("'kill_seedling': doy too large for given year 'y'"); - } - - // assumes increasingly sorted vector ry_year_day - // doy is a 1-based index - ss1s[i + doy - 1] = false; - - return Rcpp::wrap(ss1s); -} diff --git a/src/RcppExports.cpp b/src/RcppExports.cpp deleted file mode 100644 index 33a2d22c..00000000 --- a/src/RcppExports.cpp +++ /dev/null @@ -1,46 +0,0 @@ -// Generated by using Rcpp::compileAttributes() -> do not edit by hand -// Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393 - -#include - -using namespace Rcpp; - -// germination_wait_times -IntegerVector germination_wait_times(const IntegerVector& time_to_germinate, const IntegerVector& duration_fave_cond); -RcppExport SEXP _rSFSW2_germination_wait_times(SEXP time_to_germinateSEXP, SEXP duration_fave_condSEXP) { -BEGIN_RCPP - Rcpp::RObject rcpp_result_gen; - Rcpp::RNGScope rcpp_rngScope_gen; - Rcpp::traits::input_parameter< const IntegerVector& >::type time_to_germinate(time_to_germinateSEXP); - Rcpp::traits::input_parameter< const IntegerVector& >::type duration_fave_cond(duration_fave_condSEXP); - rcpp_result_gen = Rcpp::wrap(germination_wait_times(time_to_germinate, duration_fave_cond)); - return rcpp_result_gen; -END_RCPP -} -// get_KilledBySoilLayers -LogicalVector get_KilledBySoilLayers(const IntegerVector& relevantLayers, const LogicalMatrix& kill_conditions); -RcppExport SEXP _rSFSW2_get_KilledBySoilLayers(SEXP relevantLayersSEXP, SEXP kill_conditionsSEXP) { -BEGIN_RCPP - Rcpp::RObject rcpp_result_gen; - Rcpp::RNGScope rcpp_rngScope_gen; - Rcpp::traits::input_parameter< const IntegerVector& >::type relevantLayers(relevantLayersSEXP); - Rcpp::traits::input_parameter< const LogicalMatrix& >::type kill_conditions(kill_conditionsSEXP); - rcpp_result_gen = Rcpp::wrap(get_KilledBySoilLayers(relevantLayers, kill_conditions)); - return rcpp_result_gen; -END_RCPP -} -// kill_seedling -LogicalVector kill_seedling(LogicalVector& ss1s, const IntegerVector& ry_year_day, const IntegerVector& ry_useyrs, int y, int doy); -RcppExport SEXP _rSFSW2_kill_seedling(SEXP ss1sSEXP, SEXP ry_year_daySEXP, SEXP ry_useyrsSEXP, SEXP ySEXP, SEXP doySEXP) { -BEGIN_RCPP - Rcpp::RObject rcpp_result_gen; - Rcpp::RNGScope rcpp_rngScope_gen; - Rcpp::traits::input_parameter< LogicalVector& >::type ss1s(ss1sSEXP); - Rcpp::traits::input_parameter< const IntegerVector& >::type ry_year_day(ry_year_daySEXP); - Rcpp::traits::input_parameter< const IntegerVector& >::type ry_useyrs(ry_useyrsSEXP); - Rcpp::traits::input_parameter< int >::type y(ySEXP); - Rcpp::traits::input_parameter< int >::type doy(doySEXP); - rcpp_result_gen = Rcpp::wrap(kill_seedling(ss1s, ry_year_day, ry_useyrs, y, doy)); - return rcpp_result_gen; -END_RCPP -} diff --git a/src/init.c b/src/init.c index 46ee9522..d0266441 100644 --- a/src/init.c +++ b/src/init.c @@ -28,21 +28,10 @@ static const R_CMethodDef CEntries[] = { {NULL, NULL, 0, NULL} }; -/* .Call calls: Rcpp v0.12.12 registers these correctly */ -extern SEXP _rSFSW2_germination_wait_times(SEXP, SEXP); -extern SEXP _rSFSW2_get_KilledBySoilLayers(SEXP, SEXP); -extern SEXP _rSFSW2_kill_seedling(SEXP, SEXP, SEXP, SEXP, SEXP); - -static const R_CallMethodDef CallEntries[] = { - {"_rSFSW2_germination_wait_times", (DL_FUNC) &_rSFSW2_germination_wait_times, 2}, - {"_rSFSW2_get_KilledBySoilLayers", (DL_FUNC) &_rSFSW2_get_KilledBySoilLayers, 2}, - {"_rSFSW2_kill_seedling", (DL_FUNC) &_rSFSW2_kill_seedling, 5}, - {NULL, NULL, 0} -}; /* Register package calls with R */ void R_init_rSFSW2(DllInfo *dll) { - R_registerRoutines(dll, CEntries, CallEntries, NULL, NULL); + R_registerRoutines(dll, CEntries, NULL, NULL, NULL); R_useDynamicSymbols(dll, FALSE); R_forceSymbols(dll, TRUE); } diff --git a/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.1.0.sqlite3 b/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.1.0.sqlite3 deleted file mode 100644 index 54d3484d..00000000 --- a/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.1.0.sqlite3 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8ceb095c6901790fe1d9c6061d27eeeda7ffd84c9fb4ae09a839542414eb0857 -size 4980736 diff --git a/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.3.0.sqlite3 b/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.3.0.sqlite3 new file mode 100644 index 00000000..31aea63d --- /dev/null +++ b/tests/test_data/0_ReferenceOutput/dbOutput_TestPrj4_v4.3.0.sqlite3 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:921ff32695b83b261b1a8cb2a618aefa003d870aa238935e6ff68029ae241756 +size 4980736 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv deleted file mode 100644 index 0df6b0b4..00000000 --- a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v09.csv +++ /dev/null @@ -1,4 +0,0 @@ -Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forb,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Imperm_L1,Exclude_ClimateAmbient,Grass_HydRed_OnOff,Shrub_HydRed_OnOff,Tree_HydRed_OnOff,Forb_HydRed_OnOff,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,PET_multiplier,Grass_SWPcrit_MPa,Shrub_SWPcrit_MPa,Tree_SWPcrit_MPa,Forb_SWPcrit_MPa,UseCO2BiomassMultiplier,UseCO2WUEMultiplier,SoilTemp_Flag -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1 -DefaultSettings,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,,,,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,1,1,1 -noCO2effects,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,,,,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,0,0,1 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv new file mode 100644 index 00000000..4ed19562 --- /dev/null +++ b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_ExperimentalDesign_v10.csv @@ -0,0 +1,4 @@ +Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionTotalGrasses_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forb,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Imperm_L1,Exclude_ClimateAmbient,Grass_HydRed_OnOff,Shrub_HydRed_OnOff,Tree_HydRed_OnOff,Forb_HydRed_OnOff,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,PET_multiplier,Grass_SWPcrit_MPa,Shrub_SWPcrit_MPa,Tree_SWPcrit_MPa,Forb_SWPcrit_MPa,UseCO2BiomassMultiplier,UseCO2WUEMultiplier,SoilTemp_Flag +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,1,1,1,1,1,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1 +DefaultSettings,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,0,0,,0,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,1,1,1 +noCO2effects,,,,,,,,,,,,,,Prairie,CONUSSOIL_BSE_EVERY10cm,,SchenkJackson2003_PCdry_grasses,SchenkJackson2003_PCdry_shrubs,FILL,SchenkJackson2003_PCdry_forbs,FILL,1,,,,,0,0,,0,1,1,1,1,1,1,1,1,,,,,,,,,,,,,,,,,,,,,,,,,,15,990,1,-3.5,-3.9,-2,-2,0,0,1 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv deleted file mode 100644 index 4604f017..00000000 --- a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v17.csv +++ /dev/null @@ -1,2 +0,0 @@ -Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forbs,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,Exclude_ClimateAmbient,MaxTempDepth,UseCO2BiomassMultiplier,UseCO2WUEMultiplier -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv new file mode 100644 index 00000000..81dc5663 --- /dev/null +++ b/tests/test_data/TestPrj4/1_Input/SWRuns_InputData_TreatmentDesign_v18.csv @@ -0,0 +1,2 @@ +Label,filesin,prodin,siteparamin,soilsin,weathersetupin,cloudin,YearStart,YearEnd,LookupWeatherFolder,LookupClimateTemp,LookupClimatePPT,LookupShiftedPPT,LookupShiftedPPTCategory,LookupSnowDensity,LookupTranspRegions,LookupEvapCoefs,LookupTranspCoefs_Grass,LookupTranspCoefs_Shrub,LookupTranspCoefs_Tree,LookupTranspCoefs_Forb,LookupCO2data,PotentialNaturalVegetation_CompositionShrubsC3C4_Paruelo1996,PotentialNaturalVegetation_CompositionShrubs_Fraction,PotentialNaturalVegetation_CompositionTotalGrasses_Fraction,PotentialNaturalVegetation_CompositionC3_Fraction,PotentialNaturalVegetation_CompositionC4_Fraction,PotentialNaturalVegetation_CompositionAnnuals_Fraction,PotentialNaturalVegetation_CompositionForb_Fraction,PotentialNaturalVegetation_CompositionBareGround_Fraction,PotentialNaturalVegetation_CompositionTrees_Fraction,AdjMonthlyBioMass_Precipitation,AdjMonthlyBioMass_Temperature,AdjRootProfile,RootProfile_C3,RootProfile_C4,RootProfile_Annuals,RootProfile_Shrubs,RootProfile_Forbs,Shrub_TotalBiomass_ScalingFactor,Shrub_LiveBiomass_ScalingFactor,Shrub_Litter_ScalingFactor,Grass_TotalBiomass_ScalingFactor,Grass_LiveBiomass_ScalingFactor,Grass_Litter_ScalingFactor,Tree_TotalBiomass_ScalingFactor,Tree_LiveBiomass_ScalingFactor,Tree_Litter_ScalingFactor,Forb_TotalBiomass_ScalingFactor,Forb_LiveBiomass_ScalingFactor,Forb_Litter_ScalingFactor,Vegetation_Biomass_ScalingSeason_AllGrowingORNongrowing,Vegetation_Height_ScalingFactor,PotentialNaturalVegetation_Composition_basedOnReferenceOrScenarioClimate,ClimateScenario_Temp_PerturbationInMeanSeasonalityBothOrNone,ClimateScenario_PPT_PerturbationInMeanSeasonalityBothOrNone,Exclude_ClimateAmbient,MaxTempDepth,UseCO2BiomassMultiplier,UseCO2WUEMultiplier +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v11.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v11.csv deleted file mode 100644 index b962dca2..00000000 --- a/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v11.csv +++ /dev/null @@ -1,7 +0,0 @@ -"Label","site_id","WeatherFolder","X_WGS84","Y_WGS84","ELEV_m","Include_YN","dailyweather_source","Include_YN_DailyWeather","GCM_sources","Include_YN_ClimateScenarioSources","SoilTexture_source","Include_YN_SoilSources","Elevation_source","Include_YN_ElevationSources","ClimateNormals_source","Include_YN_ClimateNormalSources" -"Site01",1,"Site01_data_35.8125_-106.3125",-106.29953,35.76555,2072,1,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site02",2,"Site02_data_35.8125_-106.3125",-106.27478,35.76451,2072,1,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site03",3,"Site03_data_35.8125_-106.3125",-106.28126,35.7799,2072,1,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site04",4,"NA",-106.28749,35.7953,2072,0,"NA",0,"NA",0,"NA",0,"NA",0,"NA",0 -"Site05",5,"Site05_data_35.8125_-106.3125",-106.28749,35.7953,2072,1,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 -"Site06",6,"Site06_data_35.8125_-106.3125",-106.28749,35.7953,2072,1,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 diff --git a/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v12.csv b/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v12.csv new file mode 100644 index 00000000..58931e09 --- /dev/null +++ b/tests/test_data/TestPrj4/1_Input/SWRuns_InputMaster_Test_v12.csv @@ -0,0 +1,7 @@ +"Label","site_id","Include_YN","WeatherFolder","X_WGS84","Y_WGS84","ELEV_m","Slope","Aspect","dailyweather_source","Include_YN_DailyWeather","GCM_sources","Include_YN_ClimateScenarioSources","SoilTexture_source","Include_YN_SoilSources","Elevation_source","Include_YN_ElevationSources","ClimateNormals_source","Include_YN_ClimateNormalSources" +"Site01",1,1,"Site01_data_35.8125_-106.3125",-106.29953,35.76555,2072,NA,NA,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 +"Site02",2,1,"Site02_data_35.8125_-106.3125",-106.27478,35.76451,2072,NA,NA,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 +"Site03",3,1,"Site03_data_35.8125_-106.3125",-106.28126,35.7799,2072,NA,NA,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 +"Site04",4,0,"NA",-106.28749,35.7953,2072,NA,NA,"NA",0,"NA",0,"NA",0,"NA",0,"NA",0 +"Site05",5,1,"Site05_data_35.8125_-106.3125",-106.28749,35.7953,2072,NA,NA,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 +"Site06",6,1,"Site06_data_35.8125_-106.3125",-106.28749,35.7953,2072,NA,NA,"Maurer2002_NorthAmerica",1,"CMIP5_BCSD_GDODCPUCLLNL_USA",1,"ISRICWISEv12_Global",1,"Elevation_NED_USA",1,"ClimateNormals_NCDC2005_USA",1 diff --git a/tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv b/tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv similarity index 68% rename from tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv rename to tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv index 2096c534..1b07edef 100644 --- a/tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v14.csv +++ b/tests/test_data/TestPrj4/1_Input/datafiles/SWRuns_InputData_siteparam_v15.csv @@ -1,2 +1,2 @@ -Label,SWC_min,SWC_init,SWC_wet,SWC_YearlyReset,SWC_Deepdrain,PET_multiplier,SoilTemp_Flag,SoilTempC_atUpperBoundary,SoilTempC_atLowerBoundary,SoilTemp_BiomassLimiter_gPERm2,SoilTemp_T1constant_a,SoilTemp_T1constant_b,SoilTemp_T1constant_c,SoilTemp_SoilThermCondct,SoilTemp_cs_constant,SoilTemp_SpecificHeatCapacity,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,Latitude,Altitude,Slope,Aspect,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Param_UnsaturatedPercolation -UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 +Label,SWC_min,SWC_init,SWC_wet,SWC_YearlyReset,SWC_Deepdrain,PET_multiplier,SoilTemp_Flag,SoilTempC_atUpperBoundary,SoilTempC_atLowerBoundary,SoilTemp_BiomassLimiter_gPERm2,SoilTemp_T1constant_a,SoilTemp_T1constant_b,SoilTemp_T1constant_c,SoilTemp_SoilThermCondct,SoilTemp_cs_constant,SoilTemp_SpecificHeatCapacity,SoilTemp_deltaX_cm,SoilTemp_MaxDepth_cm,RunoffPercent_fromPondedWater,RunonPercent_fromPondedWater,Param_UnsaturatedPercolation +UseInformationToCreateSoilWatRuns,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 diff --git a/tests/test_data/TestPrj4/1_Input/treatments/LookupCO2data/AtmosCO2.csv b/tests/test_data/TestPrj4/1_Input/treatments/LookupCO2data/AtmosCO2.csv index 59a1b899..a4a0ca7a 100644 --- a/tests/test_data/TestPrj4/1_Input/treatments/LookupCO2data/AtmosCO2.csv +++ b/tests/test_data/TestPrj4/1_Input/treatments/LookupCO2data/AtmosCO2.csv @@ -1,4 +1,4 @@ -Year,Default,RCP85,20TH_CENTURY,RCP3PD,RCP45,RCP6 +Year,Fix360ppm,RCP85,historical,RCP3PD,RCP45,RCP6 1765,360,278.05158,278.05158,278.05158,278.05158,278.05158 1766,360,278.10615,278.10615,278.10615,278.10615,278.10615 1767,360,278.22039,278.22039,278.22039,278.22039,278.22039 diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/climate.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/climate.in index 5e050207..cc2e6166 100644 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/climate.in +++ b/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/climate.in @@ -1,6 +1,18 @@ -71.0 61.0 61.0 51.0 41.0 31.0 23.0 23.0 31.0 41.0 61.0 61.0 # (site: testing), sky cover (sunrise-sunset),%,Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -1.3 2.9 3.3 3.8 3.8 3.8 3.3 3.3 2.9 1.3 1.3 1.3 # Wind speed (m/s),Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -61.0 61.0 61.0 51.0 51.0 51.0 41.0 41.0 51.0 51.0 61.0 61.0 # rel. Humidity (%),Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 # transmissivity (rel), only used in petfunc, but falls out of the equations (a = trans * b, c = a / trans) -213.7 241.6 261.0 308.0 398.1 464.5 0.0 0.0 0.0 140.0 161.6 185.1 # snow density (kg/m3): Brown, R. D. and P. W. Mote. 2009. The response of Northern Hemisphere snow cover to a changing climate. Journal of Climate 22:2124-2145. -1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 # m = number of precipitation events per day +#------ Input file for mean monthly atmospheric parameters + +# Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec + +# Sky cover (sunrise-sunset; percent) +71.0 61.0 61.0 51.0 41.0 31.0 23.0 23.0 31.0 41.0 61.0 61.0 + +# Wind speed (m / s) +1.3 2.9 3.3 3.8 3.8 3.8 3.3 3.3 2.9 1.3 1.3 1.3 + +# Relative humidity (%) +61.0 61.0 61.0 51.0 51.0 51.0 41.0 41.0 51.0 51.0 61.0 61.0 + +# Snow density (kg / m3) +213.7 241.6 261.0 308.0 398.1 464.5 0.0 0.0 0.0 140.0 161.6 185.1 + +# Number of precipitation events per day +1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/cloud.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/cloud.in deleted file mode 100644 index cfd44a31..00000000 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_cloudin/cloud.in +++ /dev/null @@ -1,5 +0,0 @@ -71.0 61.0 61.0 51.0 41.0 31.0 23.0 23.0 31.0 41.0 61.0 61.0 # (site: 002_-119.415_39.046 ), sky cover (sunrise-sunset),%,Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -1.3 2.9 3.3 3.8 3.8 3.8 3.3 3.3 2.9 1.3 1.3 1.3 # Wind speed (m/s),Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -61.0 61.0 61.0 51.0 51.0 51.0 41.0 41.0 51.0 51.0 61.0 61.0 # rel. Humidity (%),Climate Atlas of the US,http://cdo.ncdc.noaa.gov/cgi-bin/climaps/climaps.pl -1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 # transmissivity (rel), only used in petfunc, but falls out of the equations (a = trans * b, c = a / trans) -213.7 241.6 261.0 308.0 398.1 464.5 0.0 0.0 0.0 140.0 161.6 185.1 # snow density (kg/m3): Brown, R. D. and P. W. Mote. 2009. The response of Northern Hemisphere snow cover to a changing climate. Journal of Climate 22:2124-2145. diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/prod.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/prod.in deleted file mode 100755 index 6814ebe4..00000000 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/prod.in +++ /dev/null @@ -1,143 +0,0 @@ -# Plant production data file for SOILWAT -# Location: - -# ---- Composition of vegetation type components (0-1; must add up to 1) -# Grasses Shrubs Trees Forbs Bare Ground -0.0 0.0 0.0 1.0 0.0 - - -# ---- Albedo -# Grasses Shrubs Trees Forbs Bare Ground -0.167 0.143 0.106 0.167 0.15 # albedo: (Houldcroft et al. 2009) MODIS snowfree 'grassland', 'open shrub', ‘evergreen needle forest’ with MODIS albedo aggregated over pure IGBP cells where NDVI is greater than the 98th percentile NDVI - - -# ---- % Cover: divide standing LAI by this to get % cover -# Grasses Shrubs Trees Forbs -3.0 2.22 5. 3.0 - - -# -- Canopy height (cm) parameters either constant through season or as tanfunc with respect to biomass (g/m^2) -# Grasses Shrubs Trees Forbs -300.0 0.0 0.0 300.0 # xinflec -29.5 5.0 5.0 29.5 # yinflec -85. 100. 3000. 85. # range -0.002 0.003 0.00008 0.002 # slope -0. 50. 1200. 0. # if > 0 then constant canopy height (cm) - - -# --- Vegetation interception parameters for equation: intercepted rain = (a + b*veg) + (c+d*veg) * ppt; Grasses+Shrubs: veg=vegcov, Trees: veg=LAI -# Grasses Shrubs Trees Forbs -0.0182 0. 0.00461 0.0182 # a -0.0065 0.0026 0.01405 0.0065 # b -0.0019 0. 0.0383 0.0019 # c -0.0054 0.0033 0.0337 0.0054 # d - - -# --- Litter interception parameters for equation: intercepted rain = (a + b*litter) + (c+d*litter) * ppt -# Grass-Litter Shrub-Litter Tree-Litter Forbs-Litter -0.0151 0.0151 0.0151 0.0151 # a -0.00005 0.00005 0.00005 0.00005 # b -0.0116 0.0116 0.0116 0.0116 # c -0.00002 0.00002 0.00002 0.00002 # d - - -# ---- Parameter for partitioning of bare-soil evaporation and transpiration as in Es = exp(-param*LAI) -# Grasses Shrubs Trees Forbs -1. 1. 0.41 1. # Trees: According to a regression based on a review by Daikoku, K., S. Hattori, A. Deguchi, Y. Aoki, M. Miyashita, K. Matsumoto, J. Akiyama, S. Iida, T. Toba, Y. Fujita, and T. Ohta. 2008. Influence of evaporation from the forest floor on evapotranspiration from the dry canopy. Hydrological Processes 22:4083-4096. - - -# ---- Parameter for scaling and limiting bare soil evaporation rate: if totagb (g/m2) > param then no bare-soil evaporation -# Grasses Shrubs Trees Forbs -999. 999. 2099. 999. # - - -# --- Shade effects on transpiration based on live and dead biomass -# Grasses Shrubs Trees Forbs -0.3 0.3 0.3 0.3 # shade scale -150. 150. 150. 150. # shade maximal dead biomass -300. 300. 0. 300. # tanfunc: xinflec -12. 12. 0. 12. # yinflec -34. 34. 2. 34. # range -0.002 0.002 0.0002 0.002 # slope - - -# ---- Hydraulic redistribution: Ryel, Ryel R, Caldwell, Caldwell M, Yoder, Yoder C, Or, Or D, Leffler, Leffler A. 2002. Hydraulic redistribution in a stand of Artemisia tridentata: evaluation of benefits to transpiration assessed with a simulation model. Oecologia 130: 173-184. -# Grasses Shrubs Trees Forbs -1 1 1 1 # flag to turn on/off (1/0) hydraulic redistribution --0.2328 -0.2328 -0.2328 -0.2328 # maxCondroot - maximum radial soil-root conductance of the entire active root system for water (cm/-bar/day) = 0.097 cm/MPa/h -10. 10. 10. 10. # swp50 - soil water potential (-bar) where conductance is reduced by 50% = -1. MPa -3.22 3.22 3.22 3.22 # shapeCond - shaping parameter for the empirical relationship from van Genuchten to model relative soil-root conductance for water - - -# ---- Critical soil water potential (MPa), i.e., when transpiration rates cannot sustained anymore, for instance, for many crop species -1.5 MPa is assumed and called wilting point -# Grasses Shrubs Trees Forbs --3.5 -3.9 -2.0 -2.0 - - -# Grasslands component: -# -------------- Monthly production values ------------ -# Litter - dead leafy material on the ground (g/m^2 ). -# Biomass - living and dead/woody aboveground standing biomass (g/m^2). -# %Live - proportion of Biomass that is actually living (0-1.0). -# LAI_conv - monthly amount of biomass needed to produce LAI=1.0 (g/m^2). -# There should be 12 rows, one for each month, starting with January. -# -#Litter Biomass %Live LAI_conv - 75.0 150.0 0.00 300. # January - 80.0 150.0 0.00 300. # February - 85.0 150.0 0.10 300. # March - 90.0 170.0 0.20 300. # April - 50.0 190.0 0.40 300. # May - 50.0 220.0 0.60 300. # June - 50.0 250.0 0.40 300. # July - 55.0 220.0 0.60 300. # August - 60.0 190.0 0.40 300. # September - 65.0 180.0 0.20 300. # October - 70.0 170.0 0.10 300. # November - 75.0 160.0 0.00 300. # December - -# Shrublands component: -#Litter Biomass %Live LAI_conv -85.4 210.0 0.06 372 # January -88.2 212.0 0.08 372 # February -95.3 228.0 0.20 372 # March -100.5 272.0 0.33 372 # April -166.4 400.0 0.57 372 # May -186.0 404.0 0.55 372 # June -177.1 381.0 0.50 372 # July -212.2 352.0 0.46 372 # August -157.4 286.0 0.32 372 # September -124.9 235.0 0.15 372 # October -110.4 218.0 0.08 372 # November -104.3 214.0 0.06 372 # December - -# Forest component: -#Litter Biomass %Live LAI_conv -2000 15000 0.083 500 # January -2000 15000 0.083 500 # February -2000 15000 0.083 500 # March -2000 15000 0.083 500 # April -2000 15000 0.083 500 # May -2000 15000 0.083 500 # June -2000 15000 0.083 500 # July -2000 15000 0.083 500 # August -2000 15000 0.083 500 # September -2000 15000 0.083 500 # October -2000 15000 0.083 500 # November -2000 15000 0.083 500 # December - -# FORB component: -#Litter Biomass %Live LAI_conv - 75.0 150.0 0.00 300. # January - 80.0 150.0 0.00 300. # February - 85.0 150.0 0.10 300. # March - 90.0 170.0 0.20 300. # April - 50.0 190.0 0.40 300. # May - 50.0 220.0 0.60 300. # June - 50.0 250.0 0.40 300. # July - 55.0 220.0 0.60 300. # August - 60.0 190.0 0.40 300. # September - 65.0 180.0 0.20 300. # October - 70.0 170.0 0.10 300. # November - 75.0 160.0 0.00 300. # December - diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/veg.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/veg.in index 82783a4a..51e67b31 100755 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/veg.in +++ b/tests/test_data/TestPrj4/1_Input/treatments/tr_prodin/veg.in @@ -1,18 +1,34 @@ -# Plant production data file for SOILWAT2 -# Location: +#------ Input file for land cover and vegetation types: +# parameters and mean monthly biomass values -# ---- Composition of vegetation type components (0-1; must add up to 1) -# Grasses Shrubs Trees Forbs BareGround +# USER: The vegetation composition and the mean monthly biomass values should +# be adjusted for site-specific run conditions. + +# USER: Most of the other values in this file are parameters that +# describe the four available vegetation types and should not be +# modified unless a vegetation type itself is altered. + + +#---- Composition of vegetation type components (0-1; must add up to 1) +# Grasses Shrubs Trees Forbs BareGround 0.2 0.2 0.2 0.2 0.2 -# ---- Albedo -# Grasses Shrubs Trees Forbs BareGround - 0.167 0.143 0.106 0.167 0.15 # albedo: (Houldcroft et al. 2009) MODIS snowfree 'grassland', 'open shrub', ‘evergreen needle forest’ with MODIS albedo aggregated over pure IGBP cells where NDVI is greater than the 98th percentile NDVI +#---- Albedo +# Default values from Houldcroft et al. 2009: +# MODIS snowfree 'grassland', 'open shrub', ‘evergreen needle forest’ with +# MODIS albedo aggregated over pure IGBP cells where NDVI is greater than +# the 98th percentile NDVI + +# Grasses Shrubs Trees Forbs BareGround + 0.167 0.143 0.106 0.167 0.15 -# -- Canopy height (cm) parameters either constant through season or as tanfunc with respect to biomass (g/m^2) -# Grasses Shrubs Trees Forbs +#--- Canopy height (cm) parameters +# Canopy height is either constant through season or +# is a function of monthly biomass (g / m^2) + +# Grasses Shrubs Trees Forbs 300.0 0.0 0.0 300.0 # xinflec 29.5 5.0 5.0 29.5 # yinflec 85. 100. 3000. 85. # range @@ -20,29 +36,38 @@ 0.0 50. 1200. 0.0 # if > 0 then constant canopy height (cm) -# --- Vegetation interception parameters: kSmax * log10(1 + LAI_live + kdead * LAI_dead) -# Grasses Shrubs Trees Forbs - 1.0 2.6 2.0 1.0 # kSmax (mm) - 1.0 0.1 0.01 0.5 # kdead (0-1 fraction) +#--- Vegetation interception parameters +# Equation: kSmax * log10(1 + LAI_live + kdead * LAI_dead) + +# Grasses Shrubs Trees Forbs + 1.0 2.6 2.0 1.0 # kSmax (mm) + 1.0 0.1 0.01 0.5 # kdead (0-1 fraction) + +#--- Litter interception parameters +# Equation: kSmax * log10(1 + litter_density) -# --- Litter interception parameters: kSmax * log10(1 + litter_density) -# Grasses Shrubs Trees Forbs - 0.113 0.113 0.290 0.113 # kSmax (mm) +# Grasses Shrubs Trees Forbs + 0.113 0.113 0.290 0.113 # kSmax (mm) -# ---- Parameter for partitioning of bare-soil evaporation and transpiration as in Es = exp(-param*LAI) -# Grasses Shrubs Trees Forbs - 1. 1. 0.41 1. # Trees: According to a regression based on a review by Daikoku, K., S. Hattori, A. Deguchi, Y. Aoki, M. Miyashita, K. Matsumoto, J. Akiyama, S. Iida, T. Toba, Y. Fujita, and T. Ohta. 2008. Influence of evaporation from the forest floor on evapotranspiration from the dry canopy. Hydrological Processes 22:4083-4096. +#---- Parameter for partitioning of bare-soil evaporation and transpiration +# Equation: Es = exp(-param*LAI) +# Default value for trees derived from a regression based on data from a +# review by Daikoku et al. (2008) Hydrological Processes 22:4083-4096. +# Grasses Shrubs Trees Forbs + 1. 1. 0.41 1. -# ---- Parameter for scaling and limiting bare soil evaporation rate: if totagb (g/m2) > param then no bare-soil evaporation -# Grasses Shrubs Trees Forbs - 999. 999. 2099. 999. # +# --- Parameter for scaling and limiting bare soil evaporation rate +# If totagb (g / m2) > param then no bare-soil evaporation +# Grasses Shrubs Trees Forbs + 999. 999. 2099. 999. -# --- Shade effects on transpiration based on live and dead biomass -# Grasses Shrubs Trees Forbs + +#--- Shade effects on transpiration based on live and dead biomass +# Grasses Shrubs Trees Forbs 0.3 0.3 0.3 0.3 # shade scale 150. 150. 150. 150. # shade maximal dead biomass 300. 300. 0. 300. # tanfunc: xinflec @@ -51,37 +76,58 @@ 0.002 0.002 0.0002 0.002 # slope -# ---- Hydraulic redistribution: Ryel, Ryel R, Caldwell, Caldwell M, Yoder, Yoder C, Or, Or D, Leffler, Leffler A. 2002. Hydraulic redistribution in a stand of Artemisia tridentata: evaluation of benefits to transpiration assessed with a simulation model. Oecologia 130: 173-184. -# Grasses Shrubs Trees Forbs +#---- Hydraulic redistribution +# Implemenation based on Ryel et al. (2002) Oecologia 130: 173-184. +# Grasses Shrubs Trees Forbs 1 1 1 1 # flag to turn on/off (1/0) hydraulic redistribution -0.2328 -0.2328 -0.2328 -0.2328 # maxCondroot - maximum radial soil-root conductance of the entire active root system for water (cm/-bar/day) = 0.097 cm/MPa/h 10. 10. 10. 10. # swp50 - soil water potential (-bar) where conductance is reduced by 50% = -1. MPa 3.22 3.22 3.22 3.22 # shapeCond - shaping parameter for the empirical relationship from van Genuchten to model relative soil-root conductance for water -# ---- Critical soil water potential (MPa), i.e., when transpiration rates cannot sustained anymore, for instance, for many crop species -1.5 MPa is assumed and called wilting point -# Grasses Shrubs Trees Forbs +#---- Critical soil water potential (MPa) +# Soil water potential below which transpiration rates cannot be sustained, +# for instance, for many crop species -1.5 MPa is assumed (wilting point) + +# Grasses Shrubs Trees Forbs -3.5 -3.9 -2.0 -2.0 -# ---- CO2 Coefficients: multiplier = Coeff1 * x^Coeff2 -# Coefficients assume that monthly biomass inputs reflect values for conditions at -# 360 ppm CO2, i.e., multiplier = 1 for x = 360 ppm CO2 -# Grasses Shrubs Trees Forbs +#---- Effects of atmospheric CO2 concentrations +# Equation: multiplier = Coeff1 * x^Coeff2 +# Coefficients assume that monthly biomass inputs reflect values for conditions +# at 360 ppm CO2, i.e., multiplier = 1 for x = 360 ppm CO2 + +# Grasses Shrubs Trees Forbs 0.1319 0.1319 0.1319 0.1319 # Biomass Coeff1 0.3442 0.3442 0.3442 0.3442 # Biomass Coeff2 25.158 25.158 25.158 25.158 # WUE Coeff1 -0.548 -0.548 -0.548 -0.548 # WUE Coeff2 -# Grasslands component: -# -------------- Monthly production values ------------ +#------ Mean monthly biomass values +# Input biomass per unit area (g / m2) for each vegetation type represent +# values as if that vegetation type covers 100% of the simulated surface. +# That way input biomass values are independent of the input composition values. +# For example, +# - inputs for forbs: fCover = 0.4, biomass = 300 g/m2 +# - inputs for grasses: fCover = 0.6, biomass = 450 g/m2 +# Then, SOILWAT2 simulates a surface with vegetation of +# fCover = 1 (= 0.4 + 0.6) +# and a +# total biomass = 390 g / m2 (= 0.4 * 300 + 0.6 * 450) +# of which +# forb biomass = 120 g / m2 (0.4 * 300) +# and +# grass biomass = 270 g/m2 (= 0.6 * 450). + # Litter - dead leafy material on the ground (g/m^2 ). # Biomass - living and dead/woody aboveground standing biomass (g/m^2). # %Live - proportion of Biomass that is actually living (0-1.0). # LAI_conv - monthly amount of biomass needed to produce LAI=1.0 (g/m^2). # There should be 12 rows, one for each month, starting with January. -# + +# Grasslands component: #Litter Biomass %Live LAI_conv 75.0 150.0 0.00 300. # January 80.0 150.0 0.00 300. # February diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_siteparamin/siteparam.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_siteparamin/siteparam.in index 97cc3c8b..c28092c6 100755 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_siteparamin/siteparam.in +++ b/tests/test_data/TestPrj4/1_Input/treatments/tr_siteparamin/siteparam.in @@ -1,73 +1,92 @@ -# ---- SWC limits ---- --1 # swc_min : cm/cm if 0 - <1.0, -bars if >= 1.0.; if < 0. then estimate residual water content for each layer -15. # swc_init: cm/cm if < 1.0, -bars if >= 1.0. -15. # swc_wet : cm/cm if < 1.0, -bars if >= 1.0. +#------ Input file for site location, initialization, and +# miscellaneous model parameters -# ---- Model flags and coefficients ---- -0 # reset (1/0): reset/don't reset swc each new year -1 # deepdrain (1/0): allow/disallow deep drainage function. - # if deepdrain == 1, model expects extra layer in soils file. -1 # multiplier for PET (eg for climate change). -0 #proportion of ponded surface water removed as runoff daily (value ranges between 0 and 1; 0=no loss of surface water, 1=all ponded water lost via runoff) +# USER: The site location and the transpiration regions should +# be adjusted for site-specific run conditions. -# ---- Snow simulation parameters (SWAT2K model): Neitsch S, Arnold J, Kiniry J, Williams J. 2005. Soil and water assessment tool (SWAT) theoretical documentation. version 2005. Blackland Research Center, Texas Agricultural Experiment Station: Temple, TX. -# these parameters are RMSE optimized values for 10 random SNOTEL sites for western US +# USER: Most of the other values in this file are parameters that +# describe various model processes and should be considered fixed. + + +#---- Soil water content initialization, minimum, and wet condition +-1.0 # swc_min : cm/cm if 0 - <1.0, -bars if >= 1.0.; if < 0. then estimate residual water content for each layer +15.0 # swc_init: cm/cm if < 1.0, -bars if >= 1.0. +15.0 # swc_wet : cm/cm if < 1.0, -bars if >= 1.0. + +#---- Diffuse recharge and runoff/runon +0 # reset (1/0): do/don't reset soil water content for each year +1 # deepdrain (1/0): allow/disallow deep drainage (diffuse recharge) +1.0 # multiplier for PET (e.g., for climate change scenarios) +0.0 # proportion of ponded surface water removed as daily runoff (value ranges between 0 and 1; 0=no loss of surface water, 1=all ponded water lost via runoff) +0.0 # proportion of water that arrives at surface added as daily runon [from a hypothetical identical neighboring site] (value ranges between 0 and +inf; 0=no runon, >0: runon is occuring) + +#---- Snow simulation +# based on the SWAT2K model by Neitsch et al. (2005) +# Current values are RMSE optimized based on 10 random SNOTEL sites for western US 0.61 # TminAccu2 = Avg. air temp below which ppt is snow ( C) 1.54 # TmaxCrit = Snow temperature at which snow melt starts ( C) -0.10 # lambdasnow = Relative contribution of avg. air temperature to todays snow temperture vs. yesterday's snow temperature (0-1) -0.0 # RmeltMin = Minimum snow melt rate on winter solstice (cm/day/C) +0.1 # lambdasnow = Relative contribution of avg. air temperature to todays snow temperture vs. yesterday's snow temperature (0-1) +0.0 # RmeltMin = Minimum snow melt rate on winter solstice (cm/day/C) 0.27 # RmeltMax = Maximum snow melt rate on summer solstice (cm/day/C) -# ---- Drainage coefficient ---- -0.02 # slow-drain coefficient per layer (cm/day). See Eqn 2.9 in ELM doc. - # ELM shows this as a value for each layer, but this way it's applied to all. - # (Q=.02 in ELM doc, .06 in FORTRAN version). +#---- Hydraulic conductivity +0.02 # Parameter (cm / day) for unsaturated hydraulic conductivity, + # previously called slow-drain coefficient; See Eqn 2.9 in Parton 1978. -# ---- Evaporation coefficients ---- -# These control the tangent function (tanfunc) which affects the amount of soil -# water extractable by evaporation and transpiration. -# These constants aren't documented by the ELM doc. -45. # rate shift (x value of inflection point). lower value shifts curve - # leftward, meaning less water lost to evap at a given swp. effectively - # shortens/extends high rate. -.1 # rate slope: lower value (eg .01) straightens S shape meaning more gradual - # reduction effect; higher value (.5) makes abrupt transition -.25 # inflection point (y-value of inflection point) -0.5 # range: diff btw upper and lower rates at the limits +#---- Evaporation +# These control the tangent function (tanfunc) which affects the amount of soil +# water extractable by bare-soil evaporation and transpiration. +45. # rate shift (x value of inflection point). lower value shifts curve + # leftward, meaning less water lost to evap at a given swp. effectively + # shortens/extends high rate. +.1 # rate slope: lower value (eg .01) straightens S shape meaning more gradual + # reduction effect; higher value (.5) makes abrupt transition +.25 # inflection point (y-value of inflection point) +0.5 # range: diff btw upper and lower rates at the limits -# ---- Transpiration Coefficients ---- -# comments from Evap constants apply. -45. # rate shift -.1 # rate shape -.5 # inflection point -1.1 # range +#---- Transpiration +# These control the tangent function (tanfunc) which affects the amount of soil +# water extractable by transpiration. +45. # rate shift +.1 # rate shape +.5 # inflection point +1.1 # range -# ---- Intrinsic site params: -0.681 # latitude of the site in radians -1651 # altitude of site (m a.s.l.) -0 # slope at site (degrees): no slope = 0 --1 # aspect at site (degrees): N=0, E=90, S=180, W=270, no slope:-1 +#---- Site location and topography +-105.58 # longitude of the site in degrees (W < 0; E > 0) + # (currently not used by simulation, but useful for site documentation) +39.59 # latitude of the site in degrees (N > 0; S < 0) +1000 # elevation of site (m a.s.l.) +0 # slope at site (degrees): no slope = 0, vertical = 90 +999 # aspect at site (degrees): S=0, E=-90, N=±180, W=90; ignored if slope = 0 or aspect = 999 -# ---- Soil Temperature Constants ---- +#---- Soil temperature # from Parton 1978, ch. 2.2.2 Temperature-profile Submodel 300. # biomass limiter, 300 g/m^2 in Parton's equation for T1(avg daily temperature at the top of the soil) -15. # constant for T1 equation (used if biomass <= biomass limiter), 15 in Parton's equation --4. # constant for T1 equation (used if biomass > biomass limiter), -4 in Parton's equation -600. # constant for T1 equation (used if biomass > biomass limiter), 600 in Parton's equation +15. # constant for T1 equation (used if biomass <= biomass limiter), 15 in Parton's equation +-4. # constant for T1 equation (used if biomass > biomass limiter), -4 in Parton's equation +600. # constant for T1 equation (used if biomass > biomass limiter), 600 in Parton's equation 0.00070 # constant for cs (soil-thermal conductivity) equation, 0.00070 in Parton's equation -0.00030 # constant for cs equation, 0.00030 in Parton's equation +0.00030 # constant for cs equation, 0.00030 in Parton's equation 0.18 # constant for sh (specific heat capacity) equation, 0.18 in Parton's equation -6.69 # constant mean air temperature (the soil temperature at the lower boundary, 180 cm) in celsius -15. # deltaX parameter for soil_temperature function, default is 15. (distance between profile points in cm) max depth (the next number) should be evenly divisible by this number -990. # max depth for the soil_temperature function equation, default is 180. this number should be evenly divisible by deltaX -1 # flag, 1 to calculate soil_temperature, 0 to not calculate soil_temperature +4.15 # constant soil temperature (Celsius) at the lower boundary (max depth); approximate by mean annual air temperature of site +15. # deltaX parameter for soil_temperature function, default is 15. (distance between profile points in cm) max depth (the next number) should be evenly divisible by this number +990. # max depth for the soil_temperature function equation, default is 990. this number should be evenly divisible by deltaX +1 # flag, 1 to calculate soil_temperature, 0 to not calculate soil_temperature +# ---- CO2 Settings ---- +# Activate (1) / deactivate (0) biomass multiplier +1 +# Activate (1) / deactivate (0) water-usage efficiency multiplier +1 +# Name of CO2 scenario: see input file `carbon.in` +RCP85 -# ---- Transpiration regions ---- +#---- Transpiration regions # ndx : 1=shallow, 2=medium, 3=deep, 4=very deep -# layer: deepest layer number of the region. -# Layers are defined in soils.in. -# ndx layer +# layer: deepest soil layer number of the region. +# Must agree with soil layers as defined in `soils.in` +# ndx layer 1 6 2 9 3 11 diff --git a/tests/test_data/TestPrj4/1_Input/treatments/tr_weathersetupin/weathsetup.in b/tests/test_data/TestPrj4/1_Input/treatments/tr_weathersetupin/weathsetup.in index 2c65d632..55ddd973 100644 --- a/tests/test_data/TestPrj4/1_Input/treatments/tr_weathersetupin/weathsetup.in +++ b/tests/test_data/TestPrj4/1_Input/treatments/tr_weathersetupin/weathsetup.in @@ -1,32 +1,37 @@ -# Weather setup parameters -# Location: Chimney Park, WY (41.068° N, 106.1195° W, 2740 m elevation) -# -1 # 1=allow snow accumulation, 0=no snow effects. -0.0 # % of snow drift per snow event (+ indicates snow addition, - indicates snow taken away from site) -0.0 # % of snowmelt water as runoff/on per event (>0 indicates runoff, <0 indicates runon) -0 # 0=use historical data only, 1=use markov process for missing weather. -1979 # first year to begin historical weather. -5 # number of days to use in the running average of temperature. +#------ Input file for weather-related parameters and weather generator settings -# Monthly scaling parameters. +#--- Activate/deactivate simulation of snow-related processes +1 # 1 = do/don't simulate snow processes +0.0 # snow drift/blowing snow (% per snow event): > 0 is adding snow, < 0 is removing snow +0.0 # runoff/runon of snowmelt water (% per snowmelt event): > 0 is runoff, < 0 is runon + + +#--- Activate/deactivate weather generator / historical daily weather inputs +0 # 0 = use historical data only + # 1 = use weather generator for (partially) missing weather inputs + # 2 = use weather generator for all weather (don't check weather inputs) +-1 # first year to begin historical weather + # if -1, then use first year of simulation (see `years.in`) + + +#--- Monthly scaling parameters: # Month 1 = January, Month 2 = February, etc. -# PPT = multiplicative for PPT (scale*ppt). -# MaxT = additive for max temp (scale+maxtemp). -# MinT = additive for min temp (scale+mintemp). +# PPT = multiplicative for daily PPT; max(0, scale * ppt) +# MaxT = additive for daily max temperature [C]; scale + maxtemp +# MinT = additive for daily min temperature [C]; scale + mintemp # SkyCover = additive for mean monthly sky cover [%]; min(100, max(0, scale + sky cover)) # Wind = multiplicative for mean monthly wind speed; max(0, scale * wind speed) # rH = additive for mean monthly relative humidity [%]; min(100, max(0, scale + rel. Humidity)) -# Transmissivity = multiplicative for mean monthly relative transmissivity; min(1, max(0, scale * transmissivity)) -#Mon PPT MaxT MinT SkyCover Wind rH Transmissivity - 1 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 2 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 3 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 4 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 5 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 6 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 7 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 8 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 9 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 10 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 11 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 - 12 1.000000 0.000000 0.000000 0.000000 1.000000 0.000000 1.000000 +#Mon PPT MaxT MinT SkyCover Wind rH +1 1.000 0.00 0.00 0.0 1.0 0.0 +2 1.000 0.00 0.00 0.0 1.0 0.0 +3 1.000 0.00 0.00 0.0 1.0 0.0 +4 1.000 0.00 0.00 0.0 1.0 0.0 +5 1.000 0.00 0.00 0.0 1.0 0.0 +6 1.000 0.00 0.00 0.0 1.0 0.0 +7 1.000 0.00 0.00 0.0 1.0 0.0 +8 1.000 0.00 0.00 0.0 1.0 0.0 +9 1.000 0.00 0.00 0.0 1.0 0.0 +10 1.000 0.00 0.00 0.0 1.0 0.0 +11 1.000 0.00 0.00 0.0 1.0 0.0 +12 1.000 0.00 0.00 0.0 1.0 0.0 diff --git a/tests/test_data/TestPrj4/SFSW2_project_code.R b/tests/test_data/TestPrj4/SFSW2_project_code.R index 8ccc3b81..67599b24 100644 --- a/tests/test_data/TestPrj4/SFSW2_project_code.R +++ b/tests/test_data/TestPrj4/SFSW2_project_code.R @@ -5,7 +5,7 @@ # EXECUTING SIMULATIONS, AND AGGREGATING OUTPUTS #----- LICENSE -# Copyright (C) 2017 by `r packageDescription("rSFSW2")[["Author"]]` +# Copyright (C) 2017-2019 by `r packageDescription("rSFSW2")[["Author"]]` # Contact information `r packageDescription("rSFSW2")[["Maintainer"]]` # This program is free software: you can redistribute it and/or modify @@ -19,8 +19,8 @@ # GNU General Public License for more details. #------ NOTES: -# - You get an overview by: `r package?rSFSW2` -# - An index of functionality is displayed by: `r help(package = "rSFSW2")` +# - Display a package overview: `r package?rSFSW2` +# - List package functionality: `r help(package = "rSFSW2")` #------------------------------------------------------------------------------# @@ -68,16 +68,23 @@ actions <- list( dir_prj <- getwd() -writeLines(c("", "", +writeLines(c( + "", "", "###########################################################################", - paste("#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), "run started at", - t_job_start), + paste( + "#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), + "run started at", t_job_start + ), "###########################################################################", - "")) + "" +)) SFSW2_prj_meta <- init_rSFSW2_project( - fmetar = file.path(dir_prj, "SFSW2_project_descriptions.R"), update = FALSE, - verbose = interactive(), print.debug = FALSE) + fmetar = file.path(dir_prj, "SFSW2_project_descriptions.R"), + update = FALSE, + verbose = interactive(), + print.debug = FALSE +) @@ -85,25 +92,42 @@ SFSW2_prj_meta <- init_rSFSW2_project( #------ 2) LOAD THE SETTINGS FOR THIS RUN -------------------------------------- # Setting objects: # opt_behave, opt_parallel, opt_verbosity, opt_out_run, opt_chunks -source(file.path(dir_prj, "SFSW2_project_settings.R"), verbose = interactive(), - keep.source = FALSE) +source( + file = file.path(dir_prj, "SFSW2_project_settings.R"), + keep.source = FALSE +) -SFSW2_prj_meta <- update_actions(SFSW2_prj_meta, actions, - wipe_dbOutput = opt_out_run[["wipe_dbOutput"]]) +SFSW2_prj_meta <- update_actions( + SFSW2_prj_meta, + actions, + wipe_dbOutput = opt_out_run[["wipe_dbOutput"]] +) ################################################################################ #------ 3) POPULATE PROJECT WITH INPUT DATA (REPEAT UNTIL COMPLETE) ------------ -temp <- populate_rSFSW2_project_with_data(SFSW2_prj_meta, opt_behave, - opt_parallel, opt_chunks, opt_out_run, opt_verbosity) +temp <- populate_rSFSW2_project_with_data( + SFSW2_prj_meta, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity +) -if (isTRUE(opt_verbosity[["verbose"]]) && - !identical(SFSW2_prj_meta, temp[["SFSW2_prj_meta"]])) { - warning("'SFSW2_prj_meta' has changed: modify/reset input tracker status ", +if ( + isTRUE(opt_verbosity[["verbose"]]) && + !identical(SFSW2_prj_meta, temp[["SFSW2_prj_meta"]]) +) { + warning( + "'SFSW2_prj_meta' has changed: ", + "modify/reset input tracker status ", "'SFSW2_prj_meta[['input_status']]', if needed ", "(see help `?update_intracker`) and re-run project.", - call. = FALSE, immediate. = TRUE) + call. = FALSE, + immediate. = TRUE + ) } SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] @@ -116,17 +140,27 @@ SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] if (isTRUE(actions[["check_inputs"]])) { - temp <- check_rSFSW2_project_input_data(SFSW2_prj_meta, SFSW2_prj_inputs, - opt_chunks, opt_verbosity) + temp <- check_rSFSW2_project_input_data( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_chunks, + opt_verbosity + ) SFSW2_prj_meta <- temp[["SFSW2_prj_meta"]] SFSW2_prj_inputs <- temp[["SFSW2_prj_inputs"]] - if (isTRUE(opt_verbosity[["verbose"]]) && - !all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "checked"]))) { - warning("'SFSW2_prj_meta[['input_status']]': some input tracker checks ", - "failed; fix inputs, if needed, and re-run project.", - call. = FALSE, immediate. = TRUE) + if ( + isTRUE(opt_verbosity[["verbose"]]) && + !all(stats::na.exclude(SFSW2_prj_meta[["input_status"]][, "checked"])) + ) { + warning( + "'SFSW2_prj_meta[['input_status']]': ", + "some input tracker checks failed; ", + "fix inputs, if needed, and re-run project.", + call. = FALSE, + immediate. = TRUE + ) } } @@ -137,16 +171,30 @@ if (isTRUE(actions[["check_inputs"]])) { if (any(unlist(actions[c("sim_create", "sim_execute", "sim_aggregate")]))) { - SFSW2_prj_meta <- simulate_SOILWAT2_experiment(SFSW2_prj_meta, - SFSW2_prj_inputs, opt_behave, opt_parallel, opt_chunks, opt_out_run, - opt_verbosity) + SFSW2_prj_meta <- simulate_SOILWAT2_experiment( + SFSW2_prj_meta, + SFSW2_prj_inputs, + opt_behave, + opt_parallel, + opt_chunks, + opt_out_run, + opt_verbosity + ) } if (isTRUE(actions[["concat_dbOut"]])) { - stopifnot(move_output_to_dbOutput(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_behave, opt_out_run, opt_verbosity, - check_if_Pid_present = opt_verbosity[["print.debug"]])) + stopifnot( + move_output_to_dbOutput( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity, + check_if_Pid_present = opt_verbosity[["print.debug"]] + ) + ) } @@ -156,8 +204,13 @@ if (isTRUE(actions[["concat_dbOut"]])) { if (isTRUE(actions[["ensemble"]])) { - rSFSW2:::generate_ensembles(SFSW2_prj_meta, t_job_start, opt_parallel, - opt_chunks, verbose = opt_verbosity[["verbose"]]) + rSFSW2:::generate_ensembles( + SFSW2_prj_meta, + t_job_start, + opt_parallel, + opt_chunks, + verbose = opt_verbosity[["verbose"]] + ) } @@ -167,8 +220,13 @@ if (isTRUE(actions[["ensemble"]])) { if (isTRUE(actions[["check_dbOut"]])) { - info_missing <- check_outputDB_completeness(SFSW2_prj_meta, opt_parallel, - opt_behave, opt_out_run, opt_verbosity) + info_missing <- check_outputDB_completeness( + SFSW2_prj_meta, + opt_parallel, + opt_behave, + opt_out_run, + opt_verbosity + ) } @@ -182,8 +240,11 @@ exit_SFSW2_cluster(verbose = opt_verbosity[["verbose"]]) #--- Goodbye message writeLines(c("", "###########################################################################", - paste("#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), "run on", - SFSW2_prj_meta[["opt_platform"]][["host"]], "platform ended at", - Sys.time()), + paste( + "#------ rSFSW2-PROJECT:", shQuote(basename(dir_prj)), + "run on", SFSW2_prj_meta[["opt_platform"]][["host"]], "platform ended at", + Sys.time() + ), "###########################################################################", - "")) + "" +)) diff --git a/tests/test_data/TestPrj4/SFSW2_project_descriptions.R b/tests/test_data/TestPrj4/SFSW2_project_descriptions.R index 7cf90add..52c9e3fa 100644 --- a/tests/test_data/TestPrj4/SFSW2_project_descriptions.R +++ b/tests/test_data/TestPrj4/SFSW2_project_descriptions.R @@ -73,15 +73,15 @@ project_paths <- list( #------ Base names or full names of input files fnames_in <- list( - fmaster = "SWRuns_InputMaster_Test_v11.csv", + fmaster = "SWRuns_InputMaster_Test_v12.csv", fslayers = "SWRuns_InputData_SoilLayers_v9.csv", - ftreatDesign = "SWRuns_InputData_TreatmentDesign_v17.csv", - fexpDesign = "SWRuns_InputData_ExperimentalDesign_v09.csv", + ftreatDesign = "SWRuns_InputData_TreatmentDesign_v18.csv", + fexpDesign = "SWRuns_InputData_ExperimentalDesign_v10.csv", fclimnorm = "SWRuns_InputData_cloud_v10.csv", fvegetation = "SWRuns_InputData_prod_v11.csv", - fsite = "SWRuns_InputData_siteparam_v14.csv", + fsite = "SWRuns_InputData_siteparam_v15.csv", fsoils = "SWRuns_InputData_soils_v12.csv", fweathersetup = "SWRuns_InputData_weathersetup_v10.csv", fclimscen_delta = "SWRuns_InputData_ClimateScenarios_Change_v11.csv", @@ -160,6 +160,11 @@ opt_input <- list( # file.path(project_paths[["dir_ex_weather"]], "Livneh_NA_2013", # "MONTHLY_GRIDS") "GriddedDailyWeatherFromLivneh2013_NorthAmerica", 0, + # - Abatzoglou et al. 2013: 1/24 degree res. for 1979-yesterday; + # data expected at file.path(project_paths[["dir_ex_weather"]], + # "gridMET_4km_NA", "YEARLY_GRIDS"); + # obtain data with function `gridMET_download_and_check` + "GriddedDailyWeatherFromgridMET_NorthAmerica", 0, # Monthly PPT, Tmin, Tmax conditions: if using NEX or GDO-DCP-UC-LLNL, # climate condition names must be of the form SCENARIO.GCM with @@ -222,8 +227,9 @@ opt_input <- list( # etc. # Do not change/remove/add entries; only re-order to set different priorities dw_source_priority = c("DayMet_NorthAmerica", "LookupWeatherFolder", - "Maurer2002_NorthAmerica", "Livneh2013_NorthAmerica", "NRCan_10km_Canada", - "NCEPCFSR_Global"), + "Maurer2002_NorthAmerica", "Livneh2013_NorthAmerica", + "gridMET_NorthAmerica", "NRCan_10km_Canada", "NCEPCFSR_Global" + ), # Creation of dbWeather # Compression type of dbWeather; one value of eval(formals(memCompress)[[2]]) @@ -287,7 +293,14 @@ opt_sim <- list( # temperate climate := has >=4 & < 8 months with > 10C # - 4 C based standard input of mean monthly biomass values described in # Bradford et al. 2014 Journal of Ecology - growseason_Tlimit_C = 4 + growseason_Tlimit_C = 4, + # Mean monthly reference temperature describing default phenology values + # - below values calculated by KP as median across 898 big sagebrush sites + # (see https://github.com/DrylandEcology/rSFSTEP2/issues/195) + reference_temperature_default_phenology = c( + -4.6768, -2.7282, 1.8257, 6.0538, 10.696, 15.3878, + 19.7777, 18.8755, 13.7868, 7.2843, 0.4167, -4.6912 + ) ) @@ -357,15 +370,24 @@ sim_time <- list( startyr = startyr <- 1980, endyr = endyr <- 2010, - #Future time period(s): - # Each list element of 'future_yrs' will be applied to every - # climate.conditions + #--- Future time period(s): # Each list element of 'future_yrs' is a vector with three elements # \code{c(delta, DSfut_startyr, DSfut_endyr)} - # future simulation years = delta + simstartyr:endyr - # future simulation years downscaled based on - # - current conditions = DScur_startyr:DScur_endyr - # - future conditions = DSfut_startyr:DSfut_endyr + # + # Daily scenario data "idem" (pass through): + # - Each list element ("row") of 'future_yrs' must match exactly one + # of the scenario experiments of 'req_scens[["models"]]` + # (e.g., "historical", "RCP45", "RCP85") -- in the correct order + # - Value of 'delta' is ignored + # + # Monthly scenario data: + # - Each list element ("row") of 'future_yrs' will be applied to every + # climate.conditions + # - future simulation years = delta + simstartyr:endyr + # - future simulation years downscaled based on + # - current conditions = DScur_startyr:DScur_endyr + # - future conditions = DSfut_startyr:DSfut_endyr + # # NOTE: Multiple time periods doesn't work with external type # 'ClimateWizardEnsembles' DScur_startyr = startyr, @@ -384,6 +406,11 @@ req_scens <- list( # perturbations are all off ambient = "Current", + # Name of atmospheric CO2-concentration dataset to be used for "ambient" + # conditions. + # The string must match a column name of `LookupCO2data/AtmosCO2.csv` + tag_aCO2_ambient = "Fix360ppm", # e.g., "Fix360ppm", etc. + # Names of climate scenarios # - If a simulation project does not include future climate conditions, # then set models = NULL @@ -414,11 +441,15 @@ req_scens <- list( # requires live internet access # - "BCSD_SageSeer_USA": monthly time-series at 1-km resolution for the # western US prepared by Katie Renwick + # - "CMIP5_MACAv2metdata_USA": daily time series at 1/24-degree + # resolution for the US (requires `method_DS = "idem"`) # - "ESGF_Global": monthly time-series at varying resolution dataset1 = "CMIP5_BCSD_GDODCPUCLLNL_USA" ), # Downscaling method (applied to each each climate.conditions) + # Daily scenario data + # - "idem" (pass through) # Monthly scenario -> daily forcing variables # One or multiple elements of # - "raw" diff --git a/tests/test_data/TestPrj4/SFSW2_project_settings.R b/tests/test_data/TestPrj4/SFSW2_project_settings.R index 71a8ae97..d8692ea7 100644 --- a/tests/test_data/TestPrj4/SFSW2_project_settings.R +++ b/tests/test_data/TestPrj4/SFSW2_project_settings.R @@ -98,8 +98,11 @@ opt_verbosity <- list( #------ Output options opt_out_run <- list( # Write rSOILWAT2 input and output to disk for each SOILWAT2 simulation - saveRsoilwatInput = FALSE, - saveRsoilwatOutput = FALSE, + saveRsoilwatInput = TRUE, + saveRsoilwatOutput = TRUE, + + # Enforce that rSOILWAT2 objects meet the current version requirement + enforce_rSW2_version = TRUE, # Write data to big input files for experimental design x treatment design makeInputForExperimentalDesign = FALSE, diff --git a/tests/test_data/TestPrj4/SFSW2_project_xMaintenance_dbOutputTemp.R b/tests/test_data/TestPrj4/SFSW2_project_xMaintenance_dbOutputTemp.R index 8cd0f869..e655c4d9 100755 --- a/tests/test_data/TestPrj4/SFSW2_project_xMaintenance_dbOutputTemp.R +++ b/tests/test_data/TestPrj4/SFSW2_project_xMaintenance_dbOutputTemp.R @@ -5,7 +5,7 @@ # EXECUTING SIMULATIONS, AND AGGREGATING OUTPUTS #----- LICENSE -# Copyright (C) 2017 by `r packageDescription("rSFSW2")[["Author"]]` +# Copyright (C) 2017-2019 by `r packageDescription("rSFSW2")[["Author"]]` # Contact information `r packageDescription("rSFSW2")[["Maintainer"]]` # This program is free software: you can redistribute it and/or modify diff --git a/tests/testthat/test_ExtractData_ClimateDownscaling.R b/tests/testthat/test_ExtractData_ClimateDownscaling.R index 776f7667..b25f19a6 100644 --- a/tests/testthat/test_ExtractData_ClimateDownscaling.R +++ b/tests/testthat/test_ExtractData_ClimateDownscaling.R @@ -1,32 +1,6 @@ context("Extracting external climate scenario data and downscaling") #---TESTS -# Test 'convert_temperature' -temp_C <- c(-50, 0, 50) -temp_F <- c(-58, 32, 122) -temp_K <- 273.15 + temp_C -# Test 'convert_precipitation' -dpm <- c(31, 28, 31) -temp_ppt <- c(1.5, 0.3, 0) - -test_that("Unit conversion", { - expect_equal(convert_temperature(temp_C, "C"), temp_C) - expect_equal(convert_temperature(temp_F, "F"), temp_C, tolerance = 1e-6) - expect_equal(convert_temperature(temp_K, "K"), temp_C) - expect_error(convert_temperature(temp_F, "degree F")) - expect_error(convert_temperature(temp_K, "K", unit_to = "F")) - - expect_equal(convert_precipitation(temp_ppt, dpm, "cm/month"), temp_ppt) - expect_equal(convert_precipitation(temp_ppt, dpm, "mm/month"), temp_ppt / 10) - expect_equal(convert_precipitation(temp_ppt, dpm, "mm/d"), - temp_ppt * dpm / 10) - expect_equal(convert_precipitation(temp_ppt, dpm, "cm/d"), temp_ppt * dpm) - expect_equal(convert_precipitation(temp_ppt, dpm, "kg m-2 s-1"), - temp_ppt * dpm * 8640) - expect_error(convert_precipitation(temp_ppt, dpm, "L m-2")) - expect_error(convert_precipitation(temp_ppt, dpm, "cm/month", - unit_to = "L m-2")) -}) # Test 'get_time_unit' temp <- c(day = 1, days = 1, d = 1, ds = NA, @@ -51,7 +25,8 @@ test_that("Time units", { # Test 'climscen_metadata' req_metadata_fields1 <- c("bbox", "tbox", "var_desc", "sep_fname", "str_fname") -req_metadata_fields2 <- c("tag", "fileVarTags", "unit_given", "unit_real") +req_metadata_fields2 <- + c("varname", "tag", "fileVarTags", "unit_given", "unit_real") test_that("Check integrity of 'climscen_metadata'", { expect_silent(climDB_metas <- climscen_metadata()) @@ -61,9 +36,12 @@ test_that("Check integrity of 'climscen_metadata'", { for (k in seq_along(climDB_metas)) { expect_type(climDB_metas[[k]], "list") - expect_true(all(req_metadata_fields1 %in% names(climDB_metas[[k]]))) - expect_named(climDB_metas[[k]][["var_desc"]], - expected = req_metadata_fields2) + expect_true( + all(req_metadata_fields1 %in% names(climDB_metas[[k]])) + ) + expect_true( + all(req_metadata_fields2 %in% names(climDB_metas[[k]][["var_desc"]])) + ) } }) @@ -164,23 +142,3 @@ test_that("Check 'climate scenario simulation time slices'", { expect_equal(length(unlist(assocYears)), 28L) expect_equal(sum(unlist(assocYears)), 8L) }) - - -# Test 'is_ClimateForecastConvention' and 'is_NEX' -test_that("Check convenction of requested climate data'", { - expect_false( - is_ClimateForecastConvention("CMIP3_ClimateWizardEnsembles_Global")) - expect_false(is_NEX("CMIP3_ClimateWizardEnsembles_Global")) - expect_false(is_ClimateForecastConvention("CMIP3_ClimateWizardEnsembles_USA")) - expect_false(is_NEX("CMIP3_ClimateWizardEnsembles_USA")) - expect_true(is_ClimateForecastConvention("CMIP5_BCSD_GDODCPUCLLNL_USA")) - expect_false(is_NEX("CMIP5_BCSD_GDODCPUCLLNL_USA")) - expect_true(is_ClimateForecastConvention("CMIP5_BCSD_GDODCPUCLLNL_Global")) - expect_false(is_NEX("CMIP5_BCSD_GDODCPUCLLNL_Global")) - expect_false(is_ClimateForecastConvention("CMIP5_BCSD_NEX_USA")) - expect_true(is_NEX("CMIP5_BCSD_NEX_USA")) - expect_true(is_ClimateForecastConvention("CMIP5_BCSD_SageSeer_USA")) - expect_false(is_NEX("CMIP5_BCSD_SageSeer_USA")) - expect_true(is_ClimateForecastConvention("CMIP5_ESGF_Global")) - expect_false(is_NEX("CMIP5_ESGF_Global")) -}) diff --git a/tests/testthat/test_GISSM_germination_wait_times.R b/tests/testthat/test_GISSM_germination_wait_times.R deleted file mode 100644 index c810592a..00000000 --- a/tests/testthat/test_GISSM_germination_wait_times.R +++ /dev/null @@ -1,113 +0,0 @@ -context("GISSM: germination_wait_times") - -# Inputs -test_data <- list( - real_data = list( - dfc = c(NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 132L, 131L, - 130L, 129L, 128L, 127L, 126L, 125L, 124L, 123L, 122L, 121L, 120L, - 119L, 118L, 117L, 116L, 115L, 114L, NA, 113L, 112L, 111L, 110L, - 109L, 108L, 107L, 106L, 105L, 104L, 103L, 102L, 101L, 100L, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 99L, 98L, 97L, 96L, 95L, - 94L, 93L, 92L, 91L, 90L, 89L, NA, NA, NA, NA, NA, NA, NA, 88L, - 87L, 86L, 85L, 84L, 83L, 82L, 81L, 80L, 79L, 78L, 77L, 76L, 75L, - 74L, 73L, 72L, 71L, 70L, 69L, 68L, 67L, 66L, 65L, 64L, 63L, 62L, - 61L, 60L, 59L, 58L, 57L, 56L, 55L, 54L, 53L, 52L, 51L, 50L, 49L, - 48L, 47L, 46L, 45L, 44L, 43L, 42L, 41L, 40L, 39L, 38L, 37L, 36L, - 35L, 34L, 33L, 32L, 31L, 30L, 29L, 28L, NA, 27L, 26L, 25L, 24L, - 23L, 22L, 21L, 20L, 19L, 18L, 17L, 16L, 15L, 14L, 13L, 12L, 11L, - NA, 10L, 9L, NA, NA, NA, NA, 8L, 7L, 6L, NA, NA, NA, NA, NA, - NA, NA, NA, NA, 5L, NA, NA, NA, 4L, NA, NA, 3L, 2L, 1L, NA, NA, - NA, NA, NA, NA), - - ttg = c(NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 6L, 6L, 5L, - 5L, 6L, 5L, 4L, 7L, 4L, 5L, 7L, 4L, 9L, 3L, 7L, 6L, 10L, 2L, - 5L, NA, 4L, 3L, 4L, 3L, 2L, 8L, 4L, 4L, 5L, 10L, 4L, 4L, 5L, - 3L, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, - NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, 7L, 4L, 9L, 5L, - 3L, 5L, 5L, 4L, 8L, 7L, 6L, NA, NA, NA, NA, NA, NA, NA, 5L, 6L, - 5L, 5L, 6L, 7L, 5L, 5L, 4L, 9L, 5L, 4L, 3L, 5L, 4L, 6L, 5L, 3L, - 4L, 6L, 5L, 9L, 6L, 4L, 4L, 4L, 6L, 8L, 4L, 3L, 3L, 7L, 5L, 5L, - 5L, 4L, 5L, 3L, 3L, 4L, 6L, 5L, 6L, 2L, 7L, 7L, 5L, 6L, 4L, 5L, - 4L, 12L, 6L, 6L, 5L, 3L, 2L, 6L, 4L, 6L, 4L, NA, 11L, 7L, 7L, - 7L, 5L, 7L, 6L, 5L, 4L, 4L, 5L, 8L, 8L, 6L, 4L, 4L, 9L, NA, 4L, - 2L, NA, NA, NA, NA, 6L, 5L, 4L, NA, NA, NA, NA, NA, NA, NA, NA, - NA, 5L, NA, NA, NA, 4L, NA, NA, 3L, 2L, 1L, NA, NA, NA, NA, NA, - NA), - - ref = c(0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 1L, 0L, 1L, - 1L, 1L, 0L, 1L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 27L, 0L, - 27L, 27L, 27L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 7L, 7L, 7L, 0L, - 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, - 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, - 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L, - 0L, 0L, 1L, 0L, 0L, 0L, 0L, 0L, 1L, 1L, 1L, 1L, 0L, 0L, 0L, 0L, - 0L, 0L, 0L, 0L, 0L, 0L, 0L, 1L, 5L, 1L, 1L, 1L, 19L, 4L, 4L, - 14L, 14L, 14L, 5L, 2L, 0L, 0L, 0L) - ), - - test1 = list(ttg = NA, dfc = NA, ref = integer(0)), - test2 = list(ttg = 1, dfc = 1, ref = 0L), - test3 = list(ttg = rep(1, 10), dfc = 10:1, ref = rep(0L, 10)), - test4 = list(ttg = temp <- c(2, NA, 1), dfc = temp, ref = c(1L, 0L)), - test5 = list(ttg = temp <- c(2, rep(NA, 10), 1), dfc = temp, - ref = c(10L, 0L)), - test6 = list(ttg = temp <- c(3, NA, NA, 2, NA, 1), dfc = temp, - ref = c(3L, 1L, 0L)), - test7 = list(ttg = c(3, 3, 3, NA, NA, 2, NA, 1), - dfc = c(5, 4, 3, NA, NA, 2, NA, 1), ref = c(0L, 2L, 3L, 1L, 0L)), - test8 = list(ttg = c(NA, 8, 1, 2, 1, NA, NA, NA, 3, 3, 3, NA, NA, 2, NA, 1), - dfc = c(NA, 9, 8, 7, 6, NA, NA, NA, 5, 4, 3, NA, NA, 2, NA, 1), - ref = c(5L, 0L, 0L, 0L, 0L, 2L, 3L, 1L, 0L)) -) - - -test_that("germination_wait_times", { - - for (k in seq_along(test_data)) - with(test_data[[k]], expect_equal( - as.integer(germination_wait_times(ttg, dfc)), ref, - info = paste("Test dataset =", shQuote(names(test_data)[k])))) - - if (FALSE) { - for (k in seq_along(test_data)[-1]) { - print(paste("Test =", k, "with dataset =", shQuote(names(test_data)[k]))) - print(paste("ttg =", paste(test_data[[k]][["ttg"]], collapse = ", "))) - print(paste("dfc =", paste(test_data[[k]][["dfc"]], collapse = ", "))) - print(paste("ref =", paste(test_data[[k]][["ref"]], collapse = ", "))) - out <- as.integer(germination_wait_times(test_data[[k]][["ttg"]], - test_data[[k]][["dfc"]])) - print(paste("out =", paste(out, collapse = ", "))) - print("") - } -} - - #--- Errors - # time_to_germinate is not NA, but duration_fave_cond is NA - expect_error(germination_wait_times(1, NA)) - # germination takes longer than available favorable condition - expect_error(germination_wait_times(2, 1)) - expect_error(germination_wait_times(c(3, NA, 1), c(2, NA, 1))) - # arguments not of identical length - expect_error(germination_wait_times(rep(1, 10), 8:1)) -}) diff --git a/tests/testthat/test_GISSM_get_KilledBySoilLayers.R b/tests/testthat/test_GISSM_get_KilledBySoilLayers.R deleted file mode 100644 index 0414cd78..00000000 --- a/tests/testthat/test_GISSM_get_KilledBySoilLayers.R +++ /dev/null @@ -1,46 +0,0 @@ -context("GISSM: get_KilledBySoilLayers") - - -# Inputs -Nd <- 365 -Nl <- 10 -Nl2 <- round(Nl / 2) -Nl3 <- round(Nl / 3) -cond1 <- matrix(FALSE, nrow = Nd, ncol = Nl) -cond2 <- matrix(TRUE, nrow = Nd, ncol = Nl) -cond3 <- cbind(matrix(TRUE, nrow = Nd, ncol = Nl2), - matrix(FALSE, nrow = Nd, ncol = Nl2)) -cond4 <- cbind(matrix(TRUE, nrow = Nd, ncol = Nl3), - matrix(FALSE, nrow = Nd, ncol = Nl3), - matrix(TRUE, nrow = Nd, ncol = Nl3)) - - - -test_that("get_KilledBySoilLayers", { - - expect_equal(get_KilledBySoilLayers(NA, cond1), NA) - expect_equal(get_KilledBySoilLayers(Nl, cond1), FALSE) - expect_equal(get_KilledBySoilLayers(Nl, cond2), TRUE) - expect_equal(get_KilledBySoilLayers(Nl2, cond3), TRUE) - expect_equal(get_KilledBySoilLayers(2 * Nl2, cond3), FALSE) - expect_equal(get_KilledBySoilLayers(Nl3, cond4), TRUE) - expect_equal(get_KilledBySoilLayers(2 * Nl3, cond4), FALSE) - expect_equal(get_KilledBySoilLayers(3 * Nl3, cond4), FALSE) - - expect_equal(get_KilledBySoilLayers(rep(NA, Nd), cond1), rep(NA, Nd)) - expect_equal(get_KilledBySoilLayers(rep(10, Nd), cond1), rep(FALSE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(10, Nd), cond2), rep(TRUE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(Nl2, Nd), cond3), rep(TRUE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(2 * Nl2, Nd), cond3), rep(FALSE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(Nl3, Nd), cond4), rep(TRUE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(2 * Nl3, Nd), cond4), rep(FALSE, Nd)) - expect_equal(get_KilledBySoilLayers(rep(3 * Nl3, Nd), cond4), rep(FALSE, Nd)) - - #--- Errors - # relevantLayers: too long - expect_error(get_KilledBySoilLayers(rep(NA, Nd + 1), cond1)) - # relevantLayers: too large values - expect_error(get_KilledBySoilLayers(Nl + 1, cond1)) - # relevantLayers: negative values - expect_error(get_KilledBySoilLayers(-1, cond1)) -}) diff --git a/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R b/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R deleted file mode 100644 index 7118970b..00000000 --- a/tests/testthat/test_GISSM_setFALSE_SeedlingSurvival_1stSeason.R +++ /dev/null @@ -1,51 +0,0 @@ -context("GISSM: kill_seedling") - -# Inputs -calc_ref <- function(ss1s, offset, doy) { - ref <- ss1s - ref[offset + doy] <- FALSE - ref -} - -test_data <- list( - test1 = list( - ss1s = temp <- rep(TRUE, 10), ry_year_day = rep(1, 10), - ry_useyrs = 1, y = 1, doy = itemp <- 1, - ref = calc_ref(temp, 0, itemp)), - - test2 = list( - ss1s = temp <- rep(TRUE, 10), ry_year_day = rep(1, 10), - ry_useyrs = 1, y = 1, doy = itemp <- 10, - ref = calc_ref(temp, 0, itemp)), - - test3 = list( - ss1s = temp <- rep(TRUE, 30), ry_year_day = rep(1:3, each = 10), - ry_useyrs = 1:3, y = 3, doy = itemp <- 10, - ref = calc_ref(temp, 20, itemp)), - - test4 = list( - ss1s = temp <- rep(FALSE, 30), ry_year_day = rep(1:3, each = 10), - ry_useyrs = 1:3, y = 3, doy = itemp <- 10, - ref = temp) -) - - - -test_that("kill_seedling", { - - for (k in seq_along(test_data)) - with(test_data[[k]], - expect_equal( - kill_seedling(ss1s, ry_year_day, ry_useyrs, y, doy), - ref, - info = paste("Test dataset =", shQuote(names(test_data)[k])))) - - #--- Errors - if (requireNamespace("Rcpp")) { - expect_error(kill_seedling(rep(TRUE, 7), rep(1, 10), 1, 1, 1)) - expect_error(kill_seedling(rep(TRUE, 10), rep(1, 7), 1, 1, 1)) - expect_error(kill_seedling(rep(TRUE, 10), rep(1, 10), 7, 1, 1)) - expect_error(kill_seedling(rep(TRUE, 10), rep(1, 10), 1, 7, 1)) - expect_error(kill_seedling(rep(TRUE, 10), rep(1, 10), 1, 1, 70)) - } -}) diff --git a/tests/testthat/test_Mathematical_Functions.R b/tests/testthat/test_Mathematical_Functions.R deleted file mode 100644 index 51ab73b0..00000000 --- a/tests/testthat/test_Mathematical_Functions.R +++ /dev/null @@ -1,85 +0,0 @@ -context("Mathematical functions") - - - - -#--- Tests -test_that("Monotonicity:", { - #--- INPUTS - margins <- c("byrow", "bycolumn") - - temp <- matrix(NA, nrow = 5, ncol = 3) - # nolint start - test_matrices <- list( - x1 = temp, - x2 = {x <- temp; x[] <- 0; x}, - x3 = x3 <- {x <- temp; x[] <- 1:15; x}, - x4 = {x <- x3; x[2, 3] <- x[2, 2]; x}, - x5 = {x <- x3; x[2, 2:3] <- NA; x}, - x6 = {x <- x3; x[2, 3] <- 0; x} - ) - # nolint end - - replacement <- -99 - - # Expected outputs - good_nonstrict_matrices <- test_matrices - good_nonstrict_matrices[["x6"]] <- { - x <- test_matrices[["x6"]] - x[2, 3] <- replacement - x - } - - good_strict_matrices <- test_matrices - good_strict_matrices[["x1"]] <- { - x <- test_matrices[["x1"]] - x[] <- replacement - x - } - good_strict_matrices[["x2"]] <- { - x <- test_matrices[["x2"]] - x[, -1] <- replacement - x - } - good_strict_matrices[["x4"]] <- { - x <- test_matrices[["x4"]] - x[2, 3] <- replacement - x - } - good_strict_matrices[["x5"]] <- { - x <- test_matrices[["x5"]] - x[2, 2:3] <- replacement - x - } - good_strict_matrices[["x6"]] <- { - x <- test_matrices[["x6"]] - x[2, 3] <- replacement - x - } - - - #--- TESTS - for (it in seq_along(margins)) { - for (k in seq_along(test_matrices)) { - x <- test_matrices[[k]] - res_ns <- good_nonstrict_matrices[[k]] - res_s <- good_strict_matrices[[k]] - - if (it == 2) { - x <- t(x) - res_ns <- t(res_ns) - res_s <- t(res_s) - } - - expect_equal(res_ns, - check_monotonic_increase(x, MARGIN = it, increase = TRUE, - strictly = FALSE, fail = FALSE, replacement = replacement, - na.rm = FALSE)) - - expect_equal(res_s, - check_monotonic_increase(x, MARGIN = it, increase = TRUE, - strictly = TRUE, fail = FALSE, replacement = replacement, - na.rm = FALSE)) - } - } -}) diff --git a/tests/testthat/test_downscaling_fix_PPTdata_length.R b/tests/testthat/test_downscaling_fix_PPTdata_length.R index f57e4b07..701286c1 100644 --- a/tests/testthat/test_downscaling_fix_PPTdata_length.R +++ b/tests/testthat/test_downscaling_fix_PPTdata_length.R @@ -64,10 +64,10 @@ if (FALSE) { dats <- stats::runif(n) vars <- sapply(reps, function(i) var(rep(dats, i))) - plot(reps, vars, ylim = c(0, max(vars)), xlab = "# data repeats", + graphics::plot(reps, vars, ylim = c(0, max(vars)), xlab = "# data repeats", ylab = "Variance") var0l <- c(var(dats), vars[length(vars)]) - abline(h = var0l, col = "gray", lty = 2) + graphics::abline(h = var0l, col = "gray", lty = 2) graphics::mtext(side = 3, text = paste("var(data) =", signif(var0l[1], 3), "vs.", "var(limit) =", signif(var0l[2], 3), "\n", diff --git a/tests/testthat/test_downscaling_quantile_mapping.R b/tests/testthat/test_downscaling_quantile_mapping.R index 6311bae3..ea124d95 100644 --- a/tests/testthat/test_downscaling_quantile_mapping.R +++ b/tests/testthat/test_downscaling_quantile_mapping.R @@ -15,7 +15,7 @@ qmaps <- list( # to plot: if (FALSE) { k2 <- 3 - plot(qmaps[[k2]][["par"]][["modq"]], qmaps[[k2]][["par"]][["fitq"]]) + graphics::plot(qmaps[[k2]][["par"]][["modq"]], qmaps[[k2]][["par"]][["fitq"]]) } xs <- list(inter = 5:15, extra = 0:30) diff --git a/tests/testthat/test_indices.R b/tests/testthat/test_indices.R index 7dece721..187c438d 100644 --- a/tests/testthat/test_indices.R +++ b/tests/testthat/test_indices.R @@ -42,7 +42,7 @@ exp_pids <- list(test0 = rep(list(integer(0)), scenario_No), test_that("rSFSW2 indices", { for (k in seq_along(runIDs_todo)) { - if (is.natural(runIDs_todo[[k]])) { + if (rSW2utils::is.natural(runIDs_todo[[k]])) { # Index of experimental treatments (row in experimental design file) expect_equal(it_exp(isim = runIDs_todo[[k]], runN = runsN_master), exp_experiment[[k]], label = names(runIDs_todo)[k]) diff --git a/tests/testthat/test_match.R b/tests/testthat/test_match.R deleted file mode 100644 index 73ab3e6c..00000000 --- a/tests/testthat/test_match.R +++ /dev/null @@ -1,44 +0,0 @@ -context("Match for appending data") - -# Inputs -xs <- data.frame(a = 4:6, b = letters[4:6], stringsAsFactors = FALSE) -xl <- rbind(data.frame(a = 14:24, b = letters[14:24], stringsAsFactors = FALSE), - xs) - -ref_template <- data.frame(a = 10:1, b = letters[10:1], c = rep(NA, 10), - stringsAsFactors = FALSE) - -test_that("Match", { - #--- Correct use of match to append data - ref <- ref_template - expect_equal({ - id_x <- match(ref$a, xs$a, nomatch = 0) - use_r <- id_x > 0 - ref$c[use_r] <- xs$b[id_x] - ref$c[use_r] - }, ref$b[use_r]) - - ref <- ref_template - expect_equal({ - id_x <- match(ref$a, xl$a, nomatch = 0) - use_r <- id_x > 0 - ref$c[use_r] <- xl$b[id_x] - ref$c[use_r] - }, ref$b[use_r]) - - #--- Incorrect use of match (first test works because nrow(xs) <= nrow(ref); - # second test fails) - ref <- ref_template - expect_equal({ - id_r <- match(xs$a, ref$a, nomatch = 0) - ref$c[id_r] <- xs$b - ref$c[id_r] - }, ref$b[id_r]) - - ref <- ref_template - expect_warning({ - id_r <- match(xl$a, ref$a, nomatch = 0) - # number of items to replace is not a multiple of replacement length - ref$c[id_r] <- xl$b - }) -}) diff --git a/tests/testthat/test_netCDF_functions.R b/tests/testthat/test_netCDF_functions.R index ac02a20d..ab49847e 100644 --- a/tests/testthat/test_netCDF_functions.R +++ b/tests/testthat/test_netCDF_functions.R @@ -112,7 +112,8 @@ if (!any(do_skip) && is_online) { test_that("read_time_netCDF:", { for (k in seq_along(has_test_ncs)) { if (has_test_ncs[[k]]) { - expect_equal(read_time_netCDF(test_ncs[[k]][["filename"]]), + expect_equal( + read_time_netCDF(test_ncs[[k]][["filename"]], tres = "monthly"), test_ncs[[k]][["expect"]], info = basename(test_ncs[[k]][["filename"]])) } diff --git a/tests/testthat/test_rSFSW2_CodeStylePractices.R b/tests/testthat/test_rSFSW2_CodeStylePractices.R index 2992516c..d71e0148 100644 --- a/tests/testthat/test_rSFSW2_CodeStylePractices.R +++ b/tests/testthat/test_rSFSW2_CodeStylePractices.R @@ -16,8 +16,7 @@ test_that("Package code style", { files_not_tolint <- c( "ExtractData_ClimateDownscaling.R", # needs linting "Simulation_Run.R", # needs linting - "OutputDatabase_Ensembles.R", # this is de-facto deprecated & defunct - "RcppExports.R" # this is generated by Rcpp + "OutputDatabase_Ensembles.R" # this is de-facto deprecated & defunct ) # Note: working directory when these tests are run is at `tests/testthat/`