diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..02b0658f3d --- /dev/null +++ b/.editorconfig @@ -0,0 +1,29 @@ +# EditorConfig helps developers define and maintain consistent +# coding styles between different editors and IDEs +# editorconfig.org + +root = true + + +[*] +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +indent_style = space +indent_size = 2 + +[*.py] +indent_size = 4 +max_line_length = 88 + +[*.txt] +indent_style = tab +indent_size = 4 +max_line_length = 79 + +[*.{diff}] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab diff --git a/.github/ISSUE_TEMPLATE/NCO_bug_report.yml b/.github/ISSUE_TEMPLATE/NCO_bug_report.yml new file mode 100644 index 0000000000..cc53205807 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/NCO_bug_report.yml @@ -0,0 +1,66 @@ +name: NCO Bug report +description: Report something that is incorrect or broken +labels: ["nco-bug", "triage"] +assignees: + - aerorahul + +body: + - type: markdown + attributes: + value: | + Your bug may already be reported! + Please search on the [Issue tracker](https://github.com/NOAA-EMC/global-workflow/issues) before creating one. + + - type: textarea + id: current_behavior + attributes: + label: What is wrong? + description: Give a brief description of what is incorrect or broken. + placeholder: | + Short log snippets that illustrate the problem can be included here. + + For any longer logs, please create a GitHub gist (https://gist.github.com/) and link it here. + validations: + required: true + + - type: textarea + id: expected_behavior + attributes: + label: What should have happened? + placeholder: Describe what you expected to happen. + validations: + required: true + + - type: textarea + id: reproduction_steps + attributes: + label: Steps to reproduce + description: Please give explicit instructions to reproduce the error + placeholder: | + 1. + 2. + 3. + validations: + required: true + + - type: input + id: bugzilla + attributes: + label: Bugzilla issue + description: What is the corresponding NCO bugzilla issue number? + placeholder: "#..." + + - type: textarea + id: description + attributes: + label: Additional information + description: Provide context or any additional information about the bug. + validations: + required: true + + - type: textarea + id: proposed_implementation + attributes: + label: Do you have a proposed solution? + description: If you already have an idea on how to fix this, please provide it here. + placeholder: Optional diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 0000000000..216293781c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,76 @@ +name: Bug report +description: Report something that is incorrect or broken +labels: ["bug", "triage"] + +body: + - type: markdown + attributes: + value: | + Your bug may already be reported! + Please search on the [Issue tracker](https://github.com/NOAA-EMC/global-workflow/issues) before creating one. + + - type: textarea + id: current_behavior + attributes: + label: What is wrong? + description: Give a brief description of what is incorrect or broken. + placeholder: | + Short log snippets that illustrate the problem can be included here. + + For any longer logs, please create a GitHub gist (https://gist.github.com/) and link it here. + validations: + required: true + + - type: textarea + id: expected_behavior + attributes: + label: What should have happened? + placeholder: Describe what you expected to happen. + validations: + required: true + + - type: dropdown + id: hpc + attributes: + label: What machines are impacted? + description: You may select multiple machines. + multiple: true + options: + - All or N/A + - WCOSS2 + - Hera + - Orion + - Hercules + - Jet + - Cloud + validations: + required: true + + - type: textarea + id: reproduction_steps + attributes: + label: Steps to reproduce + description: Please give explicit instructions to reproduce the error + placeholder: | + 1. + 2. + 3. + validations: + required: true + + - type: textarea + id: description + attributes: + label: Additional information + description: Provide context or any additional information about the bug. + validations: + required: true + + - type: textarea + id: propsed_implementation + attributes: + label: Do you have a proposed solution? + description: If you already have an idea on how to fix this, please provide it here. + placeholder: Optional + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..3ba13e0cec --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000000..3aed58c520 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,55 @@ +name: Feature request +description: Request new capability +labels: ["feature", "triage"] + +body: + - type: markdown + attributes: + value: | + If your issue falls into one of the follow categories, please go back and use the appropriate template: + - Something is wrong or broken + - It is a request from NCO + - It is initiating a production update + - Files need to be updated or added to fix + + + Please search on the [Issue tracker](https://github.com/NOAA-EMC/global-workflow/issues) to make sure the feature has not already been requested to avoid duplicates. + + - type: textarea + id: description + attributes: + label: What new functionality do you need? + placeholder: Provide a consise description of the problem to be addressed by this feature request. Please be clear what parts of the problem are considered to be in-scope and out-of-scope + validations: + required: true + + - type: textarea + id: requirements + attributes: + label: What are the requirements for the new functionality? + validations: + required: true + + - type: textarea + id: criteria + attributes: + label: Acceptance Criteria + placeholder: What does it mean for this issue to be complete? + validations: + required: true + + - type: textarea + id: suggested_solution + attributes: + label: Suggest a solution (optional) + placeholder: | + Propose how this issue can be resolved. + + Things to address include: + * Details of the technical implementation + * Tradeoffs made in design decisions + * Caveats and considerations for the future + + If there are multiple solutions, please present each one separately. Save comparisons for the very end.)" + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/fix_file.yml b/.github/ISSUE_TEMPLATE/fix_file.yml new file mode 100644 index 0000000000..3f5b69cd1d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/fix_file.yml @@ -0,0 +1,97 @@ +name: Fix File Update +description: Request fix files be added or updated +labels: ["Fix Files", "triage"] +assignees: + - KateFriedman-NOAA + - WalterKolczynski-NOAA + +body: + - type: dropdown + attributes: + label: Target fix directory + options: + - NEW + - aer + - am + - chem + - cice + - cpl + - datm + - gdas/crtm + - gdas/fv3jedi + - gdas/gsibec + - gldas + - glwu + - gsi + - lut + - mom6 + - orog + - raw + - reg2grb2 + - sfc_climo + - ugwd + - verif + - wave + multiple: true + validations: + required: true + + - type: checkboxes + attributes: + label: Type of change + options: + - label: Update existing files + - label: Remove files + - label: Add new files to existing directory + - label: Add new fix directory + validations: + required: true + + - type: dropdown + attributes: + label: Any workflow changes needed? + description: | + Any change other than adding files to an existing directory will require at least a new fix version. + options: + - No change needed + - Fix version only + - Fix version + additional changes + validations: + required: true + + - type: textarea + attributes: + label: Related issues + description: Please link any related issues in other repositories + placeholder: NOAA-EMC/repo#1234 + validations: + required: false + + - type: input + attributes: + label: Pickup location + description: | + Please provide a path to the data on either Hera or WCOSS. + + If this requires a version update, please provide the *entire* directory, including unchanged files from current fix. + placeholder: '/path/to/new/fix' + validations: + required: true + + - type: input + attributes: + label: Size + description: How much bigger/smaller is the new set from the existing set (in MB)? + placeholder: '0' + validations: + required: true + + - type: markdown + attributes: + value: | + # Tasks + - [ ] Discuss needs with global-workflow developer assigned to request. + - [ ] Add/update/remove fix file(s) in fix sets on supported platforms (global-workflow assignee task). + - [ ] Update "Fix File Management" spreadsheet (https://docs.google.com/spreadsheets/d/1BeIvcz6TO3If4YCqkUK-oz_kGS9q2wTjwLS-BBemSEY/edit?usp=sharing). + - [ ] Make related workflow/component updates. + diff --git a/.github/ISSUE_TEMPLATE/production_update.yml b/.github/ISSUE_TEMPLATE/production_update.yml new file mode 100644 index 0000000000..ac9ada505a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/production_update.yml @@ -0,0 +1,50 @@ +name: Production Update +description: Begin the process of an operational production update +labels: ["production update", "triage"] +assignees: + - WalterKolczynski-NOAA + - KateFriedman-NOAA + +body: + - type: textarea + id: description + attributes: + label: Description + description: Provide a concise description of the production update. + placeholder: Include related issues in component repositories. + validations: + required: true + + - type: input + id: target + attributes: + label: Target version + description: What is the target GFS version? + placeholder: Use v#.#.# format + validations: + required: true + + - type: textarea + id: changes + attributes: + label: Expected workflow changes + description: What are the anticipated changes coming into the workflow? + placeholder: Include version variables, checkout tags, config/parm, etc. + validations: + required: true + + - type: markdown + attributes: + value: | + ** Tasks ** + - [ ] Create release branch + - [ ] Make workflow changes for upgrade in release branch (add additional checklist items as needed) + - [ ] Create release notes + - [ ] Cut hand-off tag for CDF + - [ ] Submit CDF to NCO + - [ ] Implementation into operations complete + - [ ] Merge release branch into operational branch + - [ ] Cut version tag from operational branch + - [ ] Release new version tag + - [ ] Announce to users + - [ ] Update Read-The-Docs operations status version in develop diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 6a0fbfec85..dbebfe8f6e 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,50 +1,53 @@ + - -**Description** + PRs should meet these guidelines: + - Each PR should address ONE topic and have an associated issue. + - No hard-coded paths or personal directories. + - No temporary or backup files should be committed (including logs). + - Any code that you disabled by being commented out should be removed or reenabled. - - - + Please delete all these comments before submitting the PR. +--> +# Description + + - + Describe your changes. Focus on the *what* and *why*. The *how* will be evident from the changes. In particular, be sure to note any interface changes, such as command line syntax, that will need to be communicated to users. -**Type of change** + At the end of your description, please be sure to add the issue this PR solves using the word "Resolves". If there are any issues that are related but not yet resolved (including in other repos), you may use "Refs". -Please delete options that are not relevant. + Resolves #1234 + Refs #4321 + Refs NOAA-EMC/repo#5678 +--> -- [ ] Bug fix (non-breaking change which fixes an issue) -- [ ] New feature (non-breaking change which adds functionality) -- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) -- [ ] This change requires a documentation update +# Type of change + +- Bug fix (fixes something broken) +- New feature (adds functionality) +- Maintenance (code refactor, clean-up, new CI test, etc.) -**How Has This Been Tested?** +# Change characteristics +- Is this a breaking change (a change in existing functionality)? YES/NO +- Does this change require a documentation update? YES/NO - - - +# How has this been tested? + - - -**Checklist** +# Checklist +- [ ] Any dependent changes have been merged and published - [ ] My code follows the style guidelines of this project - [ ] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas -- [ ] My changes need updates to the documentation. I have made corresponding changes to the documentation - [ ] My changes generate no new warnings - [ ] New and existing tests pass with my changes -- [ ] Any dependent changes have been merged and published +- [ ] I have made corresponding changes to the documentation if necessary diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index ae083a3c0b..20e4a97f9c 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -9,11 +9,18 @@ on: - release/* paths: - docs/** - pull_request: - types: [opened, reopened, synchronize] + pull_request_target: + types: [issues, opened, reopened, synchronize] + paths: + - docs/** jobs: + documentation: + + permissions: + pull-requests: 'write' + runs-on: ubuntu-latest name: Build and deploy documentation @@ -49,3 +56,17 @@ jobs: path: artifact/doc_warnings.log if-no-files-found: ignore + - name: Comment ReadDocs + uses: actions/github-script@v6 + with: + script: | + const message = ` + Link to ReadTheDocs sample build for this PR can be found at: + https://global-workflow--${{ github.event.pull_request.number }}.org.readthedocs.build/en/${{ github.event.pull_request.number }} + ` + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body: message + }) diff --git a/.github/workflows/globalworkflow-ci.yaml b/.github/workflows/globalworkflow-ci.yaml new file mode 100644 index 0000000000..1474c79a48 --- /dev/null +++ b/.github/workflows/globalworkflow-ci.yaml @@ -0,0 +1,86 @@ +name: gw-ci-orion + +on: [workflow_dispatch] + +# TEST_DIR contains 2 directories; +# 1. HOMEgfs: clone of the global-workflow +# 2. RUNTESTS: A directory containing EXPDIR and COMROT for experiments +# e.g. $> tree ./TEST_DIR +# ./TEST_DIR +# ├── HOMEgfs +# └── RUNTESTS +# ├── COMROT +# │   └── ${pslot} +# └── EXPDIR +# └── ${pslot} +env: + TEST_DIR: ${{ github.workspace }}/${{ github.run_id }} + MACHINE_ID: orion + +jobs: + checkout-build-link: + runs-on: [self-hosted, orion-ready] + timeout-minutes: 600 + + steps: + - name: Checkout global-workflow + uses: actions/checkout@v3 + with: + path: ${{ github.run_id }}/HOMEgfs # This path needs to be relative + + - name: Checkout components + run: | + cd ${{ env.TEST_DIR }}/HOMEgfs/sorc + ./checkout.sh -c -g # Options e.g. -u can be added late + + - name: Build components + run: | + cd ${{ env.TEST_DIR }}/HOMEgfs/sorc + ./build_all.sh + + - name: Link artifacts + run: | + cd ${{ env.TEST_DIR }}/HOMEgfs/sorc + ./link_workflow.sh + + create-experiments: + needs: checkout-build-link + runs-on: [self-hosted, orion-ready] + strategy: + matrix: + case: ["C48_S2S", "C96_atm3DVar"] + + steps: + - name: Create Experiments ${{ matrix.case }} + env: + HOMEgfs_PR: ${{ env.TEST_DIR }}/HOMEgfs + RUNTESTS: ${{ env.TEST_DIR }}/RUNTESTS + pslot: ${{ matrix.case }}.${{ github.run_id }} + run: | + cd ${{ env.TEST_DIR }}/HOMEgfs + source workflow/gw_setup.sh + source ci/platforms/orion.sh + ./ci/scripts/create_experiment.py --yaml ci/cases/${{ matrix.case }}.yaml --dir ${{ env.HOMEgfs_PR }} + + run-experiments: + needs: create-experiments + runs-on: [self-hosted, orion-ready] + strategy: + max-parallel: 2 + matrix: + case: ["C48_S2S", "C96_atm3DVar"] + steps: + - name: Run Experiment ${{ matrix.case }} + run: | + cd ${{ env.TEST_DIR }}/HOMEgfs + ./ci/scripts/run-check_ci.sh ${{ env.TEST_DIR }} ${{ matrix.case }}.${{ github.run_id }} + + clean-up: + needs: run-experiments + runs-on: [self-hosted, orion-ready] + steps: + - name: Clean-up + run: | + cd ${{ github.workspace }} + rm -rf ${{ github.run_id }} + diff --git a/.github/workflows/hera.yaml b/.github/workflows/hera.yaml new file mode 100644 index 0000000000..800d87e55a --- /dev/null +++ b/.github/workflows/hera.yaml @@ -0,0 +1,81 @@ +name: Hera + +on: + pull_request_target: + branches: + - develop + types: [closed] + +jobs: + + getlabels: + runs-on: ubuntu-latest + outputs: + labels: ${{ steps.id.outputs.labels }} + steps: + - name: Get Label Steps + id: id + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + OWNER: ${{ github.repository_owner }} + REPO_NAME: ${{ github.event.repository.name }} + PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + run: | + LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')" + LABELS=$(echo "$LABELS1" | tr '\n' ' ') + echo "labels=$LABELS" >> $GITHUB_OUTPUT + + passed: + if: contains( needs.getlabels.outputs.labels, 'CI-Hera-Passed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Passed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: hera.json + label: hera + message: passing + color: green + + failed: + if: contains( needs.getlabels.outputs.labels, 'CI-Hera-Failed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Failed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: hera.json + label: hera + message: failing + color: red + + + pending: + if: "!contains( needs.getlabels.outputs.labels, 'CI-Hera-Passed') && !contains( needs.getlabels.outputs.labels, 'CI-Hera-Failed')" + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Pending + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: hera.json + label: hera + message: pending + color: orange diff --git a/.github/workflows/orion.yaml b/.github/workflows/orion.yaml new file mode 100644 index 0000000000..2d17b3db63 --- /dev/null +++ b/.github/workflows/orion.yaml @@ -0,0 +1,81 @@ +name: Orion + +on: + pull_request_target: + branches: + - develop + types: [closed] + +jobs: + + getlabels: + runs-on: ubuntu-latest + outputs: + labels: ${{ steps.id.outputs.labels }} + steps: + - name: Get Label Steps + id: id + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + OWNER: ${{ github.repository_owner }} + REPO_NAME: ${{ github.event.repository.name }} + PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + run: | + LABELS1="$(gh api repos/$OWNER/$REPO_NAME/pulls/$PULL_REQUEST_NUMBER --jq '.labels.[].name')" + LABELS=$(echo "$LABELS1" | tr '\n' ' ') + echo "labels=$LABELS" >> $GITHUB_OUTPUT + + passed: + if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Passed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: orion.json + label: orion + message: passing + color: green + + failed: + if: contains( needs.getlabels.outputs.labels, 'CI-Orion-Failed') && github.event.pull_request.merged + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Failed + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: orion.json + label: orion + message: failing + color: red + + + pending: + if: "!contains( needs.getlabels.outputs.labels, 'CI-Orion-Passed') && !contains( needs.getlabels.outputs.labels, 'CI-Orion-Failed')" + runs-on: ubuntu-latest + needs: + - getlabels + + steps: + - name: Pending + uses: schneegans/dynamic-badges-action@v1.6.0 + with: + forceUpdate: true + auth: ${{ secrets.CLI_DYNAMIC_BADGES }} + gistID: e35aa2904a54deae6bbb1fdc2d960c71 + filename: orion.json + label: orion + message: pending + color: orange diff --git a/.github/workflows/pynorms.yaml b/.github/workflows/pynorms.yaml index 7f823f8318..6ea99b59ed 100644 --- a/.github/workflows/pynorms.yaml +++ b/.github/workflows/pynorms.yaml @@ -21,4 +21,4 @@ jobs: - name: Run pycodestyle run: | cd $GITHUB_WORKSPACE/global-workflow - pycodestyle -v --config ./.pycodestyle --exclude='.git,.github' ./ + pycodestyle -v --config ./.pycodestyle ./ diff --git a/.gitignore b/.gitignore index d09198e36d..047313a32f 100644 --- a/.gitignore +++ b/.gitignore @@ -30,8 +30,8 @@ fix/chem fix/cice fix/cpl fix/datm -fix/gdas fix/gldas +fix/gdas fix/gsi fix/lut fix/mom6 @@ -41,13 +41,12 @@ fix/sfc_climo fix/ugwd fix/verif fix/wave -fix/wafs # Ignore parm file symlinks #-------------------------- parm/config/config.base parm/gldas -parm/mon +parm/monitor parm/post/AEROSOL_LUTS.dat parm/post/nam_micro_lookup.dat parm/post/optics_luts_DUST.dat @@ -91,14 +90,49 @@ parm/post/postxconfig-NT-GFS-WAFS.txt parm/post/postxconfig-NT-GFS.txt parm/post/postxconfig-NT-gefs-aerosol.txt parm/post/postxconfig-NT-gefs-chem.txt +parm/ufs/noahmptable.tbl parm/wafs # Ignore sorc and logs folders from externals #-------------------------------------------- sorc/*log sorc/logs -sorc/*.cd -sorc/*.fd +sorc/calc_analysis.fd +sorc/calc_increment_ens.fd +sorc/calc_increment_ens_ncio.fd +sorc/emcsfc_ice_blend.fd +sorc/emcsfc_snow2mdl.fd +sorc/enkf.fd +sorc/enkf_chgres_recenter_nc.fd +sorc/fbwndgfs.fd +sorc/gaussian_sfcanl.fd +sorc/getsfcensmeanp.fd +sorc/getsigensmeanp_smooth.fd +sorc/getsigensstatp.fd +sorc/gfs_bufr.fd +sorc/global_cycle.fd +sorc/gsi.fd +sorc/interp_inc.fd +sorc/mkgfsawps.fd +sorc/overgridid.fd +sorc/oznmon_horiz.fd +sorc/oznmon_time.fd +sorc/radmon_angle.fd +sorc/radmon_bcoef.fd +sorc/radmon_bcor.fd +sorc/radmon_time.fd +sorc/rdbfmsua.fd +sorc/recentersigp.fd +sorc/reg2grb2.fd +sorc/supvit.fd +sorc/syndat_getjtbul.fd +sorc/syndat_maksynrc.fd +sorc/syndat_qctropcy.fd +sorc/tave.fd +sorc/tocsbufr.fd +sorc/upp.fd +sorc/vint.fd +sorc/webtitle.fd # Ignore scripts from externals #------------------------------ @@ -121,6 +155,7 @@ scripts/exgfs_atmos_wafs_grib2_0p25.sh ush/chgres_cube.sh ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh +ush/exglobal_prep_ocean_obs.py ush/fix_precip.sh ush/fv3gfs_driver_grid.sh ush/fv3gfs_filter_topo.sh @@ -142,4 +177,14 @@ ush/make_ntc_bull.pl ush/make_tif.sh ush/month_name.sh ush/imsfv3_scf2ioda.py -ush/letkf_create_ens.py +ush/atparse.bash +ush/run_bufr2ioda.py + +# version files +versions/build.ver +versions/run.ver + +# wxflow checkout and symlinks +ush/python/wxflow +workflow/wxflow +ci/scripts/wxflow diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..3eb26fb0fe --- /dev/null +++ b/.gitmodules @@ -0,0 +1,28 @@ +[submodule "sorc/ufs_model.fd"] + path = sorc/ufs_model.fd + url = https://github.com/ufs-community/ufs-weather-model + ignore = dirty +[submodule "sorc/wxflow"] + path = sorc/wxflow + url = https://github.com/NOAA-EMC/wxflow +[submodule "sorc/gfs_utils.fd"] + path = sorc/gfs_utils.fd + url = https://github.com/NOAA-EMC/gfs-utils +[submodule "sorc/ufs_utils.fd"] + path = sorc/ufs_utils.fd + url = https://github.com/ufs-community/UFS_UTILS.git +[submodule "sorc/verif-global.fd"] + path = sorc/verif-global.fd + url = https://github.com/NOAA-EMC/EMC_verif-global.git +[submodule "sorc/gsi_enkf.fd"] + path = sorc/gsi_enkf.fd + url = https://github.com/NOAA-EMC/GSI.git +[submodule "sorc/gdas.cd"] + path = sorc/gdas.cd + url = https://github.com/NOAA-EMC/GDASApp.git +[submodule "sorc/gsi_utils.fd"] + path = sorc/gsi_utils.fd + url = https://github.com/NOAA-EMC/GSI-Utils.git +[submodule "sorc/gsi_monitor.fd"] + path = sorc/gsi_monitor.fd + url = https://github.com/NOAA-EMC/GSI-Monitor.git diff --git a/.pycodestyle b/.pycodestyle index 8bd18fa9d7..5907458f78 100644 --- a/.pycodestyle +++ b/.pycodestyle @@ -3,4 +3,4 @@ count = False ignore = E402,W504 max-line-length = 160 statistics = True -exclude = Experimental +exclude = .git,.github,.vscode,venv,*.fd,*.cd,docs/source/conf.py diff --git a/.readthedocs.yaml b/.readthedocs.yaml index fa854552e5..87acd09370 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -15,7 +15,9 @@ build: sphinx: configuration: docs/source/conf.py +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: install: - requirements: docs/requirements.txt - system_packages: true diff --git a/Externals.cfg b/Externals.cfg deleted file mode 100644 index 1fde0c5033..0000000000 --- a/Externals.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# External sub-modules of global-workflow - -[UFS] -tag = 2247060 -local_path = sorc/ufs_model.fd -repo_url = https://github.com/ufs-community/ufs-weather-model.git -protocol = git -required = True - -[gfs-utils] -hash = 8965258 -local_path = sorc/gfs_utils.fd -repo_url = https://github.com/NOAA-EMC/gfs-utils -protocol = git -required = True - -[UFS-Utils] -hash = 72a0471 -local_path = sorc/ufs_utils.fd -repo_url = https://github.com/ufs-community/UFS_UTILS.git -protocol = git -required = True - -[EMC_verif-global] -tag = c267780 -local_path = sorc/verif-global.fd -repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git -protocol = git -required = True - -[GSI-EnKF] -hash = 113e307 -local_path = sorc/gsi_enkf.fd -repo_url = https://github.com/NOAA-EMC/GSI.git -protocol = git -required = False - -[GSI-Utils] -hash = 322cc7b -local_path = sorc/gsi_utils.fd -repo_url = https://github.com/NOAA-EMC/GSI-utils.git -protocol = git -required = False - -[GSI-Monitor] -hash = 45783e3 -local_path = sorc/gsi_monitor.fd -repo_url = https://github.com/NOAA-EMC/GSI-monitor.git -protocol = git -required = False - -[GDASApp] -hash = 81675c9 -local_path = sorc/gdas.cd -repo_url = https://github.com/NOAA-EMC/GDASApp.git -protocol = git -required = False - -[EMC-gfs_wafs] -hash = 014a0b8 -local_path = sorc/gfs_wafs.fd -repo_url = https://github.com/NOAA-EMC/EMC_gfs_wafs.git -protocol = git -required = False - -[externals_description] -schema_version = 1.0.0 diff --git a/README.md b/README.md index 465b0529fa..289e74933b 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,9 @@ [![Read The Docs Status](https://readthedocs.org/projects/global-workflow/badge/?badge=latest)](http://global-workflow.readthedocs.io/) [![shellnorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml) [![pynorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml) -[![pytests](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml) + +![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/hera.json) +![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/orion.json) # global-workflow Global Workflow currently supporting the Global Forecast System (GFS) with the [UFS-weather-model](https://github.com/ufs-community/ufs-weather-model) and [GSI](https://github.com/NOAA-EMC/GSI)-based Data Assimilation System. @@ -16,6 +18,7 @@ The `global-workflow` current supports the following tier-1 machines: * NOAA RDHPCS - Hera * MSU HPC - Orion +* MSU HPC - Hercules * NOAA's operational HPC - WCOSS2 Additionally, the following tier-2 machine is supported: diff --git a/ci/cases/pr/C48_ATM.yaml b/ci/cases/pr/C48_ATM.yaml new file mode 100644 index 0000000000..fc0b729af6 --- /dev/null +++ b/ci/cases/pr/C48_ATM.yaml @@ -0,0 +1,13 @@ +experiment: + system: gfs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdet: 48 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2021032312 + edate: 2021032312 + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml diff --git a/ci/cases/pr/C48_S2SW.yaml b/ci/cases/pr/C48_S2SW.yaml new file mode 100644 index 0000000000..f4b50ead22 --- /dev/null +++ b/ci/cases/pr/C48_S2SW.yaml @@ -0,0 +1,13 @@ +experiment: + system: gfs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2SW + resdet: 48 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2021032312 + edate: 2021032312 + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml diff --git a/ci/cases/pr/C48_S2SWA_gefs.yaml b/ci/cases/pr/C48_S2SWA_gefs.yaml new file mode 100644 index 0000000000..5eb99d9c1e --- /dev/null +++ b/ci/cases/pr/C48_S2SWA_gefs.yaml @@ -0,0 +1,17 @@ +experiment: + system: gefs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2SWA + resdet: 48 + resens: 48 + nens: 2 + gfs_cyc: 1 + start: cold + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2021032312 + edate: 2021032312 + yaml: {{ HOMEgfs }}/ci/platforms/gefs_ci_defaults.yaml diff --git a/ci/cases/pr/C96C48_hybatmDA.yaml b/ci/cases/pr/C96C48_hybatmDA.yaml new file mode 100644 index 0000000000..1f3e973ae7 --- /dev/null +++ b/ci/cases/pr/C96C48_hybatmDA.yaml @@ -0,0 +1,21 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdet: 96 + resens: 48 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48 + idate: 2021122018 + edate: 2021122106 + nens: 2 + gfs_cyc: 1 + start: cold + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - hercules diff --git a/ci/cases/pr/C96_atm3DVar.yaml b/ci/cases/pr/C96_atm3DVar.yaml new file mode 100644 index 0000000000..360e81e9d7 --- /dev/null +++ b/ci/cases/pr/C96_atm3DVar.yaml @@ -0,0 +1,20 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdet: 96 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122106 + nens: 0 + gfs_cyc: 1 + start: cold + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml + +skip_ci_on_hosts: + - hercules diff --git a/ci/cases/weekly/C384C192_hybatmda.yaml b/ci/cases/weekly/C384C192_hybatmda.yaml new file mode 100644 index 0000000000..4c14018e2d --- /dev/null +++ b/ci/cases/weekly/C384C192_hybatmda.yaml @@ -0,0 +1,18 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdet: 384 + resens: 192 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C384C192 + idate: 2023040118 + edate: 2023040200 + nens: 2 + gfs_cyc: 1 + start: cold + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml diff --git a/ci/cases/weekly/C384_S2SWA.yaml b/ci/cases/weekly/C384_S2SWA.yaml new file mode 100644 index 0000000000..6c624f5698 --- /dev/null +++ b/ci/cases/weekly/C384_S2SWA.yaml @@ -0,0 +1,13 @@ +experiment: + system: gfs + mode: forecast-only + +arguments: + pslot: {{ 'pslot' | getenv }} + app: S2SWA + resdet: 384 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + idate: 2016070100 + edate: 2016070100 + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml diff --git a/ci/cases/weekly/C384_atm3DVar.yaml b/ci/cases/weekly/C384_atm3DVar.yaml new file mode 100644 index 0000000000..e7986ef097 --- /dev/null +++ b/ci/cases/weekly/C384_atm3DVar.yaml @@ -0,0 +1,18 @@ +experiment: + system: gfs + mode: cycled + +arguments: + pslot: {{ 'pslot' | getenv }} + app: ATM + resdet: 384 + resens: 192 + comrot: {{ 'RUNTESTS' | getenv }}/COMROT + expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR + icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C384C192 + idate: 2023040118 + edate: 2023040200 + nens: 0 + gfs_cyc: 1 + start: cold + yaml: {{ HOMEgfs }}/ci/platforms/gfs_defaults_ci.yaml diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera new file mode 100644 index 0000000000..76a6a08670 --- /dev/null +++ b/ci/platforms/config.hera @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT +export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR +export STMP="/scratch1/NCEPDEV/stmp2/${USER}" +export SLURM_ACCOUNT=nems +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/config.hercules b/ci/platforms/config.hercules new file mode 100644 index 0000000000..e5a638a827 --- /dev/null +++ b/ci/platforms/config.hercules @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/HERCULES +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR +export STMP="/work2/noaa/stmp/${USER}" +export SLURM_ACCOUNT=nems +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion new file mode 100644 index 0000000000..3ddd05c034 --- /dev/null +++ b/ci/platforms/config.orion @@ -0,0 +1,8 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR +export STMP="/work2/noaa/stmp/${USER}" +export SLURM_ACCOUNT=nems +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/gefs_ci_defaults.yaml b/ci/platforms/gefs_ci_defaults.yaml new file mode 100644 index 0000000000..2aa30d6be4 --- /dev/null +++ b/ci/platforms/gefs_ci_defaults.yaml @@ -0,0 +1,4 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gefs/yaml/defaults.yaml +base: + ACCOUNT: ${SLURM_ACCOUNT} diff --git a/ci/platforms/gfs_defaults_ci.yaml b/ci/platforms/gfs_defaults_ci.yaml new file mode 100644 index 0000000000..5e57e617ec --- /dev/null +++ b/ci/platforms/gfs_defaults_ci.yaml @@ -0,0 +1,4 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml +base: + ACCOUNT: ${SLURM_ACCOUNT} diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index 20df09d851..164d423c67 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -2,30 +2,29 @@ set -eux ##################################################################################### # -# Script description: BASH script for checking for cases in a given PR and +# Script description: BASH script for checking for cases in a given PR and # running rocotostat on each to determine if the experiment has # succeeded or faild. This script is intended # to run from within a cron job in the CI Managers account -# Abstract TODO ##################################################################################### -HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" scriptname=$(basename "${BASH_SOURCE[0]}") echo "Begin ${scriptname} at $(date -u)" || true export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' GH=${HOME}/bin/gh -REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} +REPO_URL="https://github.com/NOAA-EMC/global-workflow.git" ######################################################################### # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${HOMEgfs}/ush/detect_machine.sh" +source "${ROOT_DIR}/ush/detect_machine.sh" case ${MACHINE_ID} in hera | orion) echo "Running Automated Testing on ${MACHINE_ID}" - source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" ;; *) echo "Unsupported platform. Exiting with error." @@ -33,8 +32,9 @@ case ${MACHINE_ID} in ;; esac set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/modulefiles" +source "${ROOT_DIR}/ush/module-setup.sh" +source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh" +module use "${ROOT_DIR}/modulefiles" module load "module_gwsetup.${MACHINE_ID}" module list set -x @@ -57,9 +57,9 @@ pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" pr_list="" if [[ -f "${pr_list_dbfile}" ]]; then - pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Running | awk '{print $1}') || true + pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display | grep -v Failed | grep Running | awk '{print $1}') || true fi -if [[ -z "${pr_list}" ]]; then +if [[ -z "${pr_list+x}" ]]; then echo "no PRs open and ready to run cases on .. exiting" exit 0 fi @@ -71,6 +71,8 @@ fi for pr in ${pr_list}; do id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + output_ci="${GFS_CI_ROOT}/PR/${pr}/output_runtime_${id}" + output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_runtime_single.log" echo "Processing Pull Request #${pr} and looking for cases" pr_dir="${GFS_CI_ROOT}/PR/${pr}" @@ -78,61 +80,83 @@ for pr in ${pr_list}; do if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then continue fi - num_cases=$(find "${pr_dir}/RUNTESTS" -mindepth 1 -maxdepth 1 -type d | wc -l) || true - #Check for PR success when ${pr_dir}/RUNTESTS is void of subfolders + #Check for PR success when ${pr_dir}/RUNTESTS/EXPDIR is void of subfolders # since all successfull ones where previously removed - if [[ "${num_cases}" -eq 0 ]] && [[ -d "${pr_dir}/RUNTESTS" ]]; then + # shellcheck disable=SC2312 + if [[ -z $(ls -A "${pr_dir}/RUNTESTS/EXPDIR") ]] ; then "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" - # Completely remove the PR and its cloned repo on sucess of all cases - rm -Rf "${pr_dir}" - continue + sed -i "1 i\`\`\`" "${output_ci}" + sed -i "1 i\All CI Test Cases Passed on ${MACHINE_ID^}:" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms + weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}" --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true + if [[ -n "${weekly_labels}" ]]; then + num_platforms=$(find "${ROOT_DIR}/ci/platforms" -type f -name "config.*" | wc -l) + passed=0 + for platforms in "${ROOT_DIR}"/ci/platforms/config.*; do + machine=$(basename "${platforms}" | cut -d. -f2) + if [[ "${weekly_labels}" == *"CI-${machine^}-Passed"* ]]; then + ((passed=passed+1)) + fi + done + if [[ "${passed}" == "${num_platforms}" ]]; then + "${GH}" pr close --repo "${REPO_URL}" "${pr}" + fi + fi + # Completely remove the PR and its cloned repo on sucess + # of all cases on this platform + rm -Rf "${pr_dir}" + continue fi - for cases in "${pr_dir}/RUNTESTS/"*; do - pslot=$(basename "${cases}") || true - if [[ -z "${pslot}" ]]; then - echo "No cases found in ${pr_dir}/RUNTESTS .. exiting" + for pslot_dir in "${pr_dir}/RUNTESTS/EXPDIR/"*; do + pslot=$(basename "${pslot_dir}") || true + if [[ -z "${pslot+x}" ]]; then + echo "No experiments found in ${pslot_dir} .. exiting" exit 0 fi - xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" - db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + xml="${pslot_dir}/${pslot}.xml" + db="${pslot_dir}/${pslot}.db" + if [[ ! -f "${db}" ]]; then + continue + fi rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true - num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + # num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true if [[ ${num_failed} -ne 0 ]]; then - { - echo "Experiment ${pslot} Terminated: *FAILED*" - echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true { + echo "Experiment ${pslot} *** FAILED *** on ${MACHINE_ID^}" + echo "Experiment ${pslot} with ${num_failed} tasks failed at $(date +'%D %r')" || true echo "Error logs:" echo "${error_logs}" - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + } >> "${output_ci}" + sed -i "1 i\`\`\`" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" for kill_cases in "${pr_dir}/RUNTESTS/"*; do pslot=$(basename "${kill_cases}") - sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "PR\/${pr}\/RUNTESTS" | awk '{print $1}' | xargs scancel || true + cancel_slurm_jobs "${pslot}" done break fi if [[ "${num_done}" -eq "${num_cycles}" ]]; then - { - echo "Experiment ${pslot} completed: *SUCCESS*" - echo "Experiment ${pslot} Completed at $(date)" || true - echo "with ${num_succeeded} successfully completed jobs" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" #Remove Experment cases that completed successfully - rm -Rf "${pr_dir}/RUNTESTS/${pslot}" + rm -Rf "${pslot_dir}" + rm -Rf "${pr_dir}/RUNTESTS/COMROT/${pslot}" + rm -f "${output_ci_single}" + # echo "\`\`\`" > "${output_ci_single}" + DATE=$(date +'%D %r') + echo "Experiment ${pslot} **SUCCESS** on ${MACHINE_ID^} at ${DATE}" >> "${output_ci_single}" + echo "Experiment ${pslot} *** SUCCESS *** at ${DATE}" >> "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}" + fi done done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 022cc44378..798c98bf50 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -9,8 +9,8 @@ usage() { echo echo "Usage: $0 -p -d -o -h" echo - echo " -p PR nunber to clone and build" - echo " -d Full path of of were to clone and build PR" + echo " -p PR number to clone and build" + echo " -d Full path of of where to clone and build PR" echo " -o Full path to output message file detailing results of CI tests" echo " -h display this message and quit" echo @@ -35,13 +35,11 @@ while getopts "p:d:o:h" opt; do *) echo "Unrecognized option" usage - exit - ;; + ;; esac done cd "${repodir}" || exit 1 -# clone copy of repo if [[ -d global-workflow ]]; then rm -Rf global-workflow fi @@ -49,15 +47,8 @@ fi git clone "${REPO_URL}" cd global-workflow || exit 1 -pr_state=$(gh pr view "${PR}" --json state --jq '.state') -if [[ "${pr_state}" != "OPEN" ]]; then - title=$(gh pr view "${PR}" --json title --jq '.title') - echo "PR ${title} is no longer open, state is ${pr_state} ... quitting" - exit 1 -fi - # checkout pull request -"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" --recurse-submodules HOMEgfs="${PWD}" source "${HOMEgfs}/ush/detect_machine.sh" @@ -76,47 +67,43 @@ source "${HOMEgfs}/ush/detect_machine.sh" commit=$(git log --pretty=format:'%h' -n 1) echo "${commit}" > "../commit" -# run checkout script +# build full cycle cd sorc || exit 1 set +e -./checkout.sh -c -g -u &>> log.checkout -checkout_status=$? -if [[ ${checkout_status} != 0 ]]; then - { - echo "Checkout: *FAILED*" - echo "Checkout: Failed at $(date)" || true - echo "Checkout: see output at ${PWD}/log.checkout" - } >> "${outfile}" - exit "${checkout_status}" -else - { - echo "Checkout: *SUCCESS*" - echo "Checkout: Completed at $(date)" || true - } >> "${outfile}" -fi -# build full cycle source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./build_all.sh &>> log.build +./build_all.sh -gu >> log.build 2>&1 build_status=$? +DATE=$(date +'%D %r') if [[ ${build_status} != 0 ]]; then { - echo "Build: *FAILED*" - echo "Build: Failed at $(date)" || true - echo "Build: see output at ${PWD}/log.build" + echo "Build: *** FAILED ***" + echo "Build: Failed at ${DATE}" + cat "${PWD}/log.build" } >> "${outfile}" exit "${build_status}" else { - echo "Build: *SUCCESS*" - echo "Build: Completed at $(date)" || true + echo "Build: Completed at ${DATE}" } >> "${outfile}" fi -./link_workflow.sh +LINK_LOGFILE_PATH=link_workflow.log +rm -f "${LINK_LOGFILE_PATH}" +./link_workflow.sh >> "${LINK_LOGFILE_PATH}" 2>&1 +link_status=$? +if [[ ${link_status} != 0 ]]; then + DATE=$(date +'%D %r') + { + echo "Link: *** FAILED ***" + echo "Link: Failed at ${DATE}" + cat "${LINK_LOGFILE_PATH}" + } >> "${outfile}" + exit "${link_status}" +fi echo "check/build/link test completed" exit "${build_status}" diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 6bd76ca2bc..a0edb4b4c3 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -10,9 +10,8 @@ set -eux # # This script uses GitHub CLI to check for Pull Requests with CI-Ready-${machine} tags on the # development branch for the global-workflow repo. It then stages tests directories per -# PR number and calls clone-build_ci.sh to perform a clone and full build from $(HOMEgfs)/sorc -# of the PR. It then is ready to run a suite of regression tests with various -# configurations with run_tests.py. +# PR number and calls clone-build_ci.sh to perform a clone and full build from the PR. +# It then is ready to run a suite of regression tests with various configurations ####################################################################################### ################################################################# @@ -22,22 +21,22 @@ export GH=${HOME}/bin/gh export REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} ################################################################ -# Setup the reletive paths to scripts and PS4 for better logging +# Setup the reletive paths to scripts and PS4 for better logging ################################################################ -HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" scriptname=$(basename "${BASH_SOURCE[0]}") -echo "Begin ${scriptname} at $(date -u)" || true +echo "Begin ${scriptname} at $(date +'%D %r')" || true export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' ######################################################################### # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${HOMEgfs}/ush/detect_machine.sh" +source "${ROOT_DIR}/ush/detect_machine.sh" case ${MACHINE_ID} in hera | orion) echo "Running Automated Testing on ${MACHINE_ID}" - source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" ;; *) echo "Unsupported platform. Exiting with error." @@ -49,98 +48,200 @@ esac # setup runtime env for correct python install and git ###################################################### set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/modulefiles" +source "${ROOT_DIR}/ci/scripts/utils/ci_utils.sh" +source "${ROOT_DIR}/ush/module-setup.sh" +module use "${ROOT_DIR}/modulefiles" module load "module_gwsetup.${MACHINE_ID}" set -x ############################################################ # query repo and get list of open PRs with tags {machine}-CI ############################################################ + pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" + if [[ ! -f "${pr_list_dbfile}" ]]; then - "${HOMEgfs}/ci/scripts/pr_list_database.py" --create "${pr_list_dbfile}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --create --dbfile "${pr_list_dbfile}" fi pr_list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" --state "open" | awk '{print $1}') || true for pr in ${pr_list}; do - "${HOMEgfs}/ci/scripts/pr_list_database.py" --add_pr "${pr}" "${pr_list_dbfile}" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + db_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}") + output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_single.log" + ############################################################# + # Check if a Ready labeled PR has changed back from once set + # and in that case completely kill the previose driver.sh cron + # job and all its decedands as well as removing all previous + # jobs in scheduler and associated files in the PR + ############################################################# + if [[ "${db_list}" == *"already is in list"* ]]; then + # Get the the PID and HOST of the driver.sh cron job + # that is stored int he CI database for this PR + driver_ID=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --display "${pr}" | awk '{print $4}') || true + driver_PID=$(echo "${driver_ID}" | cut -d":" -f1) || true + driver_HOST=$(echo "${driver_ID}" | cut -d":" -f2) || true + host_name=$(hostname -s) + rm -f "${output_ci_single}" + { + echo "CI Update on ${MACHINE_ID^} at $(date +'%D %r')" || true + echo "=================================================" + echo "PR:${pr} Reset to ${MACHINE_ID^}-Ready by user and is now restarting CI tests" || true + } >> "${output_ci_single}" + if [[ "${driver_PID}" -ne 0 ]]; then + echo "Driver PID: ${driver_PID} no longer running this build having it killed" + if [[ "${driver_HOST}" == "${host_name}" ]]; then + # shellcheck disable=SC2312 + pstree -A -p "${driver_PID}" | grep -Pow "(?<=\()[0-9]+(?=\))" | xargs kill + else + # shellcheck disable=SC2312 + ssh "${driver_HOST}" 'pstree -A -p "${driver_PID}" | grep -Eow "[0-9]+" | xargs kill' + fi + { + echo "Driver PID: Requested termination of ${driver_PID} and children on ${driver_HOST}" + echo "Driver PID: has restarted as $$ on ${host_name}" + } >> "${output_ci_single}" + fi + + experiments=$(find "${pr_dir}/RUNTESTS/EXPDIR" -mindepth 1 -maxdepth 1 -type d) || true + if [[ -z "${experiments}" ]]; then + echo "No current experiments to cancel in PR: ${pr} on ${MACHINE_ID^}" >> "${output_ci_single}" + else + for case in ${experiments}; do + case_name=$(basename "${case}") + cancel_slurm_jobs "${case_name}" + { + echo "Canceled all jobs for experiment ${case_name} in PR:${pr} on ${MACHINE_ID^}" + } >> "${output_ci_single}" + done + fi + sed -i "1 i\`\`\`" "${output_ci_single}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --add_pr "${pr}" --dbfile "${pr_list_dbfile}" + fi done pr_list="" if [[ -f "${pr_list_dbfile}" ]]; then - pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Ready | awk '{print $1}') || true + pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Ready | awk '{print $1}') || true fi -if [[ -z "${pr_list}" ]]; then +if [[ -z "${pr_list+x}" ]]; then echo "no PRs open and ready for checkout/build .. exiting" exit 0 fi - + ############################################################# # Loop throu all open PRs # Clone, checkout, build, creat set of cases, for each ############################################################# for pr in ${pr_list}; do - - "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building" - echo "Processing Pull Request #${pr}" + # Skip pr's that are currently Building for when overlapping driver scripts are being called from within cron + pr_building=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display "${pr}" --dbfile "${pr_list_dbfile}" | grep Building) || true + if [[ -z "${pr_building+x}" ]]; then + continue + fi + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') pr_dir="${GFS_CI_ROOT}/PR/${pr}" + output_ci="${pr_dir}/output_ci_${id}" + output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_single.log" + driver_build_PID=$$ + driver_build_HOST=$(hostname -s) + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Building "${driver_build_PID}:${driver_build_HOST}" + rm -Rf "${pr_dir}" mkdir -p "${pr_dir}" - # call clone-build_ci to clone and build PR - id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + { + echo "CI Update on ${MACHINE_ID^} at $(date +'%D %r')" || true + echo "============================================" + echo "Cloning and Building global-workflow PR: ${pr}" + echo "with PID: ${driver_build_PID} on host: ${driver_build_HOST}" + echo "" + } >> "${output_ci_single}" + sed -i "1 i\`\`\`" "${output_ci_single}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}" set +e - "${HOMEgfs}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${output_ci}" ci_status=$? + ################################################################## + # Checking for special case when Ready label was updated + # but a race condtion caused the clone-build_ci.sh to start + # and this instance fails before it was killed. In th case we + # we need to exit this instance of the driver script + ################################################################# + if [[ ${ci_status} -ne 0 ]]; then + build_PID_check=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display "{pr}" --dbfile "${pr_list_dbfile}" | awk '{print $4}' | cut -d":" -f1) || true + if [[ "${build_PID_check}" -ne "$$" ]]; then + echo "Driver build PID: ${build_PID_check} no longer running this build ... exiting" + exit 0 + fi + fi set -e if [[ ${ci_status} -eq 0 ]]; then - "${HOMEgfs}/ci/scripts/pr_list_database.py" --update_pr "${pr}" Open Built "${pr_list_dbfile}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Built "0:0" #setup space to put an experiment # export RUNTESTS for yaml case files to pickup export RUNTESTS="${pr_dir}/RUNTESTS" - #rm -Rf "${pr_dir:?}/RUNTESTS/"* + rm -Rf "${pr_dir:?}/RUNTESTS/"* ############################################################# - # loop over every yaml file in ${HOMEgfs}/ci/cases + # loop over every yaml file in the PR's ci/cases # and create an run directory for each one for this PR loop ############################################################# - for yaml_config in "${HOMEgfs}/ci/cases/"*.yaml; do - pslot=$(basename "${yaml_config}" .yaml) || true - export pslot - sed -i "/^base:/a\ ACCOUNT: \${SLURM_ACCOUNT}" "${pr_dir}/global-workflow/parm/config/gfs/yaml/defaults.yaml" - sed -i "/^base:/a\ ACCOUNT: \${SLURM_ACCOUNT}" "${pr_dir}/global-workflow/parm/config/gefs/yaml/defaults.yaml" + HOMEgfs="${pr_dir}/global-workflow" + cd "${HOMEgfs}" + pr_sha=$(git rev-parse --short HEAD) + + for yaml_config in "${HOMEgfs}/ci/cases/pr/"*.yaml; do + case=$(basename "${yaml_config}" .yaml) || true + # export pslot for yaml case files to pickup + export pslot="${case}_${pr_sha}" + rm -Rf "${STMP}/RUNDIRS/${pslot}" set +e - "${HOMEgfs}/ci/scripts/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/${pslot}.yaml" --dir "${pr_dir}/global-workflow" + export LOGFILE_PATH="${HOMEgfs}/ci/scripts/create_experiment.log" + rm -f "${LOGFILE_PATH}" + "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" > "${LOGFILE_PATH}" 2>&1 ci_status=$? set -e if [[ ${ci_status} -eq 0 ]]; then + last_line=$(tail -1 "${LOGFILE_PATH}") + if [[ "${last_line}" == *"Skipping creation"* ]]; then + action="Skipped" + else + action="Completed" + fi { - echo "Created experiment: *SUCCESS*" - echo "Case setup: Completed at $(date) for experiment ${pslot}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" - "${HOMEgfs}/ci/scripts/pr_list_database.py" --update_pr "${pr}" Open Running "${pr_list_dbfile}" - else + echo "Case setup: ${action} for experiment ${pslot}" || true + } >> "${output_ci}" + else { - echo "Failed to create experiment}: *FAIL* ${pslot}" - echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + echo "*** Failed *** to create experiment: ${pslot} on ${MACHINE_ID^}" + echo "" + cat "${LOGFILE_PATH}" + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" - "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + exit 1 fi done - else + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running "0:0" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + + else { echo "Failed on cloning and building global-workflowi PR: ${pr}" - echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}" || true + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" - "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" fi - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" done # looping over each open and labeled PR diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh new file mode 100755 index 0000000000..88b027d100 --- /dev/null +++ b/ci/scripts/driver_weekly.sh @@ -0,0 +1,118 @@ +#!/bin/bash +set -eux + +############################################################################################## +# +# Script description: Top level driver script for running +# weekly CI regression tests +# +# Abstract: +# +# This script runs the high resolution cases found in ${HOMEgfs}/ci/cases/weekly +# from the develop branch for the global-workflow repo that are intended to run on a weekly basis +# from a cron job. When run it will clone and build a new branch from the EMC's global-workflow and +# and create a pr using GitHub CLI by moving and replacing the yaml case files in +# ${HOMEgfs}/ci/cases/weekly to {HOMEgfs}/ci/cases/pr. Then the requisite labels are added +# so that the current BASH CI framework can then run these cases. Since this script +# creates a PR with the CI-Ready labels, the BASH CI framework will automatically run these cases +# from that point so it is only required to run this script once from a single machine. +############################################################################################## + +################################################################# +# TODO using static build for GitHub CLI until fixed in HPC-Stack +################################################################# +export GH=${HOME}/bin/gh +export REPO_URL="ssh://git@ssh.github.com:443/NOAA-EMC/global-workflow.git" + +################################################################ +# Setup the relative paths to scripts and PS4 for better logging +################################################################ +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE[0]})[${LINENO}]' + +######################################################################### +# Set up runtime environment variables for accounts on supported machines +######################################################################### + +source "${ROOT_DIR}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac + +###################################################### +# setup runtime env for correct python install and git +###################################################### +set +x +source "${ROOT_DIR}/ush/module-setup.sh" +module use "${ROOT_DIR}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +set -x + +######################################################### +# Create a new branch from develop and move yaml files +######################################################### +branch="weekly_ci_$(date +%Y%m%d)" +develop_dir="${GFS_CI_ROOT}/develop_weekly" +echo "Creating new branch ${branch} from develop on ${MACHINE_ID} in ${develop_dir}" +rm -Rf "${develop_dir}" +mkdir -p "${develop_dir}" +cd "${develop_dir}" || exit 1 +git clone --recursive "${REPO_URL}" +cd global-workflow || exit 1 +git checkout -b "${branch}" + +###################################################### +# move yaml files from ci/cases/weekly to ci/cases/pr +# and push new branch for PR weekly CI tests to GitHub +REPO_OWNER="emcbot" +REPO_NAME="global-workflow" +REMOTE_NAME="${REPO_OWNER}" + +rm -Rf ci/cases/pr +mv ci/cases/weekly ci/cases/pr +git add ci/cases +git commit -m "Moved weekly cases files into pr for high resolution testing" + +git remote add "${REMOTE_NAME}" "git@github.com:${REPO_OWNER}/${REPO_NAME}.git" + +set +e +# Delete the branch if it exists +git ls-remote --exit-code "${REMOTE_NAME}" "${branch}" +ci_status=$? +if [[ "${ci_status}" == '0' ]]; then + git push "${REMOTE_NAME}" --delete "${branch}" +fi +set -e + +git push --set-upstream "${REMOTE_NAME}" "${branch}" + +#################################################################### +# Create Pull Request using GitHub CLI and add labels for CI testing +#################################################################### + +HEAD_BRANCH="${REPO_OWNER}:${branch}" +BASE_BRANCH="develop" +PULL_REQUEST_TITLE="[DO NOT MERGE] Weekly CI Tests $(date +'%A %b %d, %Y')" +PULL_REQUEST_BODY="${PULL_REQUEST_TITLE}" +PULL_REQUEST_LABELS=("CI/CD" "CI-Orion-Ready" "CI-Hera-Ready") + +"${GH}" repo set-default "NOAA-EMC/global-workflow" +"${GH}" pr create --title "${PULL_REQUEST_TITLE}" --body "${PULL_REQUEST_BODY}" --base "${BASE_BRANCH}" --head "${HEAD_BRANCH}" +"${GH}" pr ready --undo + +# Add labels to the pull request +for label in "${PULL_REQUEST_LABELS[@]}" +do + "${GH}" pr edit --add-label "${label}" +done +cd "${GFS_CI_ROOT}" +rm -Rf "${develop_dir}" diff --git a/ci/scripts/pr_list_database.py b/ci/scripts/pr_list_database.py index b2bc1bc23d..224aabd361 100755 --- a/ci/scripts/pr_list_database.py +++ b/ci/scripts/pr_list_database.py @@ -1,12 +1,40 @@ #!/usr/bin/env python3 import sys +import os from pathlib import Path -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, REMAINDER, ZERO_OR_MORE import sqlite3 -def sql_connection(filename: Path) -> sqlite3.Connection: +def full_path(string): + """ + Gets the absolute path of the given file and confirms the directory exists + + Parameters + ---------- + string : str + Path to a file + + Returns + -------- + str + Absolute path of input path + + Raises + ------- + NotADirectoryError + If the target directory for the file does not exist. + + """ + + if os.path.isfile(string) or os.path.isdir(os.path.dirname(string)): + return os.path.abspath(string) + else: + raise NotADirectoryError(string) + + +def sql_connection(filename: os.path) -> sqlite3.Connection: """ Returns an Sqlite3 Cursor object from a given path to a sqlite3 database file @@ -22,7 +50,7 @@ def sql_connection(filename: Path) -> sqlite3.Connection: """ try: - return sqlite3.connect(Path(filename)) + return sqlite3.connect(filename) except sqlite3.Error: print(sqlite3.Error) sys.exit(-1) @@ -39,7 +67,7 @@ def sql_table(obj: sqlite3.Cursor) -> None: """ - obj.execute("CREATE TABLE processing(pr integer PRIMARY KEY, state text, status text)") + obj.execute("CREATE TABLE processing(pr integer PRIMARY KEY, state text, status text, reset_id integer, cases text)") def sql_insert(obj: sqlite3.Cursor, entities: list) -> None: @@ -51,14 +79,19 @@ def sql_insert(obj: sqlite3.Cursor, entities: list) -> None: obj : sqlite3.Cursor Cursor object for Sqlite3 entities : list - The list three string values that go into sqlite table (pr, state, status) + A list of four string values that go into sqlite table (pr, state, status, reset_id, cases) + pr: pull request number + state: The new value for the state (Open, Closed) + status: The new value for the status (Ready, Running, Failed) + reset_id: The value for number of times reset_id to Ready + cases: String containing case selection information """ - obj.execute('INSERT INTO processing(pr, state, status) VALUES(?, ?, ?)', entities) + obj.execute('INSERT INTO processing(pr, state, status, reset_id, cases) VALUES(?, ?, ?, ?, ?)', entities) -def sql_update(obj: sqlite3.Cursor, pr: str, state: str, status: str) -> None: +def sql_update(obj: sqlite3.Cursor, pr: str, updates: dict) -> None: """Updates table for a given pr with new values for state and status Parameters @@ -67,14 +100,20 @@ def sql_update(obj: sqlite3.Cursor, pr: str, state: str, status: str) -> None: sqlite3 Cursor Object pr : str The given pr number to update in the table - state : str - The new value for the state (Open, Closed) - status: str - The new value for the status (Ready, Running, Failed) + updates : dict + Dictionary of values to update for a given PR to include by postion + state, The new value for the state (Open, Closed) + status, The new value for the status (Ready, Running, Failed) + reset_id, The value for number of times reset_id to Ready + cases, Information regarding which cases are used (i.e. self PR) """ - obj.execute(f'UPDATE processing SET state = "{state}", status = "{status}" WHERE pr = {pr}') + update_list = ['state', 'status', 'reset_id', 'cases'] + rows = sql_fetch(obj) + for value in updates: + update = update_list.pop(0) + obj.execute(f'UPDATE processing SET "{update}" = "{value}" WHERE pr = {pr}') def sql_fetch(obj: sqlite3.Cursor) -> list: @@ -114,12 +153,13 @@ def input_args(): parser = ArgumentParser(description=description, formatter_class=ArgumentDefaultsHelpFormatter) - parser.add_argument('sbfile', help='SQLite3 database file with PR list', type=str) + parser.add_argument('--dbfile', help='SQLite3 database file with PR list', type=full_path) parser.add_argument('--create', help='create sqlite file for pr list status', action='store_true', required=False) parser.add_argument('--add_pr', nargs=1, metavar='PR', help='add new pr to list (defults to: Open,Ready)', required=False) parser.add_argument('--remove_pr', nargs=1, metavar='PR', help='removes pr from list', required=False) - parser.add_argument('--update_pr', nargs=3, metavar=('pr', 'state', 'status'), help='updates state and status of a given pr', required=False) - parser.add_argument('--display', help='output pr table', action='store_true', required=False) + parser.add_argument('--update_pr', nargs=REMAINDER, metavar=('pr', 'state', 'status', 'reset_id', 'cases'), + help='updates state and status of a given pr', required=False) + parser.add_argument('--display', nargs='*', help='output pr table', required=False) args = parser.parse_args() return args @@ -129,7 +169,12 @@ def input_args(): args = input_args() - con = sql_connection(args.sbfile) + if not args.create: + if not os.path.isfile(args.dbfile): + print(f'Error: {args.dbfile} does not exsist') + sys.exit(-1) + + con = sql_connection(args.dbfile) obj = con.cursor() if args.create: @@ -142,22 +187,29 @@ def input_args(): print(f"pr {row[0]} already is in list: nothing added") sys.exit(0) - entities = (args.add_pr[0], 'Open', 'Ready') + entities = (args.add_pr[0], 'Open', 'Ready', 0, 'ci_repo') sql_insert(obj, entities) if args.update_pr: + if len(args.update_pr) < 2: + print(f"update_pr must have at least one vaule to update") + sys.exit(0) pr = args.update_pr[0] - state = args.update_pr[1] - status = args.update_pr[2] - sql_update(obj, pr, state, status) + + sql_update(obj, pr, args.update_pr[1:]) if args.remove_pr: sql_remove(obj, args.remove_pr[0]) - if args.display: + if args.display is not None: rows = sql_fetch(obj) - for row in rows: - print(' '.join(map(str, row))) + if len(args.display) == 1: + for row in rows: + if int(args.display[0]) == int(row[0]): + print(' '.join(map(str, row))) + else: + for row in rows: + print(' '.join(map(str, row))) con.commit() con.close() diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh new file mode 100755 index 0000000000..a5c5369ef7 --- /dev/null +++ b/ci/scripts/run-check_ci.sh @@ -0,0 +1,107 @@ +#!/bin/bash + +set -eu + +##################################################################################### +# Script description: script to check the status of an experiment as reported +# by Rocoto +##################################################################################### + +TEST_DIR=${1:-${TEST_DIR:-?}} # Location of the root of the testing directory +pslot=${2:-${pslot:-?}} # Name of the experiment being tested by this script + +# TEST_DIR contains 2 directories; +# 1. HOMEgfs: clone of the global-workflow +# 2. RUNTESTS: A directory containing EXPDIR and COMROT for experiments +# # e.g. $> tree ./TEST_DIR +# ./TEST_DIR +# ├── HOMEgfs +# └── RUNTESTS +# ├── COMROT +# │   └── ${pslot} +# └── EXPDIR +# └── ${pslot} +HOMEgfs="${TEST_DIR}/HOMEgfs" +RUNTESTS="${TEST_DIR}/RUNTESTS" + +# Source modules and setup logging +echo "Source modules." +source "${HOMEgfs}/workflow/gw_setup.sh" + +# cd into the experiment directory +echo "cd ${RUNTESTS}/EXPDIR/${pslot}" +cd "${RUNTESTS}/EXPDIR/${pslot}" || (echo "FATAL ERROR: Unable to cd into '${RUNTESTS}/EXPDIR/${pslot}', ABORT!"; exit 1) + +# Name of the Rocoto XML and database files +xml="${pslot}.xml" +db="${pslot}.db" + +# Ensure the XML is present for the experiment +if [[ ! -f "${xml}" ]]; then + echo "FATAL ERROR: XML file ${xml} not found in '${pslot}', experiment ${pslot} failed, ABORT!" + exit 1 +fi + +# Launch experiment +echo "Launch experiment with Rocoto." +rocotorun -v "${ROCOTO_VERBOSE:-0}" -w "${xml}" -d "${db}" +sleep 30 +if [[ ! -f "${db}" ]]; then + echo "FATAL ERROR: Rocoto database file ${db} not found, experiment ${pslot} failed, ABORT!" + exit 2 +fi + +# Experiment launched +rc=99 +while true; do + + echo "Run rocotorun." + rocotorun -v "${ROCOTO_VERBOSE:-0}" -w "${xml}" -d "${db}" + + # Wait before running rocotostat + sleep 30 + + # Get job statistics + echo "Gather Rocoto statistics" + rocotostat_output=$(rocotostat -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true + num_cycles=$(echo "${rocotostat_output}" | wc -l) || true + num_done=$(echo "${rocotostat_output}" | grep -c Done) || true + num_succeeded=$(rocotostat -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + num_failed=$(rocotostat -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true + + echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" + + if [[ ${num_failed} -ne 0 ]]; then + { + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true + echo "Experiment ${pslot} Terminated: *FAILED*" + } >> "${RUNTESTS}/ci.log" + + error_logs=$(rocotostat -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs rocotocheck -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true + { + echo "Error logs:" + echo "${error_logs}" + } >> "${RUNTESTS}/ci.log" + sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log" + sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "${pr}\/RUNTESTS" | awk '{print $1}' | xargs scancel || true + rc=1 + break + fi + + if [[ "${num_done}" -eq "${num_cycles}" ]]; then + { + echo "Experiment ${pslot} Completed at $(date)" || true + echo "with ${num_succeeded} successfully completed jobs" || true + echo "Experiment ${pslot} Completed: *SUCCESS*" + } >> "${RUNTESTS}/ci.log" + sed -i "s/\`\`\`//2g" "${RUNTESTS}/ci.log" + rc=0 + break + fi + + # Wait before running rocotorun again + sleep 300 + +done + +exit "${rc}" diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh index 8a1a363d32..cdaafb337f 100755 --- a/ci/scripts/run_ci.sh +++ b/ci/scripts/run_ci.sh @@ -3,13 +3,13 @@ set -eux ##################################################################################### # -# Script description: BASH script for checking for cases in a given PR and +# Script description: BASH script for checking for cases in a given PR and # simply running rocotorun on each. This script is intended # to run from within a cron job in the CI Managers account # Abstract TODO ##################################################################################### -HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" scriptname=$(basename "${BASH_SOURCE[0]}") echo "Begin ${scriptname} at $(date -u)" || true export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' @@ -18,11 +18,11 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' # Set up runtime environment varibles for accounts on supproted machines ######################################################################### -source "${HOMEgfs}/ush/detect_machine.sh" +source "${ROOT_DIR}/ush/detect_machine.sh" case ${MACHINE_ID} in hera | orion) echo "Running Automated Testing on ${MACHINE_ID}" - source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}" ;; *) echo "Unsupported platform. Exiting with error." @@ -30,27 +30,27 @@ case ${MACHINE_ID} in ;; esac set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/modulefiles" +source "${ROOT_DIR}/ush/module-setup.sh" +module use "${ROOT_DIR}/modulefiles" module load "module_gwsetup.${MACHINE_ID}" module list set -eux -rocotorun=$(which rocotorun) -if [[ -z ${var+x} ]]; then - echo "rocotorun being used from ${rocotorun}" -else +rocotorun=$(command -v rocotorun) +if [[ -z ${rocotorun} ]]; then echo "rocotorun not found on system" exit 1 +else + echo "rocotorun being used from ${rocotorun}" fi pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" pr_list="" if [[ -f "${pr_list_dbfile}" ]]; then - pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true + pr_list=$("${ROOT_DIR}/ci/scripts/pr_list_database.py" --display --dbfile "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true fi if [[ -z "${pr_list}" ]]; then - echo "no PRs open and ready for checkout/build .. exiting" + echo "no open and built PRs that are ready for the cases to advance with rocotorun .. exiting" exit 0 fi @@ -69,8 +69,8 @@ for pr in ${pr_list}; do continue fi num_cases=0 - for cases in "${pr_dir}/RUNTESTS/"*; do - if [[ ! -d "${cases}" ]]; then + for pslot_dir in "${pr_dir}/RUNTESTS/EXPDIR/"*; do + if [[ ! -d "${pslot_dir}" ]]; then continue fi ((num_cases=num_cases+1)) @@ -78,10 +78,10 @@ for pr in ${pr_list}; do if [[ "${num_cases}" -gt "${max_concurrent_cases}" ]]; then continue fi - pslot=$(basename "${cases}") - xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" - db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" - echo "Running: ${rocotorun} -v 6 -w ${xml} -d ${db}" + pslot=$(basename "${pslot_dir}") + xml="${pslot_dir}/${pslot}.xml" + db="${pslot_dir}/${pslot}.db" + echo "Running: ${rocotorun} -v 10 -w ${xml} -d ${db}" "${rocotorun}" -v 10 -w "${xml}" -d "${db}" done done diff --git a/ci/scripts/utils/ci_utils.sh b/ci/scripts/utils/ci_utils.sh new file mode 100755 index 0000000000..737a3e5a86 --- /dev/null +++ b/ci/scripts/utils/ci_utils.sh @@ -0,0 +1,24 @@ +#!/bin/env bash + +function cancel_slurm_jobs() { + + # Usage: cancel_slurm_jobs + # Example: cancel_slurm_jobs "C48_ATM_3c4e7f74" + # + # Cancel all Slurm jobs that have the given substring in their name + # So like in the example all jobs with "C48_ATM_3c4e7f74" + # in their name will be canceled + + local substring=$1 + local job_ids + job_ids=$(squeue -u "${USER}" -h -o "%i") + + for job_id in ${job_ids}; do + job_name=$(sacct -j "${job_id}" --format=JobName%100 | head -3 | tail -1 | sed -r 's/\s+//g') || true + if [[ "${job_name}" =~ ${substring} ]]; then + echo "Canceling Slurm Job ${job_name} with: scancel ${job_id}" + scancel "${job_id}" + continue + fi + done +} diff --git a/docs/note_fixfield.txt b/docs/note_fixfield.txt index af2539e48a..a7a4001561 100644 --- a/docs/note_fixfield.txt +++ b/docs/note_fixfield.txt @@ -3,7 +3,7 @@ For EMC, the fix fields for running the model are not included in git repository They are saved locally on all platforms Hera: /scratch1/NCEPDEV/global/glopara/fix -Orion: /work/noaa/global/glopara/fix +Orion/Hercules: /work/noaa/global/glopara/fix Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix S4: /data/prod/glopara/fix diff --git a/docs/requirements.txt b/docs/requirements.txt index 9c7258463b..39372ec99d 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,5 @@ -sphinxcontrib-bibtex +sphinx sphinx_rtd_theme +sphinxcontrib-bibtex +sphinx-autobuild +sphinx-copybutton diff --git a/docs/source/_static/noaacsp_cluster_1.png b/docs/source/_static/noaacsp_cluster_1.png new file mode 100644 index 0000000000..3fdc0e68b8 Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_1.png differ diff --git a/docs/source/_static/noaacsp_cluster_2.png b/docs/source/_static/noaacsp_cluster_2.png new file mode 100644 index 0000000000..0fc3b2896d Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_2.png differ diff --git a/docs/source/_static/noaacsp_cluster_3.png b/docs/source/_static/noaacsp_cluster_3.png new file mode 100644 index 0000000000..bf3991b7ff Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_3.png differ diff --git a/docs/source/_static/noaacsp_cluster_4.png b/docs/source/_static/noaacsp_cluster_4.png new file mode 100644 index 0000000000..9294d40bbe Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_4.png differ diff --git a/docs/source/_static/noaacsp_cluster_5.png b/docs/source/_static/noaacsp_cluster_5.png new file mode 100644 index 0000000000..9fd7a96e40 Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_5.png differ diff --git a/docs/source/_static/noaacsp_cluster_6.png b/docs/source/_static/noaacsp_cluster_6.png new file mode 100644 index 0000000000..79287bc1e7 Binary files /dev/null and b/docs/source/_static/noaacsp_cluster_6.png differ diff --git a/docs/source/_static/noaacsp_instance_1.png b/docs/source/_static/noaacsp_instance_1.png new file mode 100644 index 0000000000..0e06fe345b Binary files /dev/null and b/docs/source/_static/noaacsp_instance_1.png differ diff --git a/docs/source/_static/noaacsp_instance_2.png b/docs/source/_static/noaacsp_instance_2.png new file mode 100644 index 0000000000..7c74d32853 Binary files /dev/null and b/docs/source/_static/noaacsp_instance_2.png differ diff --git a/docs/source/_static/noaacsp_instance_3.png b/docs/source/_static/noaacsp_instance_3.png new file mode 100644 index 0000000000..f1031fb576 Binary files /dev/null and b/docs/source/_static/noaacsp_instance_3.png differ diff --git a/docs/source/_static/noaacsp_instance_4.png b/docs/source/_static/noaacsp_instance_4.png new file mode 100644 index 0000000000..f4aedb27d1 Binary files /dev/null and b/docs/source/_static/noaacsp_instance_4.png differ diff --git a/docs/source/_static/noaacsp_login.png b/docs/source/_static/noaacsp_login.png new file mode 100644 index 0000000000..fd2ea73144 Binary files /dev/null and b/docs/source/_static/noaacsp_login.png differ diff --git a/docs/source/_static/noaacsp_using_1.png b/docs/source/_static/noaacsp_using_1.png new file mode 100644 index 0000000000..68550db050 Binary files /dev/null and b/docs/source/_static/noaacsp_using_1.png differ diff --git a/docs/source/_static/noaacsp_using_2.png b/docs/source/_static/noaacsp_using_2.png new file mode 100644 index 0000000000..4d1899f8f5 Binary files /dev/null and b/docs/source/_static/noaacsp_using_2.png differ diff --git a/docs/source/clone.rst b/docs/source/clone.rst index c31968ec2e..bad3f0e9f6 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -11,26 +11,41 @@ Quick clone/build/link instructions (more detailed instructions below). .. note:: Here we are making the assumption that you are using the workflow to run an experiment and so are working from the authoritative repository. If you are using a development branch then follow the instructions in :doc:`development.rst`. Once you do that you can follow the instructions here with the only difference being the repository/fork you are cloning from. -For forecast-only (coupled or uncoupled): +Clone the `global-workflow` and `cd` into the `sorc` directory: :: - git clone https://github.com/NOAA-EMC/global-workflow.git + git clone --recursive https://github.com/NOAA-EMC/global-workflow cd global-workflow/sorc - ./checkout.sh - ./build_all.sh - ./link_workflow.sh -For cycled (w/ data assimilation): +For forecast-only (coupled or uncoupled) build of the components: :: - git clone https://github.com/NOAA-EMC/global-workflow.git - cd global-workflow/sorc - ./checkout.sh -g ./build_all.sh + +For cycled (w/ data assimilation) use the `-g` option during build: + +:: + + ./build_all.sh -g + +For coupled cycling (include new UFSDA) use the `-gu` options during build: + +[Currently only available on Hera, Orion, and Hercules] + +:: + + ./build_all.sh -gu + + +Build workflow components and link workflow artifacts such as executables, etc. + +:: + ./link_workflow.sh + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Clone workflow and component repositories ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -39,24 +54,13 @@ Clone workflow and component repositories Workflow ******** -There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using either the ssh or https methods. **The ssh method is highly preferred and recommended.** - -ssh method (using a password protected SSH key): +There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` method. :: - git clone git@github.com:NOAA-EMC/global-workflow.git + git clone https://github.com/NOAA-EMC/global-workflow -.. note:: - When using ssh methods you need to make sure that your GitHub account is configured for the computer from which you are accessing the repository (See `this link `_) - -https method: - -:: - - git clone https://github.com/NOAA-EMC/global-workflow.git - -Check what you just cloned (by default you will have only the develop branch): +Check what you just cloned (by default you will have only the `develop` branch): :: @@ -68,7 +72,7 @@ You now have a cloned copy of the global-workflow git repository. To checkout a :: - git checkout BRANCH_NAME + git checkout --recurse-submodules BRANCH_NAME .. note:: Branch must already exist. If it does not you need to make a new branch using the ``-b`` flag: @@ -81,60 +85,55 @@ The ``checkout`` command will checkout BRANCH_NAME and switch your clone to that :: - git checkout my_branch + git checkout --recurse-submodules my_branch git branch - * my_branch + * my_branch develop -********** -Components -********** +Using ``--recurse-submodules`` is important to ensure you are updating the component versions to match the branch. + +^^^^^^^^^^^^^^^^ +Build components +^^^^^^^^^^^^^^^^ -Once you have cloned the workflow repository it's time to checkout/clone its components. The components will be checked out under the ``/sorc`` folder via a script called checkout.sh. Run the script with no arguments for forecast-only: +Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After checking out the branch you wish to use, run this script to build all components codes: :: - cd sorc - ./checkout.sh - -Or with the ``-g`` switch to include data assimilation (GSI) for cycling: + ./build_all.sh [-a UFS_app][-g][-h][-u][-v] + -a UFS_app: + Build a specific UFS app instead of the default + -g: + Build GSI + -h: + Print this help message and exit + -j: + Specify maximum number of build jobs (n) + -u: + Build UFS-DA + -v: + Execute all build scripts with -v option to turn on verbose where supported + +For forecast-only (coupled or uncoupled) build of the components: :: - cd sorc - ./checkout.sh -g + ./build_all.sh -If wishing to run with the operational GTG UPP and WAFS (only for select users) provide the ``-o`` flag with checkout.sh: +For cycled (w/ data assimilation) use the `-g` option during build: :: - ./checkout.sh -o + ./build_all.sh -g -Each component cloned via checkout.sh will have a log (``/sorc/logs/checkout-COMPONENT.log``). Check the screen output and logs for clone errors. +For coupled cycling (include new UFSDA) use the `-gu` options during build: -^^^^^^^^^^^^^^^^ -Build components -^^^^^^^^^^^^^^^^ - -Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After running checkout.sh run this script to build all components codes: +[Currently only available on Hera, Orion, and Hercules] :: - ./build_all.sh [-a UFS_app][-c build_config][-h][-v] - -a UFS_app: - Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config - -h: - Print usage message and exit - -v: - Run all scripts in verbose mode - -A partial build option is also available via two methods: - - a) modify gfs_build.cfg config file to disable/enable particular builds and then rerun build_all.sh + ./build_all.sh -gu - b) run individual build scripts also available in ``/sorc`` folder for each component or group of codes ^^^^^^^^^^^^^^^ Link components diff --git a/docs/source/components.rst b/docs/source/components.rst index 9e4377f739..98e76b467b 100644 --- a/docs/source/components.rst +++ b/docs/source/components.rst @@ -13,13 +13,13 @@ The major components of the system are: * Post-processing * Verification -The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. +The Global Workflow repository contains the workflow and script layers. External components will be checked out as git submodules. All of the submodules of the system reside in their respective repositories on GitHub. ====================== Component repositories ====================== -Components checked out via sorc/checkout.sh: +Components included as submodules: * **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration * **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory @@ -28,11 +28,11 @@ Components checked out via sorc/checkout.sh: * **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values * **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well * **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model +* **wxflow** Collection of python utilities for weather workflows (https://github.com/NOAA-EMC/wxflow) * **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only -* **GFS WAFS** (https://github.com/NOAA-EMC/EMC_gfs_wafs): Additional post processing products for Aircrafts .. note:: - When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not checked out. + When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not built. ===================== External dependencies @@ -42,11 +42,11 @@ External dependencies Libraries ^^^^^^^^^ -All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. These are served via spack-stack. These libraries are already available on supported NOAA HPC platforms. -Find information on official installations of HPC-STACK here: +Find information on official installations of spack-stack here: -https://github.com/NOAA-EMC/hpc-stack/wiki/Official-Installations +https://github.com/JCSDA/spack-stack/wiki/Porting-spack-stack-to-a-new-system ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Observation data (OBSPROC/prep) @@ -58,7 +58,7 @@ Data Observation data, also known as dump data, is prepared in production and then archived in a global dump archive (GDA) for use by users when running cycled experiments. The GDA (identified as ``$DMPDIR`` in the workflow) is available on supported platforms and the workflow system knows where to find the data. * Hera: /scratch1/NCEPDEV/global/glopara/dump -* Orion: /work/noaa/rstprod/dump +* Orion/Hercules: /work/noaa/rstprod/dump * Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump * WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump * S4: /data/prod/glopara/dump diff --git a/docs/source/conf.py b/docs/source/conf.py index c0f9ca572a..89526d9f69 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -80,7 +80,17 @@ # documentation. # # html_theme_options = {} -html_theme_options = {"body_max_width": "none"} +html_theme_options = {"body_max_width": "none", + "footer_icons": [ + {"name": "GitHub", + "url": "https://github.com/NOAA-EMC/global-workflow", + "html": """ + + + + """, + "class": ""}] + } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/docs/source/configure.rst b/docs/source/configure.rst index 477e95cec7..12c2f75a48 100644 --- a/docs/source/configure.rst +++ b/docs/source/configure.rst @@ -4,50 +4,58 @@ Configure Run The global-workflow configs contain switches that change how the system runs. Many defaults are set initially. Users wishing to run with different settings should adjust their $EXPDIR configs and then rerun the ``setup_xml.py`` script since some configuration settings/switches change the workflow/xml ("Adjusts XML" column value is "YES"). -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| Switch | What | Default | Adjusts XML | More Details | -+================+==============================+===============+=============+===================================================+ -| APP | Model application | ATM | YES | See case block in config.base for options | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DOIAU | Enable 4DIAU for control | YES | NO | Turned off for cold-start first half cycle | -| | with 3 increments | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DOHYBVAR | Run EnKF | YES | YES | Don't recommend turning off | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DONST | Run NSST | YES | NO | If YES, turns on NSST in anal/fcst steps, and | -| | | | | turn off rtgsst | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_AWIPS | Run jobs to produce AWIPS | NO | YES | downstream processing, ops only | -| | products | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_BUFRSND | Run job to produce BUFR | NO | YES | downstream processing | -| | sounding products | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_GEMPAK | Run job to produce GEMPAK | NO | YES | downstream processing, ops only | -| | products | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_VRFY | Run vrfy job | NO | YES | Whether to include vrfy job (GSI monitoring, | -| | | | | tracker, VSDB, fit2obs) | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| DO_METP | Run METplus jobs | YES | YES | One cycle spinup | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| EXP_WARM_START | Is experiment starting warm | .false. | NO | Impacts IAU settings for initial cycle. Can also | -| | (.true.) or cold (.false)? | | | be set when running ``setup_expt.py`` script with | -| | | | | the ``--start`` flag (e.g. ``--start warm``) | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| HPSSARCH | Archive to HPPS | NO | Possibly | Whether to save output to tarballs on HPPS | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| LOCALARCH | Archive to a local directory | NO | Possibly | Instead of archiving data to HPSS, archive to a | -| | | | | local directory, specified by ATARDIR. If | -| | | | | LOCALARCH=YES, then HPSSARCH must =NO. Changing | -| | | | | HPSSARCH from YES to NO will adjust the XML. | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| QUILTING | Use I/O quilting | .true. | NO | If .true. choose OUTPUT_GRID as cubed_sphere_grid | -| | | | | in netcdf or gaussian_grid | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| WAFSF | Run jobs to produce WAFS | NO | YES | downstream processing, ops only | -| | products | | | | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ -| WRITE_DOPOST | Run inline post | .true. | NO | If .true. produces master post output in forecast | -| | | | | job | -+----------------+------------------------------+---------------+-------------+---------------------------------------------------+ ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| Switch | What | Default | Adjusts XML | More Details | ++================+==================================+===============+=============+===================================================+ +| APP | Model application | ATM | YES | See case block in config.base for options | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DOIAU | Enable 4DIAU for control | YES | NO | Turned off for cold-start first half cycle | +| | with 3 increments | | | | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DOHYBVAR | Run EnKF | YES | YES | Don't recommend turning off | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DONST | Run NSST | YES | NO | If YES, turns on NSST in anal/fcst steps, and | +| | | | | turn off rtgsst | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_AWIPS | Run jobs to produce AWIPS | NO | YES | downstream processing, ops only | +| | products | | | | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_BUFRSND | Run job to produce BUFR | NO | YES | downstream processing | +| | sounding products | | | | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_GEMPAK | Run job to produce GEMPAK | NO | YES | downstream processing, ops only | +| | products | | | | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_FIT2OBS | Run FIT2OBS job | YES | YES | Whether to run the FIT2OBS job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_TRACKER | Run tracker job | YES | YES | Whether to run the tracker job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_GENESIS | Run genesis job | YES | YES | Whether to run the genesis job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_GENESIS_FSU | Run FSU genesis job | YES | YES | Whether to run the FSU genesis job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_VERFOZN | Run GSI monitor ozone job | YES | YES | Whether to run the GSI monitor ozone job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_VERFRAD | Run GSI monitor radiance job | YES | YES | Whether to run the GSI monitor radiance job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_VMINMON | Run GSI monitor minimization job | YES | YES | Whether to run the GSI monitor minimization job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_METP | Run METplus jobs | YES | YES | One cycle spinup | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| EXP_WARM_START | Is experiment starting warm | .false. | NO | Impacts IAU settings for initial cycle. Can also | +| | (.true.) or cold (.false)? | | | be set when running ``setup_expt.py`` script with | +| | | | | the ``--start`` flag (e.g. ``--start warm``) | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| HPSSARCH | Archive to HPPS | NO | Possibly | Whether to save output to tarballs on HPPS | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| LOCALARCH | Archive to a local directory | NO | Possibly | Instead of archiving data to HPSS, archive to a | +| | | | | local directory, specified by ATARDIR. If | +| | | | | LOCALARCH=YES, then HPSSARCH must =NO. Changing | +| | | | | HPSSARCH from YES to NO will adjust the XML. | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| QUILTING | Use I/O quilting | .true. | NO | If .true. choose OUTPUT_GRID as cubed_sphere_grid | +| | | | | in netcdf or gaussian_grid | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ +| WRITE_DOPOST | Run inline post | .true. | NO | If .true. produces master post output in forecast | +| | | | | job | ++----------------+----------------------------------+---------------+-------------+---------------------------------------------------+ diff --git a/docs/source/custom.css b/docs/source/custom.css new file mode 100644 index 0000000000..be64c1472e --- /dev/null +++ b/docs/source/custom.css @@ -0,0 +1,9 @@ +/* custom.css */ + +.red-text { + color: red; +} + +.blue-text { + color: blue; +} diff --git a/docs/source/development.rst b/docs/source/development.rst index e95516bcca..4739d2b602 100644 --- a/docs/source/development.rst +++ b/docs/source/development.rst @@ -196,3 +196,4 @@ Moving forward you'll want to perform the "remote update" command regularly to u :: git remote update + diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst index da54f29521..3ce6a889d9 100644 --- a/docs/source/hpc.rst +++ b/docs/source/hpc.rst @@ -19,10 +19,12 @@ HPC helpdesks * WCOSS2: hpc.wcoss2-help@noaa.gov * Hera: rdhpcs.hera.help@noaa.gov * Orion: rdhpcs.orion.help@noaa.gov +* Hercules: rdhpcs.hercules.help@noaa.gov * HPSS: rdhpcs.hpss.help@noaa.gov * Gaea: oar.gfdl.help@noaa.gov * S4: david.huber@noaa.gov * Jet: rdhpcs.jet.help@noaa.gov +* Cloud: rdhpcs.cloud.help@noaa.gov ====================== Restricted data access @@ -71,17 +73,21 @@ Version It is advised to use Git v2+ when available. At the time of writing this documentation the default Git clients on the different machines were as noted in the table below. It is recommended that you check the default modules before loading recommended ones: +---------+----------+---------------------------------------+ -| Machine | Default | Recommended | +| Machine | Default | Recommended | +---------+----------+---------------------------------------+ -| Hera | v2.18.0 | default | +| Hera | v2.18.0 | default | +---------+----------+---------------------------------------+ -| Orion | v1.8.3.1 | **module load git/2.28.0** | +| Hercules | v2.31.1 | default | +---------+----------+---------------------------------------+ -| Jet | v2.18.0 | default | +| Orion | v1.8.3.1 | **module load git/2.28.0** | +---------+----------+---------------------------------------+ -| WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | +| Jet | v2.18.0 | default | +---------+----------+---------------------------------------+ -| S4 | v1.8.3.1 | **module load git/2.30.0** | +| WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | ++---------+----------+---------------------------------------+ +| S4 | v1.8.3.1 | **module load git/2.30.0** | ++---------+----------+---------------------------------------+ +| AWS PW | v1.8.3.1 | default +---------+----------+---------------------------------------+ ^^^^^^^^^^^^^ @@ -100,7 +106,7 @@ For the manage_externals utility functioning:: Fix: git config --global ssh.variant ssh ======================================== -Stacksize on R&Ds (Hera, Orion, Jet, S4) +Stacksize on R&Ds (Hera, Orion, Hercules, Jet, S4) ======================================== Some GFS components, like the UPP, need an unlimited stacksize. Add the following setting into your appropriate .*rc file to support these components: diff --git a/docs/source/index.rst b/docs/source/index.rst index e254a83fa2..2eb786199a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -10,7 +10,7 @@ Status ====== * State of develop (HEAD) branch: GFSv17+ development -* State of operations (dev/gfs.v16 branch): GFS v16.3.7 `tag: [gfs.v16.3.7] `_ +* State of operations (dev/gfs.v16 branch): GFS v16.3.12 `tag: [gfs.v16.3.12] `_ ============= Code managers @@ -37,3 +37,4 @@ GitHub updates: Users should adjust their "Watch" settings for this repo so they hpc.rst output.rst run.rst + noaa_csp.rst diff --git a/docs/source/init.rst b/docs/source/init.rst index 5c9c811052..65e400c68e 100644 --- a/docs/source/init.rst +++ b/docs/source/init.rst @@ -49,7 +49,7 @@ Cold-start atmosphere-only cycled C96 deterministic C48 enkf (80 members) ICs ar :: Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C96C48 - Orion: /work/noaa/global/glopara/data/ICSDIR/C96C48 + Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C96C48 WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C96C48 Start date = 2021122018 @@ -108,7 +108,7 @@ Warm-start cycled w/ coupled (S2S) model C48 atmosphere C48 enkf (80 members) 5 :: Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48C48mx500 - Orion: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500 + Orion/Hercules: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500 WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48C48mx500 Jet: /lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/C48C48mx500 @@ -224,7 +224,7 @@ Forecast-only P8 prototype initial conditions are made available to users on sup WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/IC/COUPLED HERA: /scratch1/NCEPDEV/climate/role.ufscpara/IC - ORION: /work/noaa/global/glopara/data/ICSDIR/prototype_ICs + ORION/Hercules: /work/noaa/global/glopara/data/ICSDIR/prototype_ICs JET: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs S4: /data/prod/glopara/coupled_ICs @@ -253,7 +253,7 @@ Not yet supported. See :ref:`Manual Generation` section below --------------------- Forecast-only coupled --------------------- -Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). +Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion/Hercules, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). .. _forecastonly-atmonly: @@ -318,12 +318,12 @@ Manual Generation The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127). -The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop (found in ``sorc/checkout.sh``). The ``chgres_cube`` code/scripts currently support the following GFS inputs: +The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop. The ``chgres_cube`` code/scripts currently support the following GFS inputs: -* pre-GFSv14 -* GFSv14 -* GFSv15 -* GFSv16 +* pre-GFSv14 +* GFSv14 +* GFSv15 +* GFSv16 Users can use the copy of UFS_UTILS that is already cloned and built within their global-workflow clone or clone/build it separately: @@ -354,7 +354,7 @@ Then switch to a different tag or use the default branch (develop). where ``$MACHINE`` is ``wcoss2``, ``hera``, or ``jet``. .. note:: - UFS-UTILS builds on Orion but due to the lack of HPSS access on Orion the ``gdas_init`` utility is not supported there. + UFS-UTILS builds on Orion/Hercules but due to the lack of HPSS access on Orion/Hercules the ``gdas_init`` utility is not supported there. 3. Configure your conversion: @@ -380,7 +380,7 @@ Most users will want to adjust the following ``config`` settings for the current where ``$MACHINE`` is currently ``wcoss2``, ``hera`` or ``jet``. Additional options will be available as support for other machines expands. .. note:: - UFS-UTILS builds on Orion but due to lack of HPSS access there is no ``gdas_init`` driver for Orion nor support to pull initial conditions from HPSS for the ``gdas_init`` utility. + UFS-UTILS builds on Orion/Hercules but due to lack of HPSS access there is no ``gdas_init`` driver for Orion/Hercules nor support to pull initial conditions from HPSS for the ``gdas_init`` utility. Several small jobs will be submitted: diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst index 67863bb9a2..0e3700bf20 100644 --- a/docs/source/jobs.rst +++ b/docs/source/jobs.rst @@ -18,10 +18,10 @@ An experimental run is different from operations in the following ways: * Addition steps in experimental mode: - - verification (vrfy) - - archive (arch) + - cleanup (cleanup) + Downstream jobs (e.g. awips, gempak, etc.) are not included in the diagram. Those jobs are not normally run in developmental tests. ============================= @@ -71,8 +71,6 @@ Jobs in the GFS Configuration +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | postN | Runs the post processor. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ -| vrfy | Runs the verification tasks. | -+-------------------+-----------------------------------------------------------------------------------------------------------------------+ | waveinit | Runs wave initialization step. | +-------------------+-----------------------------------------------------------------------------------------------------------------------+ | waveprep | Runs wave prep step. | diff --git a/docs/source/noaa_csp.rst b/docs/source/noaa_csp.rst new file mode 100644 index 0000000000..66317efe92 --- /dev/null +++ b/docs/source/noaa_csp.rst @@ -0,0 +1,200 @@ +.. role:: red-text + +######################################## +Configuring NOAA Cloud Service Providers +######################################## + +The NOAA Cloud Service Providers (CSP) support the forecast-only +configurations for the global workflow. Once a suitable CSP instance +and cluster is defined/created, the global workflow may be executed as +on the other platforms discussed in the previous sections. In order +successfully execute the global-workflow, a suitable CSP cluster must +be created. Currently the global-workflow supports the following +instance and storage types as a function of CSP and forecast +resolution. + +.. list-table:: + :widths: auto + :header-rows: 1 + :align: center + + * - **Cloud Service Provider** + - **Global Workflow Resolution** + - **Global Workflow Application** + - **Instance Type** + - **Partition** + - **File System** + * - Amazon Web Services Parallel Works + - C48 + - ``ATM`` + - ``c5.9xlarge (36 vCPUs, 72 GB Memory, amd64)`` + - ``compute`` + - ``/lustre`` + +Instructions regarding configuring the respective CSP instance and +cluster follows. + +********************* +Login to the NOAA CSP +********************* + +Log in to the `NOAA CSP `_ and into +the resources configuration. The user should arrive at the following +screen. + +Note that the ``Username or email`` query is case-sensitive. The user +will then be prompted for their respective RSA token key using the +same application use for the other RDHPCS machines (i.e., Hera, Jet, +etc.,). + +.. image:: _static/noaacsp_login.png + +******************************* +Configure the NOAA CSP Instance +******************************* + +Once logged into the NOAA CSP, navigate to the :red-text:`RESOURCES` section +and click the ``+ Add Resource`` button in the upper-right corner as +illustrated below. + +.. image:: _static/noaacsp_instance_1.png + +Next, the mandatory attributes for the respective instance must be +defined as shown in the illustration below. + +.. image:: _static/noaacsp_instance_2.png + +The annotated attributes and their respective descriptions are as +follows. + +1. A unique name for the instance. Best practices suggest one that is + clear, concise, and relevant to the application. +2. A short description of the instance, i.e., ``This instance supports + this task.`` +3. Tag(s) describing and identifying the respective instance. These + allow for improved bookkeeping, especially when a user has multiple + or concurrent instance types. + +Next, the cluster is defined as shown in the following illustration. + +.. image:: _static/noaacsp_instance_3.png + +The NOAA Parallel Works initiative currently provides 2 CSPs for the +global-workflow; **AWS** (Amazon Web Services) and **Azure** +(Microsoft Azure). Existing clusters may also be modified. However +this is neither recommended or supported. + +Finally, when satisfied with the CSP instance configure, click ``Add +Resource`` as illustrated below. + +.. image:: _static/noaacsp_instance_4.png + +****************************** +Configure the NOAA CSP Cluster +****************************** + +Navigate to the tab and locate the CSP instance configured in the +previous section and click on the link, `globalworkflowdemo` for this +example. + +.. image:: _static/noaacsp_cluster_1.png + +The respective CSP cluster maybe then be configured. The mandatory +configuration attributes are as follows. + +- Availability zone; +- Disk size and storage type(s); +- Available compute and resource partitions. + +The following image describes the general settings for the respective +cluster. These attributes are specific to the user and the respective +user's group allocation. The right-most panel provides a breakdown of +the costs related to the requested compute and storage +resources. While there is a space to place an SSH key here, RDHPCS +recommends adding any SSH keys under the respective user's +``Account➡Authentication instead``. This will allow you to connect +from another machine instead of using the Parallel Works web terminal. + +.. image:: _static/noaacsp_cluster_2.png + +The following image describes the controller settings for a cluster +created for a C48 atmosphere forecast-only configuration. Here the +user must define the instance type (see the table above), the number +of image disks and the image disk sizes. + +.. image:: _static/noaacsp_cluster_3.png + +Next the partitions for the cluster may be defined. A partition +configuration for the aforementioned C48 atmosphere forecast-only +application is illustrated in the figure below. Note that the instance +type beneath ``Controller Settings`` and ``Partitions`` must be +identical. Other configurations are not supported by the +global-workflow team. Once the partitions are configured, click the +``+ Add Partition`` button in the upper-right corner. + +.. image:: _static/noaacsp_cluster_4.png + +For the storage do be allocated for the global-workflow application it +is suggested that the ``Mount Point`` be ``/lustre``. Once the storage +has been configured, click the ``+ Add Attached Storage`` button in +the upper-right corner. This is illustrated in the following image. + +.. image:: _static/noaacsp_cluster_5.png + +Finally, the following illustrates a JSON version of the cluster +configuration created from the steps above. When opening issues +related to the NOAA CSP global-workflow applications please include +the JSON content. + +.. image:: _static/noaacsp_cluster_6.png + +************************** +Using the NOAA CSP Cluster +************************** + +To activate the cluster, click the button circled in +:red-text:red. The cluster status is denoted by the color-coded button +on the right. The amount of time required to start the cluster is +variable and not immediate and may take several minutes for the +cluster to become. + +.. image:: _static/noaacsp_using_1.png + +For instances where a NOAA CSP cluster does not initialize, useful +output can be found beneath the ``Logs`` section beneath the +``Provision`` tab as illustrated below. Once again, when opening +issues related to the NOAA CSP cluster initialization please include +this information. + +.. image:: _static/noaacsp_using_2.png + +*************************** +Running the Global Workflow +*************************** + +The global-workflow configuration currently requires that all initial +conditions, observations, and fixed-files, are staged in the +appropriate paths prior to running the global-workflow. As suggested +above, it is strongly recommended the the user configure their +respective experiments to use the ``/lustre`` file system for the +``EXPDIR`` and ``ROTDIR`` contents. The ``/contrib`` file system is +suitable for compiling and linking the workflow components required of +the global-workflow. + +The software stack supporting the ``develop`` branch of the +global-workflow is provided for the user and is located beneath +``/contrib/emc_static/spack-stack``. The modules required for the +global-workflow execution may be loaded as follows. + +.. code-block:: bash + + user@host:$ module unuse /opt/cray/craype/default/modulefiles + user@host:$ module unuse /opt/cray/modulefiles + user@host:$ module use /contrib/emc_static/spack-stack/miniconda/modulefiles/miniconda + user@host:$ module load py39_4.12.0 + user@host:$ module load rocoto/1.3.3 + +The execution of the global-workflow should now follow the same steps +as those for the RDHPCS on-premise hosts. + + diff --git a/docs/source/run.rst b/docs/source/run.rst index 56728d3282..817ed3ccfa 100644 --- a/docs/source/run.rst +++ b/docs/source/run.rst @@ -2,7 +2,7 @@ Run Global Workflow ################### -Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial checkout stage will be slightly different. Apart from this there is a third mode that is rarely used in development mode and is primarily for operational use. This mode switches on specialized post processing needed by the aviation industry. Since the files associated with this mode are restricted, only select users will have need and/or ability to run in this mode. +Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial build stage will be slightly different. .. toctree:: diff --git a/docs/source/setup.rst b/docs/source/setup.rst index ec63327fbc..be04aa5d96 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -2,69 +2,13 @@ Experiment Setup ================ - Global workflow uses a set of scripts to help configure and set up the drivers (also referred to as Workflow Manager) that run the end-to-end system. While currently we use a `ROCOTO `__ based system and that is documented here, an `ecFlow `__ based systm is also under development and will be introduced to the Global Workflow when it is mature. To run the setup scripts, you need to make sure to have a copy of ``python3`` with ``numpy`` available. The easiest way to guarantee this is to load python from the `official hpc-stack installation `_ for the machine you are on: + Global workflow uses a set of scripts to help configure and set up the drivers (also referred to as Workflow Manager) that run the end-to-end system. While currently we use a `ROCOTO `__ based system and that is documented here, an `ecFlow `__ based systm is also under development and will be introduced to the Global Workflow when it is mature. To run the setup scripts, you need to have rocoto and a python3 environment with several specific libraries. The easiest way to guarantee this is to source the following script, which will load the necessary modules for your machine: -.. list-table:: Python Module Load Commands - :widths: 25 120 - :header-rows: 1 + :: - * - **MACHINE** - - **COMMAND(S)** - * - Hera - - :: + # Note: this will wipe your existing lmod environment + source workflow/gw_setup.sh - module use -a /contrib/anaconda/modulefiles - module load anaconda/anaconda3-5.3.1 - * - Orion - - :: - - module load python/3.7.5 - * - WCOSS2 - - :: - - module load python/3.8.6 - * - S4 - - :: - - module load miniconda/3.8-s4 - - * - Jet - - :: - - module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles - module load miniconda3/4.12.0 - conda activate ufswm - -If running with Rocoto make sure to have a Rocoto module loaded before running setup scripts: - -.. list-table:: ROCOTO Module Load Commands - :widths: 25 120 - :header-rows: 1 - - * - **MACHINE** - - **COMMAND(S)** - * - Hera - - :: - - module load rocoto/1.3.3 - * - Orion - - :: - - module load contrib - module load rocoto/1.3.3 - * - WCOSS2 - - :: - - module use /apps/ops/test/nco/modulefiles/ - module load core/rocoto/1.3.5 - * - S4 - - :: - - module load rocoto/1.3.4 - * - Jet - - :: - - module load rocoto/1.3.3 ^^^^^^^^^^^^^^^^^^^^^^^^ Forecast-only experiment @@ -94,8 +38,8 @@ where: * ``$APP`` is the target application, one of: - ATM: atmosphere-only [default] - - ATMW: atm-wave - ATMA: atm-aerosols + - ATMW: atm-wave (currently non-functional) - S2S: atm-ocean-ice - S2SA: atm-ocean-ice-aerosols - S2SW: atm-ocean-ice-wave @@ -206,7 +150,12 @@ where: * ``$APP`` is the target application, one of: - ATM: atmosphere-only [default] - - ATMW: atm-wave + - ATMA: atm-aerosols + - ATMW: atm-wave (currently non-functional) + - S2S: atm-ocean-ice + - S2SA: atm-ocean-ice-aerosols + - S2SW: atm-ocean-ice-wave + - S2SWA: atm-ocean-ice-wave-aerosols * ``$IDATE`` is the initial start date of your run (first cycle CDATE, YYYYMMDDCC) * ``$EDATE`` is the ending date of your run (YYYYMMDDCC) and is the last cycle that will complete diff --git a/docs/source/start.rst b/docs/source/start.rst index 957971e637..7dc093e8a4 100644 --- a/docs/source/start.rst +++ b/docs/source/start.rst @@ -23,7 +23,7 @@ Set up your experiment cron ^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: - Orion currently only supports cron on Orion-login-1. Cron support for other login nodes is coming in the future. + Orion and Hercules currently only support cron on Orion-login-1 and Hercules-login-1, respectively. Cron support for other login nodes is coming in the future. :: diff --git a/docs/source/wave.rst b/docs/source/wave.rst index 4ff29ab245..7b4f7471b8 100644 --- a/docs/source/wave.rst +++ b/docs/source/wave.rst @@ -98,7 +98,7 @@ You will need to add the following files: For config.ufs: -If this is a computational you will need to update this file. If this is a new output file you can skip this update. +If this is a computational grid, you will need to update this file. If this is a new output file you can skip this update. There are two sections to update: 1. Add the new grid as a possible ww3 resolution at the top of the file 2. In the WW3 specific settings section, add a case for the new grid and define ntasks_ww3 (number of tasks for WW3) and @@ -122,11 +122,17 @@ computational grid or just an output grid you will need a new parm/wave/ww3_grib WW3/model/inp/ww3_grib.inp for more infomration about this input file. The other existing templated input files can be used as guides to create these new files. - - - ***************************** Updates for New Product Grids ***************************** -If this is a new file for AWIPS Products, additional updates are likely required. +If this is a new file for AWIPS Products, additional updates are likely required. + +************** +New Staged ICs +************** + +Depending on the desired use of the new grid, or if the default grid is changed, staged initial conditions for use in +workflow testing will also be needed. + +For example, C384 S2SW need an IC for 20130101 00z and low resolution tests need an IC for 20210323 12z. diff --git a/ecf/defs/gfs_00.def b/ecf/defs/gfs_00.def index 2ff0a785a7..d326f16ecc 100644 --- a/ecf/defs/gfs_00.def +++ b/ecf/defs/gfs_00.def @@ -1496,83 +1496,6 @@ edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ( :TIME >= 0440 and :TIME < 1040 ) and ../post/jgfs_atmos_post_f003 == complete - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ( :TIME >= 0433 and :TIME < 1033) and ./jgfs_atmos_wafs_grib2 == complete - task jgfs_atmos_wafs_blending_0p25 - trigger ( :TIME >= 0425 and :TIME < 1025) and ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/gfs_06.def b/ecf/defs/gfs_06.def index 4524d28374..4772e29267 100644 --- a/ecf/defs/gfs_06.def +++ b/ecf/defs/gfs_06.def @@ -1496,83 +1496,6 @@ edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ( :TIME >= 1040 and :TIME < 1640) and ../post/jgfs_atmos_post_f003 == complete - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ( :TIME >= 1033 and :TIME < 1633) and ./jgfs_atmos_wafs_grib2 == complete - task jgfs_atmos_wafs_blending_0p25 - trigger ( :TIME >= 1025 and :TIME < 1625) and ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/gfs_12.def b/ecf/defs/gfs_12.def index 6e366bffcd..a242a10de2 100644 --- a/ecf/defs/gfs_12.def +++ b/ecf/defs/gfs_12.def @@ -1496,83 +1496,6 @@ edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ( :TIME >= 1640 and :TIME < 2240) and ../post/jgfs_atmos_post_f003 == complete - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ( :TIME >= 1633 and :TIME < 2233) and ./jgfs_atmos_wafs_grib2 == complete - task jgfs_atmos_wafs_blending_0p25 - trigger ( :TIME >= 1625 and :TIME < 2225) and ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/gfs_18.def b/ecf/defs/gfs_18.def index 0a8c52cf7e..1fdeb69e39 100644 --- a/ecf/defs/gfs_18.def +++ b/ecf/defs/gfs_18.def @@ -1496,83 +1496,6 @@ edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ( :TIME >= 2240 or :TIME < 0240) and ../post/jgfs_atmos_post_f003 == complete - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ( :TIME >= 2233 or :TIME < 0233 ) and ./jgfs_atmos_wafs_grib2 == complete - task jgfs_atmos_wafs_blending_0p25 - trigger ( :TIME >= 2225 or :TIME < 0225) and ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/prod00.def b/ecf/defs/prod00.def index f36fee8ed0..2a8338c7bd 100644 --- a/ecf/defs/prod00.def +++ b/ecf/defs/prod00.def @@ -1522,85 +1522,6 @@ suite prod00 edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ../post/jgfs_atmos_post_f003 == complete - time 04:40 - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ./jgfs_atmos_wafs_grib2 == complete - time 04:33 - task jgfs_atmos_wafs_blending_0p25 - trigger ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/prod06.def b/ecf/defs/prod06.def index 9ba8b46332..60e5d94faa 100644 --- a/ecf/defs/prod06.def +++ b/ecf/defs/prod06.def @@ -1522,85 +1522,6 @@ suite prod06 edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ../post/jgfs_atmos_post_f003 == complete - time 04:40 - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ./jgfs_atmos_wafs_grib2 == complete - time 04:33 - task jgfs_atmos_wafs_blending_0p25 - trigger ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/prod12.def b/ecf/defs/prod12.def index 1c058e2640..05c3c1016e 100644 --- a/ecf/defs/prod12.def +++ b/ecf/defs/prod12.def @@ -1522,85 +1522,6 @@ suite prod12 edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ../post/jgfs_atmos_post_f003 == complete - time 04:40 - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ./jgfs_atmos_wafs_grib2 == complete - time 04:33 - task jgfs_atmos_wafs_blending_0p25 - trigger ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/defs/prod18.def b/ecf/defs/prod18.def index a7f715b016..df22c4dccb 100644 --- a/ecf/defs/prod18.def +++ b/ecf/defs/prod18.def @@ -1522,85 +1522,6 @@ suite prod18 edit HR '384' endfamily family post_processing - task jgfs_atmos_wafs_gcip - trigger ../post/jgfs_atmos_post_f003 == complete - time 04:40 - family grib_wafs - task jgfs_atmos_wafs_f000 - trigger ../../post/jgfs_atmos_post_f000 == complete and ../../post/jgfs_atmos_post_f120 == complete and ../grib2_wafs/jgfs_atmos_wafs_grib2 == complete - edit FCSTHR '00' - task jgfs_atmos_wafs_f006 - trigger ../../post/jgfs_atmos_post_f006 == complete and ./jgfs_atmos_wafs_f000 == complete - edit FCSTHR '06' - task jgfs_atmos_wafs_f012 - trigger ../../post/jgfs_atmos_post_f012 == complete and ./jgfs_atmos_wafs_f006 == complete - edit FCSTHR '12' - task jgfs_atmos_wafs_f018 - trigger ../../post/jgfs_atmos_post_f018 == complete and ./jgfs_atmos_wafs_f012 == complete - edit FCSTHR '18' - task jgfs_atmos_wafs_f024 - trigger ../../post/jgfs_atmos_post_f024 == complete and ./jgfs_atmos_wafs_f018 == complete - edit FCSTHR '24' - task jgfs_atmos_wafs_f030 - trigger ../../post/jgfs_atmos_post_f030 == complete and ./jgfs_atmos_wafs_f024 == complete - edit FCSTHR '30' - task jgfs_atmos_wafs_f036 - trigger ../../post/jgfs_atmos_post_f036 == complete and ./jgfs_atmos_wafs_f030 == complete - edit FCSTHR '36' - task jgfs_atmos_wafs_f042 - trigger ../../post/jgfs_atmos_post_f042 == complete and ./jgfs_atmos_wafs_f036 == complete - edit FCSTHR '42' - task jgfs_atmos_wafs_f048 - trigger ../../post/jgfs_atmos_post_f048 == complete and ./jgfs_atmos_wafs_f042 == complete - edit FCSTHR '48' - task jgfs_atmos_wafs_f054 - trigger ../../post/jgfs_atmos_post_f054 == complete and ./jgfs_atmos_wafs_f048 == complete - edit FCSTHR '54' - task jgfs_atmos_wafs_f060 - trigger ../../post/jgfs_atmos_post_f060 == complete and ./jgfs_atmos_wafs_f054 == complete - edit FCSTHR '60' - task jgfs_atmos_wafs_f066 - trigger ../../post/jgfs_atmos_post_f066 == complete and ./jgfs_atmos_wafs_f060 == complete - edit FCSTHR '66' - task jgfs_atmos_wafs_f072 - trigger ../../post/jgfs_atmos_post_f072 == complete and ./jgfs_atmos_wafs_f066 == complete - edit FCSTHR '72' - task jgfs_atmos_wafs_f078 - trigger ../../post/jgfs_atmos_post_f078 == complete and ./jgfs_atmos_wafs_f072 == complete - edit FCSTHR '78' - task jgfs_atmos_wafs_f084 - trigger ../../post/jgfs_atmos_post_f084 == complete and ./jgfs_atmos_wafs_f078 == complete - edit FCSTHR '84' - task jgfs_atmos_wafs_f090 - trigger ../../post/jgfs_atmos_post_f090 == complete and ./jgfs_atmos_wafs_f084 == complete - edit FCSTHR '90' - task jgfs_atmos_wafs_f096 - trigger ../../post/jgfs_atmos_post_f096 == complete and ./jgfs_atmos_wafs_f090 == complete - edit FCSTHR '96' - task jgfs_atmos_wafs_f102 - trigger ../../post/jgfs_atmos_post_f102 == complete and ./jgfs_atmos_wafs_f096 == complete - edit FCSTHR '102' - task jgfs_atmos_wafs_f108 - trigger ../../post/jgfs_atmos_post_f108 == complete and ./jgfs_atmos_wafs_f102 == complete - edit FCSTHR '108' - task jgfs_atmos_wafs_f114 - trigger ../../post/jgfs_atmos_post_f114 == complete and ./jgfs_atmos_wafs_f108 == complete - edit FCSTHR '114' - task jgfs_atmos_wafs_f120 - trigger ../../post/jgfs_atmos_post_f120 == complete and ./jgfs_atmos_wafs_f114 == complete - edit FCSTHR '120' - endfamily - family grib2_wafs - task jgfs_atmos_wafs_grib2 - trigger ../../post/jgfs_atmos_post_f000 == complete - task jgfs_atmos_wafs_grib2_0p25 - trigger ../../post/jgfs_atmos_post_f036 == complete - task jgfs_atmos_wafs_blending - trigger ./jgfs_atmos_wafs_grib2 == complete - time 04:33 - task jgfs_atmos_wafs_blending_0p25 - trigger ./jgfs_atmos_wafs_grib2_0p25 == complete - endfamily family bufr_sounding task jgfs_atmos_postsnd trigger ../../post/jgfs_atmos_post_manager:release_post000 diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf index 294d5c25d9..08d0185399 100755 --- a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 #PBS -l select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf index 39d4ec2e8d..d095742193 100755 --- a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:06:00 #PBS -l select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=80GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf index f233fed21e..f766333272 100755 --- a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=96:ompthreads=1:ncpus=96:mem=48GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true export model=gfs diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index 039ca56852..0bc2d76455 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:15:00 #PBS -l select=1:ncpus=2:mpiprocs=2:mem=4GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf index 61f7f0a17f..9d66f4bda1 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf index 5322c363ac..2dd0bdf06c 100755 --- a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf index e0ab513b33..7e3282bc95 100755 --- a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:08:00 #PBS -l select=1:ncpus=1:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf index 55d7932aaf..1da24c0d46 100755 --- a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=01:15:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf index 6993e4ae4f..33fa481a29 100755 --- a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf +++ b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:ncpus=3:mpiprocs=3:ompthreads=1:mem=200GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf index 5bf31996d9..9c7a1609e7 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf index b9ee264998..e2d3bb3463 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 #PBS -l select=1:ncpus=1:mem=5GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf index 71ef1155a9..9afd0b5083 100755 --- a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf index 569a7c0894..208ed2cc52 100755 --- a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf +++ b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=11:ompthreads=1:ncpus=11:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf index 20db8b240d..1899dc152f 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:12:00 #PBS -l select=4:mpiprocs=50:ompthreads=1:ncpus=50:mem=10GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf index 29d5a467d3..31cca40bed 100755 --- a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 #PBS -l select=1:mpiprocs=8:ompthreads=1:ncpus=8:mem=10GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf index 612a29eb71..77b44634a9 100755 --- a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=5:ompthreads=1:ncpus=5:mem=100GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index c69a02fedf..1994f238d1 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=03:00:00 #PBS -l select=1:ncpus=28:mpiprocs=28:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf index 69f736b705..e9833baa41 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=03:00:00 #PBS -l select=1:ncpus=23:mpiprocs=23:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf index d72622b93a..08686dbca3 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf index 8837e94948..1ff597411a 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index aa89d37bfc..04b07c58d1 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:30:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf index 5322c363ac..2dd0bdf06c 100755 --- a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf index f0a1a3346f..bb0bcf8db7 100755 --- a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:07:00 #PBS -l select=1:ncpus=1:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf index fc22e941bc..d2e315bcef 100755 --- a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=04:00:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf index 2333476033..e146f8df32 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:ncpus=1:mem=3GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf index a175e34e5a..c1edf3de88 100755 --- a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=3GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf index a6fb412f71..7443002a0b 100755 --- a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=4GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf index 1e19a0ea3e..e9922e0751 100755 --- a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:05:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf index 199f68adeb..2cb7f75949 100755 --- a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf index 569a7c0894..208ed2cc52 100755 --- a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf +++ b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=11:ompthreads=1:ncpus=11:mem=2GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf index a1f18fefd8..0b0e516bc2 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=03:00:00 #PBS -l select=1:mpiprocs=8:ompthreads=1:ncpus=8:mem=10GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf index 5cbd08032b..00f005a877 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:20:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf index 616f360d28..8197d58020 100755 --- a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=02:00:00 #PBS -l select=1:ncpus=1:mem=1GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf index 97700528ab..8f93f6d098 100755 --- a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -5,7 +5,7 @@ #PBS -A %PROJ%-%PROJENVIR% #PBS -l walltime=00:10:00 #PBS -l select=1:mpiprocs=65:ompthreads=1:ncpus=65:mem=150GB -#PBS -l place=vscatter +#PBS -l place=vscatter:shared #PBS -l debug=true model=gfs diff --git a/ecf/setup_ecf_links.sh b/ecf/setup_ecf_links.sh index b0dce50cc5..571d255940 100755 --- a/ecf/setup_ecf_links.sh +++ b/ecf/setup_ecf_links.sh @@ -10,50 +10,45 @@ function link_master_to_fhr(){ tmpl=$1 # Name of the master template fhrs=$2 # Array of forecast hours for fhr in ${fhrs[@]}; do - fhrchar=$(printf %03d $fhr) + fhrchar=$(printf %03d "${fhr}") master=${tmpl}_master.ecf target=${tmpl}_f${fhrchar}.ecf - rm -f $target - ln -sf $master $target + rm -f "${target}" + ln -sf "${master}" "${target}" done } # EnKF GDAS post files -cd $ECF_DIR/scripts/enkfgdas/post +cd "${ECF_DIR}/scripts/enkfgdas/post" echo "Linking enkfgdas/post ..." fhrs=($(seq 3 9)) -link_master_to_fhr "jenkfgdas_post" "$fhrs" +link_master_to_fhr "jenkfgdas_post" "${fhrs}" # GDAS post files -cd $ECF_DIR/scripts/gdas/atmos/post +cd "${ECF_DIR}/scripts/gdas/atmos/post" echo "Linking gdas/atmos/post ..." rm -f jgdas_atmos_post_anl.ecf ln -sf jgdas_atmos_post_master.ecf jgdas_atmos_post_anl.ecf fhrs=($(seq 0 9)) -link_master_to_fhr "jgdas_atmos_post" "$fhrs" +link_master_to_fhr "jgdas_atmos_post" "${fhrs}" # GFS post files -cd $ECF_DIR/scripts/gfs/atmos/post +cd "${ECF_DIR}/scripts/gfs/atmos/post" echo "Linking gfs/atmos/post ..." rm -f jgfs_atmos_post_anl.ecf ln -sf jgfs_atmos_post_master.ecf jgfs_atmos_post_anl.ecf fhrs=($(seq 0 1 120) $(seq 123 3 384)) -link_master_to_fhr "jgfs_atmos_post" "$fhrs" +link_master_to_fhr "jgfs_atmos_post" "${fhrs}" # GFS awips 20km 1p0 files -cd $ECF_DIR/scripts/gfs/atmos/post_processing/awips_20km_1p0 +cd "${ECF_DIR}/scripts/gfs/atmos/post_processing/awips_20km_1p0" echo "Linking gfs/atmos/post_processing/awips_20km_1p0 ..." fhrs=($(seq 0 3 84) $(seq 90 6 240)) -link_master_to_fhr "jgfs_atmos_awips" "$fhrs" +link_master_to_fhr "jgfs_atmos_awips" "${fhrs}" # GFS awips g2 files -cd $ECF_DIR/scripts/gfs/atmos/post_processing/awips_g2 +cd "${ECF_DIR}/scripts/gfs/atmos/post_processing/awips_g2" echo "Linking gfs/atmos/post_processing/awips_g2 ..." fhrs=($(seq 0 3 84) $(seq 90 6 240)) -link_master_to_fhr "jgfs_atmos_awips_g2" "$fhrs" +link_master_to_fhr "jgfs_atmos_awips_g2" "${fhrs}" -# GFS atmos wafs files -cd $ECF_DIR/scripts/gfs/atmos/post_processing/grib_wafs -echo "Linking gfs/atmos/post_processing/grib_wafs ..." -fhrs=($(seq 0 6 120)) -link_master_to_fhr "jgfs_atmos_wafs" "$fhrs" diff --git a/env/AWSPW.env b/env/AWSPW.env new file mode 100755 index 0000000000..894cce2343 --- /dev/null +++ b/env/AWSPW.env @@ -0,0 +1,137 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post metp" + echo "eobs eupd ecen efcs epos" + echo "postsnd awips gempak" + exit 1 + +fi + +step=$1 + +export npe_node_max=36 +export launcher="mpiexec.hydra" +export mpmd_opt="" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks + +elif [[ "${step}" = "post" ]]; then + + nth_max=$((npe_node_max / npe_node_post)) + + export NTHREADS_NP=${nth_np:-1} + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_post}" + + export NTHREADS_DWN=${nth_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -n ${npe_dwn}" + +elif [[ "${step}" = "ecen" ]]; then + + nth_max=$((npe_node_max / npe_node_ecen)) + + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen}" + + export NTHREADS_CHGRES=${nth_chgres:-12} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="time" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" + +elif [[ "${step}" = "esfc" ]]; then + + nth_max=$((npe_node_max / npe_node_esfc)) + + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc}" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" + +elif [[ "${step}" = "epos" ]]; then + + nth_max=$((npe_node_max / npe_node_epos)) + + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos}" + +elif [[ "${step}" = "postsnd" ]]; then + + export CFP_MP="YES" + + nth_max=$((npe_node_max / npe_node_postsnd)) + + export NTHREADS_POSTSND=${nth_postsnd:-1} + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" + + export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" + +elif [[ "${step}" = "awips" ]]; then + + nth_max=$((npe_node_max / npe_node_awips)) + + export NTHREADS_AWIPS=${nth_awips:-2} + [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max} + export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}" + +elif [[ "${step}" = "gempak" ]]; then + + export CFP_MP="YES" + + if [[ ${CDUMP} == "gfs" ]]; then + npe_gempak=${npe_gempak_gfs} + npe_node_gempak=${npe_node_gempak_gfs} + fi + + nth_max=$((npe_node_max / npe_node_gempak)) + + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max} + export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}" + + +elif [[ "${step}" = "fit2obs" ]]; then + + nth_max=$((npe_node_max / npe_node_fit2obs)) + + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs}" + +fi diff --git a/env/CONTAINER.env b/env/CONTAINER.env index 4f85ae56de..bfeb6dd6da 100755 --- a/env/CONTAINER.env +++ b/env/CONTAINER.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 diff --git a/env/HERA.env b/env/HERA.env index 2bcd43da10..90b98966f0 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -17,7 +17,7 @@ step=$1 export npe_node_max=40 #JKHexport launcher="srun -l --export=ALL" export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL" -export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment #export I_MPI_ADJUST_ALLREDUCE=5 @@ -59,7 +59,7 @@ elif [[ "${step}" = "atmanlrun" ]]; then export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -67,7 +67,7 @@ elif [[ "${step}" = "atmensanlrun" ]]; then export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -77,15 +77,17 @@ elif [[ "${step}" = "aeroanlrun" ]]; then export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} - export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}" -elif [[ "${step}" = "landanlrun" ]]; then +elif [[ "${step}" = "landanl" ]]; then - nth_max=$((npe_node_max / npe_node_landanlrun)) + nth_max=$((npe_node_max / npe_node_landanl)) - export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + export NTHREADS_LANDANL=${nth_landanl:-${nth_max}} [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} - export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "ocnanalbmat" ]]; then @@ -95,7 +97,7 @@ elif [[ "${step}" = "ocnanalbmat" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalrun" ]]; then @@ -105,7 +107,7 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalchkpt" ]]; then @@ -115,7 +117,7 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -130,21 +132,20 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export NTHREADS_GSI=${nth_anal:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} \$ncmd" + export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}" export NTHREADS_CYCLE=${nth_cycle:-12} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_cycle=${ntiles:-6} - export APRUN_CYCLE="${launcher} -n ${npe_cycle}" - + export APRUN_CYCLE="${launcher} -n ${npe_cycle} --cpus-per-task=${NTHREADS_CYCLE}" export NTHREADS_GAUSFCANL=1 npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}" elif [[ "${step}" = "sfcanl" ]]; then @@ -153,7 +154,7 @@ elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${nth_sfcanl:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_sfcanl=${ntiles:-6} - export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "eobs" ]]; then @@ -164,7 +165,7 @@ elif [[ "${step}" = "eobs" ]]; then export NTHREADS_GSI=${nth_eobs:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} --cpus-per-task=${NTHREADS_GSI}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} @@ -176,7 +177,7 @@ elif [[ "${step}" = "eupd" ]]; then export NTHREADS_ENKF=${nth_eupd:-${nth_max}} [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} - export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} --cpus-per-task=${NTHREADS_ENKF}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} @@ -197,17 +198,18 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ntasks}" unset nprocs ppn nnodes ntasks -elif [[ "${step}" = "post" ]]; then - nth_max=$((npe_node_max / npe_node_post)) +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) - export NTHREADS_NP=${nth_np:-1} - [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher} -n ${npe_post}" + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}" - export NTHREADS_DWN=${nth_dwn:-1} - [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_DWN="${launcher} -n ${npe_dwn}" +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation on Hera elif [[ "${step}" = "ecen" ]]; then @@ -215,7 +217,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${nth_ecen:-${nth_max}} [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} - export APRUN_ECEN="${launcher} -n ${npe_ecen}" + export APRUN_ECEN="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_ECEN}" export NTHREADS_CHGRES=${nth_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} @@ -223,7 +225,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} -n ${npe_ecen}" + export APRUN_CALCINC="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_CALCINC}" elif [[ "${step}" = "esfc" ]]; then @@ -231,11 +233,11 @@ elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${nth_esfc:-${nth_max}} [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} - export APRUN_ESFC="${launcher} -n ${npe_esfc}" + export APRUN_ESFC="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_ESFC}" export NTHREADS_CYCLE=${nth_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} - export APRUN_CYCLE="${launcher} -n ${npe_esfc}" + export APRUN_CYCLE="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "epos" ]]; then @@ -243,11 +245,7 @@ elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${nth_epos:-${nth_max}} [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} - export APRUN_EPOS="${launcher} -n ${npe_epos}" - -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher} -n ${npe_init}" + export APRUN_EPOS="${launcher} -n ${npe_epos} --cpus-per-task=${NTHREADS_EPOS}" elif [[ "${step}" = "postsnd" ]]; then @@ -257,7 +255,7 @@ elif [[ "${step}" = "postsnd" ]]; then export NTHREADS_POSTSND=${nth_postsnd:-1} [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} - export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" + export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --cpus-per-task=${NTHREADS_POSTSND}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} @@ -293,6 +291,6 @@ elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${nth_fit2obs:-1} [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} - export MPIRUN="${launcher} -n ${npe_fit2obs}" + export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/env/HERCULES.env b/env/HERCULES.env new file mode 100755 index 0000000000..3721be2b66 --- /dev/null +++ b/env/HERCULES.env @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "fcst post" + echo "Note: Hercules is only set up to run in forecast-only mode" + exit 1 + +fi + +step=$1 + +export npe_node_max=40 +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" + +# Configure MPI environment +export MPI_BUFS_PER_PROC=2048 +export MPI_BUFS_PER_HOST=2048 +export MPI_GROUP_MAX=256 +export MPI_MEMMAP_OFF=1 +export MP_STDOUTMODE="ORDERED" +export KMP_AFFINITY=scatter +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 +#export LD_BIND_NOW=1 + +ulimit -s unlimited +ulimit -a + +if [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || \ + [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostpnt" ]] || [[ "${step}" == "wavepostbndpntbll" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} + +elif [[ "${step}" = "fcst" ]]; then + + export OMP_STACKSIZE=512M + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks + +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) + + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}" + +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation + +fi diff --git a/env/JET.env b/env/JET.env index 1295054731..a49e4cad97 100755 --- a/env/JET.env +++ b/env/JET.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -79,13 +79,15 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" -elif [[ "${step}" = "landanlrun" ]]; then +elif [[ "${step}" = "landanl" ]]; then - nth_max=$((npe_node_max / npe_node_landanlrun)) + nth_max=$((npe_node_max / npe_node_landanl)) - export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + export NTHREADS_LANDANL=${nth_landanl:-${nth_max}} [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} - export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + export APRUN_LANDANL="${launcher} -n ${npe_landanl}" + + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "ocnanalbmat" ]]; then @@ -185,17 +187,17 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ntasks}" unset nprocs ppn nnodes ntasks -elif [[ "${step}" = "post" ]]; then +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) - nth_max=$((npe_node_max / npe_node_post)) + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp}" - export NTHREADS_NP=${nth_np:-1} - [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher} -n ${npe_post}" +elif [[ "${step}" = "atmos_products" ]]; then - export NTHREADS_DWN=${nth_dwn:-1} - [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_DWN="${launcher} -n ${npe_dwn}" + export USE_CFP="YES" # Use MPMD for downstream product generation elif [[ "${step}" = "ecen" ]]; then @@ -233,10 +235,6 @@ elif [[ "${step}" = "epos" ]]; then [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [[ "${step}" = "init" ]]; then - - export APRUN="${launcher} -n ${npe_init}" - elif [[ "${step}" = "postsnd" ]]; then export CFP_MP="YES" diff --git a/env/ORION.env b/env/ORION.env index 321ce9d3c5..d91fd4db03 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -16,7 +16,7 @@ step=$1 export npe_node_max=40 export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment export MPI_BUFS_PER_PROC=2048 @@ -59,7 +59,7 @@ elif [[ "${step}" = "atmanlrun" ]]; then export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} - export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun} --cpus-per-task=${NTHREADS_ATMANL}" elif [[ "${step}" = "atmensanlrun" ]]; then @@ -67,7 +67,7 @@ elif [[ "${step}" = "atmensanlrun" ]]; then export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} - export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun} --cpus-per-task=${NTHREADS_ATMENSANL}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -77,15 +77,17 @@ elif [[ "${step}" = "aeroanlrun" ]]; then export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} - export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun} --cpus-per-task=${NTHREADS_AEROANL}" -elif [[ "${step}" = "landanlrun" ]]; then +elif [[ "${step}" = "landanl" ]]; then - nth_max=$((npe_node_max / npe_node_landanlrun)) + nth_max=$((npe_node_max / npe_node_landanl)) - export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + export NTHREADS_LANDANL=${nth_landanl:-${nth_max}} [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} - export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + export APRUN_LANDANL="${launcher} -n ${npe_landanl} --cpus-per-task=${NTHREADS_LANDANL}" + + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "ocnanalbmat" ]]; then @@ -95,7 +97,7 @@ elif [[ "${step}" = "ocnanalbmat" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalrun" ]]; then @@ -105,7 +107,7 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalchkpt" ]]; then @@ -115,7 +117,7 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -130,20 +132,20 @@ elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export NTHREADS_GSI=${nth_anal:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} --cpus-per-task=${NTHREADS_GSI}" export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} \$ncmd" + export APRUN_CALCINC="${launcher} \$ncmd --cpus-per-task=${NTHREADS_CALCINC}" export NTHREADS_CYCLE=${nth_cycle:-12} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_cycle=${ntiles:-6} - export APRUN_CYCLE="${launcher} -n ${npe_cycle}" + export APRUN_CYCLE="${launcher} -n ${npe_cycle} --cpus-per-task=${NTHREADS_CYCLE}" export NTHREADS_GAUSFCANL=1 npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl} --cpus-per-task=${NTHREADS_GAUSFCANL}" elif [[ "${step}" = "sfcanl" ]]; then nth_max=$((npe_node_max / npe_node_sfcanl)) @@ -151,7 +153,7 @@ elif [[ "${step}" = "sfcanl" ]]; then export NTHREADS_CYCLE=${nth_sfcanl:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_sfcanl=${ntiles:-6} - export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "eobs" ]]; then @@ -166,7 +168,7 @@ elif [[ "${step}" = "eobs" ]]; then export NTHREADS_GSI=${nth_eobs:-${nth_max}} [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} - export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} --cpus-per-task=${NTHREADS_GSI}" elif [[ "${step}" = "eupd" ]]; then @@ -178,7 +180,7 @@ elif [[ "${step}" = "eupd" ]]; then export NTHREADS_ENKF=${nth_eupd:-${nth_max}} [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} - export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} --cpus-per-task=${NTHREADS_ENKF}" elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then @@ -196,17 +198,17 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ntasks}" unset nprocs ppn nnodes ntasks -elif [[ "${step}" = "post" ]]; then +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) - nth_max=$((npe_node_max / npe_node_post)) + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp} --cpus-per-task=${NTHREADS_UPP}" - export NTHREADS_NP=${nth_np:-1} - [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher} -n ${npe_post}" +elif [[ "${step}" = "atmos_products" ]]; then - export NTHREADS_DWN=${nth_dwn:-1} - [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_DWN="${launcher} -n ${npe_dwn}" + export USE_CFP="YES" # Use MPMD for downstream product generation elif [[ "${step}" = "ecen" ]]; then @@ -214,7 +216,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${nth_ecen:-${nth_max}} [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} - export APRUN_ECEN="${launcher} -n ${npe_ecen}" + export APRUN_ECEN="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_ECEN}" export NTHREADS_CHGRES=${nth_chgres:-12} [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} @@ -222,7 +224,7 @@ elif [[ "${step}" = "ecen" ]]; then export NTHREADS_CALCINC=${nth_calcinc:-1} [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} - export APRUN_CALCINC="${launcher} -n ${npe_ecen}" + export APRUN_CALCINC="${launcher} -n ${npe_ecen} --cpus-per-task=${NTHREADS_CALCINC}" elif [[ "${step}" = "esfc" ]]; then @@ -230,11 +232,11 @@ elif [[ "${step}" = "esfc" ]]; then export NTHREADS_ESFC=${nth_esfc:-${nth_max}} [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} - export APRUN_ESFC="${launcher} -n ${npe_esfc}" + export APRUN_ESFC="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_ESFC}" export NTHREADS_CYCLE=${nth_cycle:-14} [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} - export APRUN_CYCLE="${launcher} -n ${npe_esfc}" + export APRUN_CYCLE="${launcher} -n ${npe_esfc} --cpus-per-task=${NTHREADS_CYCLE}" elif [[ "${step}" = "epos" ]]; then @@ -242,7 +244,7 @@ elif [[ "${step}" = "epos" ]]; then export NTHREADS_EPOS=${nth_epos:-${nth_max}} [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} - export APRUN_EPOS="${launcher} -n ${npe_epos}" + export APRUN_EPOS="${launcher} -n ${npe_epos} --cpus-per-task=${NTHREADS_EPOS}" elif [[ "${step}" = "postsnd" ]]; then @@ -252,7 +254,7 @@ elif [[ "${step}" = "postsnd" ]]; then export NTHREADS_POSTSND=${nth_postsnd:-1} [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} - export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" + export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --cpus-per-task=${NTHREADS_POSTSND}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} @@ -287,6 +289,6 @@ elif [[ "${step}" = "fit2obs" ]]; then export NTHREADS_FIT2OBS=${nth_fit2obs:-1} [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} - export MPIRUN="${launcher} -n ${npe_fit2obs}" + export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/env/S4.env b/env/S4.env index da6f124232..3dab3fc3e7 100755 --- a/env/S4.env +++ b/env/S4.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -21,7 +21,7 @@ elif [[ ${PARTITION_BATCH} = "ivy" ]]; then export npe_node_max=20 fi export launcher="srun -l --export=ALL" -export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" # Configure MPI environment export OMP_STACKSIZE=2048000 @@ -76,13 +76,15 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" -elif [[ "${step}" = "landanlrun" ]]; then +elif [[ "${step}" = "landanl" ]]; then - nth_max=$((npe_node_max / npe_node_landanlrun)) + nth_max=$((npe_node_max / npe_node_landanl)) - export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + export NTHREADS_LANDANL=${nth_landanl:-${nth_max}} [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} - export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + export APRUN_LANDANL="${launcher} -n ${npe_landanl}" + + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "ocnanalbmat" ]]; then echo "WARNING: ${step} is not enabled on S4!" @@ -169,17 +171,17 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ntasks}" unset nprocs ppn nnodes ntasks -elif [[ "${step}" = "post" ]]; then +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) - nth_max=$((npe_node_max / npe_node_post)) + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp}" - export NTHREADS_NP=${nth_np:-1} - [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher} -n ${npe_post}" +elif [[ "${step}" = "atmos_products" ]]; then - export NTHREADS_DWN=${nth_dwn:-1} - [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_DWN="${launcher} -n ${npe_dwn}" + export USE_CFP="YES" # Use MPMD for downstream product generation elif [[ "${step}" = "ecen" ]]; then diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 354de5cf44..a4fe81060d 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -4,8 +4,8 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "atmanlrun atmensanlrun aeroanlrun landanlrun" - echo "anal sfcanl fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanl" + echo "anal sfcanl fcst post metp" echo "eobs eupd ecen esfc efcs epos" echo "postsnd awips gempak" exit 1 @@ -35,6 +35,7 @@ elif [[ "${step}" = "preplandobs" ]]; then elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + export USE_CFP="YES" if [[ "${step}" = "waveprep" ]] && [[ "${CDUMP}" = "gfs" ]]; then export NTASKS=${NTASKS_gfs} ; fi export wavempexec="${launcher} -np" export wave_mpmd=${mpmd_opt} @@ -65,13 +66,15 @@ elif [[ "${step}" = "aeroanlrun" ]]; then [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" -elif [[ "${step}" = "landanlrun" ]]; then +elif [[ "${step}" = "landanl" ]]; then - nth_max=$((npe_node_max / npe_node_landanlrun)) + nth_max=$((npe_node_max / npe_node_landanl)) - export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + export NTHREADS_LANDANL=${nth_landanl:-${nth_max}} [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} - export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + export APRUN_LANDANL="${launcher} -n ${npe_landanl}" + + export APRUN_APPLY_INCR="${launcher} -n 6" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then @@ -180,17 +183,17 @@ elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export FI_OFI_RXM_SAR_LIMIT=3145728 fi -elif [[ "${step}" = "post" ]]; then +elif [[ "${step}" = "upp" ]]; then + + nth_max=$((npe_node_max / npe_node_upp)) - nth_max=$((npe_node_max / npe_node_post)) + export NTHREADS_UPP=${nth_upp:-1} + [[ ${NTHREADS_UPP} -gt ${nth_max} ]] && export NTHREADS_UPP=${nth_max} + export APRUN_UPP="${launcher} -n ${npe_upp} -ppn ${npe_node_upp} --cpu-bind depth --depth ${NTHREADS_UPP}" - export NTHREADS_NP=${nth_np:-1} - [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} - export APRUN_NP="${launcher} -n ${npe_np:-${npe_post}} -ppn ${npe_node_post} --cpu-bind depth --depth ${NTHREADS_NP}" +elif [[ "${step}" = "atmos_products" ]]; then - export NTHREADS_DWN=${nth_dwn:-1} - [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} - export APRUN_DWN="${launcher} -np ${npe_dwn} ${mpmd_opt}" + export USE_CFP="YES" # Use MPMD for downstream product generation elif [[ "${step}" = "ecen" ]]; then @@ -280,8 +283,12 @@ elif [[ "${step}" = "waveawipsbulls" ]]; then unset PERL5LIB -elif [[ "${step}" = "wafsgrib2" ]] || [[ "${step}" = "wafsgrib20p25" ]]; then +elif [[ "${step:0:3}" = "mos" ]]; then - export USE_CFP=${USE_CFP:-"YES"} + export FORT_BUFFERED=TRUE + + if [[ "${step}" = "mos_stn_prep" ]]; then + export OMP_PROC_BIND=true + fi fi diff --git a/fix/product/gfs_minmon_cost.txt b/fix/product/gfs_minmon_cost.txt deleted file mode 100644 index 7b4341587b..0000000000 --- a/fix/product/gfs_minmon_cost.txt +++ /dev/null @@ -1,4 +0,0 @@ -cost_target:cost,grad,step,b,step?: -cost_number:4: -costterms_target:costterms Jb,Jo,Jc,Jl: - diff --git a/fix/product/gfs_minmon_gnorm.txt b/fix/product/gfs_minmon_gnorm.txt deleted file mode 100644 index 4687139cf3..0000000000 --- a/fix/product/gfs_minmon_gnorm.txt +++ /dev/null @@ -1,6 +0,0 @@ -igrad_target:cost,grad,step,b,step?: -igrad_number:5: -gnorm_target:penalty and grad reduction WRT outer and initial iter: -gnorm_number:14: -expected_gnorms:202: -gross_check_val:3.5E-4: diff --git a/fix/product/wafs_admin_msg b/fix/product/wafs_admin_msg deleted file mode 100755 index 5df92dba3b..0000000000 --- a/fix/product/wafs_admin_msg +++ /dev/null @@ -1,5 +0,0 @@ -WAFC WASHINGTON ADVISES ALL USERS OF TRIAL CB CLOUD, ICING AND TURBULENCE WAFS FORECASTS IN GRIB2 FORMAT THAT PRODUCTION PROBLEMS HAVE TRIGGERED CONTINGENCY MEASURES AND THE ISSUANCE OF NON-HARMONIZED FORECASTS. - -STANDARD WAFS FORECAST PARAMETERS IN GRIB2 FORMAT (WIND, TEMPERATURE, HUMIDITY, TROP HEIGHT, MAX WIND, MAX WIND HEIGHT) ARE UNAFFECTED, AND ARE AVAILABLE AS NORMAL. - -WAFC WASHINGTON APOLOGIZES FOR ANY INCONVENIENCE CAUSED DUE TO THIS ISSUE. diff --git a/jkhINFO b/jkhINFO index 50f41bcc94..1e7c2d00da 100644 --- a/jkhINFO +++ b/jkhINFO @@ -1,37 +1,11 @@ -31may - - seem to be running out of memory - * try increasing # of write tasks per group - * changed from 10 to 12 ==> increase #nodes by 8 - get MPI abort command; wrote Walter/Kate/David -29may23 - - update to UFS hash, 1ac938c, 12may23 + Joe's update +checked out dev_02jan24_9d901db from Judy's fork + (same version that is running under /scratch1/BMC/gsd-fv3-dev/jhender/test/emc_gw) - - + 02Jan24 global-workflow + UFS: 21Dec23, 991d652 + UPP: 07Nov23, 78f369b -19may23 - - update to top of develop (19May23) - - test UFS hash in checkout.sh (14Apr23 - 2247060) + use submodules instead of checkout.sh - sh checkout.sh - sh build_all.sh - sh link_workflow.sh etc jet - - - get error when running setup_expt.py - Traceback (most recent call last): - File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/./setup_expt.py", line 13, in - from hosts import Host - File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/hosts.py", line 6, in - from pygw.yaml_file import YAMLFile - File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/pygw/yaml_file.py", line 9, in - from .jinja import Jinja - File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/pygw/jinja.py", line 4, in - import jinja2 - ModuleNotFoundError: No module named 'jinja2' - - wrong Python environment ?? - * sent email to Kate, Walter, David - - sample xml file from David's directory - /lfs1/NESDIS/nesdis-rdo2/David.Huber/para/exp/384/384.xml + gfsatmos_products replaces gfspost tasks diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK index f0131ffb94..1535e07ae3 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK +++ b/jobs/JGDAS_ATMOS_GEMPAK @@ -27,7 +27,6 @@ export GRIB=pgrb2f export EXT="" export DBN_ALERT_TYPE=GDAS_GEMPAK -export SENDCOM=${SENDCOM:-NO} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} @@ -40,14 +39,14 @@ export model=${model:-gdas} # Define COM directories ############################################## for grid in 0p25 0p50 1p00; do - GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" done for grid in 1p00 0p25; do prod_dir="COM_ATMOS_GEMPAK_${grid}" GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" - if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + if [[ ! -d "${!prod_dir}" ]] ; then mkdir -m 775 -p "${!prod_dir}" fi done diff --git a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC index beadb7ccf8..6948d29df6 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +++ b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC @@ -61,9 +61,7 @@ export COMINecmwf=${COMINecmwf:-$(compath.py ${envir}/ecmwf/${ecmwf_ver})/ecmwf} export COMOUTukmet=${COMOUT} export COMOUTecmwf=${COMOUT} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTncdc} ${COMOUTukmet} ${COMOUTecmwf} -fi +mkdir -m 775 -p ${COMOUT} ${COMOUTncdc} ${COMOUTukmet} ${COMOUTecmwf} export pgmout=OUTPUT.$$ diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN index deccc0b28e..65479a9fec 100755 --- a/jobs/JGDAS_ATMOS_VERFOZN +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -4,83 +4,40 @@ # Set up environment for GDAS Ozone Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" - -export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} - -#--------------------------------------------- -# Specify Execution Areas -# -export HOMEgfs_ozn=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export HOMEgdas_ozn=${HOMEgfs_ozn:-${NWROOT}/gfs.${gfs_ver}} -export PARMgdas_ozn=${PARMgfs_ozn:-${HOMEgfs_ozn}/parm/mon} -export SCRgdas_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} -export FIXgdas_ozn=${FIXgfs_ozn:-${HOMEgfs_ozn}/fix/gdas} - -export HOMEoznmon=${HOMEoznmon:-${HOMEgfs_ozn}} -export EXECoznmon=${EXECoznmon:-${HOMEoznmon}/exec} -export FIXoznmon=${FIXoznmon:-${HOMEoznmon}/fix} -export USHoznmon=${USHoznmon:-${HOMEoznmon}/ush} - - -#----------------------------------- -# source the parm file -# -. ${PARMgdas_ozn}/gdas_oznmon.parm - +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfozn" -c "base verfozn" ############################################# # determine PDY and cyc for previous cycle ############################################# - -pdate=$(${NDATE} -6 ${PDY}${cyc}) -echo "pdate = ${pdate}" - -export P_PDY=${pdate:0:8} -export p_cyc=${pdate:8:2} +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} #--------------------------------------------- # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE # -export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/atmos/oznmon} -export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/atmos/oznmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_OZNMON -if [[ ! -d ${TANKverf_ozn} ]]; then - mkdir -p -m 775 ${TANKverf_ozn} -fi - -#--------------------------------------- -# set up validation file -# -if [[ ${VALIDATE_DATA} -eq 1 ]]; then - export ozn_val_file=${ozn_val_file:-${FIXgdas_ozn}/gdas_oznmon_base.tar} -fi - -#--------------------------------------- -# Set necessary environment variables -# -export OZN_AREA=${OZN_AREA:-glb} -export oznstat=${oznstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.oznstat} +export oznstat="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.oznstat" +export TANKverf_ozn=${TANKverf_ozn:-${COM_ATMOS_OZNMON}} +if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p -m 775 ${TANKverf_ozn} ; fi #------------------------------------------------------- # Execute the script. # -${OZNMONSH:-${SCRgdas_ozn}/exgdas_atmos_verfozn.sh} ${PDY} ${cyc} +"${SCRgfs}/exgdas_atmos_verfozn.sh" err=$? -[[ ${err} -ne 0 ]] && exit ${err} +if (( err != 0 )); then + exit "${err}" +fi ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD index 42e112c74f..d440f91b6e 100755 --- a/jobs/JGDAS_ATMOS_VERFRAD +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -4,94 +4,43 @@ # Set up environment for GDAS Radiance Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" - -export COMPONENT="atmos" - -export RAD_DATA_IN=${DATA} - -export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} -export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} - -mkdir -p ${RAD_DATA_IN} -cd ${RAD_DATA_IN} - -############################################## -# Specify Execution Areas -############################################## -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} - -export FIXgdas=${FIXgdas:-${HOMEgfs}/fix/gdas} -export PARMmon=${PARMmon:-${HOMEgfs}/parm/mon} - -export HOMEradmon=${HOMEradmon:-${HOMEgfs}} -export EXECradmon=${EXECradmon:-${HOMEradmon}/exec} -export FIXradmon=${FIXradmon:-${FIXgfs}} -export USHradmon=${USHradmon:-${HOMEradmon}/ush} - - -################################### -# source the parm file -################################### -parm_file=${parm_file:-${PARMmon}/da_mon.parm} -. ${parm_file} - +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfrad" -c "base verfrad" ############################################# # determine PDY and cyc for previous cycle ############################################# - -pdate=$(${NDATE} -6 ${PDY}${cyc}) -echo "pdate = ${pdate}" - -export P_PDY=${pdate:0:8} -export p_cyc=${pdate:8:2} +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} ############################################# # COMOUT - WHERE GSI OUTPUT RESIDES # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/atmos/radmon} -export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/atmos/radmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RADMON +YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_RADMON_PREV:COM_ATMOS_RADMON_TMPL -mkdir -p -m 775 ${TANKverf_rad} - -######################################## -# Set necessary environment variables -######################################## -export RAD_AREA=${RAD_AREA:-glb} - -export biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} -export radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} - -echo " " -echo "JOB HAS STARTED" -echo " " +export biascr="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias" +export radstat="${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat" +export TANKverf_rad=${TANKverf_rad:-${COM_ATMOS_RADMON}} +export TANKverf_radM1=${TANKverf_radM1:-${COM_ATMOS_RADMON_PREV}} +if [[ ! -d ${TANKverf_rad} ]]; then mkdir -p -m 775 ${TANKverf_rad} ; fi +if [[ ! -d ${TANKverf_radM1} ]]; then mkdir -p -m 775 ${TANKverf_radM1} ; fi ######################################################## # Execute the script. -${RADMONSH:-${SCRgfs}/exgdas_atmos_verfrad.sh} ${PDY} ${cyc} +"${SCRgfs}/exgdas_atmos_verfrad.sh" err=$? - -if [[ ${err} -ne 0 ]] ; then - exit ${err} -else - echo " " - echo "JOB HAS COMPLETED NORMALLY" - echo " " +if (( err != 0 )); then + exit "${err}" fi ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-YES} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${RAD_DATA_IN} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" +exit 0 diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST index 45d0ad8b1d..53408df8cf 100755 --- a/jobs/JGDAS_ENKF_FCST +++ b/jobs/JGDAS_ENKF_FCST @@ -8,7 +8,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs" # Set variables used in the script ############################################## export CDUMP=${RUN/enkf} -export rCDUMP="enkfgdas" ############################################## # Begin JOB SPECIFIC work @@ -31,6 +30,9 @@ fi export ENSEND=$((NMEM_EFCSGRP * 10#${ENSGRP})) export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) +if [[ ${DO_WAVE} == "YES" ]]; then + declare -rx RUNwave="${RUN}wave" +fi ############################################################### # Run relevant script diff --git a/jobs/JGDAS_FIT2OBS b/jobs/JGDAS_FIT2OBS index d673845404..7638e4f0c8 100755 --- a/jobs/JGDAS_FIT2OBS +++ b/jobs/JGDAS_FIT2OBS @@ -17,13 +17,17 @@ export CDATE vday=${CDATE:0:8} vcyc=${CDATE:8:2} -export COM_INA=${ROTDIR}/gdas.${vday}/${vcyc}/atmos +# These are used by fit2obs, so we can't change them to the standard COM variable names +# shellcheck disable=SC2153 +YMD=${vday} HH=${vcyc} generate_com -rx COM_INA:COM_ATMOS_ANALYSIS_TMPL +RUN=${CDUMP} YMD=${vday} HH=${vcyc} generate_com -rx COM_PRP:COM_OBS_TMPL + # We want to defer variable expansion, so ignore warning about single quotes # shellcheck disable=SC2016 export COM_INF='$ROTDIR/vrfyarch/gfs.$fdy/$fzz' -export COM_PRP=${ROTDIR}/gdas.${vday}/${vcyc}/obs export PRPI=${COM_PRP}/${RUN}.t${vcyc}z.prepbufr +# shellcheck disable=SC2153 export sig1=${COM_INA}/${RUN}.t${vcyc}z.atmanl.nc export sfc1=${COM_INA}/${RUN}.t${vcyc}z.atmanl.nc export CNVS=${COM_INA}/${RUN}.t${vcyc}z.cnvstat @@ -37,8 +41,8 @@ export HORZ_DIR=${ARCDIR}/horiz export COMLOX=${DATA}/fitx [[ ! -d "${COMLOX}" ]] && mkdir -p "${COMLOX}" -echo "echo err_chk">"${DATA}"/err_chk; chmod 755 "${DATA}"/err_chk -echo "echo postmsg">"${DATA}"/postmsg; chmod 755 "${DATA}"/postmsg +echo "echo err_chk">"${DATA}/err_chk"; chmod 755 "${DATA}/err_chk" +echo "echo postmsg">"${DATA}/postmsg"; chmod 755 "${DATA}/postmsg" ############################################## # Check spinup and available inputs diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT index 613de589d2..a1ecc116ea 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" @@ -21,7 +20,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} +EXSCRIPT=${GDASOCNBMATSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY index c85b5c886b..08e7da60c0 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" @@ -20,7 +19,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} +EXSCRIPT=${GDASOCNMBATVRFYSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT index 7e4294bd7c..afac9fbc25 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" @@ -11,7 +10,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalchkpt" -c "base ocnanal ocnana ############################################## # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") export GDATE export gPDY=${GDATE:0:8} export gcyc=${GDATE:8:2} @@ -35,7 +34,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +EXSCRIPT=${GDASOCNCHKPTSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST index eb9607ad21..6034fc5425 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" @@ -32,7 +31,7 @@ export PYTHONPATH # Run relevant script ############################################################### -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} +EXSCRIPT=${GDASOCNPOSTPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index c3fd5b5d65..2e49a9f14d 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanalprep" @@ -11,7 +10,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanal export CDUMP=${CDUMP:-${RUN:-"gfs"}} # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") export GDATE export gPDY=${GDATE:0:8} export gcyc=${GDATE:8:2} @@ -35,13 +34,14 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ # Add UFSDA to PYTHONPATH ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}" +pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}:${pyiodaPATH}" export PYTHONPATH ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} +EXSCRIPT=${GDASOCNPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN index 87ca5560c4..5871497223 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" export WIPE_DATA="NO" export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" @@ -18,7 +17,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalr ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} +EXSCRIPT=${GDASOCNRUNSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY index b0efa93532..aba76d7d1a 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY @@ -1,5 +1,4 @@ #!/bin/bash -export STRICT="NO" source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanalprep" @@ -11,7 +10,7 @@ export CDUMP=${CDUMP:-${RUN:-"gfs"}} export GDUMP=${GDUMP:-"gdas"} # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") export gPDY=${GDATE:0:8} export gcyc=${GDATE:8:2} @@ -28,11 +27,12 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS # Add UFSDA to PYTHONPATH export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${HOMEgfs}/sorc/gdas.cd/ush/eva:${PYTHONPATH} +export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} +EXSCRIPT=${GDASOCNVRFYPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG index 0119bc7f2d..516c7a403b 100755 --- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG @@ -30,9 +30,9 @@ export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO -GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL -if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then +if [[ ! -d "${COM_ATMOS_WMO}" ]] ; then mkdir -m 775 -p "${COM_ATMOS_WMO}" fi @@ -47,21 +47,22 @@ export pgmout=OUTPUT.$$ ######################################################## # Execute the script. -${HOMEgfs}/scripts/exgfs_atmos_awips_20km_1p0deg.sh ${fcsthrs} +"${HOMEgfs}/scripts/exgfs_atmos_awips_20km_1p0deg.sh" "${fcsthrs}" export err=$?; err_chk ######################################################## ############################################ # print exec I/O output ############################################ -if [ -e "${pgmout}" ] ; then - cat ${pgmout} +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" fi ################################### # Remove temp directories ################################### -if [ "${KEEPDATA}" != "YES" ] ; then - rm -rf ${DATA} +cd "${DATAROOT}" || pass +if [[ "${KEEPDATA}" != "YES" ]] ; then + rm -rf "${DATA}" fi diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2 index 94151fbd72..5bd7749997 100755 --- a/jobs/JGFS_ATMOS_AWIPS_G2 +++ b/jobs/JGFS_ATMOS_AWIPS_G2 @@ -34,11 +34,9 @@ export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO -GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL -if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then - mkdir -m 775 -p "${COM_ATMOS_WMO}" -fi +mkdir -m 775 -p "${COM_ATMOS_WMO}" export pgmout=OUTPUT.$$ diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index 85e4bf7651..5ac97e079c 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -1,22 +1,17 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis" -c "base genesis" -# TODO (#1220) Determine if this is still needed -export RUN_ENVIR=${RUN_ENVIR:-"nco"} - ############################################## # Set variables used in the exglobal script ############################################## export cmodel=${RUN} #################################### -# SENDCOM - Copy Files From TMPDIR to $COMOUT # SENDDBN - Issue DBNet Client Calls #################################### -export SENDCOM=YES export SENDDBN=${SENDDBN:-NO} export SENDECF=${SENDECF:-NO} @@ -33,7 +28,7 @@ export SCRIPTens_tracker=${SCRIPTens_tracker:-${HOMEens_tracker}/scripts} # Define COM directories ############################################## YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GENESIS -YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL # The following variables are used by the tracker scripts which are outside # of global-workflow and therefore can't be standardized at this time diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 3aa3c6f5f4..a91d8e3c5b 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -1,12 +1,9 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "tracker" -c "base tracker" -# TODO (#1220) Determine if this is still needed -export RUN_ENVIR=${RUN_ENVIR:-"nco"} - export COMPONENT="atmos" @@ -18,10 +15,8 @@ export CDUMP=${RUN/enkf} #################################### -# SENDCOM - Copy Files From TMPDIR to $COMOUT # SENDDBN - Issue DBNet Client Calls #################################### -export SENDCOM=YES export SENDDBN=${SENDDBN:-NO} export SENDECF=${SENDECF:-NO} @@ -38,7 +33,7 @@ export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush} # Define COM and Data directories ############################################## YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_TRACK COM_ATMOS_GENESIS -YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_GRID_TMPL if [[ ! -d "${COM_ATMOS_TRACK}" ]]; then mkdir -p "${COM_ATMOS_TRACK}"; fi @@ -84,7 +79,6 @@ ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} export err=$?; err_chk -#if [ "$SENDCOM" = 'YES' ]; then # cat ${DATA}/trak.avnx.atcfunix.${PDY}${cyc} | \ # sed s:AVNX:GFSO:g \ # > ${COMOUT}/gfso.t${cyc}z.cyclone.trackatcfunix @@ -92,7 +86,6 @@ export err=$?; err_chk # cat ${DATA}/trak.avnx.atcfunix.${PDY}${cyc} | \ # sed s:AVNX:AVNO:g \ # > ${COMOUT}/avn.t${cyc}z.cyclone.trackatcfunix -#fi ############################################## # Final processing diff --git a/jobs/JGFS_ATMOS_FBWIND b/jobs/JGFS_ATMOS_FBWIND index f4b94442e8..e04b06c0d6 100755 --- a/jobs/JGFS_ATMOS_FBWIND +++ b/jobs/JGFS_ATMOS_FBWIND @@ -35,9 +35,7 @@ export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} -fi +mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} ######################################################## diff --git a/jobs/JGFS_ATMOS_FSU_GENESIS b/jobs/JGFS_ATMOS_FSU_GENESIS index e5fd5ff3c3..cc730e21bb 100755 --- a/jobs/JGFS_ATMOS_FSU_GENESIS +++ b/jobs/JGFS_ATMOS_FSU_GENESIS @@ -1,9 +1,7 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} +source "${HOMEgfs}/ush/jjob_header.sh" -e "genesis_fsu" -c "base genesis_fsu" export COMPONENT="atmos" @@ -16,10 +14,8 @@ export CDUMP=${CDUMP:-${RUN:-"gfs"}} #################################### -# SENDCOM - Copy Files From TMPDIR to $COMOUT # SENDDBN - Issue DBNet Client Calls #################################### -export SENDCOM=YES export SENDDBN=${SENDDBN:-NO} export SENDECF=${SENDECF:-NO} diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK index 161f0e0883..ddf10342d2 100755 --- a/jobs/JGFS_ATMOS_GEMPAK +++ b/jobs/JGFS_ATMOS_GEMPAK @@ -39,14 +39,14 @@ export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} for grid in 0p25 0p50 1p00; do - GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" done for grid in 1p00 0p50 0p25 40km 35km_atl 35km_pac; do prod_dir="COM_ATMOS_GEMPAK_${grid}" GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" - if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + if [[ ! -d "${!prod_dir}" ]] ; then mkdir -m 775 -p "${!prod_dir}" fi done diff --git a/jobs/JGFS_ATMOS_GEMPAK_META b/jobs/JGFS_ATMOS_GEMPAK_META index 0a9f5bdd90..b7786b1f49 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_META +++ b/jobs/JGFS_ATMOS_GEMPAK_META @@ -62,9 +62,7 @@ export COMINnam=${COMINnam:-$(compath.py ${envir}/nam/${nam_ver})/nam} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} -fi +mkdir -m 775 -p ${COMOUT} ######################################################## diff --git a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF index cc9d445965..58b24c5e49 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +++ b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF @@ -59,9 +59,7 @@ export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} -fi +mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} export pgmout=OUTPUT.$$ diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC index a1c2518a44..47415a39ff 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC @@ -1,7 +1,5 @@ #! /usr/bin/env bash -# TODO (#1222) This job is not part of the rocoto suite - ############################################ # GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION ############################################ @@ -12,12 +10,12 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base" ################################ # Set up the HOME directory ################################ -export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} -export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} -export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} -export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} -export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} -export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} +export EXECgfs="${EXECgfs:-${HOMEgfs}/exec}" +export PARMgfs="${PARMgfs:-${HOMEgfs}/parm}" +export EXPDIR="${EXPDIR:-${HOMEgfs}/parm/config}" +export FIXgempak="${FIXgempak:-${HOMEgfs}/gempak/fix}" +export USHgempak="${USHgempak:-${HOMEgfs}/gempak/ush}" +export SRCgfs="${SRCgfs:-${HOMEgfs}/scripts}" # For half-degree P Grib files #export DO_HD_PGRB=YES @@ -33,27 +31,20 @@ export EXT="" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} -export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${NET}.${PDY})/${cyc}/${COMPONENT}/gempak} - -export SENDDBN=${SENDDBN:-NO} -export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} - -if [ ${SENDCOM} = YES ] ; then - mkdir -m 775 -p ${COMOUT} -fi - +export COMIN="${COMIN:-$(compath.py "${envir}"/"${NET}"/"${gfs_ver}")/${RUN}.${PDY}/${cyc}/${COMPONENT}}" +export COMOUT="${COMOUT:-$(compath.py -o "${NET}"/"${gfs_ver}"/"${NET}"."${PDY}")/${cyc}/${COMPONENT}/gempak}" -# TODO - Assess what is going on with overwriting $DATA here (#1224) +export SENDDBN="${SENDDBN:-NO}" +export DBNROOT="${DBNROOT:-${UTILROOT}/fakedbn}" -export DATA_HOLD=${DATA} +mkdir -m 775 -p "${COMOUT}" ################################################################# # Execute the script for the regular grib ################################################################# -export DATA=${DATA_HOLD}/SPECIAL -mkdir -p ${DATA} -cd ${DATA} +export DATA_SPECIAL="${DATA}/SPECIAL" +mkdir -p "${DATA_SPECIAL}" +cd "${DATA_SPECIAL}" || exit 1 export DBN_ALERT_TYPE=GFS_GOESSIM_GEMPAK export RUN2=gfs_goessim @@ -67,14 +58,14 @@ echo "RUNS the Program" ######################################################## # Execute the script. -${SRCgfs}/exgfs_atmos_goes_nawips.sh +"${SRCgfs}/exgfs_atmos_goes_nawips.sh" ################################################################# # Execute the script for the 221 grib -export DATA=${DATA_HOLD}/SPECIAL221 -mkdir -p ${DATA} -cd ${DATA} +export DATA_SPECIAL221="${DATA}/SPECIAL221" +mkdir -p "${DATA_SPECIAL221}" +cd "${DATA_SPECIAL221}" || exit 1 export DBN_ALERT_TYPE=GFS_GOESSIM221_GEMPAK export RUN2=gfs_goessim221 @@ -88,12 +79,12 @@ echo "RUNS the Program" ######################################################## # Execute the script. -${SRCgfs}/exgfs_atmos_goes_nawips.sh +"${SRCgfs}/exgfs_atmos_goes_nawips.sh" export err=$?; err_chk ######################################################## echo "end of program" -cd ${DATA_HOLD} +cd "${DATA}" || exit 1 echo "######################################" echo " SPECIAL.OUT " echo "######################################" @@ -101,14 +92,14 @@ echo "######################################" ############################################ # print exec I/O output ############################################ -if [ -e "${pgmout}" ] ; then - cat ${pgmout} +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" fi ################################### # Remove temp directories ################################### -if [ "${KEEPDATA}" != "YES" ] ; then +if [[ "${KEEPDATA}" != "YES" ]] ; then rm -rf "${DATA}" fi diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS index 48b13c3d9e..a98835ada2 100755 --- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS @@ -10,7 +10,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "npoess" -c "base" export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} - ################################ # Set up the HOME directory ################################ @@ -34,52 +33,9 @@ export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES -GRID="0p50" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_TMPL - -if [[ ${SENDCOM} == "YES" ]]; then - mkdir -m 775 -p "${COM_ATMOS_GOES}" -fi - -# TODO - This should be in the ex-script (#1226) - -#################################### -# Specify Forecast Hour Range -#################################### -export SHOUR=000 -export FHOUR=180 -export FHINC=003 +GRID="0p50" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_GRID_TMPL -#################################### -# Specify Timeout Behavior of Post -# -# SLEEP_TIME - Amount of time to wait for -# a restart file before exiting -# SLEEP_INT - Amount of time to wait between -# checking for restart files -#################################### -export SLEEP_TIME=900 -export SLEEP_INT=5 - -#################################### -# Check if this is a restart -#################################### -if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb2" ]]; then - modelrecvy=$(cat < "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb") - recvy_cyc="${modelrecvy:8:2}" - recvy_shour="${modelrecvy:10:13}" - - if [[ ${RERUN} == "NO" ]]; then - NEW_SHOUR=$(( recvy_shour + FHINC )) - if (( NEW_SHOUR >= SHOUR )); then - export SHOUR=${NEW_SHOUR} - fi - if (( recvy_shour >= FHOUR )); then - echo "Forecast Pgrb Generation Already Completed to ${FHOUR}" - else - echo "Starting: PDY=${PDY} cycle=t${recvy_cyc}z SHOUR=${SHOUR}" - fi - fi -fi +mkdir -m 775 -p "${COM_ATMOS_GOES}" ############################################################# # Execute the script diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND index 2318d70e31..721dd27628 100755 --- a/jobs/JGFS_ATMOS_POSTSND +++ b/jobs/JGFS_ATMOS_POSTSND @@ -15,7 +15,6 @@ export CDUMP=${RUN/enkf} ######################################## export model=${model:-gfs} -export SENDCOM=${SENDCOM:-YES} export SENDDBN=${SENDDBN:-YES} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} diff --git a/jobs/JGFS_ATMOS_VERIFICATION b/jobs/JGFS_ATMOS_VERIFICATION new file mode 100755 index 0000000000..23a450cd55 --- /dev/null +++ b/jobs/JGFS_ATMOS_VERIFICATION @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "metp" -c "base metp" + +############################################################### +## Abstract: +## Inline METplus verification and diagnostics driver script +## HOMEgfs : /full/path/to/workflow +## EXPDIR : /full/path/to/config/files +## CDATE : current analysis date (YYYYMMDDHH) +## CDUMP : cycle name (gdas / gfs) +## PDY : current date (YYYYMMDD) +## cyc : current cycle (HH) +## SDATE_GFS : first date of GFS cycle (YYYYMMDDHHMM) +## METPCASE : METplus verification use case (g2g1 | g2o1 | pcp1) +############################################################### + +# TODO: This should not be permitted as DATAROOT is set at the job-card level. +# TODO: DATAROOT is being used as DATA in metp jobs. This should be rectified in metp. +# TODO: The temporary directory is DATA and is created at the top of the J-Job. +# TODO: remove this line +export DATAROOT=${DATA} + +VDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${VRFYBACK_HRS} hours") +export VDATE=${VDATE:0:8} + +# Since this is currently a one-element list, shellcheck things we would rather run this as a command +# shellcheck disable=SC2041 +for grid in '1p00'; do + prod_dir="COM_ATMOS_GRIB_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL" +done + +# TODO: If none of these are on, why are we running this job? +if [[ "${RUN_GRID2GRID_STEP1}" == "YES" || "${RUN_GRID2OBS_STEP1}" == "YES" || "${RUN_PRECIP_STEP1}" == "YES" ]]; then + ${VERIF_GLOBALSH} + status=$? + if (( status != 0 )); then exit "${status}"; fi +fi + +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATAROOT}" ; fi # TODO: This should be $DATA + diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE index 065ebe8d0a..ff8e2e9569 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -9,7 +9,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroan # Set variables used in the script ############################################## # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE index 2f8c222e18..79320b77ee 100755 --- a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aeroanl # Set variables used in the script ############################################## # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index 2d2f8c8814..e6c016e703 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -11,16 +11,19 @@ export CDUMP=${RUN/enkf} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMOS_GEMPAK \ COM_ATMOS_GENESIS COM_ATMOS_HISTORY COM_ATMOS_INPUT COM_ATMOS_MASTER COM_ATMOS_RESTART \ - COM_ATMOS_TRACK COM_ATMOS_WAFS COM_ATMOS_WMO \ - COM_CHEM_HISTORY \ - COM_ICE_HISTORY COM_ICE_INPUT \ + COM_ATMOS_TRACK COM_ATMOS_WMO \ + COM_CHEM_HISTORY COM_CHEM_ANALYSIS\ + COM_MED_RESTART \ + COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART \ COM_OBS COM_TOP \ - COM_OCEAN_DAILY COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_XSECT \ - COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION + COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \ + COM_OCEAN_ANALYSIS \ + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \ + COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON for grid in "0p25" "0p50" "1p00"; do - YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" - YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL" done ############################################################### diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE index 246502cdfa..c50214aad1 100755 --- a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atm # Set variables used in the script ############################################## # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" @@ -20,7 +20,7 @@ GDUMP="gdas" RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ - COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL ############################################################### # Run relevant script diff --git a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP index fdaca08240..3312ef27f5 100755 --- a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP +++ b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP @@ -3,8 +3,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "sfc_prep" -c "base" -export RUN_ENVIR=${RUN_ENVIR:-"nco"} - export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} diff --git a/jobs/JGLOBAL_ATMOS_PRODUCTS b/jobs/JGLOBAL_ATMOS_PRODUCTS new file mode 100755 index 0000000000..24e7edacdd --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_PRODUCTS @@ -0,0 +1,47 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmos_products" -c "base atmos_products" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Construct COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER + +for grid in '0p25' '0p50' '1p00'; do + prod_dir="COM_ATMOS_GRIB_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_GRID_TMPL" + if [[ ! -d "${!prod_dir}" ]]; then mkdir -m 775 -p "${!prod_dir}"; fi +done + +# Variables used in this job +export PREFIX="${RUN}.t${cyc}z." + +############################################################### +# Run exglobal script +"${HOMEgfs}/scripts/exglobal_atmos_products.sh" +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]]; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA:-NO}" = "NO" ]] && rm -rf "${DATA}" + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL index dcedb7b65b..0d709e56dd 100755 --- a/jobs/JGLOBAL_ATMOS_SFCANL +++ b/jobs/JGLOBAL_ATMOS_SFCANL @@ -8,9 +8,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl" # Set variables used in the script ############################################## export CDUMP="${RUN/enkf}" -if [[ ${RUN_ENVIR} = "nco" ]]; then - export ROTDIR=${COMROOT:?}/${NET}/${envir} -fi ############################################## diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index d5e4834851..82c3a5c755 100755 --- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -3,9 +3,6 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" -# TODO (#1220) Evaluate if this is still needed -export RUN_ENVIR=${RUN_ENVIR:-"nco"} - ############################################## # Set variables used in the exglobal script diff --git a/jobs/JGLOBAL_ATMOS_UPP b/jobs/JGLOBAL_ATMOS_UPP new file mode 100755 index 0000000000..9364f33225 --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_UPP @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "upp" -c "base upp" + +############################################## +# Set variables used in the exglobal script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Construct COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER +if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -m 775 -p "${COM_ATMOS_MASTER}"; fi + + +############################################################### +# Run relevant exglobal script + +"${HOMEgfs}/scripts/exglobal_atmos_upp.py" +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]]; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA:-NO}" = "NO" ]] && rm -rf "${DATA}" + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_VMINMON b/jobs/JGLOBAL_ATMOS_VMINMON new file mode 100755 index 0000000000..dbd76aed5b --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_VMINMON @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########################################################### +# Global Minimization Monitor (MinMon) job +########################################################### +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" + +############################################# +# Determine PDY and cyc for previous cycle +############################################# +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON +YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL + +export gsistat="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.gsistat" +export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} + +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 "${M_TANKverf}" ; fi +if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 "${M_TANKverfM1}" ; fi + +######################################################## +# Execute the script. +"${SCRgfs}/exglobal_atmos_vminmon.sh" +err=$? +if (( err != 0 )); then + exit "${err}" +fi + +################################ +# Remove the Working Directory +################################ +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE index c0bc56f6e2..52a782d7c4 100755 --- a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE +++ b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -9,7 +9,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfi # Set variables used in the script ############################################## # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE index 2d794fb846..4ef5e6392d 100755 --- a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE +++ b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlini # Set variables used in the script ############################################## # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" diff --git a/jobs/JGLOBAL_ATM_PREP_IODA_OBS b/jobs/JGLOBAL_ATM_PREP_IODA_OBS new file mode 100755 index 0000000000..ef0e682468 --- /dev/null +++ b/jobs/JGLOBAL_ATM_PREP_IODA_OBS @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prepatmiodaobs" -c "base prepatmiodaobs" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + + +############################################################### +# Run relevant script +EXSCRIPT=${BUFR2IODASH:-${HOMEgfs}/ush/run_bufr2ioda.py} +${EXSCRIPT} "${PDY}${cyc}" "${RUN}" "${DMPDIR}" "${IODAPARM}" "${COM_OBS}/" +status=$? +[[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}") + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_CLEANUP b/jobs/JGLOBAL_CLEANUP new file mode 100755 index 0000000000..ad938ccf60 --- /dev/null +++ b/jobs/JGLOBAL_CLEANUP @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "cleanup" -c "base cleanup" + +"${HOMEgfs}/scripts/exglobal_cleanup.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 + diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 5be44a8c97..b2825af54f 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -17,27 +17,36 @@ rCDUMP=${CDUMP} [[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" # Forecast length for GFS forecast -if [ ${CDUMP} = "gfs" ]; then +case ${RUN} in + *gfs | *gefs) + # shellcheck disable=SC2153 export FHMAX=${FHMAX_GFS} + # shellcheck disable=SC2153 export FHOUT=${FHOUT_GFS} export FHMAX_HF=${FHMAX_HF_GFS} export FHOUT_HF=${FHOUT_HF_GFS} -else + ;; + *gdas) export FHMAX_HF=0 export FHOUT_HF=0 -fi + ;; + *) + echo "FATAL ERROR: Unsupported RUN '${RUN}'" + exit 1 +esac # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) +declare -rx GDATE # shellcheck disable= -declare -x gPDY="${GDATE:0:8}" -declare -x gcyc="${GDATE:8:2}" +declare -rx gPDY="${GDATE:0:8}" +declare -rx gcyc="${GDATE:8:2}" # Construct COM variables from templates (see config.com) YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ - COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP + COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP COM_CONF RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL @@ -52,13 +61,13 @@ fi if [[ ${DO_OCN} == "YES" ]]; then YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \ COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS - RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL fi if [[ ${DO_ICE} == "YES" ]]; then YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART - RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL fi diff --git a/jobs/JGLOBAL_LAND_ANALYSIS b/jobs/JGLOBAL_LAND_ANALYSIS new file mode 100755 index 0000000000..3ff7e72a35 --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS @@ -0,0 +1,46 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanl" -c "base landanl" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_LAND_ANALYSIS COM_CONF + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_LAND_ANALYSIS}" "${COM_CONF}" + +############################################################### +# Run relevant script + +EXSCRIPT=${LANDANLPY:-${HOMEgfs}/scripts/exglobal_land_analysis.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_PREP_LAND_OBS b/jobs/JGLOBAL_PREP_LAND_OBS index 164f78d8f6..025adae529 100755 --- a/jobs/JGLOBAL_PREP_LAND_OBS +++ b/jobs/JGLOBAL_PREP_LAND_OBS @@ -8,7 +8,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "preplandobs" -c "base preplandobs" ############################################## # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 -GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") gPDY=${GDATE:0:8} gcyc=${GDATE:8:2} GDUMP="gdas" diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS new file mode 100755 index 0000000000..a100aca89c --- /dev/null +++ b/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -0,0 +1,46 @@ +#!/bin/bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prepoceanobs" -c "base prepoceanobs" + + +############################################## +# Set variables used in the script +############################################## + +export COMIN_OBS="${DATA}" +YMD=${PDY} HH=${cyc} generate_com -rx COMOUT_OBS:COM_OBS_TMPL + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Add prep_marine_obs.py to PYTHONPATH +export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPOCNOBSPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Handle the temporary working directory +########################################## +cd "${DATAROOT}" || (echo "FATAL ERROR: ${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC new file mode 100755 index 0000000000..4c94990fde --- /dev/null +++ b/jobs/JGLOBAL_STAGE_IC @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" + +# Restart conditions for GFS cycle come from GDAS +# shellcheck disable=SC2153 +rCDUMP=${CDUMP} +[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" +export rCDUMP + +# Execute the Script +"${HOMEgfs}/scripts/exglobal_stage_ic.sh" + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_WAVE_GEMPAK b/jobs/JGLOBAL_WAVE_GEMPAK index b7c97ce571..89c389fa11 100755 --- a/jobs/JGLOBAL_WAVE_GEMPAK +++ b/jobs/JGLOBAL_WAVE_GEMPAK @@ -10,7 +10,6 @@ export errchk=${errchk:-err_chk} # Set COM Paths ################################### export DBN_ALERT_TYPE=GFS_WAVE_GEMPAK -export SENDCOM=${SENDCOM:-YES} export SENDDBN=${SENDDBN:-YES} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT index 49fccad66f..7ad742f25a 100755 --- a/jobs/JGLOBAL_WAVE_INIT +++ b/jobs/JGLOBAL_WAVE_INIT @@ -10,7 +10,7 @@ export errchk=${errchk:-err_chk} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} @@ -18,9 +18,7 @@ export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP -if [ ${SENDCOM} = YES ]; then - mkdir -m 775 -p ${COM_WAVE_PREP} -fi +mkdir -m 775 -p ${COM_WAVE_PREP} # Set mpi serial command export wavempexec=${wavempexec:-"mpirun -n"} diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT index 9016d624d7..9d404077fd 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNT +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT @@ -9,7 +9,7 @@ export errchk=${errchk:-err_chk} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL index c193a28cf7..3de49fcc3b 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL @@ -13,7 +13,7 @@ export CDATE=${PDY}${cyc} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT index 3ee1d56eef..1b573435a3 100755 --- a/jobs/JGLOBAL_WAVE_POST_PNT +++ b/jobs/JGLOBAL_WAVE_POST_PNT @@ -9,7 +9,7 @@ export errchk=${errchk:-err_chk} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS index 47e7063db4..231b793de7 100755 --- a/jobs/JGLOBAL_WAVE_POST_SBS +++ b/jobs/JGLOBAL_WAVE_POST_SBS @@ -9,7 +9,7 @@ export errchk=${errchk:-err_chk} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS index 794258e756..3a2947af56 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS @@ -9,7 +9,6 @@ export errchk=${errchk:-err_chk} ################################### # Set COM Paths ################################### -export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED index a2134461da..4b32c709bf 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED @@ -9,7 +9,6 @@ export errchk=${errchk:-err_chk} ################################### # Set COM Paths ################################### -export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} @@ -18,9 +17,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_WMO if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi -if [ ${SENDCOM} = YES ]; then - mkdir -p "${COM_WAVE_WMO}" -fi +mkdir -p "${COM_WAVE_WMO}" ################################### # Execute the Script diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP index 5ff48d886c..f246045f53 100755 --- a/jobs/JGLOBAL_WAVE_PREP +++ b/jobs/JGLOBAL_WAVE_PREP @@ -17,7 +17,7 @@ export MP_PULSE=0 export CDO=${CDO_ROOT}/bin/cdo # Path to HOME Directory -export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} export USHwave=${USHwave:-${HOMEgfs}/ush} export EXECwave=${EXECwave:-${HOMEgfs}/exec} diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh index 8f5a445de4..16bb6887fd 100755 --- a/jobs/rocoto/aeroanlfinal.sh +++ b/jobs/rocoto/aeroanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh index 4e3d32ff9f..9aaf255782 100755 --- a/jobs/rocoto/aeroanlinit.sh +++ b/jobs/rocoto/aeroanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh index 0ec2fb8437..bcd86e3fbf 100755 --- a/jobs/rocoto/aeroanlrun.sh +++ b/jobs/rocoto/aeroanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/anal.sh b/jobs/rocoto/anal.sh index d99152ef19..00f03e3832 100755 --- a/jobs/rocoto/anal.sh +++ b/jobs/rocoto/anal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS" status=$? -exit ${status} +exit "${status}" diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index 2f62d8b354..d949b7d76f 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -16,5 +16,4 @@ export jobid="${job}.$$" "${HOMEgfs}"/jobs/JGLOBAL_ARCHIVE status=$? - exit "${status}" diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh index 3c75c52cb0..3d3c3ba9e6 100755 --- a/jobs/rocoto/atmanlfinal.sh +++ b/jobs/rocoto/atmanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh index 7bb2587f0b..13c7d8710b 100755 --- a/jobs/rocoto/atmanlinit.sh +++ b/jobs/rocoto/atmanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlrun.sh index aad80e0b06..1b87cb4074 100755 --- a/jobs/rocoto/atmanlrun.sh +++ b/jobs/rocoto/atmanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh index 838e9712f8..5ffaa92754 100755 --- a/jobs/rocoto/atmensanlfinal.sh +++ b/jobs/rocoto/atmensanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh index 0ab78a1083..2c2204548a 100755 --- a/jobs/rocoto/atmensanlinit.sh +++ b/jobs/rocoto/atmensanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh index 91efdb3768..d991e3eb82 100755 --- a/jobs/rocoto/atmensanlrun.sh +++ b/jobs/rocoto/atmensanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmos_products.sh b/jobs/rocoto/atmos_products.sh new file mode 100755 index 0000000000..472f202de8 --- /dev/null +++ b/jobs/rocoto/atmos_products.sh @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +## atmosphere products driver script +## FHRLST : forecast hour list to post-process (e.g. -f001, f000, f000_f001_f002, ...) +############################################################### + +# Source FV3GFS workflow modules +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="atmos_products" +export jobid="${job}.$$" + +############################################################### +# shellcheck disable=SC2153,SC2001 +IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's and convert to array + +#--------------------------------------------------------------- +# Execute the JJOB +for fhr in "${fhrs[@]}"; do + # The analysis fhr is -001. Performing math on negative, leading 0 integers is tricky. + # The negative needs to be in front of "10#", so do some regex magic to make it happen. + fhr="10#${fhr}" + fhr=${fhr//10\#-/-10\#} + export FORECAST_HOUR=$(( fhr )) + "${HOMEgfs}/jobs/JGLOBAL_ATMOS_PRODUCTS" + status=$? + if (( status != 0 )); then exit "${status}"; fi +done + +exit 0 diff --git a/jobs/rocoto/awips.sh b/jobs/rocoto/awips_20km_1p0deg.sh similarity index 65% rename from jobs/rocoto/awips.sh rename to jobs/rocoto/awips_20km_1p0deg.sh index f9289255f9..e1bf623883 100755 --- a/jobs/rocoto/awips.sh +++ b/jobs/rocoto/awips_20km_1p0deg.sh @@ -5,7 +5,6 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## Abstract: ## Inline awips driver script -## RUN_ENVIR : runtime environment (emc | nco) ## HOMEgfs : /full/path/to/workflow ## EXPDIR : /full/path/to/config/files ## CDATE : current analysis date (YYYYMMDDHH) @@ -20,14 +19,13 @@ source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? (( status != 0 )) && exit "${status}" -export job="awips" +export job="awips_20km_1p0deg" export jobid="${job}.$$" -# TODO (#1228) - This script is doing more than just calling a j-job -# Also, this forces us to call the config files here instead of the j-job source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" -fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +# shellcheck disable=SC2153 +fhrlst=$(echo "${FHRLST}" | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') ############################################################### @@ -35,9 +33,10 @@ fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') echo echo "=============== BEGIN AWIPS ===============" -for fhr in ${fhrlst}; do +for fhr3 in ${fhrlst}; do + fhr=$(( 10#${fhr3} )) if (( fhr > FHMAX_GFS )); then - echo "Nothing to process for FHR = ${fhr}, cycle" + echo "Nothing to process for FHR = ${fhr3}, cycle" continue fi @@ -45,24 +44,17 @@ for fhr in ${fhrlst}; do fhmax=84 if (( fhr >= fhmin && fhr <= fhmax )); then if ((fhr % 3 == 0)); then - fhr3=$(printf %03d $((10#${fhr}))) - export fcsthrs=${fhr3} - ${AWIPS20SH} - fi - - if ((fhr % 6 == 0)); then - ${AWIPSG2SH} + export fcsthrs="${fhr3}" + "${AWIPS20KM1P0DEGSH}" fi fi fhmin=90 fhmax=240 - if (( fhr >= fhmin && fhr <= fhmax )); then + if (( fhr >= fhmin && fhr <= fhmax )); then if ((fhr % 6 == 0)); then - fhr3=$(printf %03i $((10#${fhr}))) - export fcsthrs=${fhr3} - ${AWIPS20SH} - ${AWIPSG2SH} + export fcsthrs="${fhr3}" + "${AWIPS20KM1P0DEGSH}" fi fi done diff --git a/jobs/rocoto/awips_g2.sh b/jobs/rocoto/awips_g2.sh new file mode 100755 index 0000000000..121c96d63f --- /dev/null +++ b/jobs/rocoto/awips_g2.sh @@ -0,0 +1,57 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +## Abstract: +## Inline awips driver script +## HOMEgfs : /full/path/to/workflow +## EXPDIR : /full/path/to/config/files +## CDATE : current analysis date (YYYYMMDDHH) +## CDUMP : cycle name (gdas / gfs) +## PDY : current date (YYYYMMDD) +## cyc : current cycle (HH) +############################################################### + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="awips_g2" +export jobid="${job}.$$" + +source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" + +# shellcheck disable=SC2153 +fhrlst=$(echo "${FHRLST}" | sed -e "s/_/ /g; s/f/ /g; s/,/ /g") + +############################################################### + +################################################################################ +echo +echo "=============== BEGIN AWIPS ===============" + +for fhr3 in ${fhrlst}; do + fhr=$(( 10#${fhr3} )) + if (( fhr > FHMAX_GFS )); then + echo "Nothing to process for FHR = ${fhr3}, cycle" + continue + fi + + fhmin=0 + fhmax=240 + if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 6 == 0)); then + "${AWIPSG2SH}" + fi + fi +done + + +############################################################### +# Force Exit out cleanly +if [[ ${KEEPDATA:-"NO"} == "NO" ]] ; then rm -rf "${DATA}" ; fi + +exit 0 diff --git a/jobs/rocoto/cleanup.sh b/jobs/rocoto/cleanup.sh new file mode 100755 index 0000000000..96303fde57 --- /dev/null +++ b/jobs/rocoto/cleanup.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. "${HOMEgfs}"/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="cleanup" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGLOBAL_CLEANUP +status=$? + +exit "${status}" diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh index 46a25ac759..c5667cb970 100755 --- a/jobs/rocoto/efcs.sh +++ b/jobs/rocoto/efcs.sh @@ -4,31 +4,22 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up +# TODO clean this up once ncdiag/1.1.2 is installed on WCOSS2 source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/sorc/ufs_model.fd/tests" -module load modules.ufs_model.lua -# Workflow needs utilities from prod_util (setPDY.sh, ndate, etc.) -module load prod_util -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals +if [[ "${MACHINE_ID}" == "wcoss2" ]]; then + . ${HOMEgfs}/ush/load_ufswm_modules.sh +else + . ${HOMEgfs}/ush/load_fv3gfs_modules.sh fi -module list -unset MACHINE_ID -set_trace +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="efcs" export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGDAS_ENKF_FCST +"${HOMEgfs}/jobs/JGDAS_ENKF_FCST" status=$? exit ${status} diff --git a/jobs/rocoto/eobs.sh b/jobs/rocoto/eobs.sh index 95fa42cb08..1139c3e623 100755 --- a/jobs/rocoto/eobs.sh +++ b/jobs/rocoto/eobs.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS +"${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS" status=$? -exit ${status} +exit "${status}" diff --git a/jobs/rocoto/eupd.sh b/jobs/rocoto/eupd.sh index 3ed028f87a..9134ca5f14 100755 --- a/jobs/rocoto/eupd.sh +++ b/jobs/rocoto/eupd.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE +"${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE" status=$? -exit ${status} +exit "${status}" diff --git a/jobs/rocoto/fbwind.sh b/jobs/rocoto/fbwind.sh new file mode 100755 index 0000000000..fdf14f5473 --- /dev/null +++ b/jobs/rocoto/fbwind.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="fbwind" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGFS_ATMOS_FBWIND" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 512bee127f..9138b4eb43 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -4,50 +4,21 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up +# TODO clean this up once ncdiag/1.1.2 is installed on WCOSS2 source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/sorc/ufs_model.fd/tests" -module load modules.ufs_model.lua -module load prod_util -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals -fi -if [[ "${MACHINE_ID}" = "hera" ]]; then - module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core" - module load "miniconda3/4.6.14" - module load "gfs_workflow/1.0.0" -# TODO: orion and wcoss2 will be uncommented when they are ready. This comment block will be removed in the next PR -#elif [[ "${MACHINE_ID}" = "orion" ]]; then -# module use "/home/rmahajan/opt/global-workflow/modulefiles/core" -# module load "python/3.7.5" -# module load "gfs_workflow/1.0.0" -#elif [[ "${MACHINE_ID}" = "wcoss2" ]]; then -# module load "python/3.7.5" +if [[ "${MACHINE_ID}" == "wcoss2" ]]; then + . ${HOMEgfs}/ush/load_ufswm_modules.sh +else + . ${HOMEgfs}/ush/load_fv3gfs_modules.sh fi -module list -unset MACHINE_ID -set_trace - -############################################################### -# exglobal_forecast.py requires the following in PYTHONPATH -# This will be moved to a module load when ready -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src:${HOMEgfs}/ush/python/pygfs" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" -export PYTHONPATH +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="fcst" export jobid="${job}.$$" -############################################################### # Execute the JJOB -${HOMEgfs}/jobs/JGLOBAL_FORECAST +"${HOMEgfs}/jobs/JGLOBAL_FORECAST" status=$? - exit ${status} diff --git a/jobs/rocoto/gempak.sh b/jobs/rocoto/gempak.sh index 14950535c8..82ea1175d8 100755 --- a/jobs/rocoto/gempak.sh +++ b/jobs/rocoto/gempak.sh @@ -5,13 +5,13 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### . "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ ${status} -ne 0 ]] && exit ${status} +if (( status != 0 )); then exit "${status}"; fi export job="gempak" export jobid="${job}.$$" # Execute the JJOB -${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK +"${HOMEgfs}/jobs/J${RUN^^}_ATMOS_GEMPAK" status=$? -exit ${status} +exit "${status}" diff --git a/jobs/rocoto/gempakmeta.sh b/jobs/rocoto/gempakmeta.sh new file mode 100755 index 0000000000..0babf7bbbf --- /dev/null +++ b/jobs/rocoto/gempakmeta.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="gempakmeta" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_META" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/gempakmetancdc.sh b/jobs/rocoto/gempakmetancdc.sh new file mode 100755 index 0000000000..cfd51cf58a --- /dev/null +++ b/jobs/rocoto/gempakmetancdc.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="gempakmetancdc" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/gempakncdcupapgif.sh b/jobs/rocoto/gempakncdcupapgif.sh new file mode 100755 index 0000000000..2cc84cd47d --- /dev/null +++ b/jobs/rocoto/gempakncdcupapgif.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="gempakncdcupapgif" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/gempakpgrb2spec.sh b/jobs/rocoto/gempakpgrb2spec.sh new file mode 100755 index 0000000000..f76c33ecdb --- /dev/null +++ b/jobs/rocoto/gempakpgrb2spec.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="gempakpgrb2spec" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/genesis.sh b/jobs/rocoto/genesis.sh new file mode 100755 index 0000000000..009a7006ef --- /dev/null +++ b/jobs/rocoto/genesis.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="genesis" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB + +"${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/genesis_fsu.sh b/jobs/rocoto/genesis_fsu.sh new file mode 100755 index 0000000000..05c0ff8827 --- /dev/null +++ b/jobs/rocoto/genesis_fsu.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="genesis_fsu" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB + +"${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/landanl.sh b/jobs/rocoto/landanl.sh new file mode 100755 index 0000000000..f49b6f9f8b --- /dev/null +++ b/jobs/rocoto/landanl.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanl" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS" +status=$? +exit "${status}" diff --git a/jobs/rocoto/metp.sh b/jobs/rocoto/metp.sh index 82254a0435..2bf833c1d6 100755 --- a/jobs/rocoto/metp.sh +++ b/jobs/rocoto/metp.sh @@ -3,94 +3,13 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### -## Abstract: -## Inline METplus verification and diagnostics driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -## SDATE_GFS : first date of GFS cycle (YYYYMMDDHHMM) -## METPCASE : METplus verification use case (g2g1 | g2o1 | pcp1) -############################################################### - -############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ ${status} -ne 0 ]] && exit ${status} +if (( status != 0 )); then exit "${status}"; fi export job="metp${METPCASE}" export jobid="${job}.$$" -############################################## -# make temp directory -############################################## -export DATA=${DATA:-${DATAROOT}/${jobid}} -mkdir -p ${DATA} -cd ${DATA} - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -############################################################### -echo -echo "=============== START TO SOURCE RELEVANT CONFIGS ===============" -configs="base metp" -for config in ${configs}; do - . ${EXPDIR}/config.${config} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} -done - - -############################################################### -echo -echo "=============== START TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. ${BASE_ENV}/${machine}.env metp -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -############################################################### -export COMPONENT="atmos" -export VDATE="$(echo $(${NDATE} -${VRFYBACK_HRS} ${CDATE}) | cut -c1-8)" -export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" - -# TODO: This should not be permitted as DATAROOT is set at the job-card level. -# TODO: DATAROOT is being used as DATA in metp jobs. This should be rectified in metp. -# TODO: The temporary directory is DATA and is created at the top of the J-Job. -# TODO: remove this line -export DATAROOT=${DATA} - -############################################################### -echo -echo "=============== START TO RUN METPLUS VERIFICATION ===============" -if [ ${CDUMP} = "gfs" ]; then - - if [ ${RUN_GRID2GRID_STEP1} = "YES" -o ${RUN_GRID2OBS_STEP1} = "YES" -o ${RUN_PRECIP_STEP1} = "YES" ]; then - - ${VERIF_GLOBALSH} - status=$? - [[ ${status} -ne 0 ]] && exit ${status} - [[ ${status} -eq 0 ]] && echo "Succesfully ran ${VERIF_GLOBALSH}" - fi -fi - - -if [ ${CDUMP} = "gdas" ]; then - echo "METplus verification currently not supported for CDUMP=${CDUMP}" -fi -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATAROOT} ; fi # TODO: This should be $DATA - +"${HOMEgfs}/jobs/JGFS_ATMOS_VERIFICATION" -exit 0 +exit $? diff --git a/jobs/rocoto/mos_ext_grd_fcst.sh b/jobs/rocoto/mos_ext_grd_fcst.sh new file mode 100755 index 0000000000..ce37711907 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_fcst" -c "base mos_ext_grd_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_grd_prdgen.sh b/jobs/rocoto/mos_ext_grd_prdgen.sh new file mode 100755 index 0000000000..fb641e04f0 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_prdgen" -c "base mos_ext_grd_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_grd_prep.sh b/jobs/rocoto/mos_ext_grd_prep.sh new file mode 100755 index 0000000000..defe9222b6 --- /dev/null +++ b/jobs/rocoto/mos_ext_grd_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_grd_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_grd_prep" -c "base mos_ext_grd_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_GRD_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_fcst.sh b/jobs/rocoto/mos_ext_stn_fcst.sh new file mode 100755 index 0000000000..85cde49192 --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_fcst" -c "base mos_ext_stn_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_prdgen.sh b/jobs/rocoto/mos_ext_stn_prdgen.sh new file mode 100755 index 0000000000..17709d5ffb --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_prdgen" -c "base mos_ext_stn_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_ext_stn_prep.sh b/jobs/rocoto/mos_ext_stn_prep.sh new file mode 100755 index 0000000000..9c65761a0d --- /dev/null +++ b/jobs/rocoto/mos_ext_stn_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_ext_stn_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_ext_stn_prep" -c "base mos_ext_stn_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_EXT_STN_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_fcst.sh b/jobs/rocoto/mos_grd_fcst.sh new file mode 100755 index 0000000000..42832d5f14 --- /dev/null +++ b/jobs/rocoto/mos_grd_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_fcst" -c "base mos_grd_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_prdgen.sh b/jobs/rocoto/mos_grd_prdgen.sh new file mode 100755 index 0000000000..c60b2e8f39 --- /dev/null +++ b/jobs/rocoto/mos_grd_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_prdgen" -c "base mos_grd_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_grd_prep.sh b/jobs/rocoto/mos_grd_prep.sh new file mode 100755 index 0000000000..3276ebf87d --- /dev/null +++ b/jobs/rocoto/mos_grd_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_grd_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_grd_prep" -c "base mos_grd_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_GRD_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_fcst.sh b/jobs/rocoto/mos_stn_fcst.sh new file mode 100755 index 0000000000..0024ed24b7 --- /dev/null +++ b/jobs/rocoto/mos_stn_fcst.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_fcst" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_fcst" -c "base mos_stn_fcst" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_FORECAST" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_prdgen.sh b/jobs/rocoto/mos_stn_prdgen.sh new file mode 100755 index 0000000000..01ada3f9d5 --- /dev/null +++ b/jobs/rocoto/mos_stn_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_prdgen" -c "base mos_stn_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_stn_prep.sh b/jobs/rocoto/mos_stn_prep.sh new file mode 100755 index 0000000000..21b19c6305 --- /dev/null +++ b/jobs/rocoto/mos_stn_prep.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_stn_prep" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_stn_prep" -c "base mos_stn_prep" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_STN_PREP" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_wx_ext_prdgen.sh b/jobs/rocoto/mos_wx_ext_prdgen.sh new file mode 100755 index 0000000000..b1c81e8a3b --- /dev/null +++ b/jobs/rocoto/mos_wx_ext_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_wx_ext_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_wx_ext_prdgen" -c "base mos_wx_ext_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_WX_EXT_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/mos_wx_prdgen.sh b/jobs/rocoto/mos_wx_prdgen.sh new file mode 100755 index 0000000000..345682b03e --- /dev/null +++ b/jobs/rocoto/mos_wx_prdgen.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="mos_wx_prdgen" +export jobid="${job}.$$" + +############################################################### +# Source jjob_header before invoking external JJOB + +source "${HOMEgfs}/ush/jjob_header.sh" -e "mos_wx_prdgen" -c "base mos_wx_prdgen" + +############################################################### +# Execute the JJOB + +"${HOMEgfs_mos}/jobs/JGFSMOS_WX_PRDGEN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/npoess.sh b/jobs/rocoto/npoess.sh new file mode 100755 index 0000000000..3599ca26bd --- /dev/null +++ b/jobs/rocoto/npoess.sh @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +if (( status != 0 )); then exit "${status}"; fi + +export job="npoess_pgrb2_0p5deg" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS" + +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh index ee8da061f2..5a2dc091cf 100755 --- a/jobs/rocoto/ocnpost.sh +++ b/jobs/rocoto/ocnpost.sh @@ -21,9 +21,6 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost" # Set variables used in the exglobal script ############################################## export CDUMP=${RUN/enkf} -if [[ ${RUN_ENVIR} = "nco" ]]; then - export ROTDIR=${COMROOT:?}/${NET}/${envir} -fi ############################################## # Begin JOB SPECIFIC work @@ -32,7 +29,7 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3 COM_OCEAN_XSECT COM_ICE_HISTORY for grid in "0p50" "0p25"; do - YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_GRID_TMPL" done for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do @@ -44,19 +41,19 @@ done fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') export OMP_NUM_THREADS=1 -export ENSMEM=${ENSMEM:-01} +export ENSMEM=${ENSMEM:-000} export IDATE=${PDY}${cyc} for fhr in ${fhrlst}; do - export fhr=${fhr} + export fhr=${fhr} # Ignore possible spelling error (nothing is misspelled) # shellcheck disable=SC2153 VDATE=$(${NDATE} "${fhr}" "${IDATE}") # shellcheck disable= declare -x VDATE cd "${DATA}" || exit 2 - if (( fhr > 0 )); then + if (( 10#${fhr} > 0 )); then # TODO: This portion calls NCL scripts that are deprecated (see Issue #923) if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} @@ -93,18 +90,18 @@ for fhr in ${fhrlst}; do [[ ${status} -ne 0 ]] && exit "${status}" fi if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then - echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -v temp -d yh,503 -d xh,-299.92,60.03 \ + ncks -v temp -d yh,0.0 \ "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? [[ ${status} -ne 0 ]] && exit "${status}" fi if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then - echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -v uo -d yh,503 -d xh,-299.92,60.03 \ + ncks -v uo -d yh,0.0 \ "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh deleted file mode 100755 index e84b2b7b71..0000000000 --- a/jobs/rocoto/post.sh +++ /dev/null @@ -1,33 +0,0 @@ -#! /usr/bin/env bash - -source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -## NCEP post driver script -## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...) -## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...) -############################################################### - -# Source FV3GFS workflow modules -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -status=$? -[[ ${status} -ne 0 ]] && exit ${status} - -export job="post" -export jobid="${job}.$$" - -if [ ${FHRGRP} = 'anl' ]; then - fhrlst="anl" -else - fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') -fi - -#--------------------------------------------------------------- -for fhr in ${fhrlst}; do - export post_times=${fhr} - ${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST - status=$? - [[ ${status} -ne 0 ]] && exit ${status} -done - -exit 0 diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index 826dec5ae7..dfb541abb6 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -65,14 +65,12 @@ fi if [[ ${PROCESS_TROPCY} = "YES" ]]; then export COMINsyn=${COMINsyn:-$(compath.py gfs/prod/syndat)} - if [[ ${RUN_ENVIR} != "nco" ]]; then - export ARCHSYND=${ROTDIR}/syndat - if [[ ! -d ${ARCHSYND} ]]; then mkdir -p ${ARCHSYND}; fi - if [[ ! -s ${ARCHSYND}/syndat_akavit ]]; then - for file in syndat_akavit syndat_dateck syndat_stmcat.scr syndat_stmcat syndat_sthisto syndat_sthista ; do - cp ${COMINsyn}/${file} ${ARCHSYND}/. - done - fi + export ARCHSYND=${ROTDIR}/syndat + if [[ ! -d ${ARCHSYND} ]]; then mkdir -p ${ARCHSYND}; fi + if [[ ! -s ${ARCHSYND}/syndat_akavit ]]; then + for file in syndat_akavit syndat_dateck syndat_stmcat.scr syndat_stmcat syndat_sthisto syndat_sthista ; do + cp ${COMINsyn}/${file} ${ARCHSYND}/. + done fi if [[ ${ROTDIR_DUMP} = "YES" ]]; then rm "${COM_OBS}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00"; fi @@ -96,7 +94,6 @@ if [[ ${MAKE_PREPBUFR} = "YES" ]]; then fi export job="j${CDUMP}_prep_${cyc}" - export DATAROOT="${RUNDIR}/${CDATE}/${CDUMP}/prepbufr" export COMIN=${COM_OBS} export COMOUT=${COM_OBS} RUN="gdas" YMD=${PDY} HH=${cyc} generate_com -rx COMINgdas:COM_ATMOS_HISTORY_TMPL diff --git a/jobs/rocoto/prepatmiodaobs.sh b/jobs/rocoto/prepatmiodaobs.sh new file mode 100755 index 0000000000..d424df9261 --- /dev/null +++ b/jobs/rocoto/prepatmiodaobs.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="prepatmobs" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow and ioda utilities +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYIODALIB="${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7" +PYTHONPATH="${PYIODALIB}:${wxflowPATH}:${PYTHONPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_PREP_IODA_OBS" +status=$? +exit "${status}" diff --git a/jobs/rocoto/preplandobs.sh b/jobs/rocoto/preplandobs.sh index 6fcd659eae..6304dd611b 100755 --- a/jobs/rocoto/preplandobs.sh +++ b/jobs/rocoto/preplandobs.sh @@ -13,9 +13,9 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/pyioda" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}:${gdasappPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/prepoceanobs.sh b/jobs/rocoto/prepoceanobs.sh new file mode 100755 index 0000000000..d7ae87851f --- /dev/null +++ b/jobs/rocoto/prepoceanobs.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="prepoceanobs" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGLOBAL_PREP_OCEAN_OBS +status=$? +exit "${status}" diff --git a/jobs/rocoto/stage_ic.sh b/jobs/rocoto/stage_ic.sh new file mode 100755 index 0000000000..5e7b3395d2 --- /dev/null +++ b/jobs/rocoto/stage_ic.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +# Source FV3GFS workflow modules +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +[[ "${status}" -ne 0 ]] && exit "${status}" + +export job="stage_ic" +export jobid="${job}.$$" + +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_STAGE_IC" +status=$? + + +exit "${status}" diff --git a/jobs/rocoto/tracker.sh b/jobs/rocoto/tracker.sh new file mode 100755 index 0000000000..3e2efd644e --- /dev/null +++ b/jobs/rocoto/tracker.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="tracker" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB + +"${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh new file mode 100755 index 0000000000..18d5c12cea --- /dev/null +++ b/jobs/rocoto/upp.sh @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +## Offline UPP driver script +## UPP_RUN: analysis, forecast, goes, wafs. See upp.yaml for valid options +## FHRLST : forecast hourlist to be post-process (e.g. f000, f000_f001_f002, ...) +############################################################### + +# Source FV3GFS workflow modules +#. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +#status=$? +#if (( status != 0 )); then exit "${status}"; fi +# Temporarily load modules from UPP on WCOSS2 +source "${HOMEgfs}/ush/detect_machine.sh" +if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + set +x + source "${HOMEgfs}/ush/module-setup.sh" + module use "${HOMEgfs}/sorc/ufs_model.fd/FV3/upp/modulefiles" + module load "${MACHINE_ID}" + module load prod_util + module load cray-pals + module load cfp + module load libjpeg + module load grib_util/1.2.3 + module load wgrib2/2.0.8 + export WGRIB2=wgrib2 + module load python/3.8.6 + module load crtm/2.4.0 # TODO: This is only needed when UPP_RUN=goes. Is there a better way to handle this? + set_trace +else + . "${HOMEgfs}/ush/load_fv3gfs_modules.sh" + status=$? + if (( status != 0 )); then exit "${status}"; fi +fi + +############################################################### +# setup python path for workflow utilities and tasks +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +export job="upp" +export jobid="${job}.$$" + +############################################################### +# shellcheck disable=SC2153,SC2001 +IFS='_' read -ra fhrs <<< "${FHRLST//f}" # strip off the 'f's convert to array + +# Execute the JJOB +for fhr in "${fhrs[@]}"; do + export FORECAST_HOUR=$(( 10#${fhr} )) + "${HOMEgfs}/jobs/JGLOBAL_ATMOS_UPP" + status=$? + if (( status != 0 )); then exit "${status}"; fi +done + +exit 0 diff --git a/jobs/rocoto/verfozn.sh b/jobs/rocoto/verfozn.sh new file mode 100755 index 0000000000..70a772fca6 --- /dev/null +++ b/jobs/rocoto/verfozn.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfozn" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/verfrad.sh b/jobs/rocoto/verfrad.sh new file mode 100755 index 0000000000..a687d3cf26 --- /dev/null +++ b/jobs/rocoto/verfrad.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfrad" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vminmon.sh b/jobs/rocoto/vminmon.sh new file mode 100755 index 0000000000..73f10167e5 --- /dev/null +++ b/jobs/rocoto/vminmon.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="vminmon" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN MINMON ===============" + +"${HOMEgfs}/jobs/JGLOBAL_ATMOS_VMINMON" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh deleted file mode 120000 index 063570a2ab..0000000000 --- a/jobs/rocoto/vrfy.sh +++ /dev/null @@ -1 +0,0 @@ -vrfy_gsl.sh \ No newline at end of file diff --git a/jobs/rocoto/waveinit.sh b/jobs/rocoto/waveinit.sh index d0c3f49929..b38367d09a 100755 --- a/jobs/rocoto/waveinit.sh +++ b/jobs/rocoto/waveinit.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostbndpnt.sh b/jobs/rocoto/wavepostbndpnt.sh index 5d26498356..1a4f940150 100755 --- a/jobs/rocoto/wavepostbndpnt.sh +++ b/jobs/rocoto/wavepostbndpnt.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostbndpntbll.sh b/jobs/rocoto/wavepostbndpntbll.sh index ce4f9e6b2d..2d128facb7 100755 --- a/jobs/rocoto/wavepostbndpntbll.sh +++ b/jobs/rocoto/wavepostbndpntbll.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostpnt.sh b/jobs/rocoto/wavepostpnt.sh index 9efb755dec..60017394f5 100755 --- a/jobs/rocoto/wavepostpnt.sh +++ b/jobs/rocoto/wavepostpnt.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/wavepostsbs.sh b/jobs/rocoto/wavepostsbs.sh index e4bea0bc34..f4789210d8 100755 --- a/jobs/rocoto/wavepostsbs.sh +++ b/jobs/rocoto/wavepostsbs.sh @@ -4,7 +4,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/jobs/rocoto/waveprep.sh b/jobs/rocoto/waveprep.sh index 0cbafde87e..fa934167b7 100755 --- a/jobs/rocoto/waveprep.sh +++ b/jobs/rocoto/waveprep.sh @@ -5,7 +5,8 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_ufswm_modules.sh status=$? [[ ${status} -ne 0 ]] && exit ${status} diff --git a/modulefiles/module-setup.csh.inc b/modulefiles/module-setup.csh.inc index a3cd672300..7086326627 100644 --- a/modulefiles/module-setup.csh.inc +++ b/modulefiles/module-setup.csh.inc @@ -8,10 +8,9 @@ if ( { test -d /lfs/f1 } ) then source /usr/share/lmod/lmod/init/$__ms_shell fi module reset -else if ( { test -d /mnt/lfs1 } ) then - # We are on NOAA Jet +else if ( { test -d /lfs3 } ) then if ( ! { module help >& /dev/null } ) then - source /apps/lmod/lmod/init/$__ms_shell + source /apps/lmod/lmod/init/$__ms_shell endif module purge else if ( { test -d /scratch1 } ) then @@ -20,11 +19,18 @@ else if ( { test -d /scratch1 } ) then source /apps/lmod/lmod/init/$__ms_shell endif module purge -else if ( { test -d /work } ) then - # We are on MSU Orion - if ( ! { module help >& /dev/null } ) then - source /apps/lmod/init/$__ms_shell - endif +elif [[ -d /work ]] ; then + # We are on MSU Orion or Hercules + if [[ -d /apps/other ]] ; then + # Hercules + init_path="/apps/other/lmod/lmod/init/$__ms_shell" + else + # Orion + init_path="/apps/lmod/lmod/init/$__ms_shell" + fi + if ( ! eval module help > /dev/null 2>&1 ) ; then + source "${init_path}" + fi module purge else if ( { test -d /data/prod } ) then # We are on SSEC S4 diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index e5322cbb2c..db9dabffe1 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -35,9 +35,16 @@ elif [[ -d /scratch1 ]] ; then fi module purge elif [[ -d /work ]] ; then - # We are on MSU Orion + # We are on MSU Orion or Hercules + if [[ -d /apps/other ]] ; then + # Hercules + init_path="/apps/other/lmod/lmod/init/$__ms_shell" + else + # Orion + init_path="/apps/lmod/lmod/init/$__ms_shell" + fi if ( ! eval module help > /dev/null 2>&1 ) ; then - source /apps/lmod/lmod/init/$__ms_shell + source "${init_path}" fi module purge elif [[ -d /glade ]] ; then diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index 9634202bb5..cf77df520a 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -2,45 +2,49 @@ help([[ Load environment to run GFS on Hera ]]) -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-impi", "2018.0.4")) - -load(pathJoin("hpss", "hpss")) -load(pathJoin("gempak", "7.4.2")) -load(pathJoin("ncl", "6.6.2")) -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("png", "1.6.35")) -load(pathJoin("cdo", "1.9.5")) -load(pathJoin("R", "3.5.0")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("nco", "4.9.1")) -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) -load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ncdiag", "1.0.0")) -load(pathJoin("crtm", "2.4.0")) -load(pathJoin("wgrib2", "2.0.8")) +spack_stack_ver=(os.getenv("spack_stack_ver") or "None") +spack_env=(os.getenv("spack_env") or "None") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("python", os.getenv("python_ver"))) + +load(pathJoin("hpss", os.getenv("hpss_ver"))) +load(pathJoin("gempak", os.getenv("gempak_ver"))) +load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) +load(pathJoin("R", os.getenv("R_ver"))) + +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) + +load(pathJoin("met", os.getenv("met_ver"))) +load(pathJoin("metplus", os.getenv("metplus_ver"))) + setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) +--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) - -prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) -load(pathJoin("fit2obs", "1.0.0")) - --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("ufswm", "1.0.0")) -load(pathJoin("met", "9.1")) -load(pathJoin("metplus", "3.1")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.hercules.lua b/modulefiles/module_base.hercules.lua new file mode 100644 index 0000000000..d587b90c4f --- /dev/null +++ b/modulefiles/module_base.hercules.lua @@ -0,0 +1,46 @@ +help([[ +Load environment to run GFS on Hercules +]]) + +spack_stack_ver=(os.getenv("spack_stack_ver") or "None") +spack_env=(os.getenv("spack_env") or "None") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("python", os.getenv("python_ver"))) + +-- TODO load NCL once the SAs remove the 'depends_on' statements within it +-- NCL is a static installation and does not depend on any libraries +-- but as is will load, among others, the system netcdf-c/4.9.0 module +--load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) + +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) + +setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) + +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) + +whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index dbca3c4500..7a0faaaeb6 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -2,42 +2,46 @@ help([[ Load environment to run GFS on Jet ]]) -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/role.epic/hpc-stack/libs/intel-18.0.5.274/modulefiles/stack") - -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-impi", "2018.4.274")) -load(pathJoin("cmake", "3.20.1")) - -load("hpss") -load(pathJoin("gempak", "7.4.2")) -load(pathJoin("ncl", "6.6.2")) -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("libpng", "1.6.35")) -load(pathJoin("cdo", "1.9.5")) -load(pathJoin("R", "4.0.2")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("nco", "4.9.1")) -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) -load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ncdiag", "1.0.0")) -load(pathJoin("crtm", "2.4.0")) -load(pathJoin("wgrib2", "2.0.8")) -setenv("WGRIB2","wgrib2") - -prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v1.0.1/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) +spack_stack_ver=(os.getenv("spack_stack_ver") or "None") +spack_env=(os.getenv("spack_env") or "None") +prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("python", os.getenv("python_ver"))) + +load(pathJoin("hpss", os.getenv("hpss_ver"))) +load(pathJoin("gempak", os.getenv("gempak_ver"))) +load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) +load(pathJoin("R", os.getenv("R_ver"))) + +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) -prepend_path("MODULEPATH", "/contrib/anaconda/modulefiles") -load(pathJoin("anaconda", "5.3.1")) +setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) +--prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) -prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/Fit2Obs/v1.0.0/modulefiles")) -load(pathJoin("fit2obs", "1.0.0")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua index 0c9033c07f..3464accc89 100644 --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -2,44 +2,47 @@ help([[ Load environment to run GFS on Orion ]]) -prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "2018.4")) -load(pathJoin("hpc-impi", "2018.4")) - -load(pathJoin("gempak", "7.5.1")) -load(pathJoin("ncl", "6.6.2")) -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) -load(pathJoin("png", "1.6.35")) -load(pathJoin("cdo", "1.9.5")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("nco", "4.8.1")) -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) -load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ncdiag", "1.0.0")) -load(pathJoin("crtm", "2.4.0")) -load(pathJoin("wgrib2", "2.0.8")) +spack_stack_ver=(os.getenv("spack_stack_ver") or "None") +spack_env=(os.getenv("spack_env") or "None") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("python", os.getenv("python_ver"))) + +load(pathJoin("gempak", os.getenv("gempak_ver"))) +load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) + +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) + +load(pathJoin("met", os.getenv("met_ver"))) +load(pathJoin("metplus", os.getenv("metplus_ver"))) + setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) +--prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) - -prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) -load(pathJoin("fit2obs", "1.0.0")) - --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-intel", "2018.4")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("ufswm", "1.0.0")) -load(pathJoin("met", "9.1")) -load(pathJoin("metplus", "3.1")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua index 5bd0f1d6fb..51eeee0f10 100644 --- a/modulefiles/module_base.s4.lua +++ b/modulefiles/module_base.s4.lua @@ -2,36 +2,43 @@ help([[ Load environment to run GFS on S4 ]]) -load("license_intel") -prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack") - -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "18.0.4")) -load(pathJoin("hpc-impi", "18.0.4")) - -load(pathJoin("miniconda", "3.8-s4")) -load(pathJoin("ncl", "6.4.0-precompiled")) -load(pathJoin("cdo", "1.9.8")) -load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) -load(pathJoin("png", "1.6.35")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("nco", "4.9.3")) -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) -load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ncdiag", "1.0.0")) -load(pathJoin("crtm", "2.4.0")) -load(pathJoin("wgrib2", "2.0.8")) +spack_stack_ver=(os.getenv("spack_stack_ver") or "None") +spack_env=(os.getenv("spack_env") or "None") +prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-" .. spack_stack_ver .. "/envs/" .. spack_env .. "/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("python", os.getenv("python_ver"))) + +load(pathJoin("ncl", os.getenv("ncl_ver"))) +load(pathJoin("jasper", os.getenv("jasper_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) + +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) + setenv("WGRIB2","wgrib2") +setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) -prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/feature-GFSv17_com_reorg/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) +--prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) -prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/Fit2Obs/v1.0.0/modulefiles")) -load(pathJoin("fit2obs", "1.0.0")) +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua index c27d69901a..57d3bf51dd 100644 --- a/modulefiles/module_base.wcoss2.lua +++ b/modulefiles/module_base.wcoss2.lua @@ -2,39 +2,43 @@ help([[ Load environment to run GFS on WCOSS2 ]]) -load(pathJoin("PrgEnv-intel", "8.1.0")) -load(pathJoin("craype", "2.7.13")) -load(pathJoin("intel", "19.1.3.304")) -load(pathJoin("cray-mpich", "8.1.9")) -load(pathJoin("cray-pals", "1.0.17")) -load(pathJoin("cfp", "2.0.4")) +load(pathJoin("PrgEnv-intel", os.getenv("PrgEnv_intel_ver"))) +load(pathJoin("craype", os.getenv("craype_ver"))) +load(pathJoin("intel", os.getenv("intel_ver"))) +load(pathJoin("cray-mpich", os.getenv("cray_mpich_ver"))) +load(pathJoin("cray-pals", os.getenv("cray_pals_ver"))) +load(pathJoin("cfp", os.getenv("cfp_ver"))) setenv("USE_CFP","YES") -load(pathJoin("python", "3.8.6")) -load(pathJoin("gempak", "7.14.1")) -load(pathJoin("perl", "5.32.0")) -load(pathJoin("libjpeg", "9c")) -load(pathJoin("udunits", "2.2.28")) -load(pathJoin("gsl", "2.7")) -load(pathJoin("cdo", "1.9.8")) - -load(pathJoin("hdf5", "1.10.6")) -load(pathJoin("netcdf", "4.7.4")) - -load(pathJoin("nco", "4.7.9")) -load(pathJoin("prod_util", "2.0.9")) -load(pathJoin("grib_util", "1.2.3")) -load(pathJoin("bufr_dump", "1.0.0")) -load(pathJoin("util_shared", "1.4.0")) -load(pathJoin("g2tmpl", "1.9.1")) -load(pathJoin("ncdiag", "1.0.0")) -load(pathJoin("crtm", "2.4.0")) -load(pathJoin("wgrib2", "2.0.7")) +load(pathJoin("python", os.getenv("python_ver"))) +load(pathJoin("gempak", os.getenv("gempak_ver"))) +load(pathJoin("perl", os.getenv("perl_ver"))) +load(pathJoin("libjpeg", os.getenv("libjpeg_ver"))) +load(pathJoin("udunits", os.getenv("udunits_ver"))) +load(pathJoin("gsl", os.getenv("gsl_ver"))) +load(pathJoin("cdo", os.getenv("cdo_ver"))) +load(pathJoin("hdf5", os.getenv("hdf5_ver"))) +load(pathJoin("netcdf", os.getenv("netcdf_ver"))) + +load(pathJoin("nco", os.getenv("nco_ver"))) +load(pathJoin("prod_util", os.getenv("prod_util_ver"))) +load(pathJoin("grib_util", os.getenv("grib_util_ver"))) +load(pathJoin("bufr_dump", os.getenv("bufr_dump_ver"))) +load(pathJoin("util_shared", os.getenv("util_shared_ver"))) +load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) +load(pathJoin("ncdiag", os.getenv("ncdiag_ver"))) +load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) + +--prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) -load(pathJoin("prepobs", "1.0.1")) +load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) + +prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v" .. os.getenv("fit2obs_ver"), "modulefiles")) +load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) -prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v1.0.0/modulefiles")) -load(pathJoin("fit2obs", "1.0.0")) +append_path("MODULEPATH", pathJoin("/apps/ops/prod/nco/models/modulefiles")) +load(pathJoin("mos_shared", os.getenv("mos_shared_ver"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua index f4b62a5fd2..1aecddf549 100644 --- a/modulefiles/module_gwci.hera.lua +++ b/modulefiles/module_gwci.hera.lua @@ -2,14 +2,14 @@ help([[ Load environment to run GFS workflow setup scripts on Hera ]]) -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-impi", "2018.0.4")) +load(pathJoin("stack-intel", os.getenv("2021.5.0"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1"))) -load(pathJoin("netcdf","4.7.4")) -load(pathJoin("nccmp","1.8.7.0")) +load(pathJoin("netcdf-c", os.getenv("4.9.2"))) +load(pathJoin("netcdf-fortran", os.getenv("4.6.0"))) +load(pathJoin("nccmp","1.9.0.1")) load(pathJoin("wgrib2", "2.0.8")) whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwci.hercules.lua b/modulefiles/module_gwci.hercules.lua new file mode 100644 index 0000000000..9c60aed467 --- /dev/null +++ b/modulefiles/module_gwci.hercules.lua @@ -0,0 +1,15 @@ +help([[ +Load environment to run GFS workflow ci scripts on Hercules +]]) + +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +load(pathJoin("stack-intel", os.getenv("2021.9.0"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.9.0"))) + +load(pathJoin("netcdf-c", os.getenv("4.9.2"))) +load(pathJoin("netcdf-fortran", os.getenv("4.6.0"))) +load(pathJoin("nccmp","1.9.0.1")) +load(pathJoin("wgrib2", "3.1.1")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua index 971ba01c65..18851ba7d4 100644 --- a/modulefiles/module_gwci.orion.lua +++ b/modulefiles/module_gwci.orion.lua @@ -2,21 +2,14 @@ help([[ Load environment to run GFS workflow ci scripts on Orion ]]) -prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "2018.4")) -load(pathJoin("hpc-impi", "2018.4")) -load(pathJoin("netcdf","4.7.4")) -load(pathJoin("netcdf","4.7.4-parallel")) -load(pathJoin("nccmp"," 1.8.7.0")) -load(pathJoin("contrib","0.1")) -load(pathJoin("wgrib2","3.0.2")) +load(pathJoin("stack-intel", os.getenv("2022.0.2"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1"))) -prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-intel", "2018.4")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("gfs_workflow", "1.0.0")) +load(pathJoin("netcdf-c", os.getenv("4.9.2"))) +load(pathJoin("netcdf-fortran", os.getenv("4.6.0"))) +load(pathJoin("nccmp","1.9.0.1")) +load(pathJoin("wgrib2", "2.0.8")) whatis("Description: GFS run ci top-level sripts environment") diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua index a07b32b6a6..c86cac7b02 100644 --- a/modulefiles/module_gwsetup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -4,10 +4,15 @@ Load environment to run GFS workflow setup scripts on Hera load(pathJoin("rocoto")) --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("gfs_workflow", "1.0.0")) +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" +local python_ver=os.getenv("python_ver") or "3.10.8" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.hercules.lua b/modulefiles/module_gwsetup.hercules.lua new file mode 100644 index 0000000000..673928605c --- /dev/null +++ b/modulefiles/module_gwsetup.hercules.lua @@ -0,0 +1,19 @@ +help([[ +Load environment to run GFS workflow ci scripts on Hercules +]]) + +load(pathJoin("contrib","0.1")) +load(pathJoin("rocoto","1.3.5")) + +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0" +local python_ver=os.getenv("python_ver") or "3.10.8" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") + +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.jet.lua b/modulefiles/module_gwsetup.jet.lua new file mode 100644 index 0000000000..d08389c711 --- /dev/null +++ b/modulefiles/module_gwsetup.jet.lua @@ -0,0 +1,18 @@ +help([[ +Load environment to run GFS workflow setup scripts on Jet +]]) + +load(pathJoin("rocoto", "1.3.3")) + +prepend_path("MODULEPATH", "/mnt/lfs4/HFIP/hfv3gfs/role.epic/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" +local python_ver=os.getenv("python_ver") or "3.10.8" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") + +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua index 37f3187fb4..93a59c8e50 100644 --- a/modulefiles/module_gwsetup.orion.lua +++ b/modulefiles/module_gwsetup.orion.lua @@ -2,16 +2,20 @@ help([[ Load environment to run GFS workflow ci scripts on Orion ]]) --- Temporary until official hpc-stack is updated - prepend_path("MODULEPATH", "/apps/modulefiles/core") load(pathJoin("contrib","0.1")) load(pathJoin("rocoto","1.3.3")) load(pathJoin("git","2.28.0")) -prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("gfs_workflow", "1.0.0")) +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2022.0.2" +local python_ver=os.getenv("python_ver") or "3.10.8" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") -whatis("Description: GFS run ci top-level sripts environment") +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.s4.lua b/modulefiles/module_gwsetup.s4.lua new file mode 100644 index 0000000000..291c654bb3 --- /dev/null +++ b/modulefiles/module_gwsetup.s4.lua @@ -0,0 +1,19 @@ +help([[ +Load environment to run GFS workflow setup scripts on S4 +]]) + +load(pathJoin("rocoto","1.3.5")) +load(pathJoin("git","2.30.0")) + +prepend_path("MODULEPATH", "/data/prod/jedi/spack-stack/spack-stack-1.5.1/envs/gsi-addon/install/modulefiles/Core") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" +local python_ver=os.getenv("python_ver") or "3.10.8" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("python", python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") + +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.wcoss2.lua b/modulefiles/module_gwsetup.wcoss2.lua new file mode 100644 index 0000000000..d4e64548b0 --- /dev/null +++ b/modulefiles/module_gwsetup.wcoss2.lua @@ -0,0 +1,10 @@ +help([[ +Load environment to run GFS workflow ci scripts on WCOSS2 +]]) + +load(pathJoin("git","2.29.0")) + +prepend_path("MODULEPATH", "/apps/ops/test/nco/modulefiles/core") +load(pathJoin("rocoto","1.3.5")) + +whatis("Description: GFS run setup environment") diff --git a/parm/config/gefs/config.aero b/parm/config/gefs/config.aero new file mode 120000 index 0000000000..9cf5cce080 --- /dev/null +++ b/parm/config/gefs/config.aero @@ -0,0 +1 @@ +../gfs/config.aero \ No newline at end of file diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index 8a336a2650..64a38f669f 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -19,7 +19,7 @@ export PARTITION_BATCH="@PARTITION_BATCH@" export PARTITION_SERVICE="@PARTITION_SERVICE@" # Project to use in mass store: -HPSS_PROJECT="@HPSS_PROJECT@" +export HPSS_PROJECT="@HPSS_PROJECT@" # Directories relative to installation areas: export HOMEgfs=@HOMEgfs@ @@ -30,9 +30,15 @@ export UTILgfs=${HOMEgfs}/util export EXECgfs=${HOMEgfs}/exec export SCRgfs=${HOMEgfs}/scripts -export FIXcice=${HOMEgfs}/fix/cice -export FIXmom=${HOMEgfs}/fix/mom6 -export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" ######################################################################## @@ -40,7 +46,7 @@ export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops export COMINsyn="@COMINsyn@" -export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" # USER specific paths export HOMEDIR="@HOMEDIR@" @@ -55,13 +61,11 @@ export BASE_GIT="@BASE_GIT@" export DO_BUFRSND="NO" # BUFR sounding products export DO_GEMPAK="NO" # GEMPAK products export DO_AWIPS="NO" # AWIPS products -export WAFSF="NO" # WAFS products -export DO_VRFY="YES" # VRFY step # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES # if want MOS written to HPSS. Should update arch.sh to -# use RUNMOS flag (currently in config.vrfy) +# use RUNMOS flag export REALTIME="YES" # Experiment mode (cycled or forecast-only) @@ -74,9 +78,7 @@ export MODE="@MODE@" # cycled/forecast-only # CLEAR #################################################### # Build paths relative to $HOMEgfs -export FIXgsi="${HOMEgfs}/fix/gsi" export HOMEpost="${HOMEgfs}" -export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" # CONVENIENT utility scripts and other environment parameters export NCP="/bin/cp -p" @@ -86,8 +88,8 @@ export VERBOSE="YES" export KEEPDATA="NO" export CHGRP_RSTPROD="@CHGRP_RSTPROD@" export CHGRP_CMD="@CHGRP_CMD@" -export NCDUMP="$NETCDF/bin/ncdump" -export NCLEN="$HOMEgfs/ush/getncdimlen" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts export BASE_ENV="${HOMEgfs}/env" @@ -101,11 +103,7 @@ export assim_freq=6 export PSLOT="@PSLOT@" export EXPDIR="@EXPDIR@/${PSLOT}" export ROTDIR="@ROTDIR@/${PSLOT}" -export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work -export DUMP_SUFFIX="" -if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then - export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel -fi + export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead export ARCDIR="${NOSCRUB}/archive/${PSLOT}" @@ -113,8 +111,8 @@ export ATARDIR="@ATARDIR@" # Commonly defined parameters in JJOBS export envir=${envir:-"prod"} -export NET="gfs" # NET is defined in the job-card (ecf) -export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +export NET="gefs" # NET is defined in the job-card (ecf) +export RUN="gefs" # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy # TODO: determine where is RUN actually used in the workflow other than here # TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be # consistent w/ EE2? @@ -130,11 +128,10 @@ export REDOUT="1>" export REDERR="2>" export SENDECF=${SENDECF:-"NO"} -export SENDCOM=${SENDCOM:-"YES"} export SENDSDM=${SENDSDM:-"NO"} export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} export SENDDBN=${SENDDBN:-"NO"} -export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} # APP settings export APP=@APP@ @@ -146,10 +143,8 @@ export DO_WAVE="NO" export DO_OCN="NO" export DO_ICE="NO" export DO_AERO="NO" -export CCPP_SUITE="@CCPP_SUITE@" export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both export DOBNDPNT_WAVE="NO" -export cplwav2atm=".false." export FRAC_GRID=".true." # Set operational resolution @@ -157,34 +152,48 @@ export OPS_RES="C768" # Do not change # Resolution specific parameters export LEVS=128 -export CASE="@CASECTL@" +export CASE="@CASECTL@" # CASE is required in GEFS to determine ocean/ice/wave resolutions export CASE_ENS="@CASEENS@" # TODO: This should not depend on $CASE or $CASE_ENS # These are the currently available grid-combinations case "${CASE}" in - "C48") export OCNRES=500;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=100 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + export OCNRES=025 + export waveGRD='glo_025' + ;; esac export ICERES=${OCNRES} -export waveGRD='glo_025' case "${APP}" in ATM) - export confignamevarfornems="atm" ;; ATMA) export DO_AERO="YES" - export confignamevarfornems="atm_aero" ;; ATMW) export DO_COUPLED="YES" export DO_WAVE="YES" export WAVE_CDUMP="both" - export confignamevarfornems="leapfrog_atm_wav" ;; NG-GODAS) export DO_ATM="NO" @@ -195,23 +204,16 @@ case "${APP}" in export DO_COUPLED="YES" export DO_OCN="YES" export DO_ICE="YES" - export CCPP_SUITE="FV3_GFS_v17_coupled_p8" - export confignamevarfornems="cpld" if [[ "${APP}" =~ A$ ]]; then export DO_AERO="YES" - export confignamevarfornems="${confignamevarfornems}_aero" fi if [[ "${APP}" =~ ^S2SW ]]; then export DO_WAVE="YES" export WAVE_CDUMP="both" export cplwav2atm=".true." - export confignamevarfornems="${confignamevarfornems}_outerwave" fi - - source ${EXPDIR}/config.defaults.s2sw - ;; *) echo "Unrecognized APP: ${1}" @@ -219,37 +221,21 @@ case "${APP}" in ;; esac -# Surface cycle update frequency -if [[ "${CDUMP}" =~ "gdas" ]] ; then - export FHCYC=1 - export FTSFS=10 -elif [[ "${CDUMP}" =~ "gfs" ]] ; then - export FHCYC=24 -fi - -# Output frequency of the forecast model (for cycling) -export FHMIN=0 -export FHMAX=9 -export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) - -# Cycle to run EnKF (set to BOTH for both gfs and gdas) -export EUPD_CYC="gdas" - # GFS cycle info export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. # GFS output and frequency export FHMIN_GFS=0 -export FHMAX_GFS_00=${FHMAX_GFS_00:-120} -export FHMAX_GFS_06=${FHMAX_GFS_06:-120} -export FHMAX_GFS_12=${FHMAX_GFS_12:-120} -export FHMAX_GFS_18=${FHMAX_GFS_18:-120} -export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} -export FHOUT_GFS=${FHOUT_GFS:-3} -export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} -export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 if (( gfs_cyc != 0 )); then export STEP_GFS=$(( 24 / gfs_cyc )) else @@ -257,34 +243,27 @@ else fi export ILPOST=1 # gempak output frequency up to F120 +export FHMIN_ENKF=${FHMIN_GFS} +export FHMAX_ENKF=${FHMAX_GFS} +export FHOUT_ENKF=${FHOUT_GFS} + # GFS restart interval in hours -export restart_interval_gfs=0 +export restart_interval_gfs=12 +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used export QUILTING=".true." export OUTPUT_GRID="gaussian_grid" export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST export WRITE_NSFLIP=".true." -# IAU related parameters -export DOIAU="YES" # Enable 4DIAU for control with 3 increments -export IAUFHRS="3,6,9" -export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) -export IAU_DELTHRS=6 -export IAU_OFFSET=6 -export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble -export IAUFHRS_ENKF="3,6,9" -export IAU_DELTHRS_ENKF=6 - -# Use Jacobians in eupd and thereby remove need to run eomg -export lobsdiag_forenkf=".true." - -# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA -# export DO_WAVE="NO" -# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" -# fi +# Override normal post flat files for GEFS +export FLTFILEGFS="${PARMgfs}/post/postxconfig-NT-GEFS.txt" +export FLTFILEGFSF00="${PARMgfs}/post/postxconfig-NT-GEFS-F00.txt" # Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL -export imp_physics=@IMP_PHYSICS@ +export imp_physics=8 # Shared parameters # DA engine @@ -295,39 +274,23 @@ export DO_JEDILANDDA="NO" export DO_MERGENSST="NO" # Hybrid related -export DOHYBVAR="@DOHYBVAR@" export NMEM_ENS=@NMEM_ENS@ -export NMEM_ENS_GFS=@NMEM_ENS@ -export SMOOTH_ENKF="NO" -export l4densvar=".true." -export lwrite4danl=".true." - -# EnKF output frequency -if [[ ${DOHYBVAR} = "YES" ]]; then - export FHMIN_ENKF=3 - export FHMAX_ENKF=9 - export FHMAX_ENKF_GFS=120 - export FHOUT_ENKF_GFS=3 - if [ $l4densvar = ".true." ]; then - export FHOUT=1 - export FHOUT_ENKF=1 - else - export FHOUT_ENKF=3 - fi -fi -# if 3DVAR and IAU -if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then - export IAUFHRS="6" - export IAU_FHROT="3" - export IAU_FILTER_INCREMENTS=".true." - export IAUFHRS_ENKF="6" -fi +# set default member number memdir for control +# this will be overwritten for the perturbed members +export ENSMEM="000" +export MEMDIR="mem${ENSMEM}" -# Check if cycle is cold starting, DOIAU off, or free-forecast mode -if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then - export IAU_OFFSET=0 +export DOIAU="NO" # While we are not doing IAU, we may want to warm start w/ IAU in the future +# Check if cycle is cold starting +if [[ "${EXP_WARM_START}" = ".false." ]]; then export IAU_FHROT=0 +else + if [[ "${DOIAU}" = "YES" ]]; then + export IAU_FHROT=3 + else + export IAU_FHROT=0 + fi fi # turned on nsst in anal and/or fcst steps, and turn off rtgsst @@ -343,24 +306,8 @@ export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" # Make the aircraft prepbufr file on the fly or use the GDA version export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" -# Analysis increments to zero in CALCINCEXEC -export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" - -# Write analysis files for early cycle EnKF -export DO_CALC_INCREMENT_ENKF_GFS="YES" - -# Stratospheric increments to zero -export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" -export INCVARS_EFOLD="5" - -# Swith to generate netcdf or binary diagnostic files. If not specified, -# script default to binary diagnostic files. Set diagnostic file -# variables here since used in both DA and vrfy jobs -export netcdf_diag=".true." -export binary_diag=".false." - # Verification options -export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp export DO_FIT2OBS="NO" # Run fit to observations package # Archiving options diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs index 95c2cb58de..9593408848 100644 --- a/parm/config/gefs/config.efcs +++ b/parm/config/gefs/config.efcs @@ -5,47 +5,35 @@ echo "BEGIN: config.efcs" -# TODO: the _ENKF counterparts need to be defined in config.base -export DO_AERO=${DO_AERO_ENKF:-"NO"} -export DO_OCN=${DO_OCN_ENKF:-"NO"} -export DO_ICE=${DO_ICE_ENKF:-"NO"} -export DO_WAVE=${DO_WAVE_ENKF:-"NO"} - -# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too -if [[ ${DO_OCN} == "YES" ]]; then - case "${CASE_ENS}" in - "C48") export OCNRES=500;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; - esac -fi -[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES -[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? +# Turn off components in ensemble via _ENKF, or use setting from deterministic +export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}} +export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}} +export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}} +export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}} # Source model specific information that is resolution dependent string="--fv3 ${CASE_ENS}" -[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" -[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" -[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" -source $EXPDIR/config.ufs ${string} +# Ocean/Ice/Waves ensemble configurations are identical to deterministic member +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} # Get task specific resources -. $EXPDIR/config.resources efcs +source "${EXPDIR}/config.resources" efcs # Use serial I/O for ensemble (lustre?) export OUTPUT_FILETYPE_ATM="netcdf" export OUTPUT_FILETYPE_SFC="netcdf" # Number of enkf members per fcst job -export NMEM_EFCSGRP=2 -export NMEM_EFCSGRP_GFS=1 +export NMEM_EFCSGRP=1 export RERUN_EFCSGRP="NO" # Turn off inline UPP for EnKF forecast -export WRITE_DOPOST=".false." +export WRITE_DOPOST=".true." # Stochastic physics parameters (only for ensemble forecasts) export DO_SKEB="YES" @@ -66,32 +54,6 @@ export SPPT_LSCALE=500000. export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." -if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" -else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" -fi - -# FV3 model namelist parameters to over-ride -export restart_interval=${restart_interval:-6} - -# For IAU, write restarts at beginning of window also -if [ $DOIAU_ENKF = "YES" ]; then - export restart_interval="3 -1" -fi - -# wave model -export cplwav=.false. - -# ocean model resolution -case "${CASE_ENS}" in - "C48") export OCNRES=500;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; -esac -export ICERES=$OCNRES +export restart_interval=${restart_interval_gfs} echo "END: config.efcs" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst new file mode 100644 index 0000000000..4c8d3be99f --- /dev/null +++ b/parm/config/gefs/config.fcst @@ -0,0 +1,268 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + + +# Get task specific resources +source "${EXPDIR}/config.resources" fcst +export domains_stack_size="16000000" + + +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### + +export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" +#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="${HOMEgfs}/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if (( gwd_opt == 1 )); then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if (( gwd_opt == 2 )); then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=1 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".true." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".true." + export do_ugwp_v1_orog_only=".false." + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi +fi + +# Sponge layer settings +export tau=0. +export rf_cutoff=10. +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulance schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +# Write more variables to output +export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" + +# Write gfs restart files to rerun fcst from any break point +export restart_interval=${restart_interval_gfs:-12} + +# Turn off dry mass adjustment in GFS +export adjust_dry_mass=".false." + +# Write each restart file in 16 small files to save time +if [[ "${CASE}" = C768 ]]; then + export io_layout="4,4" +else + export io_layout="1,1" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/parm/config/gefs/config.ice b/parm/config/gefs/config.ice new file mode 120000 index 0000000000..39385a1cb4 --- /dev/null +++ b/parm/config/gefs/config.ice @@ -0,0 +1 @@ +../gfs/config.ice \ No newline at end of file diff --git a/parm/config/gefs/config.nsst b/parm/config/gefs/config.nsst new file mode 100644 index 0000000000..db4367b2c0 --- /dev/null +++ b/parm/config/gefs/config.nsst @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +cdate="${PDY}${cyc}" +if (( cdate < 2017072000 )); then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if (( NST_GSI > 0 )); then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/parm/config/gefs/config.ocn b/parm/config/gefs/config.ocn new file mode 120000 index 0000000000..d9ab25be5e --- /dev/null +++ b/parm/config/gefs/config.ocn @@ -0,0 +1 @@ +../gfs/config.ocn \ No newline at end of file diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 9dd5c6e737..a50418d23a 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -8,19 +8,13 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" - echo "coupled_ic aerosol_init" - echo "atmanlinit atmanlrun atmanlfinal" - echo "atmensanlinit atmensanlrun atmensanlfinal" - echo "landanlprep landanlinit landanlrun landanlfinal" - echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" - echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "stage_ic aerosol_init" + echo "sfcanl analcalc analdiag fcst fit2obs metp arch echgres" + echo "ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak" - echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" - echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -49,6 +43,8 @@ elif [[ ${machine} = "S4" ]]; then fi elif [[ ${machine} = "ORION" ]]; then export npe_node_max=40 +elif [[ ${machine} = "HERCULES" ]]; then + export npe_node_max=40 fi if [[ ${step} = "prep" ]]; then @@ -169,303 +165,6 @@ elif [[ ${step} = "waveawipsgridded" ]]; then export NTASKS=${npe_waveawipsgridded} export memory_waveawipsgridded_gfs="1GB" -elif [[ "${step}" = "atmanlinit" ]]; then - - export wtime_atmanlinit="00:10:00" - export npe_atmanlinit=1 - export nth_atmanlinit=1 - npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) - export npe_node_atmanlinit - export memory_atmanlinit="3072M" - -elif [[ "${step}" = "atmanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmanlrun="00:30:00" - npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun - npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmanlrun_gfs - export nth_atmanlrun=1 - export nth_atmanlrun_gfs=${nth_atmanlrun} - npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) - export npe_node_atmanlrun - export is_exclusive=True - -elif [[ "${step}" = "atmanlfinal" ]]; then - - export wtime_atmanlfinal="00:30:00" - export npe_atmanlfinal=${npe_node_max} - export nth_atmanlfinal=1 - npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) - export npe_node_atmanlfinal - export is_exclusive=True - -elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then - # below lines are for creating JEDI YAML - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=1 - layout_y=1 - ;; - *) - echo "FATAL ERROR: Resolution not supported for land analysis'" - exit 1 - esac - - export layout_x - export layout_y - - if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then - declare -x "wtime_${step}"="00:10:00" - declare -x "npe_${step}"=1 - declare -x "nth_${step}"=1 - temp_stepname="nth_${step}" - declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" - declare -x "memory_${step}"="3072M" - elif [[ "${step}" = "landanlrun" ]]; then - export wtime_landanlrun="00:30:00" - npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanlrun - export nth_landanlrun=1 - npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) - export npe_node_landanlrun - export is_exclusive=True - elif [[ "${step}" = "landanlprep" ]]; then - export wtime_landanlprep="00:30:00" - npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanlprep - export nth_landanlprep=1 - npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc) - export npe_node_landanlprep - export is_exclusive=True - fi - -elif [[ "${step}" = "aeroanlinit" ]]; then - - # below lines are for creating JEDI YAML - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=8 - layout_y=8 - ;; - *) - echo "FATAL ERROR: Resolution not supported for aerosol analysis'" - exit 1 - esac - - export layout_x - export layout_y - - export wtime_aeroanlinit="00:10:00" - export npe_aeroanlinit=1 - export nth_aeroanlinit=1 - npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) - export npe_node_aeroanlinit - export memory_aeroanlinit="3072M" - -elif [[ "${step}" = "aeroanlrun" ]]; then - - case ${CASE} in - C768) - layout_x=6 - layout_y=6 - ;; - C384) - layout_x=5 - layout_y=5 - ;; - C192 | C96 | C48) - layout_x=8 - layout_y=8 - ;; - *) - echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" - exit 1 - esac - - export layout_x - export layout_y - - export wtime_aeroanlrun="00:30:00" - npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun - npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_aeroanlrun_gfs - export nth_aeroanlrun=1 - export nth_aeroanlrun_gfs=1 - npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) - export npe_node_aeroanlrun - export is_exclusive=True - -elif [[ "${step}" = "aeroanlfinal" ]]; then - - export wtime_aeroanlfinal="00:10:00" - export npe_aeroanlfinal=1 - export nth_aeroanlfinal=1 - npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) - export npe_node_aeroanlfinal - export memory_aeroanlfinal="3072M" - -elif [[ "${step}" = "ocnanalprep" ]]; then - - export wtime_ocnanalprep="00:10:00" - export npe_ocnanalprep=1 - export nth_ocnanalprep=1 - npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) - export npe_node_ocnanalprep - export memory_ocnanalprep="24GB" - -elif [[ "${step}" = "ocnanalbmat" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 - ;; - C48) - npes=16 - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - - export wtime_ocnanalbmat="00:30:00" - export npe_ocnanalbmat=${npes} - export nth_ocnanalbmat=1 - export is_exclusive=True - npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) - export npe_node_ocnanalbmat - -elif [[ "${step}" = "ocnanalrun" ]]; then - npes=16 - case ${CASE} in - C384) - npes=480 - ;; - C48) - npes=16 - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - - export wtime_ocnanalrun="00:30:00" - export npe_ocnanalrun=${npes} - export nth_ocnanalrun=1 - export is_exclusive=True - npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) - export npe_node_ocnanalrun - -elif [[ "${step}" = "ocnanalchkpt" ]]; then - - export wtime_ocnanalchkpt="00:10:00" - export npe_ocnanalchkpt=1 - export nth_ocnanalchkpt=1 - npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) - export npe_node_ocnanalchkpt - case ${CASE} in - C384) - export memory_ocnanalchkpt="128GB" - ;; - C48) - export memory_ocnanalchkpt="32GB" - ;; - *) - echo "FATAL: Resolution not supported'" - exit 1 - esac - -elif [[ "${step}" = "ocnanalpost" ]]; then - - export wtime_ocnanalpost="00:30:00" - export npe_ocnanalpost=${npe_node_max} - export nth_ocnanalpost=1 - npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) - export npe_node_ocnanalpost - -elif [[ "${step}" = "ocnanalvrfy" ]]; then - - export wtime_ocnanalvrfy="00:35:00" - export npe_ocnanalvrfy=1 - export nth_ocnanalvrfy=1 - npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) - export npe_node_ocnanalvrfy - export memory_ocnanalvrfy="24GB" - -elif [[ ${step} = "anal" ]]; then - - export wtime_anal="00:50:00" - export wtime_anal_gfs="00:40:00" - export npe_anal=780 - export nth_anal=5 - export npe_anal_gfs=825 - export nth_anal_gfs=5 - if [[ "${machine}" = "WCOSS2" ]]; then - export nth_anal=8 - export nth_anal_gfs=8 - fi - if [[ ${CASE} = "C384" ]]; then - export npe_anal=160 - export npe_anal_gfs=160 - export nth_anal=10 - export nth_anal_gfs=10 - if [[ ${machine} = "S4" ]]; then - #On the S4-s4 partition, this is accomplished by increasing the task - #count to a multiple of 32 - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_anal=416 - export npe_anal_gfs=416 - fi - #S4 is small, so run this task with just 1 thread - export nth_anal=1 - export nth_anal_gfs=1 - export wtime_anal="02:00:00" - fi - fi - if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_anal=84 - export npe_anal_gfs=84 - if [[ ${machine} = "S4" ]]; then - export nth_anal=4 - export nth_anal_gfs=4 - #Adjust job count for S4 - if [[ ${PARTITION_BATCH} = "s4" ]]; then - export npe_anal=88 - export npe_anal_gfs=88 - elif [[ ${PARTITION_BATCH} = "ivy" ]]; then - export npe_anal=90 - export npe_anal_gfs=90 - fi - fi - fi - npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) - export npe_node_anal - export nth_cycle=${nth_anal} - npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) - export npe_node_cycle - export is_exclusive=True - elif [[ ${step} = "analcalc" ]]; then export wtime_analcalc="00:10:00" @@ -551,38 +250,42 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then export MEDPETS MEDTHREADS echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + CHMPETS=0; CHMTHREADS=0 if [[ "${DO_AERO}" = "YES" ]]; then # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). (( CHMTHREADS = ATMTHREADS )) (( CHMPETS = FV3PETS )) # Do not add to NTASKS_TOT - export CHMPETS CHMTHREADS echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" fi + export CHMPETS CHMTHREADS + WAVPETS=0; WAVTHREADS=0 if [[ "${DO_WAVE}" = "YES" ]]; then (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) (( WAVTHREADS = nthreads_ww3 )) - export WAVPETS WAVTHREADS echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) fi + export WAVPETS WAVTHREADS + OCNPETS=0; OCNTHREADS=0 if [[ "${DO_OCN}" = "YES" ]]; then (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) (( OCNTHREADS = nthreads_mom6 )) - export OCNPETS OCNTHREADS echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) fi + export OCNPETS OCNTHREADS + ICEPETS=0; ICETHREADS=0 if [[ "${DO_ICE}" = "YES" ]]; then (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) (( ICETHREADS = nthreads_cice6 )) - export ICEPETS ICETHREADS echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) fi + export ICEPETS ICETHREADS echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" @@ -600,11 +303,11 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then case "${CASE}" in "C48" | "C96" | "C192") - declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}"="03:00:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; "C384" | "C768" | "C1152") - declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}"="06:00:00" declare -x "wtime_${step}_gfs"="06:00:00" ;; *) @@ -629,89 +332,6 @@ elif [[ ${step} = "ocnpost" ]]; then npe_ocnpost=2 fi -elif [[ ${step} = "post" ]]; then - - export wtime_post="00:12:00" - export wtime_post_gfs="01:00:00" - export npe_post=126 - res=$(echo "${CASE}" | cut -c2-) - if (( npe_post > res )); then - export npe_post=${res} - fi - export nth_post=1 - export npe_node_post=${npe_post} - export npe_node_post_gfs=${npe_post} - export npe_node_dwn=${npe_node_max} - if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi - if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi - export is_exclusive=True - -elif [[ ${step} = "wafs" ]]; then - - export wtime_wafs="00:30:00" - export npe_wafs=1 - export npe_node_wafs=${npe_wafs} - export nth_wafs=1 - export memory_wafs="1GB" - -elif [[ ${step} = "wafsgcip" ]]; then - - export wtime_wafsgcip="00:30:00" - export npe_wafsgcip=2 - export nth_wafsgcip=1 - export npe_node_wafsgcip=1 - export memory_wafsgcip="50GB" - -elif [[ ${step} = "wafsgrib2" ]]; then - - export wtime_wafsgrib2="00:30:00" - export npe_wafsgrib2=18 - export nth_wafsgrib2=1 - npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) - export npe_node_wafsgrib2 - export memory_wafsgrib2="80GB" - -elif [[ ${step} = "wafsblending" ]]; then - - export wtime_wafsblending="00:30:00" - export npe_wafsblending=1 - export nth_wafsblending=1 - npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) - export npe_node_wafsblending - export memory_wafsblending="15GB" - -elif [[ ${step} = "wafsgrib20p25" ]]; then - - export wtime_wafsgrib20p25="00:30:00" - export npe_wafsgrib20p25=11 - export nth_wafsgrib20p25=1 - npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) - export npe_node_wafsgrib20p25 - export memory_wafsgrib20p25="80GB" - -elif [[ ${step} = "wafsblending0p25" ]]; then - - export wtime_wafsblending0p25="00:30:00" - export npe_wafsblending0p25=1 - export nth_wafsblending0p25=1 - npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) - export npe_node_wafsblending0p25 - export memory_wafsblending0p25="15GB" - -elif [[ ${step} = "vrfy" ]]; then - - export wtime_vrfy="03:00:00" - export wtime_vrfy_gfs="06:00:00" - export npe_vrfy=3 - export nth_vrfy=1 - export npe_node_vrfy=1 - export npe_vrfy_gfs=1 - export npe_node_vrfy_gfs=1 - if [[ ${machine} == "HERA" ]]; then - export memory_vrfy="16384M" - fi - export is_exclusive=True - elif [[ "${step}" = "fit2obs" ]]; then export wtime_fit2obs="00:20:00" @@ -767,113 +387,12 @@ elif [[ ${step} = "arch" || ${step} = "earc" ]]; then eval "export memory_${step}=50GB" fi -elif [[ ${step} = "coupled_ic" ]]; then - - export wtime_coupled_ic="00:15:00" - export npe_coupled_ic=1 - export npe_node_coupled_ic=1 - export nth_coupled_ic=1 - export is_exclusive=True - -elif [[ "${step}" = "atmensanlinit" ]]; then - - export wtime_atmensanlinit="00:10:00" - export npe_atmensanlinit=1 - export nth_atmensanlinit=1 - npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) - export npe_node_atmensanlinit - export memory_atmensanlinit="3072M" - -elif [[ "${step}" = "atmensanlrun" ]]; then - - # make below case dependent later - export layout_x=1 - export layout_y=1 - - export wtime_atmensanlrun="00:30:00" - npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun - npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_atmensanlrun_gfs - export nth_atmensanlrun=1 - export nth_atmensanlrun_gfs=${nth_atmensanlrun} - npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) - export npe_node_atmensanlrun - export is_exclusive=True - -elif [[ "${step}" = "atmensanlfinal" ]]; then - - export wtime_atmensanlfinal="00:30:00" - export npe_atmensanlfinal=${npe_node_max} - export nth_atmensanlfinal=1 - npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) - export npe_node_atmensanlfinal - export is_exclusive=True - -elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then +elif [[ ${step} = "stage_ic" ]]; then - export wtime_eobs="00:15:00" - export wtime_eomg="01:00:00" - if [[ ${CASE} = "C768" ]]; then - export npe_eobs=200 - elif [[ ${CASE} = "C384" ]]; then - export npe_eobs=100 - elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_eobs=40 - fi - export npe_eomg=${npe_eobs} - export nth_eobs=2 - export nth_eomg=${nth_eobs} - npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) - export npe_node_eobs - export npe_node_eomg=${npe_node_eobs} - export is_exclusive=True - #The number of tasks and cores used must be the same for eobs - #For S4, this is accomplished by running 10 tasks/node - if [[ ${machine} = "S4" ]]; then - export npe_node_eobs=10 - fi - -elif [[ ${step} = "ediag" ]]; then - - export wtime_ediag="00:15:00" - export npe_ediag=48 - export nth_ediag=1 - npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) - export npe_node_ediag - export memory_ediag="30GB" - -elif [[ ${step} = "eupd" ]]; then - - export wtime_eupd="00:30:00" - if [[ ${CASE} = "C768" ]]; then - export npe_eupd=480 - export nth_eupd=6 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - fi - elif [[ ${CASE} = "C384" ]]; then - export npe_eupd=270 - export nth_eupd=2 - if [[ "${machine}" = "WCOSS2" ]]; then - export npe_eupd=315 - export nth_eupd=14 - elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=8 - elif [[ ${machine} = "S4" ]]; then - export npe_eupd=160 - export nth_eupd=2 - fi - elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then - export npe_eupd=42 - export nth_eupd=2 - if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=4 - fi - fi - npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) - export npe_node_eupd + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 export is_exclusive=True elif [[ ${step} = "ecen" ]]; then diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic new file mode 100644 index 0000000000..e2bb0af2b8 --- /dev/null +++ b/parm/config/gefs/config.stage_ic @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48") + export CPL_ATMIC="gefs_test" + export CPL_ICEIC="gefs_test" + export CPL_OCNIC="gefs_test" + export CPL_WAVIC="gefs_test" + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 562fc9228d..68b364529e 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -8,14 +8,15 @@ echo "BEGIN: config.ufs" -if [ $# -le 1 ]; then +if (( $# <= 1 )); then echo "Must specify an input resolution argument to set variables!" echo "argument can be any one of the following:" echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--gocart" exit 1 @@ -25,26 +26,34 @@ fi skip_mom6=true skip_cice6=true skip_ww3=true +skip_gocart=true skip_mediator=true # Loop through named arguments -while [[ $# -gt 0 ]]; do +while (( $# > 0 )); do key="$1" case "${key}" in "--fv3") fv3_res="$2" + shift ;; "--mom6") mom6_res="$2" skip_mom6=false + shift ;; "--cice6") cice6_res="$2" skip_cice6=false + shift ;; "--ww3") ww3_res="$2" skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false ;; *) # unknown option echo "FATAL ERROR: Unknown option: ${key}, ABORT!" @@ -52,7 +61,6 @@ while [[ $# -gt 0 ]]; do ;; esac shift - shift done # Mediator is required if any of the non-ATM components are used @@ -64,7 +72,7 @@ case "${machine}" in "WCOSS2") npe_node_max=128 ;; - "HERA" | "ORION") + "HERA" | "ORION" | "HERCULES" ) npe_node_max=40 ;; "JET") @@ -98,6 +106,10 @@ case "${machine}" in ;; esac ;; + *) + echo "FATAL ERROR: Unrecognized machine ${machine}" + exit 14 + ;; esac export npe_node_max @@ -112,6 +124,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -126,6 +140,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -140,6 +156,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -154,10 +172,12 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 ;; "C768") export DELTIM=150 @@ -168,6 +188,8 @@ case "${fv3_res}" in export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=4 @@ -182,6 +204,8 @@ case "${fv3_res}" in export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -196,6 +220,8 @@ case "${fv3_res}" in export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -235,13 +261,28 @@ case ${fv3_res} in esac export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + # Mediator specific settings if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? fi # MOM6 specific settings if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" nthreads_mom6=1 case "${mom6_res}" in "500") @@ -255,6 +296,11 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; "100") ntasks_mom6=20 @@ -267,8 +313,16 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi ;; - "50") + "050") ntasks_mom6=60 OCNTIM=3600 NX_GLB=720 @@ -279,7 +333,17 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' - ;; + eps_imesh="1.0e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' + ;; "025") ntasks_mom6=220 OCNTIM=1800 @@ -291,12 +355,23 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + TOPOEDITS="" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' ;; *) echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" exit 1 ;; esac + export nthreads_mom6 ntasks_mom6 export OCNTIM export NX_GLB NY_GLB @@ -305,10 +380,18 @@ if [[ "${skip_mom6}" == "false" ]]; then export CHLCLIM export MOM6_RIVER_RUNOFF export MOM6_RESTART_SETTING + export eps_imesh + export TOPOEDITS + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES fi # CICE6 specific settings if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" # Ensure we sourced the MOM6 section if [[ "${skip_mom6}" == "true" ]]; then echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" @@ -345,6 +428,10 @@ fi # WW3 specific settings if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" nthreads_ww3=2 case "${ww3_res}" in "gnh_10m;aoc_9km;gsh_15m") @@ -353,15 +440,20 @@ if [[ "${skip_ww3}" == "false" ]]; then "gwes_30m") ntasks_ww3=100 ;; - "mx050") - ntasks_ww3=240 + "glo_025") + ntasks_ww3=262 + ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 ;; "mx025") ntasks_ww3=80 ;; - "glo_025") - ntasks_ww3=262 - ;; *) echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" exit 1 @@ -370,4 +462,47 @@ if [[ "${skip_ww3}" == "false" ]]; then export ntasks_ww3 nthreads_ww3 fi +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# Set the name of the UFS (previously nems) configure template to use +case "${model_list}" in + atm) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + ;; + atm.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + ;; + atm.ocean.ice.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + ;; + atm.ocean.ice.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + ;; + atm.ocean.ice.wave.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + ;; + *) + echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + exit 16 + ;; +esac + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +unset model_list + echo "END: config.ufs" diff --git a/parm/config/gefs/config.wave b/parm/config/gefs/config.wave new file mode 100644 index 0000000000..e04331e533 --- /dev/null +++ b/parm/config/gefs/config.wave @@ -0,0 +1,185 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="${HOMEgfs}/exec" +export FIXwave="${HOMEgfs}/fix/wave" +export PARMwave="${HOMEgfs}/parm/wave" +export USHwave="${HOMEgfs}/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD='' + export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' + export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} + ;; + *) + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# The start time reflects the number of hindcast hours prior to the cycle initial time +export FHMAX_WAV=${FHMAX_GFS} +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) + +# Selected output parameters (gridded) +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" + +# Restart file config +export WAVHCYC=${assim_freq:-6} +export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh +if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart +else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run +fi +export RSTIOFF_WAV=0 # first restart file offset relative to model start +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [[ ${RUNMEM} = -1 ]]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB="${RUNMEM: -2}" +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ ${DO_ICE} == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ ${DO_OCN} == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/parm/config/gefs/config.waveinit b/parm/config/gefs/config.waveinit new file mode 120000 index 0000000000..769bbcf903 --- /dev/null +++ b/parm/config/gefs/config.waveinit @@ -0,0 +1 @@ +../gfs/config.waveinit \ No newline at end of file diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml index 6e7633bfe0..ce5d8aeb3d 100644 --- a/parm/config/gefs/yaml/defaults.yaml +++ b/parm/config/gefs/yaml/defaults.yaml @@ -1 +1,6 @@ -# This file intentionally left blank +base: + DO_JEDIATMVAR: "NO" + DO_JEDIATMENS: "NO" + DO_JEDIOCNVAR: "NO" + DO_JEDILANDDA: "NO" + DO_MERGENSST: "NO" diff --git a/parm/config/gfs/config.aero b/parm/config/gfs/config.aero index 1cb3bf5679..32993554b4 100644 --- a/parm/config/gfs/config.aero +++ b/parm/config/gfs/config.aero @@ -2,15 +2,16 @@ # UFS-Aerosols settings -# Directory containing GOCART configuration files. Defaults to parm/chem if unset. -AERO_CONFIG_DIR=$HOMEgfs/parm/chem +# Turn off warnings about unused variables +# shellcheck disable=SC2034 + # Path to the input data tree -case $machine in +case ${machine} in "HERA") AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" ;; - "ORION") + "ORION" | "HERCULES") AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" ;; "S4") @@ -23,18 +24,23 @@ case $machine in AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" ;; *) - echo "FATAL ERROR: Machine $machine unsupported for aerosols" + echo "FATAL ERROR: Machine ${machine} unsupported for aerosols" exit 2 ;; esac +export AERO_INPUTS_DIR -# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) -AERO_EMIS_FIRE=QFED +export AERO_DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table.aero" +export AERO_FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table.aero" +# Biomass burning emission dataset. Choose from: gbbepx, qfed, none +export AERO_EMIS_FIRE="qfed" +# Directory containing GOCART configuration files +export AERO_CONFIG_DIR="${HOMEgfs}/parm/ufs/gocart" # Aerosol convective scavenging factors (list of string array elements) # Element syntax: ':'. Use = * to set default factor for all aerosol tracers # Scavenging factors are set to 0 (no scavenging) if unset -aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +export fscav_aero="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" # # Number of diagnostic aerosol tracers (default: 0) -aero_diag_tracers=2 +export dnats_aero=2 diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl index 41d63f8549..32ba43b7ba 100644 --- a/parm/config/gfs/config.aeroanl +++ b/parm/config/gfs/config.aeroanl @@ -8,17 +8,23 @@ echo "BEGIN: config.aeroanl" export CASE_ANL=${CASE} export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml -export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml export STATICB_TYPE='identity' export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml -export FV3JEDI_FIX=${HOMEgfs}/fix/gdas -export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export FIXgdas=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FIXgdas}/bump/aero/${CASE_ANL}/ export BERROR_DATE="20160630.000000" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x -export crtm_VERSION="2.3.0" + +if [[ "${DOIAU}" == "YES" ]]; then + export aero_bkg_times="3,6,9" + export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_fgat_gfs_aero.yaml +else + export aero_bkg_times="6" + export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +fi echo "END: config.aeroanl" diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch deleted file mode 120000 index 0c165de1ef..0000000000 --- a/parm/config/gfs/config.arch +++ /dev/null @@ -1 +0,0 @@ -config.arch_gsl \ No newline at end of file diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch new file mode 100644 index 0000000000..a23bcce6ae --- /dev/null +++ b/parm/config/gfs/config.arch @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} + +echo "END: config.arch" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index c045704fa2..abfbd80734 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -13,13 +13,9 @@ export STATICB_TYPE="gsibec" export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml export INTERP_METHOD='barycentric' -export layout_x=1 -export layout_y=1 - -export io_layout_x=1 -export io_layout_y=1 +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x -export crtm_VERSION="2.3.0" echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 4d945ea717..58fd7b6e22 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -10,13 +10,9 @@ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml export INTERP_METHOD='barycentric' -export layout_x=1 -export layout_y=1 - -export io_layout_x=1 -export io_layout_y=1 +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x -export crtm_VERSION="2.3.0" echo "END: config.atmensanl" diff --git a/parm/config/gfs/config.atmos_products b/parm/config/gfs/config.atmos_products new file mode 100644 index 0000000000..d8b1d6e32b --- /dev/null +++ b/parm/config/gfs/config.atmos_products @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.atmos_products ########## +# atmosphere grib2 products specific + +echo "BEGIN: config.atmos_products" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmos_products + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +# Scripts used by this job +export INTERP_ATMOS_MASTERSH="${HOMEgfs}/ush/interp_atmos_master.sh" +export INTERP_ATMOS_SFLUXSH="${HOMEgfs}/ush/interp_atmos_sflux.sh" + +if [[ "${RUN:-}" == "gdas" ]]; then + export downset=1 + export FHOUT_PGBS=${FHOUT:-1} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="NO" # Create interpolated sflux.1p00 file +elif [[ "${RUN:-}" == "gfs" ]]; then + export downset=2 + export FHOUT_PGBS=${FHOUT_GFS:-3} # Output frequency of supplemental gfs pgb file at 1.0 and 0.5 deg + export FLXGF="YES" # Create interpolated sflux.1p00 file +fi + +# paramlist files for the different forecast hours and downsets +export paramlista="${HOMEgfs}/parm/post/global_1x1_paramlist_g2" +export paramlista_anl="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.anl" +export paramlista_f000="${HOMEgfs}/parm/post/global_1x1_paramlist_g2.f000" +export paramlistb="${HOMEgfs}/parm/post/global_master-catchup_parmlist_g2" + +echo "END: config.atmos_products" diff --git a/parm/config/gfs/config.awips b/parm/config/gfs/config.awips index 9003e9f6b0..3b78d4bb4b 100644 --- a/parm/config/gfs/config.awips +++ b/parm/config/gfs/config.awips @@ -6,10 +6,10 @@ echo "BEGIN: config.awips" # Get task specific resources -. $EXPDIR/config.resources awips +. "${EXPDIR}/config.resources" awips -export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG -export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 +export AWIPS20KM1P0DEGSH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG" +export AWIPSG2SH="${HOMEgfs}/jobs/JGFS_ATMOS_AWIPS_G2" # No. of concurrent awips jobs export NAWIPSGRP=42 diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn deleted file mode 120000 index 6e9cfcec1a..0000000000 --- a/parm/config/gfs/config.base.emc.dyn +++ /dev/null @@ -1 +0,0 @@ -config.base.emc.dyn_hera \ No newline at end of file diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn new file mode 100644 index 0000000000..b726c1788a --- /dev/null +++ b/parm/config/gfs/config.base.emc.dyn @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +export HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs="${HOMEgfs}/parm" +export FIXgfs="${HOMEgfs}/fix" +export USHgfs="${HOMEgfs}/ush" +export UTILgfs="${HOMEgfs}/util" +export EXECgfs="${HOMEgfs}/exec" +export SCRgfs="${HOMEgfs}/scripts" + +export FIXam="${FIXgfs}/am" +export FIXaer="${FIXgfs}/aer" +export FIXcpl="${FIXgfs}/cpl" +export FIXlut="${FIXgfs}/lut" +export FIXorog="${FIXgfs}/orog" +export FIXcice="${FIXgfs}/cice" +export FIXmom="${FIXgfs}/mom6" +export FIXreg2grb2="${FIXgfs}/reg2grb2" +export FIXugwd="${FIXgfs}/ugwd" + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" +export BASE_CPLIC="@BASE_CPLIC@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_GOES="NO" # GOES products +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export DO_NPOESS="NO" # NPOESS products +export DO_TRACKER="YES" # Hurricane track verification +export DO_GENESIS="YES" # Cyclone genesis verification +export DO_GENESIS_FSU="NO" # Cyclone genesis verification (FSU) +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring +export DO_MOS="NO" # GFS Model Output Statistics - Only supported on WCOSS2 + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT:-}/obsproc/v${obsproc_run_ver:-1.1.2}" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" +export NCLEN="${HOMEgfs}/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change # TODO: Why is this needed and where is it used? + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") + export OCNRES=500 + export waveGRD='glo_500' + ;; + "C96") + export OCNRES=500 + export waveGRD='glo_200' + ;; + "C192") + export OCNRES=050 + export waveGRD='glo_200' + ;; + "C384") + export OCNRES=025 + export waveGRD='glo_025' + ;; + "C768" | "C1152") + export OCNRES=025 + export waveGRD='mx025' + ;; + *) + echo "FATAL ERROR: Unrecognized CASE ${CASE}, ABORT!" + exit 1 + ;; +esac +export ICERES=${OCNRES} + +case "${APP}" in + ATM) + ;; + ATMA) + export DO_AERO="YES" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + fi + ;; + *) + echo "Unrecognized APP: '${APP}'" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=120 +export FHMAX_GFS_06=120 +export FHMAX_GFS_12=120 +export FHMAX_GFS_18=120 +current_fhmax_var=FHMAX_GFS_${cyc}; declare -x FHMAX_GFS=${!current_fhmax_var} + +export FHOUT_GFS=6 # Must be 6 for S2S until #1629 is addressed; 3 for ops +export FHMAX_HF_GFS=0 +export FHOUT_HF_GFS=1 +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +#JKHexport restart_interval_gfs=12 +export restart_interval_gfs=-1 ## JKH +# NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS +# TODO: Remove this variable from config.base and reference from config.fcst +# TODO: rework logic in config.wave and push it to parsing_nameslist_WW3.sh where it is actually used + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="@DOIAU@" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=${IAUFHRS%%,*} +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [[ ${l4densvar} = ".true." ]]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 + export IAUFHRS="6" +fi + +if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in DA job +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp; not supported with spack-stack +export DO_FIT2OBS="YES" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +echo "END: config.base" diff --git a/parm/config/gfs/config.cleanup b/parm/config/gfs/config.cleanup new file mode 100644 index 0000000000..1908c91bb5 --- /dev/null +++ b/parm/config/gfs/config.cleanup @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.cleanup ########## +echo "BEGIN: config.cleanup" + +# Get task specific resources +source "${EXPDIR}/config.resources" cleanup + +export CLEANUP_COM="YES" # NO=retain ROTDIR. YES default in cleanup.sh + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +# Specify the list of files to exclude from the first stage of cleanup +# Because arrays cannot be exported, list is a single string of comma- +# separated values. This string is split to form an array at runtime. +case ${RUN} in + gdas | gfs) exclude_string="*prepbufr*, *cnvstat*, *atmanl.nc" ;; + enkf*) exclude_string="*f006.ens*" ;; + *) exclude_string="" ;; +esac +export exclude_string + +echo "END: config.cleanup" \ No newline at end of file diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 6a824012c6..208b0ac096 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -7,7 +7,7 @@ echo "BEGIN: config.com" # expansion does not occur when this file is sourced. Substitution happens later # during runtime. It is recommended to use the helper function `generate_com()`, # to do this substitution, which is defined in `ush/preamble.sh`. -# +# # Syntax for generate_com(): # generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] # @@ -30,10 +30,10 @@ echo "BEGIN: config.com" # MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY # -# +# # If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` # must correspondingly be updated to match. -# +# if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") @@ -48,21 +48,24 @@ COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' +declare -rx COM_CONF_TMPL=${COM_BASE}'/conf' declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' -declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2' +declare -rx COM_ATMOS_GRIB_GRID_TMPL=${COM_ATMOS_GRIB_TMPL}'/${GRID}' declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' -declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' @@ -79,9 +82,9 @@ declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' -declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' -declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' +declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' diff --git a/parm/config/gfs/config.efcs b/parm/config/gfs/config.efcs index 95c2cb58de..283ec3ab7e 100644 --- a/parm/config/gfs/config.efcs +++ b/parm/config/gfs/config.efcs @@ -5,35 +5,25 @@ echo "BEGIN: config.efcs" -# TODO: the _ENKF counterparts need to be defined in config.base -export DO_AERO=${DO_AERO_ENKF:-"NO"} -export DO_OCN=${DO_OCN_ENKF:-"NO"} -export DO_ICE=${DO_ICE_ENKF:-"NO"} -export DO_WAVE=${DO_WAVE_ENKF:-"NO"} - -# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too -if [[ ${DO_OCN} == "YES" ]]; then - case "${CASE_ENS}" in - "C48") export OCNRES=500;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; - esac -fi -[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES -[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? +# Turn off components in ensemble via _ENKF, or use setting from deterministic +export DO_AERO=${DO_AERO_ENKF:-${DO_AERO:-"NO"}} +export DO_OCN=${DO_OCN_ENKF:-${DO_OCN:-"NO"}} +export DO_ICE=${DO_ICE_ENKF:-${DO_ICE:-"NO"}} +export DO_WAVE=${DO_WAVE_ENKF:-${DO_WAVE:-"NO"}} # Source model specific information that is resolution dependent string="--fv3 ${CASE_ENS}" -[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" -[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" -[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" -source $EXPDIR/config.ufs ${string} +# Ocean/Ice/Waves ensemble configurations are identical to deterministic member +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} # Get task specific resources -. $EXPDIR/config.resources efcs +. "${EXPDIR}/config.resources" efcs # Use serial I/O for ensemble (lustre?) export OUTPUT_FILETYPE_ATM="netcdf" @@ -66,32 +56,17 @@ export SPPT_LSCALE=500000. export SPPT_LOGIT=".true." export SPPT_SFCLIMIT=".true." -if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da_orig" fi -# FV3 model namelist parameters to over-ride -export restart_interval=${restart_interval:-6} - # For IAU, write restarts at beginning of window also -if [ $DOIAU_ENKF = "YES" ]; then - export restart_interval="3 -1" +if [[ "${DOIAU_ENKF:-}" = "YES" ]]; then + export restart_interval="3" +else + export restart_interval="6" fi -# wave model -export cplwav=.false. - -# ocean model resolution -case "${CASE_ENS}" in - "C48") export OCNRES=500;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; -esac -export ICERES=$OCNRES - echo "END: config.efcs" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 3bd81e41c7..35b3fac262 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -6,80 +6,47 @@ echo "BEGIN: config.fcst" # Turn off waves if not used for this CDUMP -case $WAVE_CDUMP in - both | ${CDUMP/enkf} ) ;; # Don't change +case ${WAVE_CDUMP} in + both | "${CDUMP/enkf}" ) ;; # Don't change *) DO_WAVE="NO" ;; # Turn waves off esac # Source model specific information that is resolution dependent -string="--fv3 $CASE" -[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" -[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" -[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" -source $EXPDIR/config.ufs ${string} - -# Source component configs if necessary -for component in WAVE OCN ICE AERO; do - control="DO_${component}" - if [[ $(eval echo \$$control) == "YES" ]]; then - . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') - fi -done +string="--fv3 ${CASE}" +[[ "${DO_OCN}" == "YES" ]] && string="${string} --mom6 ${OCNRES}" +[[ "${DO_ICE}" == "YES" ]] && string="${string} --cice6 ${ICERES}" +[[ "${DO_WAVE}" == "YES" ]] && string="${string} --ww3 ${waveGRD// /;}" +[[ "${DO_AERO}" == "YES" ]] && string="${string} --gocart" +# We are counting on $string being multiple arguments +# shellcheck disable=SC2086 +source "${EXPDIR}/config.ufs" ${string} + # Get task specific resources -. $EXPDIR/config.resources fcst +source "${EXPDIR}/config.resources" fcst export domains_stack_size="16000000" -if [[ "$DONST" = "YES" ]]; then - . $EXPDIR/config.nsst +if [[ "${DONST}" == "YES" ]]; then + source "${EXPDIR}/config.nsst" fi export esmf_profile=".false." export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE -####################################################################### -# COUPLING COMPONENTS - -# cpl defaults -export cpl=".false." -export cplflx=".false." -export cplice=".false." -export cplchm=".false." -export cplwav=".false." - -# cpl changes based on APP - -if [[ "$DO_COUPLED" = "YES" ]]; then - export cpl=".true." -fi -if [[ "$DO_AERO" = "YES" ]]; then - export cplchm=".true." -fi -if [[ "$DO_ICE" = "YES" ]]; then - export cplice=".true." - export cplflx=".true." -fi -if [[ "$DO_OCN" = "YES" ]]; then - export cplflx=".true." -fi -if [[ "$DO_WAVE" = "YES" ]]; then - export cplwav=".true." -fi - ####################################################################### -export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" -#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on -export FCSTEXECDIR="$HOMEgfs/exec" +export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.sh" +#export FORECASTSH="${HOMEgfs}/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="${HOMEgfs}/exec" export FCSTEXEC="ufs_model.x" ####################################################################### # Model configuration export TYPE="nh" export MONO="non-mono" -export range_warn=".false." ## JKH +#JKHexport range_warn=".false." ## JKH # Use stratosphere h2o physics export h2o_phys=".true." @@ -92,16 +59,17 @@ export gwd_opt=2 # --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc # do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) # do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. -if [[ "$gwd_opt" -eq 1 ]]; then +if (( gwd_opt == 1 )); then export knob_ugwp_version=0 export do_ugwp=".false." export do_tofd=".false." - export launch_level=$(echo "$LEVS/2.35" |bc) + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level fi # -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc -if [[ "$gwd_opt" -eq 2 ]]; then +if (( gwd_opt == 2 )); then #--used for UFS p7 and p8a #export knob_ugwp_version=1 @@ -117,38 +85,41 @@ if [[ "$gwd_opt" -eq 2 ]]; then #export do_ugwp_v1_orog_only=".false." #--used for UFS p8 - export knob_ugwp_version=0 + export knob_ugwp_version=1 export do_ugwp=".false." export do_tofd=".false." - export do_ugwp_v0=".true." - export do_ugwp_v1=".false." + export do_ugwp_v0=".false." + export do_ugwp_v1=".true." export do_ugwp_v0_orog_only=".false." export do_ugwp_v0_nst_only=".false." - export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ls_bl=".true." export do_gsl_drag_ss=".true." - export do_gsl_drag_tofd=".false." + export do_gsl_drag_tofd=".true." export do_ugwp_v1_orog_only=".false." - export launch_level=$(echo "$LEVS/2.35" |bc) + launch_level=$(echo "${LEVS}/2.35" |bc) + export launch_level + if [[ ${do_gsl_drag_ls_bl} == ".true." ]]; then + export cdmbgwd=${cdmbgwd_gsl} + fi fi # Sponge layer settings -export tau=0.0 ## JKH -export rf_cutoff=10 ## JKH -export fast_tau_w_sec=0.2 ## JKH +export tau=0. +export rf_cutoff=10. export d2_bg_k1=0.20 export d2_bg_k2=0.04 export dz_min=6 export n_sponge=42 -if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then +if (( LEVS == 128 )) && [[ "${CDUMP}" =~ "gdas" ]]; then export tau=5.0 export rf_cutoff=1.0e3 export d2_bg_k1=0.20 export d2_bg_k2=0.0 fi -# PBL/turbulence schemes +# PBL/turbulance schemes export hybedmf=".false." -if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then export satmedmf=".false." export isatmedmf=0 export shal_cnv=".false." @@ -164,7 +135,12 @@ else export isatmedmf=1 fi tbf="" -if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi +if [[ "${satmedmf}" == ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "${progsigma}" == ".true." ]]; then tbp="_progsigma" ; fi # Radiation options export IAER=1011 ; #spectral band mapping method for aerosol optical properties @@ -176,9 +152,9 @@ export isubc_sw=2 export isubc_lw=2 # RRTMGP radiation scheme -export do_RRTMGP=.false. -export doGP_cldoptics_LUT=.false. -export doGP_lwscat=.false. +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. # LSM configuration # NoahMP only @@ -188,7 +164,6 @@ export iopt_trs="2" # Convection Options: 2-SASAS, 3-GF export progsigma=".true." if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then - export progsigma=.false. export imfdeepcnv=5 export imfshalcnv=-1 ## JKH - no shallow GF elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then @@ -200,7 +175,7 @@ elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then export imfshalcnv=3 else export imfdeepcnv=2 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then export imfshalcnv=-1 else export imfshalcnv=2 @@ -218,68 +193,68 @@ export cal_pre=".true." export do_sat_adj=".false." export random_clds=".true." -if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr - export ncld=1 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" - export nwat=2 - -elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 - export ncld=2 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" - export nwat=6 - -elif [[ "$imp_physics" -eq 8 ]]; then # Thompson - export nwat=6 - - export cal_pre=".false." - export random_clds=".false." - export effr_in=".true." - export lradar=".false." - export ttendlim="-999" - export dt_inner=$((DELTIM/2)) - export sedi_semi=.true. - if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi - export decfl=10 - - export dddmp=0.1 - export d4_bg=0.12 - export ncld=2 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" ]] ; then - export ltaerosol=".true." - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" - else - export ltaerosol=".false." - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" - #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" - fi - export hord_mt_nh_nonmono=5 - export hord_xx_nh_nonmono=5 - export vtdm4_nh_nonmono=0.02 - export nord=2 - export n_split=4 ## JKH - -elif [[ "$imp_physics" -eq 11 ]]; then # GFDL - export ncld=5 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" - export nwat=6 - export dnats=1 - export cal_pre=".false." - export do_sat_adj=".true." - export random_clds=".false." - export lgfdlmprad=".true." - export effr_in=".true." - export reiflag=2 - - export hord_mt_nh_nonmono=5 - export hord_xx_nh_nonmono=5 - export vtdm4_nh_nonmono=0.02 - export nord=2 - export d4_bg=0.12 - export dddmp=0.1 +case ${imp_physics} in + 99) # ZhaoCarr + export ncld=1 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + ;; + 6) # WSM6 + export ncld=2 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_wsm6${tbf}${tbp}" + export nwat=6 + ;; + 8) # Thompson + export ncld=2 + export nwat=6 + + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_ugwpv1_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3_mynn" || + "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/ufs/fv3/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_thompson_noaero_tke${tbp}" + fi -else - echo "Unknown microphysics option, ABORT!" -fi + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".true." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + ;; + 11) # GFDL + export ncld=5 + export FIELD_TABLE="${HOMEgfs}/parm/ufs/fv3/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + ;; + *) echo "Unknown microphysics option, ABORT!" ;; +esac # Stochastic physics export DO_SPPT=${DO_SPPT:-"NO"} @@ -289,9 +264,9 @@ export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} export DO_CA=${DO_CA:-"YES"} #coupling settings -export cplmode="nems_frac" -if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then - export cplmode="nems_orig" +export cplmode="ufs.frac" +if [[ "${FRAC_GRID:-".true."}" == ".false." ]]; then + export cplmode="ufs.nfrac" fi export psm_bc="1" @@ -317,70 +292,33 @@ export deflate_level=1 # Disable the use of coupler.res; get model start time from model_configure export USE_COUPLER_RES="NO" -if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters +if [[ "${CDUMP}" =~ "gdas" ]] ; then # GDAS cycle specific parameters # Variables used in DA cycling - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" - - # Write restart files, where $number is current model start time. - # restart_interval: $number - # number=0, writes out restart files at the end of forecast. - # number>0, writes out restart files at the frequency of $number and at the end of forecast. - # restart_interval: "$number -1" - # writes out restart files only once at $number forecast hour. - # restart_interval: "$number1 $number2 $number3 ..." - # writes out restart file at the specified forecast hours - export restart_interval=${restart_interval:-6} - - # For IAU, write restarts at beginning of window also - if [[ "$DOIAU" = "YES" ]]; then - export restart_interval="3 6" - fi + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table_da" - # Choose coupling with wave - if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + if [[ "${DOIAU}" == "YES" ]]; then + export restart_interval="3" + else + export restart_interval="6" + fi # Turn on dry mass adjustment in GDAS export adjust_dry_mass=".true." -elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters +elif [[ "${CDUMP}" =~ "gfs" ]] ; then # GFS cycle specific parameters # Write more variables to output - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + export DIAG_TABLE="${HOMEgfs}/parm/ufs/fv3/diag_table" # Write gfs restart files to rerun fcst from any break point - export restart_interval_gfs=${restart_interval_gfs:-0} - if [[ "$restart_interval_gfs" -le 0 ]]; then - export restart_interval="$FHMAX_GFS" - else - rst_list="" - IAU_OFFSET=${IAU_OFFSET:-0} - [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 - xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) - while [ $xfh -le $FHMAX_GFS ]; do - rst_list="$rst_list $xfh" - xfh=$((xfh+restart_interval_gfs)) - done - export restart_interval="$rst_list" - fi - - if [[ "$DO_AERO" = "YES" ]]; then - # Make sure a restart file is written at the cadence time - if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then - export restart_interval="$STEP_GFS $restart_interval" - fi - fi - - # Choose coupling with wave - if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then - export cplwav=".true." - fi + export restart_interval=${restart_interval_gfs:-12} # Turn off dry mass adjustment in GFS export adjust_dry_mass=".false." # Write each restart file in 16 small files to save time - if [[ "$CASE" = C768 ]]; then + if [[ "${CASE}" = C768 ]]; then export io_layout="4,4" else export io_layout="1,1" @@ -388,16 +326,6 @@ elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters fi -if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling - export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" - export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" - export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) - export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" - export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" - export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" - export dnats_aero="${aero_diag_tracers:-0}" -fi - # Remember config.efcs will over-ride these values for ensemble forecasts # if these variables are re-defined there. # Otherwise, the ensemble forecast will inherit from config.fcst diff --git a/parm/config/gfs/config.genesis b/parm/config/gfs/config.genesis new file mode 100644 index 0000000000..62a1bf88c0 --- /dev/null +++ b/parm/config/gfs/config.genesis @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis ########## +echo "BEGIN: config.genesis" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis" diff --git a/parm/config/gfs/config.genesis_fsu b/parm/config/gfs/config.genesis_fsu new file mode 100644 index 0000000000..13948592c4 --- /dev/null +++ b/parm/config/gfs/config.genesis_fsu @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.genesis_fsu ########## +echo "BEGIN: config.genesis_fsu" + +# Get task specific resources +. "${EXPDIR}/config.resources" genesis_fsu + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.genesis_fsu" diff --git a/parm/config/gfs/config.ice b/parm/config/gfs/config.ice index 7bc1f80966..205458020f 100644 --- a/parm/config/gfs/config.ice +++ b/parm/config/gfs/config.ice @@ -2,4 +2,8 @@ echo "BEGIN: config.ice" +# Override atm-only FV3 settings when ice model is on +export min_seaice="1.0e-6" +export use_cice_alb=".true." + echo "END: config.ice" diff --git a/parm/config/gfs/config.landanl b/parm/config/gfs/config.landanl index 89bb8a4b7b..70ebae7529 100644 --- a/parm/config/gfs/config.landanl +++ b/parm/config/gfs/config.landanl @@ -1,23 +1,34 @@ #! /usr/bin/env bash ########## config.landanl ########## -# configuration common to all land analysis tasks +# configuration common to land analysis tasks echo "BEGIN: config.landanl" -obs_list_name=gdas_land_adpsfc_only.yaml -if [[ "${cyc}" == "18" ]]; then +# Get task specific resources +. "${EXPDIR}/config.resources" landanl + +obs_list_name=gdas_land_gts_only.yaml +if [[ "${cyc}" = "18" ]]; then obs_list_name=gdas_land_prototype.yaml fi export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} -export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml -export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +# Name of the JEDI executable and its yaml template +export JEDIEXE="${HOMEgfs}/exec/fv3jedi_letkf.x" +export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml" + +# Ensemble member properties +export SNOWDEPTHVAR="snodl" +export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe" +export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/apply_incr_nml.j2" export io_layout_x=@IO_LAYOUT_X@ export io_layout_y=@IO_LAYOUT_Y@ -export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x - echo "END: config.landanl" diff --git a/parm/config/gfs/config.mos b/parm/config/gfs/config.mos new file mode 100644 index 0000000000..a74c7e7d21 --- /dev/null +++ b/parm/config/gfs/config.mos @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.mos ########## +echo "BEGIN: config.mos" + +# MOS package location +export HOMEgfs_mos=/lfs/h1/ops/prod/packages/gfs_mos.v${mos_ver} + +echo "END: config.mos" diff --git a/parm/config/gfs/config.mos_ext_grd_fcst b/parm/config/gfs/config.mos_ext_grd_fcst new file mode 100644 index 0000000000..db94af945f --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_fcst ########## +echo "BEGIN: config.mos_ext_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_fcst" diff --git a/parm/config/gfs/config.mos_ext_grd_prdgen b/parm/config/gfs/config.mos_ext_grd_prdgen new file mode 100644 index 0000000000..ade31b0c1a --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prdgen ########## +echo "BEGIN: config.mos_ext_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prdgen" diff --git a/parm/config/gfs/config.mos_ext_grd_prep b/parm/config/gfs/config.mos_ext_grd_prep new file mode 100644 index 0000000000..0ba14e2573 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_grd_prep ########## +echo "BEGIN: config.mos_ext_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_grd_prep" diff --git a/parm/config/gfs/config.mos_ext_stn_fcst b/parm/config/gfs/config.mos_ext_stn_fcst new file mode 100644 index 0000000000..5b26d196f9 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_fcst ########## +echo "BEGIN: config.mos_ext_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_fcst" diff --git a/parm/config/gfs/config.mos_ext_stn_prdgen b/parm/config/gfs/config.mos_ext_stn_prdgen new file mode 100644 index 0000000000..9f63eb56fd --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prdgen ########## +echo "BEGIN: config.mos_ext_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prdgen" diff --git a/parm/config/gfs/config.mos_ext_stn_prep b/parm/config/gfs/config.mos_ext_stn_prep new file mode 100644 index 0000000000..c443503f11 --- /dev/null +++ b/parm/config/gfs/config.mos_ext_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_ext_stn_prep ########## +echo "BEGIN: config.mos_ext_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_ext_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_ext_stn_prep" diff --git a/parm/config/gfs/config.mos_grd_fcst b/parm/config/gfs/config.mos_grd_fcst new file mode 100644 index 0000000000..bd0d50a04d --- /dev/null +++ b/parm/config/gfs/config.mos_grd_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_fcst ########## +echo "BEGIN: config.mos_grd_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_fcst" diff --git a/parm/config/gfs/config.mos_grd_prdgen b/parm/config/gfs/config.mos_grd_prdgen new file mode 100644 index 0000000000..dd9ce8bcd8 --- /dev/null +++ b/parm/config/gfs/config.mos_grd_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prdgen ########## +echo "BEGIN: config.mos_grd_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prdgen" diff --git a/parm/config/gfs/config.mos_grd_prep b/parm/config/gfs/config.mos_grd_prep new file mode 100644 index 0000000000..8a3d334d0d --- /dev/null +++ b/parm/config/gfs/config.mos_grd_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_grd_prep ########## +echo "BEGIN: config.mos_grd_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_grd_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_grd_prep" diff --git a/parm/config/gfs/config.mos_stn_fcst b/parm/config/gfs/config.mos_stn_fcst new file mode 100644 index 0000000000..7cb266ea3a --- /dev/null +++ b/parm/config/gfs/config.mos_stn_fcst @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_fcst ########## +echo "BEGIN: config.mos_stn_fcst" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_fcst + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_fcst" diff --git a/parm/config/gfs/config.mos_stn_prdgen b/parm/config/gfs/config.mos_stn_prdgen new file mode 100644 index 0000000000..f92edbd0fd --- /dev/null +++ b/parm/config/gfs/config.mos_stn_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prdgen ########## +echo "BEGIN: config.mos_stn_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prdgen" diff --git a/parm/config/gfs/config.mos_stn_prep b/parm/config/gfs/config.mos_stn_prep new file mode 100644 index 0000000000..b236f42879 --- /dev/null +++ b/parm/config/gfs/config.mos_stn_prep @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_stn_prep ########## +echo "BEGIN: config.mos_stn_prep" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_stn_prep + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_stn_prep" diff --git a/parm/config/gfs/config.mos_wx_ext_prdgen b/parm/config/gfs/config.mos_wx_ext_prdgen new file mode 100644 index 0000000000..054cb950ad --- /dev/null +++ b/parm/config/gfs/config.mos_wx_ext_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_ext_prdgen ########## +echo "BEGIN: config.mos_wx_ext_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_ext_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_ext_prdgen" diff --git a/parm/config/gfs/config.mos_wx_prdgen b/parm/config/gfs/config.mos_wx_prdgen new file mode 100644 index 0000000000..d4481b65fc --- /dev/null +++ b/parm/config/gfs/config.mos_wx_prdgen @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.mos_wx_prdgen ########## +echo "BEGIN: config.mos_wx_prdgen" + +# Get task specific resources +. "${EXPDIR}/config.resources" mos_wx_prdgen + +# Get MOS settings +. "${EXPDIR}/config.mos" + +echo "END: config.mos_wx_prdgen" diff --git a/parm/config/gfs/config.npoess b/parm/config/gfs/config.npoess new file mode 100644 index 0000000000..9a388d2e6b --- /dev/null +++ b/parm/config/gfs/config.npoess @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.npoess ########## +# GFS NPOESS step specific + +echo "BEGIN: config.npoess" + +# Get task specific resources +. "${EXPDIR}/config.resources" npoess + +echo "END: config.npoess" diff --git a/parm/config/gfs/config.nsst b/parm/config/gfs/config.nsst index 235c91f08b..db4367b2c0 100644 --- a/parm/config/gfs/config.nsst +++ b/parm/config/gfs/config.nsst @@ -12,7 +12,8 @@ export NST_MODEL=2 # nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, export NST_SPINUP=0 -if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then +cdate="${PDY}${cyc}" +if (( cdate < 2017072000 )); then export NST_SPINUP=1 fi @@ -28,6 +29,6 @@ export NST_GSI=3 # default 0: No NST info at all; # 2: Input NST info, used in CRTM simulation, no Tr analysis # 3: Input NST info, used in both CRTM simulation and Tr analysis export NSTINFO=0 # number of elements added in obs. data array (default = 0) -if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi +if (( NST_GSI > 0 )); then export NSTINFO=4; fi echo "END: config.nsst" diff --git a/parm/config/gfs/config.ocn b/parm/config/gfs/config.ocn index 7d14e3dd52..37f6a966aa 100644 --- a/parm/config/gfs/config.ocn +++ b/parm/config/gfs/config.ocn @@ -11,7 +11,6 @@ export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL # Templated variables in MOM_input_template export MOM6_USE_LI2016="True" # set to False for restart reproducibility export MOM6_THERMO_SPAN="False" -export MOM6_ALLOW_LANDMASK_CHANGES="False" if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then export ODA_INCUPD="True" diff --git a/parm/config/gfs/config.ocnanal b/parm/config/gfs/config.ocnanal index 36519c7f35..ec45ddd288 100644 --- a/parm/config/gfs/config.ocnanal +++ b/parm/config/gfs/config.ocnanal @@ -5,17 +5,16 @@ echo "BEGIN: config.ocnanal" -export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" export OBS_LIST=@SOCA_OBS_LIST@ -[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml -export OBS_YAML=${OBS_LIST} -export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export OBS_YAML="${OBS_LIST}" +export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@ export SOCA_VARS=tocn,socn,ssh export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@ export SOCA_NINNER=@SOCA_NINNER@ export CASE_ANL=@CASE_ANL@ -export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin # R2D2 diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost index 89304df7f4..851c476e6c 100644 --- a/parm/config/gfs/config.ocnpost +++ b/parm/config/gfs/config.ocnpost @@ -5,12 +5,25 @@ echo "BEGIN: config.ocnpost" # Get task specific resources -source $EXPDIR/config.resources ocnpost +source "${EXPDIR}/config.resources" ocnpost -# Convert nemsio files to grib files using post job +# Convert netcdf files to grib files using post job #------------------------------------------- +case "${OCNRES}" in + "025") export MAKE_OCN_GRIB="YES";; + "050") export MAKE_OCN_GRIB="NO";; + "100") export MAKE_OCN_GRIB="NO";; + "500") export MAKE_OCN_GRIB="NO";; + *) export MAKE_OCN_GRIB="NO";; +esac -# No. of concurrent post jobs [0 implies sequential] -export NPOSTGRP=2 +if [[ "${machine}" = "WCOSS2" ]] || [[ "${machine}" = "HERCULES" ]]; then + #Currently the conversion to netcdf uses NCL which is not on WCOSS2 or HERCULES + #This should be removed when this is updated + export MAKE_OCN_GRIB="NO" +fi + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 echo "END: config.ocnpost" diff --git a/parm/config/gfs/config.prep b/parm/config/gfs/config.prep index b05b82a43e..d5ac1925f7 100644 --- a/parm/config/gfs/config.prep +++ b/parm/config/gfs/config.prep @@ -13,10 +13,7 @@ export cdate10=${PDY}${cyc} # Relocation and syndata QC export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} -[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" -export DO_RELOCATE="NO" export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" -export SENDCOM=YES export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} diff --git a/parm/config/gfs/config.prepatmiodaobs b/parm/config/gfs/config.prepatmiodaobs new file mode 100644 index 0000000000..ed9b246120 --- /dev/null +++ b/parm/config/gfs/config.prepatmiodaobs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.prepatmiodaobs ########## +# Atm Obs Prep specific + +echo "BEGIN: config.prepatmiodaobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" prepatmiodaobs + +export BUFR2IODASH="${HOMEgfs}/ush/run_bufr2ioda.py" +export IODAPARM="${HOMEgfs}/sorc/gdas.cd/parm/ioda/bufr2ioda" + +echo "END: config.prepatmiodaobs" diff --git a/parm/config/gfs/config.preplandobs b/parm/config/gfs/config.preplandobs index d69b0f7f59..20ae20b5ad 100644 --- a/parm/config/gfs/config.preplandobs +++ b/parm/config/gfs/config.preplandobs @@ -8,6 +8,8 @@ echo "BEGIN: config.preplandobs" # Get task specific resources . "${EXPDIR}/config.resources" preplandobs +export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_gts.yaml" +export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x" export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" diff --git a/parm/config/gfs/config.prepoceanobs b/parm/config/gfs/config.prepoceanobs new file mode 100644 index 0000000000..068ecff1ad --- /dev/null +++ b/parm/config/gfs/config.prepoceanobs @@ -0,0 +1,17 @@ +#!/bin/bash + +########## config.prepoceanobs ########## + +echo "BEGIN: config.prepoceanobs" + +export OCNOBS2IODAEXEC=${HOMEgfs}/sorc/gdas.cd/build/bin/gdas_obsprovider2ioda.x + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBSPROC_YAML=@OBSPROC_CONFIG@ +export OBS_LIST=@SOCA_OBS_LIST@ +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} + +# Get task specific resources +. "${EXPDIR}/config.resources" prepoceanobs +echo "END: config.prepoceanobs" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 38efea7882..b3319ecc1b 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -8,20 +8,22 @@ if [[ $# -ne 1 ]]; then echo "Must specify an input task argument to set resource variables!" echo "argument can be any one of the following:" - echo "coupled_ic aerosol_init" - echo "prep preplandobs" + echo "stage_ic aerosol_init" + echo "prep preplandobs prepatmiodaobs" echo "atmanlinit atmanlrun atmanlfinal" echo "atmensanlinit atmensanlrun atmensanlfinal" - echo "landanlinit landanlrun landanlfinal" + echo "landanl" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "anal sfcanl analcalc analdiag fcst echgres" + echo "upp atmos_products" + echo "tracker genesis genesis_fsu" + echo "verfozn verfrad vminmon fit2obs metp arch cleanup" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" - echo "postsnd awips gempak" - echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" - echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + echo "postsnd awips gempak npoess" + echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -33,28 +35,31 @@ echo "BEGIN: config.resources" if [[ "${machine}" = "WCOSS2" ]]; then export npe_node_max=128 elif [[ "${machine}" = "JET" ]]; then - if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then - export npe_node_max=16 - elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then export npe_node_max=24 - elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then export npe_node_max=16 - elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then export npe_node_max=40 fi -elif [[ "${machine}" = "HERA" ]]; then +elif [[ ${machine} = "HERA" ]]; then export npe_node_max=40 -elif [[ "${machine}" = "S4" ]]; then - if [[ "${PARTITION_BATCH}" = "s4" ]]; then +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then export npe_node_max=32 - elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then export npe_node_max=20 fi -elif [[ "${machine}" = "ORION" ]]; then +elif [[ "${machine}" = "AWSPW" ]]; then + export PARTITION_BATCH="compute" + export npe_node_max=40 +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "HERCULES" ]]; then export npe_node_max=40 fi -if [[ "${step}" = "prep" ]]; then +if [[ ${step} = "prep" ]]; then export wtime_prep='00:30:00' export npe_prep=4 export npe_node_prep=2 @@ -73,6 +78,13 @@ elif [[ "${step}" = "preplandobs" ]]; then npe_node_preplandobs=1 export npe_node_preplandobs +elif [[ "${step}" = "prepatmiodaobs" ]]; then + export wtime_prepatmiodaobs="00:10:00" + export npe_prepatmiodaobs=1 + export nth_prepatmiodaobs=1 + npe_node_prepatmiodaobs=$(echo "${npe_node_max} / ${nth_prepatmiodaobs}" | bc) + export npe_node_prepatmiodaobs + elif [[ "${step}" = "aerosol_init" ]]; then export wtime_aerosol_init="00:05:00" export npe_aerosol_init=1 @@ -82,7 +94,7 @@ elif [[ "${step}" = "aerosol_init" ]]; then export NTASKS=${npe_aerosol_init} export memory_aerosol_init="6G" -elif [[ "${step}" = "waveinit" ]]; then +elif [[ ${step} = "waveinit" ]]; then export wtime_waveinit="00:10:00" export npe_waveinit=12 @@ -92,7 +104,7 @@ elif [[ "${step}" = "waveinit" ]]; then export NTASKS=${npe_waveinit} export memory_waveinit="2GB" -elif [[ "${step}" = "waveprep" ]]; then +elif [[ ${step} = "waveprep" ]]; then export wtime_waveprep="00:10:00" export npe_waveprep=5 @@ -108,7 +120,7 @@ elif [[ "${step}" = "waveprep" ]]; then export memory_waveprep="100GB" export memory_waveprep_gfs="150GB" -elif [[ "${step}" = "wavepostsbs" ]]; then +elif [[ ${step} = "wavepostsbs" ]]; then export wtime_wavepostsbs="00:20:00" export wtime_wavepostsbs_gfs="03:00:00" @@ -120,7 +132,7 @@ elif [[ "${step}" = "wavepostsbs" ]]; then export memory_wavepostsbs="10GB" export memory_wavepostsbs_gfs="10GB" -elif [[ "${step}" = "wavepostbndpnt" ]]; then +elif [[ ${step} = "wavepostbndpnt" ]]; then export wtime_wavepostbndpnt="01:00:00" export npe_wavepostbndpnt=240 @@ -130,7 +142,7 @@ elif [[ "${step}" = "wavepostbndpnt" ]]; then export NTASKS=${npe_wavepostbndpnt} export is_exclusive=True -elif [[ "${step}" = "wavepostbndpntbll" ]]; then +elif [[ ${step} = "wavepostbndpntbll" ]]; then export wtime_wavepostbndpntbll="01:00:00" export npe_wavepostbndpntbll=448 @@ -140,9 +152,9 @@ elif [[ "${step}" = "wavepostbndpntbll" ]]; then export NTASKS=${npe_wavepostbndpntbll} export is_exclusive=True -elif [[ "${step}" = "wavepostpnt" ]]; then +elif [[ ${step} = "wavepostpnt" ]]; then - export wtime_wavepostpnt="01:30:00" + export wtime_wavepostpnt="04:00:00" export npe_wavepostpnt=200 export nth_wavepostpnt=1 npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) @@ -150,7 +162,7 @@ elif [[ "${step}" = "wavepostpnt" ]]; then export NTASKS=${npe_wavepostpnt} export is_exclusive=True -elif [[ "${step}" = "wavegempak" ]]; then +elif [[ ${step} = "wavegempak" ]]; then export wtime_wavegempak="02:00:00" export npe_wavegempak=1 @@ -160,7 +172,7 @@ elif [[ "${step}" = "wavegempak" ]]; then export NTASKS=${npe_wavegempak} export memory_wavegempak="1GB" -elif [[ "${step}" = "waveawipsbulls" ]]; then +elif [[ ${step} = "waveawipsbulls" ]]; then export wtime_waveawipsbulls="00:20:00" export npe_waveawipsbulls=1 @@ -170,7 +182,7 @@ elif [[ "${step}" = "waveawipsbulls" ]]; then export NTASKS=${npe_waveawipsbulls} export is_exclusive=True -elif [[ "${step}" = "waveawipsgridded" ]]; then +elif [[ ${step} = "waveawipsgridded" ]]; then export wtime_waveawipsgridded="02:00:00" export npe_waveawipsgridded=1 @@ -182,6 +194,15 @@ elif [[ "${step}" = "waveawipsgridded" ]]; then elif [[ "${step}" = "atmanlinit" ]]; then + # make below case dependent later + export layout_x=1 + export layout_y=1 + + layout_gsib_x=$(echo "${layout_x} * 3" | bc) + export layout_gsib_x + layout_gsib_y=$(echo "${layout_y} * 2" | bc) + export layout_gsib_y + export wtime_atmanlinit="00:10:00" export npe_atmanlinit=1 export nth_atmanlinit=1 @@ -215,7 +236,7 @@ elif [[ "${step}" = "atmanlfinal" ]]; then export npe_node_atmanlfinal export is_exclusive=True -elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then +elif [[ "${step}" = "landanl" ]]; then # below lines are for creating JEDI YAML case ${CASE} in C768) @@ -238,39 +259,34 @@ elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "l export layout_x export layout_y - if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then - declare -x "wtime_${step}"="00:10:00" - declare -x "npe_${step}"=1 - declare -x "nth_${step}"=1 - temp_stepname="nth_${step}" - declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" - declare -x "memory_${step}"="3072M" - elif [[ "${step}" = "landanlrun" ]]; then - export wtime_landanlrun="00:30:00" - npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) - export npe_landanlrun - export nth_landanlrun=1 - npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) - export npe_node_landanlrun - export is_exclusive=True - fi + export wtime_landanl="00:15:00" + npe_landanl=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanl + export nth_landanl=1 + npe_node_landanl=$(echo "${npe_node_max} / ${nth_landanl}" | bc) + export npe_node_landanl elif [[ "${step}" = "aeroanlinit" ]]; then # below lines are for creating JEDI YAML case ${CASE} in - C768) - layout_x=6 - layout_y=6 + C768) + layout_x=8 + layout_y=8 ;; C384) - layout_x=5 - layout_y=5 + layout_x=8 + layout_y=8 ;; - C192 | C96 | C48) + C192 | C96) layout_x=8 layout_y=8 ;; + C48 ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; *) echo "FATAL ERROR: Resolution not supported for aerosol analysis'" exit 1 @@ -289,18 +305,23 @@ elif [[ "${step}" = "aeroanlinit" ]]; then elif [[ "${step}" = "aeroanlrun" ]]; then case ${CASE} in - C768) - layout_x=6 - layout_y=6 + C768) + layout_x=8 + layout_y=8 ;; C384) - layout_x=5 - layout_y=5 + layout_x=8 + layout_y=8 ;; - C192 | C96 | C48) + C192 | C96) layout_x=8 layout_y=8 ;; + C48 ) + # this case is for testing only + layout_x=1 + layout_y=1 + ;; *) echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" exit 1 @@ -338,12 +359,25 @@ elif [[ "${step}" = "ocnanalprep" ]]; then export npe_node_ocnanalprep export memory_ocnanalprep="24GB" +elif [[ "${step}" = "prepoceanobs" ]]; then + + export wtime_prepoceanobs="00:10:00" + export npe_prepoceanobs=1 + export nth_prepoceanobs=1 + npe_node_prepoceanobs=$(echo "${npe_node_max} / ${nth_prepoceanobs}" | bc) + export npe_node_prepoceanobs + export memory_prepoceanobs="24GB" + + elif [[ "${step}" = "ocnanalbmat" ]]; then npes=16 case ${CASE} in C384) npes=480 ;; + C96) + npes=16 + ;; C48) npes=16 ;; @@ -364,21 +398,27 @@ elif [[ "${step}" = "ocnanalrun" ]]; then case ${CASE} in C384) npes=480 + memory_ocnanalrun="128GB" + ;; + C96) + npes=16 ;; C48) npes=16 + memory_ocnanalrun="64GB" ;; *) echo "FATAL: Resolution not supported'" exit 1 esac - export wtime_ocnanalrun="00:30:00" + export wtime_ocnanalrun="00:15:00" export npe_ocnanalrun=${npes} - export nth_ocnanalrun=1 + export nth_ocnanalrun=2 export is_exclusive=True npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) export npe_node_ocnanalrun + export memory_ocnanalrun elif [[ "${step}" = "ocnanalchkpt" ]]; then @@ -391,6 +431,9 @@ elif [[ "${step}" = "ocnanalchkpt" ]]; then C384) export memory_ocnanalchkpt="128GB" ;; + C96) + export memory_ocnanalchkpt="32GB" + ;; C48) export memory_ocnanalchkpt="32GB" ;; @@ -416,7 +459,7 @@ elif [[ "${step}" = "ocnanalvrfy" ]]; then export npe_node_ocnanalvrfy export memory_ocnanalvrfy="24GB" -elif [[ "${step}" = "anal" ]]; then +elif [[ ${step} = "anal" ]]; then export wtime_anal="00:50:00" export wtime_anal_gfs="00:40:00" @@ -428,15 +471,15 @@ elif [[ "${step}" = "anal" ]]; then export nth_anal=8 export nth_anal_gfs=8 fi - if [[ "${CASE}" = "C384" ]]; then + if [[ ${CASE} = "C384" ]]; then export npe_anal=160 export npe_anal_gfs=160 export nth_anal=10 export nth_anal_gfs=10 - if [[ "${machine}" = "S4" ]]; then + if [[ ${machine} = "S4" ]]; then #On the S4-s4 partition, this is accomplished by increasing the task #count to a multiple of 32 - if [[ "${PARTITION_BATCH}" = "s4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then export npe_anal=416 export npe_anal_gfs=416 fi @@ -446,17 +489,17 @@ elif [[ "${step}" = "anal" ]]; then export wtime_anal="02:00:00" fi fi - if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_anal=84 export npe_anal_gfs=84 - if [[ "${machine}" = "S4" ]]; then + if [[ ${machine} = "S4" ]]; then export nth_anal=4 export nth_anal_gfs=4 #Adjust job count for S4 - if [[ "${PARTITION_BATCH}" = "s4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then export npe_anal=88 export npe_anal_gfs=88 - elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then export npe_anal=90 export npe_anal_gfs=90 fi @@ -469,7 +512,7 @@ elif [[ "${step}" = "anal" ]]; then export npe_node_cycle export is_exclusive=True -elif [[ "${step}" = "analcalc" ]]; then +elif [[ ${step} = "analcalc" ]]; then export wtime_analcalc="00:10:00" export npe_analcalc=127 @@ -480,8 +523,9 @@ elif [[ "${step}" = "analcalc" ]]; then npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) export npe_node_analcalc export is_exclusive=True + export memory_analcalc="48GB" -elif [[ "${step}" = "analdiag" ]]; then +elif [[ ${step} = "analdiag" ]]; then export wtime_analdiag="00:15:00" export npe_analdiag=96 # Should be at least twice npe_ediag @@ -490,7 +534,7 @@ elif [[ "${step}" = "analdiag" ]]; then export npe_node_analdiag export memory_analdiag="48GB" -elif [[ "${step}" = "sfcanl" ]]; then +elif [[ ${step} = "sfcanl" ]]; then export wtime_sfcanl="00:10:00" export npe_sfcanl=6 @@ -549,43 +593,47 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit # TODO: Update reference when moved to ufs-weather-model RTD MEDTHREADS=${nthreads_mediator:-1} - MEDPETS=${MEDPETS:-ATMPETS} + MEDPETS=${MEDPETS:-${FV3PETS}} [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 export MEDPETS MEDTHREADS echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + CHMPETS=0; CHMTHREADS=0 if [[ "${DO_AERO}" = "YES" ]]; then # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). (( CHMTHREADS = ATMTHREADS )) (( CHMPETS = FV3PETS )) # Do not add to NTASKS_TOT - export CHMPETS CHMTHREADS echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" fi + export CHMPETS CHMTHREADS + WAVPETS=0; WAVTHREADS=0 if [[ "${DO_WAVE}" = "YES" ]]; then (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) (( WAVTHREADS = nthreads_ww3 )) - export WAVPETS WAVTHREADS echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) fi + export WAVPETS WAVTHREADS + OCNPETS=0; OCNTHREADS=0 if [[ "${DO_OCN}" = "YES" ]]; then (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) (( OCNTHREADS = nthreads_mom6 )) - export OCNPETS OCNTHREADS echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) fi + export OCNPETS OCNTHREADS + ICEPETS=0; ICETHREADS=0 if [[ "${DO_ICE}" = "YES" ]]; then (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) (( ICETHREADS = nthreads_cice6 )) - export ICEPETS ICETHREADS echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) fi + export ICEPETS ICETHREADS echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" @@ -601,15 +649,18 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then done - case ${CASE} in + case "${CASE}" in "C48" | "C96" | "C192") declare -x "wtime_${step}"="00:30:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; - "C384" | "C768" | "C1152") + "C384") + declare -x "wtime_${step}"="00:20:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") declare -x "wtime_${step}"="01:00:00" - #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" - declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + declare -x "wtime_${step}_gfs"="06:00:00" ;; *) echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" @@ -620,104 +671,107 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then unset _CDUMP _CDUMP_LIST unset NTASKS_TOT -elif [[ "${step}" = "ocnpost" ]]; then +elif [[ ${step} = "ocnpost" ]]; then export wtime_ocnpost="00:30:00" export npe_ocnpost=1 export npe_node_ocnpost=1 export nth_ocnpost=1 export memory_ocnpost="96G" - if [[ "${machine}" == "JET" ]]; then + if [[ ${machine} == "JET" ]]; then # JET only has 88GB of requestable memory per node # so a second node is required to meet the requiremtn npe_ocnpost=2 fi -elif [[ "${step}" = "post" ]]; then +elif [[ "${step}" = "upp" ]]; then - export wtime_post="00:12:00" - export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller - #JKH export wtime_post_gfs="01:00:00" - #JKH export npe_post=126 - export npe_post=${npe_node_max} ## JKH - change to use 1 node for post - res=$(echo "${CASE}" | cut -c2-) - if (( npe_post > res )); then - export npe_post=${res} + case "${CASE}" in + "C48" | "C96") + export npe_upp=${CASE:1} + ;; + "C192" | "C384" | "C768") + export npe_upp=120 + ;; + *) + echo "FATAL ERROR: Resolution '${CASE}' not supported for UPP'" + exit 1 + ;; + esac + export npe_node_upp=${npe_upp} + + export nth_upp=1 + + export wtime_upp="00:15:00" + if [[ "${npe_node_upp}" -gt "${npe_node_max}" ]]; then + export npe_node_upp=${npe_node_max} fi - export nth_post=1 - export npe_node_post=${npe_post} - export npe_node_post_gfs=${npe_post} - export npe_node_dwn=${npe_node_max} - if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi - if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi export is_exclusive=True -elif [[ "${step}" = "wafs" ]]; then - - export wtime_wafs="00:30:00" - export npe_wafs=1 - export npe_node_wafs=${npe_wafs} - export nth_wafs=1 - export memory_wafs="1GB" - -elif [[ "${step}" = "wafsgcip" ]]; then - - export wtime_wafsgcip="00:30:00" - export npe_wafsgcip=2 - export nth_wafsgcip=1 - export npe_node_wafsgcip=1 - export memory_wafsgcip="50GB" - -elif [[ "${step}" = "wafsgrib2" ]]; then - - export wtime_wafsgrib2="00:30:00" - export npe_wafsgrib2=18 - export nth_wafsgrib2=1 - npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) - export npe_node_wafsgrib2 - export memory_wafsgrib2="80GB" - -elif [[ "${step}" = "wafsblending" ]]; then - - export wtime_wafsblending="00:30:00" - export npe_wafsblending=1 - export nth_wafsblending=1 - npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) - export npe_node_wafsblending - export memory_wafsblending="15GB" - -elif [[ "${step}" = "wafsgrib20p25" ]]; then - - export wtime_wafsgrib20p25="00:30:00" - export npe_wafsgrib20p25=11 - export nth_wafsgrib20p25=1 - npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) - export npe_node_wafsgrib20p25 - export memory_wafsgrib20p25="80GB" - -elif [[ "${step}" = "wafsblending0p25" ]]; then - - export wtime_wafsblending0p25="00:30:00" - export npe_wafsblending0p25=1 - export nth_wafsblending0p25=1 - npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) - export npe_node_wafsblending0p25 - export memory_wafsblending0p25="15GB" - -elif [[ "${step}" = "vrfy" ]]; then - - export wtime_vrfy="03:00:00" - export wtime_vrfy_gfs="06:00:00" - export npe_vrfy=3 - export nth_vrfy=1 - export npe_node_vrfy=1 - export npe_vrfy_gfs=1 - export npe_node_vrfy_gfs=1 - if [[ "${machine}" == "HERA" ]]; then - export memory_vrfy="16384M" - fi +elif [[ ${step} = "atmos_products" ]]; then + + export wtime_atmos_products="00:15:00" + export npe_atmos_products=24 + export nth_atmos_products=1 + export npe_node_atmos_products="${npe_atmos_products}" + export wtime_atmos_products_gfs="${wtime_atmos_products}" + export npe_atmos_products_gfs="${npe_atmos_products}" + export nth_atmos_products_gfs="${nth_atmos_products}" + export npe_node_atmos_products_gfs="${npe_node_atmos_products}" export is_exclusive=True +elif [[ ${step} = "verfozn" ]]; then + + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + +elif [[ ${step} = "verfrad" ]]; then + + export wtime_verfrad="00:40:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + +elif [[ ${step} = "vminmon" ]]; then + + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + +elif [[ ${step} = "tracker" ]]; then + + export wtime_tracker="00:10:00" + export npe_tracker=1 + export nth_tracker=1 + export npe_node_tracker=1 + export memory_tracker="4G" + +elif [[ ${step} = "genesis" ]]; then + + export wtime_genesis="00:25:00" + export npe_genesis=1 + export nth_genesis=1 + export npe_node_genesis=1 + export memory_genesis="4G" + +elif [[ ${step} = "genesis_fsu" ]]; then + + export wtime_genesis_fsu="00:10:00" + export npe_genesis_fsu=1 + export nth_genesis_fsu=1 + export npe_node_genesis_fsu=1 + export memory_genesis_fsu="4G" + elif [[ "${step}" = "fit2obs" ]]; then export wtime_fit2obs="00:20:00" @@ -725,7 +779,7 @@ elif [[ "${step}" = "fit2obs" ]]; then export nth_fit2obs=1 export npe_node_fit2obs=1 export memory_fit2obs="20G" - if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi elif [[ "${step}" = "metp" ]]; then @@ -738,7 +792,7 @@ elif [[ "${step}" = "metp" ]]; then export npe_node_metp_gfs=4 export is_exclusive=True -elif [[ "${step}" = "echgres" ]]; then +elif [[ ${step} = "echgres" ]]; then export wtime_echgres="00:10:00" export npe_echgres=3 @@ -748,7 +802,7 @@ elif [[ "${step}" = "echgres" ]]; then export memory_echgres="200GB" fi -elif [[ "${step}" = "init" ]]; then +elif [[ ${step} = "init" ]]; then export wtime_init="00:30:00" export npe_init=24 @@ -756,21 +810,21 @@ elif [[ "${step}" = "init" ]]; then export npe_node_init=6 export memory_init="70G" -elif [[ "${step}" = "init_chem" ]]; then +elif [[ ${step} = "init_chem" ]]; then export wtime_init_chem="00:30:00" export npe_init_chem=1 export npe_node_init_chem=1 export is_exclusive=True -elif [[ "${step}" = "mom6ic" ]]; then +elif [[ ${step} = "mom6ic" ]]; then export wtime_mom6ic="00:30:00" export npe_mom6ic=24 export npe_node_mom6ic=24 export is_exclusive=True -elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then eval "export wtime_${step}='06:00:00'" eval "export npe_${step}=1" @@ -781,16 +835,27 @@ elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then eval "export memory_${step}=50GB" fi -elif [[ "${step}" = "coupled_ic" ]]; then +elif [[ ${step} == "cleanup" ]]; then + export wtime_cleanup="01:00:00" + export npe_cleanup=1 + export npe_node_cleanup=1 + export nth_cleanup=1 + export memory_cleanup="4096M" + +elif [[ ${step} = "stage_ic" ]]; then - export wtime_coupled_ic="00:15:00" - export npe_coupled_ic=1 - export npe_node_coupled_ic=1 - export nth_coupled_ic=1 + export wtime_stage_ic="00:15:00" + export npe_stage_ic=1 + export npe_node_stage_ic=1 + export nth_stage_ic=1 export is_exclusive=True elif [[ "${step}" = "atmensanlinit" ]]; then + # make below case dependent later + export layout_x=1 + export layout_y=1 + export wtime_atmensanlinit="00:10:00" export npe_atmensanlinit=1 export nth_atmensanlinit=1 @@ -824,15 +889,15 @@ elif [[ "${step}" = "atmensanlfinal" ]]; then export npe_node_atmensanlfinal export is_exclusive=True -elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export wtime_eobs="00:15:00" export wtime_eomg="01:00:00" - if [[ "${CASE}" = "C768" ]]; then + if [[ ${CASE} = "C768" ]]; then export npe_eobs=200 - elif [[ "${CASE}" = "C384" ]]; then + elif [[ ${CASE} = "C384" ]]; then export npe_eobs=100 - elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eobs=40 fi export npe_eomg=${npe_eobs} @@ -844,11 +909,11 @@ elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then export is_exclusive=True #The number of tasks and cores used must be the same for eobs #For S4, this is accomplished by running 10 tasks/node - if [[ "${machine}" = "S4" ]]; then + if [[ ${machine} = "S4" ]]; then export npe_node_eobs=10 fi -elif [[ "${step}" = "ediag" ]]; then +elif [[ ${step} = "ediag" ]]; then export wtime_ediag="00:15:00" export npe_ediag=48 @@ -857,29 +922,27 @@ elif [[ "${step}" = "ediag" ]]; then export npe_node_ediag export memory_ediag="30GB" -elif [[ "${step}" = "eupd" ]]; then +elif [[ ${step} = "eupd" ]]; then export wtime_eupd="00:30:00" - if [[ "${CASE}" = "C768" ]]; then + if [[ ${CASE} = "C768" ]]; then export npe_eupd=480 export nth_eupd=6 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 fi - elif [[ "${CASE}" = "C384" ]]; then + elif [[ ${CASE} = "C384" ]]; then export npe_eupd=270 - export nth_eupd=2 + export nth_eupd=8 if [[ "${machine}" = "WCOSS2" ]]; then export npe_eupd=315 export nth_eupd=14 - elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then - export nth_eupd=8 - elif [[ "${machine}" = "S4" ]]; then + elif [[ ${machine} = "S4" ]]; then export npe_eupd=160 export nth_eupd=2 fi - elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eupd=42 export nth_eupd=2 if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then @@ -890,13 +953,13 @@ elif [[ "${step}" = "eupd" ]]; then export npe_node_eupd export is_exclusive=True -elif [[ "${step}" = "ecen" ]]; then +elif [[ ${step} = "ecen" ]]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=4 if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi - if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) export npe_node_ecen export nth_cycle=${nth_ecen} @@ -904,9 +967,9 @@ elif [[ "${step}" = "ecen" ]]; then export npe_node_cycle export is_exclusive=True -elif [[ "${step}" = "esfc" ]]; then +elif [[ ${step} = "esfc" ]]; then - export wtime_esfc="00:06:00" + export wtime_esfc="00:08:00" export npe_esfc=80 export nth_esfc=1 npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) @@ -916,19 +979,16 @@ elif [[ "${step}" = "esfc" ]]; then export npe_node_cycle export memory_esfc="80GB" -elif [[ "${step}" = "epos" ]]; then +elif [[ ${step} = "epos" ]]; then export wtime_epos="00:15:00" export npe_epos=80 - export nth_epos=4 - if [[ "${machine}" == "HERA" ]]; then - export nth_epos=6 - fi + export nth_epos=1 npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) export npe_node_epos export is_exclusive=True -elif [[ "${step}" = "postsnd" ]]; then +elif [[ ${step} = "postsnd" ]]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 @@ -943,7 +1003,7 @@ elif [[ "${step}" = "postsnd" ]]; then fi export is_exclusive=True -elif [[ "${step}" = "awips" ]]; then +elif [[ ${step} = "awips" ]]; then export wtime_awips="03:30:00" export npe_awips=1 @@ -951,7 +1011,15 @@ elif [[ "${step}" = "awips" ]]; then export nth_awips=1 export memory_awips="3GB" -elif [[ "${step}" = "gempak" ]]; then +elif [[ ${step} = "npoess" ]]; then + + export wtime_npoess="03:30:00" + export npe_npoess=1 + export npe_node_npoess=1 + export nth_npoess=1 + export memory_npoess="3GB" + +elif [[ ${step} = "gempak" ]]; then export wtime_gempak="03:00:00" export npe_gempak=2 @@ -962,6 +1030,153 @@ elif [[ "${step}" = "gempak" ]]; then export memory_gempak="4GB" export memory_gempak_gfs="2GB" +elif [[ ${step} = "mos_stn_prep" ]]; then + + export wtime_mos_stn_prep="00:10:00" + export npe_mos_stn_prep=3 + export npe_node_mos_stn_prep=3 + export nth_mos_stn_prep=1 + export memory_mos_stn_prep="5GB" + export NTASK="${npe_mos_stn_prep}" + export PTILE="${npe_node_mos_stn_prep}" + +elif [[ ${step} = "mos_grd_prep" ]]; then + + export wtime_mos_grd_prep="00:10:00" + export npe_mos_grd_prep=4 + export npe_node_mos_grd_prep=4 + export nth_mos_grd_prep=1 + export memory_mos_grd_prep="16GB" + export NTASK="${npe_mos_grd_prep}" + export PTILE="${npe_node_mos_grd_prep}" + +elif [[ ${step} = "mos_ext_stn_prep" ]]; then + + export wtime_mos_ext_stn_prep="00:15:00" + export npe_mos_ext_stn_prep=2 + export npe_node_mos_ext_stn_prep=2 + export nth_mos_ext_stn_prep=1 + export memory_mos_ext_stn_prep="5GB" + export NTASK="${npe_mos_ext_stn_prep}" + export PTILE="${npe_node_mos_ext_stn_prep}" + +elif [[ ${step} = "mos_ext_grd_prep" ]]; then + + export wtime_mos_ext_grd_prep="00:10:00" + export npe_mos_ext_grd_prep=7 + export npe_node_mos_ext_grd_prep=7 + export nth_mos_ext_grd_prep=1 + export memory_mos_ext_grd_prep="3GB" + export NTASK="${npe_mos_ext_grd_prep}" + export PTILE="${npe_node_mos_ext_grd_prep}" + +elif [[ ${step} = "mos_stn_fcst" ]]; then + + export wtime_mos_stn_fcst="00:10:00" + export npe_mos_stn_fcst=5 + export npe_node_mos_stn_fcst=5 + export nth_mos_stn_fcst=1 + export memory_mos_stn_fcst="40GB" + export NTASK="${npe_mos_stn_fcst}" + export PTILE="${npe_node_mos_stn_fcst}" + +elif [[ ${step} = "mos_grd_fcst" ]]; then + + export wtime_mos_grd_fcst="00:10:00" + export npe_mos_grd_fcst=7 + export npe_node_mos_grd_fcst=7 + export nth_mos_grd_fcst=1 + export memory_mos_grd_fcst="50GB" + export NTASK="${npe_mos_grd_fcst}" + export PTILE="${npe_node_mos_grd_fcst}" + +elif [[ ${step} = "mos_ext_stn_fcst" ]]; then + + export wtime_mos_ext_stn_fcst="00:20:00" + export npe_mos_ext_stn_fcst=3 + export npe_node_mos_ext_stn_fcst=3 + export nth_mos_ext_stn_fcst=1 + export memory_mos_ext_stn_fcst="50GB" + export NTASK="${npe_mos_ext_stn_fcst}" + export PTILE="${npe_node_mos_ext_stn_fcst}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_fcst" ]]; then + + export wtime_mos_ext_grd_fcst="00:10:00" + export npe_mos_ext_grd_fcst=7 + export npe_node_mos_ext_grd_fcst=7 + export nth_mos_ext_grd_fcst=1 + export memory_mos_ext_grd_fcst="50GB" + export NTASK="${npe_mos_ext_grd_fcst}" + export PTILE="${npe_node_mos_ext_grd_fcst}" + +elif [[ ${step} = "mos_stn_prdgen" ]]; then + + export wtime_mos_stn_prdgen="00:10:00" + export npe_mos_stn_prdgen=1 + export npe_node_mos_stn_prdgen=1 + export nth_mos_stn_prdgen=1 + export memory_mos_stn_prdgen="15GB" + export NTASK="${npe_mos_stn_prdgen}" + export PTILE="${npe_node_mos_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_grd_prdgen" ]]; then + + export wtime_mos_grd_prdgen="00:40:00" + export npe_mos_grd_prdgen=72 + export npe_node_mos_grd_prdgen=18 + export nth_mos_grd_prdgen=4 + export memory_mos_grd_prdgen="20GB" + export NTASK="${npe_mos_grd_prdgen}" + export PTILE="${npe_node_mos_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_grd_prdgen}" + +elif [[ ${step} = "mos_ext_stn_prdgen" ]]; then + + export wtime_mos_ext_stn_prdgen="00:10:00" + export npe_mos_ext_stn_prdgen=1 + export npe_node_mos_ext_stn_prdgen=1 + export nth_mos_ext_stn_prdgen=1 + export memory_mos_ext_stn_prdgen="15GB" + export NTASK="${npe_mos_ext_stn_prdgen}" + export PTILE="${npe_node_mos_ext_stn_prdgen}" + export prepost=True + +elif [[ ${step} = "mos_ext_grd_prdgen" ]]; then + + export wtime_mos_ext_grd_prdgen="00:30:00" + export npe_mos_ext_grd_prdgen=96 + export npe_node_mos_ext_grd_prdgen=6 + export nth_mos_ext_grd_prdgen=16 + export memory_mos_ext_grd_prdgen="30GB" + export NTASK="${npe_mos_ext_grd_prdgen}" + export PTILE="${npe_node_mos_ext_grd_prdgen}" + export OMP_NUM_THREADS="${nth_mos_ext_grd_prdgen}" + +elif [[ ${step} = "mos_wx_prdgen" ]]; then + + export wtime_mos_wx_prdgen="00:10:00" + export npe_mos_wx_prdgen=4 + export npe_node_mos_wx_prdgen=2 + export nth_mos_wx_prdgen=2 + export memory_mos_wx_prdgen="10GB" + export NTASK="${npe_mos_wx_prdgen}" + export PTILE="${npe_node_mos_wx_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_prdgen}" + +elif [[ ${step} = "mos_wx_ext_prdgen" ]]; then + + export wtime_mos_wx_ext_prdgen="00:10:00" + export npe_mos_wx_ext_prdgen=4 + export npe_node_mos_wx_ext_prdgen=2 + export nth_mos_wx_ext_prdgen=2 + export memory_mos_wx_ext_prdgen="10GB" + export NTASK="${npe_mos_wx_ext_prdgen}" + export PTILE="${npe_node_mos_wx_ext_prdgen}" + export OMP_NUM_THREADS="${nth_mos_wx_ext_prdgen}" + else echo "Invalid step = ${step}, ABORT!" diff --git a/parm/config/gfs/config.stage_ic b/parm/config/gfs/config.stage_ic new file mode 100644 index 0000000000..7f3956af4d --- /dev/null +++ b/parm/config/gfs/config.stage_ic @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +########## config.stage_ic ########## + +echo "BEGIN: config.stage_ic" + +# Get task specific resources +source "${EXPDIR}/config.resources" stage_ic + +case "${CASE}" in + "C48" | "C96") + export CPL_ATMIC="workflow_${CASE}_refactored" + export CPL_ICEIC="workflow_${CASE}_refactored" + export CPL_OCNIC="workflow_${CASE}_refactored" + export CPL_WAVIC="workflow_${CASE}_refactored" + ;; + "C384") + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c_refactored + export CPL_ICEIC=CPC_refactored + export CPL_OCNIC=CPC3Dvar_refactored + export CPL_WAVIC=workflow_C384_refactored + ;; + "C768") + export CPL_ATMIC=HR2_refactored + export CPL_ICEIC=HR1_refactored + export CPL_OCNIC=HR1_refactored + export CPL_WAVIC=HR1_refactored + ;; + "C1152") + export CPL_ATMIC=HR2_C1152_refactored + export CPL_ICEIC=HR3_refactored + export CPL_OCNIC=HR3_refactored + export CPL_WAVIC=HR1_refactored + ;; + *) + echo "FATAL ERROR Unrecognized resolution: ${CASE}" + exit 1 + ;; +esac + +echo "END: config.stage_ic" diff --git a/parm/config/gfs/config.tracker b/parm/config/gfs/config.tracker new file mode 100644 index 0000000000..71fcf9196d --- /dev/null +++ b/parm/config/gfs/config.tracker @@ -0,0 +1,12 @@ +#! /usr/bin/env bash + +########## config.tracker ########## +echo "BEGIN: config.tracker" + +# Get task specific resources +. "${EXPDIR}/config.resources" tracker + +# Get tropcy settings +. "${EXPDIR}/config.tropcy" + +echo "END: config.tracker" diff --git a/parm/config/gfs/config.tropcy b/parm/config/gfs/config.tropcy new file mode 100644 index 0000000000..718abe3be5 --- /dev/null +++ b/parm/config/gfs/config.tropcy @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.tropcy ########## +echo "BEGIN: config.tropcy" + +# Tracker/genesis package location +export HOMEens_tracker=${BASE_GIT}/TC_tracker/${ens_tracker_ver} + +export SENDCOM="YES" # Needed by tracker scripts still + +export FHOUT_CYCLONE=6 +FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) +export FHMAX_CYCLONE + +echo "END: config.tropcy" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 0f3d381cf7..000c8b1e99 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -8,14 +8,15 @@ echo "BEGIN: config.ufs" -if [ $# -le 1 ]; then +if (( $# <= 1 )); then echo "Must specify an input resolution argument to set variables!" echo "argument can be any one of the following:" echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" echo "--mom6 500|100|025" echo "--cice6 500|100|025" - echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|glo_200|glo_500|mx025" + echo "--gocart" exit 1 @@ -25,26 +26,34 @@ fi skip_mom6=true skip_cice6=true skip_ww3=true +skip_gocart=true skip_mediator=true # Loop through named arguments -while [[ $# -gt 0 ]]; do +while (( $# > 0 )); do key="$1" case "${key}" in "--fv3") fv3_res="$2" + shift ;; "--mom6") mom6_res="$2" skip_mom6=false + shift ;; "--cice6") cice6_res="$2" skip_cice6=false + shift ;; "--ww3") ww3_res="$2" skip_ww3=false + shift + ;; + "--gocart") + skip_gocart=false ;; *) # unknown option echo "FATAL ERROR: Unknown option: ${key}, ABORT!" @@ -52,7 +61,6 @@ while [[ $# -gt 0 ]]; do ;; esac shift - shift done # Mediator is required if any of the non-ATM components are used @@ -64,7 +72,7 @@ case "${machine}" in "WCOSS2") npe_node_max=128 ;; - "HERA" | "ORION") + "HERA" | "ORION" | "HERCULES") npe_node_max=40 ;; "JET") @@ -98,6 +106,10 @@ case "${machine}" in ;; esac ;; + *) + echo "FATAL ERROR: Unrecognized machine ${machine}" + exit 14 + ;; esac export npe_node_max @@ -112,6 +124,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=1 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="40.0,1.77,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -126,6 +140,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=1 export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="20.0,2.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=3.0e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 export WRITE_GROUP_GFS=1 @@ -140,6 +156,8 @@ case "${fv3_res}" in export nthreads_fv3=1 export nthreads_fv3_gfs=2 export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="10.0,3.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=1.5e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=1 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 export WRITE_GROUP_GFS=2 @@ -147,16 +165,18 @@ case "${fv3_res}" in ;; "C384") export DELTIM=300 - export layout_x=6 + export layout_x=8 export layout_y=8 export layout_x_gfs=8 export layout_y_gfs=8 - export nthreads_fv3=1 + export nthreads_fv3=2 export nthreads_fv3_gfs=2 export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export WRITE_GROUP=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 - export WRITE_GROUP_GFS=2 + export cdmbgwd_gsl="5.0,5.0,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.8e-3 # setting for UGWPv1 non-stationary GWD + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 ;; "C768") @@ -164,24 +184,16 @@ case "${fv3_res}" in export layout_x=8 export layout_y=12 export layout_x_gfs=12 - #JKHexport layout_y_gfs=16 - export layout_y_gfs=12 + export layout_y_gfs=16 export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="2.5,7.5,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.5e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=2 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 - #JKHexport WRITE_GROUP_GFS=4 - if [[ "${machine}" == "HERA" ]] ; then - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 - elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 - elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 - fi + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 #Note this should be 10 for WCOSS2 ;; "C1152") export DELTIM=120 @@ -192,10 +204,12 @@ case "${fv3_res}" in export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="1.67,8.8,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.35e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=20 # TODO: refine these numbers when a case is available ;; "C3072") export DELTIM=90 @@ -206,6 +220,8 @@ case "${fv3_res}" in export nthreads_fv3=4 export nthreads_fv3_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export cdmbgwd_gsl="0.625,14.1,1.0,1.0" # settings for GSL drag suite + export knob_ugwp_tauamp=0.13e-3 # setting for UGWPv1 non-stationary GWD export WRITE_GROUP=4 export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available export WRITE_GROUP_GFS=4 @@ -242,16 +258,35 @@ case ${fv3_res} in OUTPUT_FILETYPE_ATM="netcdf_parallel" OUTPUT_FILETYPE_SFC="netcdf_parallel" ;; + *) + echo "FATAL ERROR: Unrecognized FV3 resolution ${fv3_res}" + exit 15 + ;; esac export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." +export cplwav2atm=".false." +export CCPP_SUITE="FV3_GFS_v17_p8_ugwpv1" +model_list="atm" + # Mediator specific settings if [[ "${skip_mediator}" == "false" ]]; then + export cpl=".true." export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 + export CCPP_SUITE="FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: Does this include FV3_GFS_v17_p8? Can this be used instead of FV3_GFS_v17_p8? fi # MOM6 specific settings if [[ "${skip_mom6}" == "false" ]]; then + source "${EXPDIR}/config.ocn" + export cplflx=".true." + model_list="${model_list}.ocean" nthreads_mom6=1 case "${mom6_res}" in "500") @@ -265,6 +300,11 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + eps_imesh="4.0e-1" + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" + MOM6_DIAG_MISVAL="0.0" + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; "100") ntasks_mom6=20 @@ -273,12 +313,22 @@ if [[ "${skip_mom6}" == "false" ]]; then NY_GLB=320 DT_DYNAM_MOM6='1800' DT_THERM_MOM6='3600' - FRUNOFF="" + FRUNOFF="runoff.daitren.clim.1deg.nc" CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" - MOM6_RESTART_SETTING='n' + MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + eps_imesh="2.5e-1" + TOPOEDITS="ufs.topo_edits_011818.nc" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='True' ;; - "50") + "050") ntasks_mom6=60 OCNTIM=3600 NX_GLB=720 @@ -289,7 +339,17 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" MOM6_RESTART_SETTING='n' MOM6_RIVER_RUNOFF='True' - ;; + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" + ;; "025") ntasks_mom6=220 OCNTIM=1800 @@ -301,29 +361,49 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" MOM6_RIVER_RUNOFF='True' MOM6_RESTART_SETTING="r" + eps_imesh="1.0e-1" + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" + else + MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" + fi + MOM6_ALLOW_LANDMASK_CHANGES='False' + TOPOEDITS="" ;; *) echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" exit 1 ;; esac + export nthreads_mom6 ntasks_mom6 export OCNTIM export NX_GLB NY_GLB export DT_DYNAM_MOM6 DT_THERM_MOM6 export FRUNOFF export CHLCLIM + export TOPOEDITS export MOM6_RIVER_RUNOFF export MOM6_RESTART_SETTING + export eps_imesh + export MOM6_DIAG_COORD_DEF_Z_FILE + export MOM6_DIAG_MISVAL + export MOM6_ALLOW_LANDMASK_CHANGES fi # CICE6 specific settings if [[ "${skip_cice6}" == "false" ]]; then + source "${EXPDIR}/config.ice" + export cplice=".true." + model_list="${model_list}.ice" # Ensure we sourced the MOM6 section if [[ "${skip_mom6}" == "true" ]]; then echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" exit 1 fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 case "${cice6_res}" in "500") @@ -355,6 +435,10 @@ fi # WW3 specific settings if [[ "${skip_ww3}" == "false" ]]; then + source "${EXPDIR}/config.wave" + export cplwav=".true." + export cplwav2atm=".true." + model_list="${model_list}.wave" nthreads_ww3=2 case "${ww3_res}" in "gnh_10m;aoc_9km;gsh_15m") @@ -366,6 +450,14 @@ if [[ "${skip_ww3}" == "false" ]]; then "glo_025") ntasks_ww3=262 ;; + "glo_200") + ntasks_ww3=30 + nthreads_ww3=1 + ;; + "glo_500") + ntasks_ww3=12 + nthreads_ww3=1 + ;; "mx025") ntasks_ww3=80 ;; @@ -377,4 +469,47 @@ if [[ "${skip_ww3}" == "false" ]]; then export ntasks_ww3 nthreads_ww3 fi +# GOCART specific settings +if [[ "${skip_gocart}" == "false" ]]; then + source "${EXPDIR}/config.aero" + export cplchm=".true." + model_list="${model_list}.aero" +fi + +# Set the name of the UFS (previously nems) configure template to use +case "${model_list}" in + atm) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm.IN" + ;; + atm.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.atm_aero.IN" + ;; + atm.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.leapfrog_atm_wav.IN" + ;; + atm.ocean.ice) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld.IN" + ;; + atm.ocean.ice.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero.IN" + ;; + atm.ocean.ice.wave) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_outerwave.IN" + ;; + atm.ocean.ice.wave.aero) + export ufs_configure_template="${HOMEgfs}/parm/ufs/ufs.configure.cpld_aero_outerwave.IN" + ;; + *) + echo "FATAL ERROR: Unable to determine appropriate UFS configure template for ${model_list}" + exit 16 + ;; +esac + +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: ${ufs_configure_template} either doesn't exist or is not readable." + exit 17 +fi + +unset model_list + echo "END: config.ufs" diff --git a/parm/config/gfs/config.upp b/parm/config/gfs/config.upp new file mode 100644 index 0000000000..a1bd0a7d34 --- /dev/null +++ b/parm/config/gfs/config.upp @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.upp ########## +# UPP specific + +echo "BEGIN: config.upp" + +# Get task specific resources +. "${EXPDIR}/config.resources" upp + +export UPP_CONFIG="${HOMEgfs}/parm/post/upp.yaml" + +# No. of forecast hours to process in a single job +export NFHRS_PER_GROUP=3 + +echo "END: config.upp" diff --git a/parm/config/gfs/config.verfozn b/parm/config/gfs/config.verfozn new file mode 100644 index 0000000000..9eea0f25a3 --- /dev/null +++ b/parm/config/gfs/config.verfozn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +export DO_DATA_RPT=1 +export OZN_AREA="glb" +export OZNMON_SUFFIX=${NET} +export PARMmonitor=${PARMgfs}/monitor +export SATYPE_FILE=${PARMmonitor}/gdas_oznmon_satype.txt + +# Source the parm file +. "${PARMmonitor}/gdas_oznmon.parm" + +# Set up validation file +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${PARMmonitor}/gdas_oznmon_base.tar +fi + +echo "END: config.verfozn" diff --git a/parm/config/gfs/config.verfrad b/parm/config/gfs/config.verfrad new file mode 100644 index 0000000000..dd65020180 --- /dev/null +++ b/parm/config/gfs/config.verfrad @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +export PARMmonitor=${PARMgfs}/monitor +export satype_file=${PARMmonitor}/gdas_radmon_satype.txt + +# Source the parm file +. "${PARMmonitor}/da_mon.parm" + +# Other variables +export RAD_AREA="glb" +export MAKE_CTL=1 +export MAKE_DATA=1 +export USE_ANL=1 +export DO_DIAG_RPT=1 +export DO_DATA_RPT=1 + +export RADMON_SUFFIX=${RUN} +export CYCLE_INTERVAL=${assim_freq:-6} +export VERBOSE="YES" + +echo "END: config.verfrad" diff --git a/parm/config/gfs/config.vminmon b/parm/config/gfs/config.vminmon new file mode 100644 index 0000000000..8929c36e0e --- /dev/null +++ b/parm/config/gfs/config.vminmon @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export CYCLE_INTERVAL=${assim_freq:-6} + +export PARMmonitor=${PARMgfs}/monitor +export mm_gnormfile=${PARMmonitor}/${RUN}_minmon_gnorm.txt +export mm_costfile=${PARMmonitor}/${RUN}_minmon_cost.txt + +echo "END: config.vminmon" diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave index ba7b7ad259..acb4c518ba 100644 --- a/parm/config/gfs/config.wave +++ b/parm/config/gfs/config.wave @@ -10,10 +10,10 @@ echo "BEGIN: config.wave" # System and version export wave_sys_ver=v1.0.0 -export EXECwave="$HOMEgfs/exec" -export FIXwave="$HOMEgfs/fix/wave" -export PARMwave="$HOMEgfs/parm/wave" -export USHwave="$HOMEgfs/ush" +export EXECwave="${HOMEgfs}/exec" +export FIXwave="${HOMEgfs}/fix/wave" +export PARMwave="${HOMEgfs}/parm/wave" +export USHwave="${HOMEgfs}/ush" # This config contains variables/parameters used in the fcst step # Some others are also used across the workflow in wave component scripts @@ -27,23 +27,23 @@ export CDUMPRSTwave="gdas" # Grids for wave model export waveGRD=${waveGRD:-'mx025'} -#grid dependent variable defaults +#grid dependent variable defaults export waveGRDN='1' # grid number for ww3_multi export waveGRDG='10' # grid group for ww3_multi export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients -export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export waveMULTIGRID='.false.' # .true./.false. for multi or shel export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS -export waveesmfGRD=' ' # input grid for multigrid +export waveesmfGRD=' ' # input grid for multigrid #Grid dependent variables for various grids case "${waveGRD}" in "gnh_10m;aoc_9km;gsh_15m") - #GFSv16 settings: + #GFSv16 settings: export waveGRDN='1 2 3' export waveGRDG='10 20 30' export USE_WAV_RMP='YES' export waveMULTIGRID='.true.' - export IOSRV='3' + export IOSRV='3' export MESH_WAV=' ' export waveesmfGRD='glox_10m' export waveuoutpGRD='points' @@ -52,81 +52,92 @@ case "${waveGRD}" in ;; "gwes_30m") #Grid used for P8 - export waveinterpGRD=' ' + export waveinterpGRD='' export wavepostGRD='gwes_30m' + export waveuoutpGRD=${waveGRD} ;; "mx025") #Grid used for HR1 (tripolar 1/4 deg) export waveinterpGRD='reg025' - export wavepostGRD=' ' + export wavepostGRD='' + export waveuoutpGRD=${waveGRD} ;; - "glo_025") - #GEFSv13 regular lat/lon 1/4 deg grid - export waveinterpGRD=' ' + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD='' export wavepostGRD='glo_025' + export waveuoutpGRD=${waveGRD} + ;; + "glo_200") + #Global regular lat/lon 2deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_200' + export waveuoutpGRD=${waveGRD} + ;; + "glo_500") + #Global regular lat/lon 5deg deg grid + export waveinterpGRD='' + export wavepostGRD='glo_500' + export waveuoutpGRD=${waveGRD} ;; *) - echo "No grid specific wave config values" - ;; + echo "FATAL ERROR: No grid specific wave config values exist for ${waveGRD}. Aborting." + exit 1 + ;; esac # Grids for input wind fields export WAVEWND_DID= export WAVEWND_FID= -# Grids for output fields (used in all steps) -export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid -export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native - # in POST will generate grib unless gribOK not set -export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) - - # The start time reflects the number of hindcast hours prior to the cycle initial time -if [ "$CDUMP" = "gdas" ]; then +if [[ "${CDUMP}" = "gdas" ]]; then export FHMAX_WAV=${FHMAX:-9} else - export FHMAX_WAV=$FHMAX_GFS + export FHMAX_WAV=${FHMAX_GFS} fi -export WAVHINDH=${WAVHINDH:-0} -export FHMIN_WAV=${FHMIN_WAV:-0} -export FHOUT_WAV=${FHOUT_WAV:-3} -export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} -export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export WAVHINDH=0 +export FHMIN_WAV=0 +export FHOUT_WAV=3 +export FHMAX_HF_WAV=120 +export FHOUT_HF_WAV=1 export FHMAX_WAV_IBP=180 if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi # gridded and point output rate -export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) -export DTPNT_WAV=${DTPNT_WAV:-3600} -export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) +export DTFLD_WAV=$(( FHOUT_HF_WAV * 3600 )) +export DTPNT_WAV=3600 +export FHINCP_WAV=$(( DTPNT_WAV / 3600 )) # Selected output parameters (gridded) -export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} +export OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" # Restart file config -if [ "$CDUMP" = "gdas" ]; then +if [[ "${CDUMP}" = "gdas" ]]; then export WAVNCYC=4 export WAVHCYC=${assim_freq:-6} - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days -elif [ ${gfs_cyc} -ne 0 ]; then + export FHMAX_WAV_CUR=48 # RTOFS forecasts only out to 8 days +elif [[ ${gfs_cyc} -ne 0 ]]; then export WAVHCYC=${assim_freq:-6} - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days else export WAVHCYC=0 - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days + export FHMAX_WAV_CUR=192 # RTOFS forecasts only out to 8 days fi # Restart timing business export RSTTYPE_WAV='T' # generate second tier of restart files -if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS +if [[ "${CDUMP}" != gfs ]]; then # Setting is valid for GDAS and GEFS export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file export DT_2_RST_WAV=43200 # restart stride for checkpointing restart export RSTIOFF_WAV=0 # first restart file offset relative to model start else # This is a GFS run - rst_dt_gfs=$(( restart_interval_gfs * 3600 )) - if [ $rst_dt_gfs -gt 0 ]; then - export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) # TODO: This calculation needs to move to parsing_namelists_WW3.sh + if [[ ${rst_dt_gfs} -gt 0 ]]; then + export DT_1_RST_WAV=0 #${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + #temporarily set to zero to avoid a clash in requested restart times + #which makes the wave model crash a fix for the model issue will be coming export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart else rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) @@ -141,17 +152,17 @@ fi export RUNMEM=${RUNMEM:--1} # Set wave model member tags if ensemble run # -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN -if [ $RUNMEM = -1 ]; then +if [[ ${RUNMEM} = -1 ]]; then # No suffix added to model ID in case of deterministic run export waveMEMB= else # Extract member number only - export waveMEMB=$(echo $RUNMEM | grep -o '..$') + export waveMEMB="${RUNMEM: -2}" fi # Determine if wave component needs input and/or is coupled export WW3ATMINP='CPL' -if [[ $DO_ICE == "YES" ]]; then +if [[ ${DO_ICE} == "YES" ]]; then export WW3ICEINP='CPL' export WAVEICE_FID= else @@ -160,7 +171,7 @@ else fi export WAVECUR_DID=rtofs -if [[ $DO_OCN == "YES" ]]; then +if [[ ${DO_OCN} == "YES" ]]; then export WW3CURINP='CPL' export WAVECUR_FID= else diff --git a/parm/config/gfs/config.waveawipsbulls b/parm/config/gfs/config.waveawipsbulls index fd21869355..65a8d5076b 100644 --- a/parm/config/gfs/config.waveawipsbulls +++ b/parm/config/gfs/config.waveawipsbulls @@ -9,6 +9,5 @@ echo "BEGIN: config.waveawipsbulls" . $EXPDIR/config.resources waveawipsbulls export DBNROOT=/dev/null -export SENDCOM="YES" echo "END: config.waveawipsbulls" diff --git a/parm/config/gfs/config.waveawipsgridded b/parm/config/gfs/config.waveawipsgridded index 6896ec8bd2..bd7c7c11e4 100644 --- a/parm/config/gfs/config.waveawipsgridded +++ b/parm/config/gfs/config.waveawipsgridded @@ -9,6 +9,5 @@ echo "BEGIN: config.waveawipsgridded" . $EXPDIR/config.resources waveawipsgridded export DBNROOT=/dev/null -export SENDCOM="YES" echo "END: config.waveawipsgridded" diff --git a/parm/config/gfs/config.wavegempak b/parm/config/gfs/config.wavegempak index da76c364ce..bcbec91f07 100644 --- a/parm/config/gfs/config.wavegempak +++ b/parm/config/gfs/config.wavegempak @@ -8,6 +8,5 @@ echo "BEGIN: config.wavegempak" # Get task specific resources . $EXPDIR/config.resources wavegempak -export SENDCOM="YES" echo "END: config.wavegempak" diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs index f9f8c81d44..8e74aae069 100644 --- a/parm/config/gfs/config.wavepostsbs +++ b/parm/config/gfs/config.wavepostsbs @@ -17,7 +17,7 @@ export DOIBP_WAV='NO' # Input boundary points export DOFLD_WAV='YES' # Field data export DOPNT_WAV='YES' # Station data export DOGRB_WAV='YES' # Create grib2 files -if [[ -z ${waveinterpGRD} ]]; then +if [[ -n "${waveinterpGRD}" ]]; then export DOGRI_WAV='YES' # Create interpolated grids else export DOGRI_WAV='NO' # Do not create interpolated grids diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 202efc2270..c0298edb18 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -1,9 +1,19 @@ base: + DOIAU: "YES" DO_JEDIATMVAR: "NO" DO_JEDIATMENS: "NO" DO_JEDIOCNVAR: "NO" DO_JEDILANDDA: "NO" DO_MERGENSST: "NO" + +atmanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + +atmensanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + aeroanl: IO_LAYOUT_X: 1 IO_LAYOUT_Y: 1 @@ -13,13 +23,13 @@ landanl: IO_LAYOUT_Y: 1 ocnanal: - SOCA_INPUT_FIX_DIR: '/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25' - CASE_ANL: 'C48' - SOCA_OBS_LIST: '' - COMIN_OBS: '/scratch2/NCEPDEV/marineda/r2d2' - SABER_BLOCKS_YAML: '' - SOCA_NINNER: 50 - R2D2_OBS_SRC: 'gdas_marine' - R2D2_OBS_DUMP: 's2s_v1' + SOCA_INPUT_FIX_DIR: "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca" # TODO: These need to go to glopara fix space. @guillaumevernieres will open an issue + CASE_ANL: "C48" + COMIN_OBS: "/scratch2/NCEPDEV/marineda/r2d2-v2-v3" # TODO: make platform agnostic + SOCA_OBS_LIST: "{{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml" + SOCA_NINNER: 100 + R2D2_OBS_SRC: "gdas_marine" + R2D2_OBS_DUMP: "s2s_v1" + SABER_BLOCKS_YAML: "" NICAS_RESOL: 1 NICAS_GRID_SIZE: 15000 diff --git a/parm/gdas/aero_crtm_coeff.yaml b/parm/gdas/aero_crtm_coeff.yaml new file mode 100644 index 0000000000..75b54c3741 --- /dev/null +++ b/parm/gdas/aero_crtm_coeff.yaml @@ -0,0 +1,13 @@ +mkdir: +- {{ DATA }}/crtm/ +copy: +- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/v.viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/v.viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/v.viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/v.viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm/] +- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm/] diff --git a/parm/gdas/aero_jedi_fix.yaml b/parm/gdas/aero_jedi_fix.yaml new file mode 100644 index 0000000000..85a00c3c30 --- /dev/null +++ b/parm/gdas/aero_jedi_fix.yaml @@ -0,0 +1,11 @@ +mkdir: +- !ENV ${DATA}/fv3jedi +copy: +- - !ENV ${FIXgdas}/fv3jedi/fv3files/akbk$(npz).nc4 + - !ENV ${DATA}/fv3jedi/akbk.nc4 +- - !ENV ${FIXgdas}/fv3jedi/fv3files/fmsmpp.nml + - !ENV ${DATA}/fv3jedi/fmsmpp.nml +- - !ENV ${FIXgdas}/fv3jedi/fv3files/field_table_gfdl + - !ENV ${DATA}/fv3jedi/field_table +- - !ENV $(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml + - !ENV ${DATA}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml diff --git a/parm/gdas/aeroanl_inc_vars.yaml b/parm/gdas/aeroanl_inc_vars.yaml new file mode 100644 index 0000000000..298373d6e2 --- /dev/null +++ b/parm/gdas/aeroanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['dust1', 'dust2', 'dust3', 'dust4', 'dust5', 'seas1', 'seas2', 'seas3', 'seas4', 'so4', 'oc1', 'oc2', 'bc1', 'bc2'] diff --git a/parm/gdas/atm_crtm_coeff.yaml b/parm/gdas/atm_crtm_coeff.yaml new file mode 100644 index 0000000000..b943f5cbb3 --- /dev/null +++ b/parm/gdas/atm_crtm_coeff.yaml @@ -0,0 +1,178 @@ +mkdir: +- {{ DATA }}/crtm +copy: +# Emissivity files +- [{{ CRTM_FIX }}/NPOESS.VISice.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.VISland.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.VISsnow.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.VISwater.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.IRice.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.IRland.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/NPOESS.IRsnow.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/Nalli.IRwater.EmisCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/FASTEM6.MWwater.EmisCoeff.bin, {{ DATA }}/crtm] +# Aerosol and Cloud files +- [{{ CRTM_FIX }}/AerosolCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/CloudCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/CloudCoeff.GFDLFV3.-109z-1.bin, {{ DATA }}/crtm] +# Satellite_Sensor specific Tau and Spc coefficient files +- [{{ CRTM_FIX }}/abi_g16.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/abi_g16.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/abi_g17.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/abi_g17.TauCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/abi_g18.SpcCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/abi_g18.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ahi_himawari8.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ahi_himawari8.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ahi_himawari9.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ahi_himawari9.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/airs_aqua.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/airs_aqua.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsr2_gcom-w1.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsr2_gcom-w1.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsre_aqua.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsre_aqua.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_aqua.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_aqua.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-a.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-a.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-b.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-b.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-c.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_metop-c.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n18.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n18.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n19.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsua_n19.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsub_n17.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/amsub_n17.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/atms_n20.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/atms_n20.TauCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/atms_n21.SpcCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/atms_n21.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/atms_npp.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/atms_npp.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-a.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-a.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-b.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-b.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-c.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_metop-c.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_n18.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_n18.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_n19.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/avhrr3_n19.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/cris-fsr_n20.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/cris-fsr_n20.TauCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/cris-fsr_n21.SpcCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/cris-fsr_n21.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/cris-fsr_npp.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/cris-fsr_npp.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/gmi_gpm.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/gmi_gpm.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs3_n17.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs3_n17.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_metop-a.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_metop-a.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_metop-b.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_metop-b.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_n19.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/hirs4_n19.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-a.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-a.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-b.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-b.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-c.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/iasi_metop-c.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g12.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g12.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g13.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g13.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g14.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g14.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/imgr_g15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-a.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-a.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-b.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-b.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-c.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_metop-c.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_n18.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_n18.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_n19.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/mhs_n19.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/saphir_meghat.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/saphir_meghat.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m08.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m08.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m09.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m09.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m10.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m10.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/seviri_m11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g12.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g12.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g13.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g13.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g14.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g14.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD1_g15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g12.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g12.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g13.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g13.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g14.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g14.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD2_g15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g12.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g12.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g13.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g13.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g14.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g14.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD3_g15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g11.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g11.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g12.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g12.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g13.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g13.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g14.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g14.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/sndrD4_g15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmi_f15.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmi_f15.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f16.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f16.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f17.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f17.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f18.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f18.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f19.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f19.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f20.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/ssmis_f20.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/viirs-m_j1.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/viirs-m_j1.TauCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/viirs-m_j2.SpcCoeff.bin, {{ DATA }}/crtm] +##- [{{ CRTM_FIX }}/viirs-m_j2.TauCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/viirs-m_npp.SpcCoeff.bin, {{ DATA }}/crtm] +- [{{ CRTM_FIX }}/viirs-m_npp.TauCoeff.bin, {{ DATA }}/crtm] +# Special Spc files +##- [{{ CRTM_FIX }}/amsua_metop-a_v2.SpcCoeff.bin, {{ DATA }}/crtm] diff --git a/parm/gdas/atm_jedi_fix.yaml b/parm/gdas/atm_jedi_fix.yaml new file mode 100644 index 0000000000..3d1ca79f33 --- /dev/null +++ b/parm/gdas/atm_jedi_fix.yaml @@ -0,0 +1,7 @@ +mkdir: +- $(DATA)/fv3jedi +copy: +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table] +- [$(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml, $(DATA)/fv3jedi/fv3jedi_fieldmetadata_restart.yaml] diff --git a/parm/gdas/atmanl_inc_vars.yaml b/parm/gdas/atmanl_inc_vars.yaml new file mode 100644 index 0000000000..cb6718ce9f --- /dev/null +++ b/parm/gdas/atmanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['ua', 'va', 't', 'sphum', 'liq_wat', 'ice_wat', 'o3mr'] diff --git a/parm/gdas/land_jedi_fix.yaml b/parm/gdas/land_jedi_fix.yaml new file mode 100644 index 0000000000..3d1ca79f33 --- /dev/null +++ b/parm/gdas/land_jedi_fix.yaml @@ -0,0 +1,7 @@ +mkdir: +- $(DATA)/fv3jedi +copy: +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table] +- [$(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml, $(DATA)/fv3jedi/fv3jedi_fieldmetadata_restart.yaml] diff --git a/parm/post/global_1x1_paramlist_g2 b/parm/post/global_1x1_paramlist_g2 index 553a42bc62..350a715bac 100644 --- a/parm/post/global_1x1_paramlist_g2 +++ b/parm/post/global_1x1_paramlist_g2 @@ -46,6 +46,7 @@ ALBDO:surface APCP:surface APTMP:2 m above ground var discipline=0 master_table=2 parmcat=0 parm=21:2 m above ground +AOTK:entire atmosphere CAPE:180-0 mb above ground CAPE:255-0 mb above ground CAPE:90-0 mb above ground @@ -91,6 +92,8 @@ DPT:2 m above ground DSWRF:surface FLDCP:surface FRICV:surface +FROZR:surface +FRZR:surface GFLUX:surface GRLE:50 mb GRLE:1000 mb @@ -167,6 +170,7 @@ HGT:PV=2e-06 (Km^2/kg/s) surface HGT:surface HGT:tropopause HINDEX:surface +HLCY:1000-0 m above ground HLCY:3000-0 m above ground HPBL:surface ICAHT:max wind @@ -363,6 +367,7 @@ RWMR:950 mb RWMR:975 mb RWMR:1 hybrid level SHTFL:surface +SDEN:surface SFCR:surface SOILW:0-0.1 m below ground SOILW:0.1-0.4 m below ground @@ -533,6 +538,7 @@ TMP:PV=2e-06 (Km^2/kg/s) surface TMP:surface TMP:tropopause TOZNE:entire atmosphere (considered as a single layer) +TSNOWP:surface UFLX:surface UGRD:0.995 sigma level UGRD:1000 mb diff --git a/parm/post/global_1x1_paramlist_g2.f000 b/parm/post/global_1x1_paramlist_g2.f000 index 615022063b..b1dcb09ccd 100644 --- a/parm/post/global_1x1_paramlist_g2.f000 +++ b/parm/post/global_1x1_paramlist_g2.f000 @@ -159,6 +159,7 @@ HGT:PV=2e-06 (Km^2/kg/s) surface HGT:surface HGT:tropopause HINDEX:surface +HLCY:1000-0 m above ground HLCY:3000-0 m above ground HPBL:surface ICAHT:max wind diff --git a/parm/post/itag.jinja b/parm/post/itag.jinja new file mode 100644 index 0000000000..07aa41f8da --- /dev/null +++ b/parm/post/itag.jinja @@ -0,0 +1,16 @@ +&model_inputs + filename = "{{ atmos_filename }}", + ioform = "{{ ioform }}", + grib = "{{ grib_version }}", + datestr = "{{ valid_datetime | strftime('%Y-%m-%d_%H:%M:%S') }}", + modelname = "{{ NET | upper }}", + submodelname = "NONE", + filenameflux = "{{ flux_filename }}", + filenameflat = "postxconfig-NT.txt" +/ +&nampgb + kpo = {{ po | length }}, + po = {{ po | join(', ') }}, + rdaod = {{ rdaod | to_f90bool }} +/ + diff --git a/parm/post/upp.yaml b/parm/post/upp.yaml new file mode 100644 index 0000000000..651f3c12a8 --- /dev/null +++ b/parm/post/upp.yaml @@ -0,0 +1,90 @@ +upp: + config: + grib_version: "grib2" + ioform: "netcdfpara" + po: [1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,40.,30.,20.,15.,10.,7.,5.,3.,2.,1.,0.7,0.4,0.2,0.1,0.07,0.04,0.02,0.01] + rdaod: False + fix_data: + mkdir: + - "{{ DATA }}" + copy: + - ["{{ 'g2tmpl_ROOT' | getenv }}/share/params_grib2_tbl_new", "{{ DATA }}/params_grib2_tbl_new"] + - ["{{ HOMEgfs }}/parm/post/nam_micro_lookup.dat", "{{ DATA }}/eta_micro_lookup.dat"] + - ["{{ HOMEgfs }}/exec/upp.x", "{{ DATA }}/"] + - ["{{ HOMEgfs }}/parm/post/itag.jinja", "{{ DATA }}/"] + +analysis: + config: + rdaod: True + data_in: + copy: + - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-ANL.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmanl.nc", "{{ DATA }}/{{ atmos_filename }}"] + - ["{{ COM_ATMOS_ANALYSIS }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcanl.nc", "{{ DATA }}/{{ flux_filename }}"] + data_out: + copy: + - ["{{ DATA }}/GFSPRS.GrbF00", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.master.grb2anl"] + - ["{{ DATA }}/GFSPRS.GrbF00.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.master.grb2ianl"] + +forecast: + config: + rdaod: False + data_in: + copy: + {% if forecast_hour == 0 %} + - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-F00-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"] + {% else %} + - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-TWO.txt", "{{ DATA }}/postxconfig-NT.txt"] + {% endif %} + - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] + - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] + data_out: + copy: + - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.masterf{{ '%03d' % forecast_hour }}.grb2"] + - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxf{{ '%03d' % forecast_hour }}.grb2"] + - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.masterf{{ '%03d' % forecast_hour }}.grb2.idx"] + - ["{{ DATA }}/GFSFLX.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfluxf{{ '%03d' % forecast_hour }}.grb2.idx"] + +goes: + config: + rdaod: True + data_in: + copy: + {% set crtm_coefficients = [ + 'amsre_aqua', + 'imgr_g11', 'imgr_g12', 'imgr_g13', 'imgr_g15', + 'imgr_mt1r', 'imgr_mt2', 'imgr_insat3d', + 'seviri_m10', + 'ssmi_f13', 'ssmi_f14', 'ssmi_f15', 'ssmis_f16', 'ssmis_f17', 'ssmis_f18', 'ssmis_f19', 'ssmis_f20', + 'tmi_trmm', + 'abi_gr', + 'ahi_himawari8' + ] %} + {% for coefficient in crtm_coefficients %} + - ["{{ 'CRTM_FIX' | getenv }}/{{ coefficient }}.TauCoeff.bin", "{{ DATA }}/"] + - ["{{ 'CRTM_FIX' | getenv }}/{{ coefficient }}.SpcCoeff.bin", "{{ DATA }}/"] + {% endfor %} + {% for coefficient in ['v.seviri_m10'] %} + - ["{{ 'CRTM_FIX' | getenv }}/{{ coefficient }}.SpcCoeff.bin", "{{ DATA }}/"] + {% endfor %} + {% set emission_coefficients = [ + 'FASTEM4.MWwater', 'FASTEM5.MWwater', 'FASTEM6.MWwater', + 'IGBP.IRland', 'IGBP.VISland', + 'Nalli.IRwater', + 'NPOESS.IRice', 'NPOESS.IRland', 'NPOESS.IRsnow', + 'NPOESS.VISice', 'NPOESS.VISland', 'NPOESS.VISsnow', 'NPOESS.VISwater', + 'USGS.IRland', 'USGS.VISland', + 'WuSmith.IRwater' + ] %} + {% for coefficient in emission_coefficients %} + - ["{{ 'CRTM_FIX' | getenv }}/{{ coefficient }}.EmisCoeff.bin", "{{ DATA }}/"] + {% endfor %} + - ["{{ 'CRTM_FIX' | getenv }}/AerosolCoeff.bin", "{{ DATA }}/"] + - ["{{ 'CRTM_FIX' | getenv }}/CloudCoeff.bin", "{{ DATA }}/"] + - ["{{ HOMEgfs }}/parm/post/postxconfig-NT-GFS-GOES.txt", "{{ DATA }}/postxconfig-NT.txt"] + - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.atmf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ atmos_filename }}"] + - ["{{ COM_ATMOS_HISTORY }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.sfcf{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/{{ flux_filename }}"] + data_out: + copy: + - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.goesmasterf{{ '%03d' % forecast_hour }}.grb2"] + - ["{{ DATA }}/GFSPRS.GrbF{{ '%02d' % forecast_hour }}.idx", "{{ COM_ATMOS_MASTER }}/{{ RUN }}.t{{ current_cycle | strftime('%H') }}z.goesmasterf{{ '%03d' % forecast_hour }}.grb2.idx"] diff --git a/parm/product/transfer_gfs_1.list b/parm/product/transfer_gfs_1.list index 5067ae9d98..2fda891ca3 100644 --- a/parm/product/transfer_gfs_1.list +++ b/parm/product/transfer_gfs_1.list @@ -30,7 +30,6 @@ com/gfs/_ENVIR_/gfs._PDY_/ - /gfs.t??z.*flx* - /gfs.t??z.*atmfc* - /gfs_grb211.t??z.pgrsfnf* -- /wafsgfs??.t??z.grisfnf* - /gfs.t??z.master.* - /gfs*sfcf* @@ -43,7 +42,6 @@ com/gfs/_ENVIR_/gfs._PDYm1_/ - /gfs.t??z.*flx* - /gfs.t??z.*atmfc* - /gfs_grb211.t??z.pgrsfnf* -- /wafsgfs??.t??z.grisfnf* - /gfs.t??z.master.* - /gfs*sfcf* diff --git a/parm/product/transfer_gfs_7.list b/parm/product/transfer_gfs_7.list index 8ef7fbfa22..b21756140c 100644 --- a/parm/product/transfer_gfs_7.list +++ b/parm/product/transfer_gfs_7.list @@ -27,7 +27,6 @@ com/gfs/_ENVIR_/gfs._PDY_/ + /gfs.t??z.flxgrbf* + /gfs.t??z.flxgrbif* + /gfs.t??z.goessimpgrb2.1p00.f* -+ /wafsgfs??.t??z.grib* #+ /gfs.t??z.flxf*.nemsio - * B 444000 @@ -37,7 +36,6 @@ com/gfs/_ENVIR_/gfs._PDYm1_/ + /gfs.t??z.flxgrbf* + /gfs.t??z.flxgrbif* + /gfs.t??z.goessimpgrb2.1p00.f* -+ /wafsgfs??.t??z.grib* #+ /gfs.t??z.flxf*.nemsio - * B 444000 diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml index cc82f7a253..7d901fe17b 100644 --- a/parm/ufs/fix/gfs/atmos.fixed_files.yaml +++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml @@ -1,85 +1,85 @@ copy: # Atmosphere mosaic file linked as the grid_spec file (atm only) - - [$(FIX_orog)/$(atm_res)/$(atm_res)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] + - [$(FIXorog)/$(CASE)/$(CASE)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] # Atmosphere grid tile files - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile1.nc, $(DATA)/INPUT/] - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile2.nc, $(DATA)/INPUT/] - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile3.nc, $(DATA)/INPUT/] - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile4.nc, $(DATA)/INPUT/] - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile5.nc, $(DATA)/INPUT/] - - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile6.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile1.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile2.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile3.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile4.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile5.nc, $(DATA)/INPUT/] + - [$(FIXorog)/$(CASE)/$(CASE)_grid.tile6.nc, $(DATA)/INPUT/] - # oro_data_ls and oro_data_ss files from FIX_ugwd - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] - - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] + # oro_data_ls and oro_data_ss files from FIXugwd + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] + - [$(FIXugwd)/$(CASE)/$(CASE)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] # GWD?? - - [$(FIX_ugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] + - [$(FIXugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] # CO2 climatology - - [$(FIX_am)/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] - - [$(FIX_am)/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] - - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] + - [$(FIXam)/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] + - [$(FIXam)/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] + - [$(FIXam)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] - # FIX_am files - - [$(FIX_am)/global_climaeropac_global.txt, $(DATA)/aerosol.dat] - - [$(FIX_am)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] - - [$(FIX_am)/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] - - [$(FIX_am)/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] - - [$(FIX_am)/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] - - [$(FIX_am)/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] - - [$(FIX_am)/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] - - [$(FIX_am)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] - - [$(FIX_am)/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] - - [$(FIX_am)/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] - - [$(FIX_am)/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] - - [$(FIX_am)/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] + # FIXam files + - [$(FIXam)/global_climaeropac_global.txt, $(DATA)/aerosol.dat] + - [$(FIXam)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] + - [$(FIXam)/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] + - [$(FIXam)/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] + - [$(FIXam)/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] + - [$(FIXam)/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] + - [$(FIXam)/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] + - [$(FIXam)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] + - [$(FIXam)/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] + - [$(FIXam)/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] + - [$(FIXam)/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] + - [$(FIXam)/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] # MERRA2 Aerosol Climatology - - [$(FIX_aer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] - - [$(FIX_aer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] + - [$(FIXaer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] # Optical depth - - [$(FIX_lut)/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] - - [$(FIX_lut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] - - [$(FIX_lut)/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] - - [$(FIX_lut)/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] - - [$(FIX_lut)/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] + - [$(FIXlut)/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] + - [$(FIXlut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] + - [$(FIXlut)/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] + - [$(FIXlut)/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] + - [$(FIXlut)/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] - # fd_nems.yaml file - - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_nems.yaml, $(DATA)/] + # fd_ufs.yaml file + - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml, $(DATA)/] diff --git a/parm/ufs/fix/gfs/land.fixed_files.yaml b/parm/ufs/fix/gfs/land.fixed_files.yaml index ab93ff27a6..bb2d060963 100644 --- a/parm/ufs/fix/gfs/land.fixed_files.yaml +++ b/parm/ufs/fix/gfs/land.fixed_files.yaml @@ -1,58 +1,58 @@ copy: - # Files from FIX_orog/C??.mx??_frac/fix_sfc - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile6.nc, $(DATA)/] + # Files from FIXorog/C??/sfc + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).facsf.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).maximum_snow_albedo.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).slope_type.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).snowfree_albedo.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).soil_type.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).substrate_temperature.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_greenness.tile6.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile1.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile2.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile3.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile4.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile5.nc, $(DATA)/] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile6.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile1.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile2.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile3.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile4.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile5.nc, $(DATA)/] + - [$(FIXorog)/$(CASE)/sfc/$(CASE).mx$(OCNRES).vegetation_type.tile6.nc, $(DATA)/] diff --git a/parm/ufs/fix/gfs/ocean.fixed_files.yaml b/parm/ufs/fix/gfs/ocean.fixed_files.yaml index 801f070c49..1ca8ce7a68 100644 --- a/parm/ufs/fix/gfs/ocean.fixed_files.yaml +++ b/parm/ufs/fix/gfs/ocean.fixed_files.yaml @@ -1,10 +1,9 @@ copy: # Orography data tile files - # The following are for "frac_grid = .true." - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] - - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] + - [$(FIXorog)/$(CASE)/$(CASE).mx$(OCNRES)_oro_data.tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] diff --git a/parm/parm_fv3diag/data_table b/parm/ufs/fv3/data_table similarity index 100% rename from parm/parm_fv3diag/data_table rename to parm/ufs/fv3/data_table diff --git a/parm/parm_fv3diag/diag_table b/parm/ufs/fv3/diag_table similarity index 99% rename from parm/parm_fv3diag/diag_table rename to parm/ufs/fv3/diag_table index 37421f8a4f..b972b3470c 100644 --- a/parm/parm_fv3diag/diag_table +++ b/parm/ufs/fv3/diag_table @@ -34,6 +34,7 @@ "ocean_model", "ePBL_h_ML", "ePBL", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 "ocean_model", "MLD_003", "MLD_003", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 "ocean_model", "MLD_0125", "MLD_0125", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "tob", "tob", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 # Z-Space Fields Provided for CMIP6 (CMOR Names): "ocean_model_z", "uo", "uo", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 @@ -98,6 +99,7 @@ "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2 "gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 @@ -265,7 +267,6 @@ "gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2 "gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2 - #============================================================================================= # #====> This file can be used with diag_manager/v2.0a (or higher) <==== diff --git a/parm/parm_fv3diag/diag_table.aero b/parm/ufs/fv3/diag_table.aero similarity index 100% rename from parm/parm_fv3diag/diag_table.aero rename to parm/ufs/fv3/diag_table.aero diff --git a/parm/parm_fv3diag/diag_table_aod b/parm/ufs/fv3/diag_table_aod similarity index 100% rename from parm/parm_fv3diag/diag_table_aod rename to parm/ufs/fv3/diag_table_aod diff --git a/parm/parm_fv3diag/diag_table_da b/parm/ufs/fv3/diag_table_da similarity index 95% rename from parm/parm_fv3diag/diag_table_da rename to parm/ufs/fv3/diag_table_da index cdcc36ee57..40824caee9 100644 --- a/parm/parm_fv3diag/diag_table_da +++ b/parm/ufs/fv3/diag_table_da @@ -2,15 +2,15 @@ "fv3_history2d", 0, "hours", 1, "hours", "time" "ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1, "hours" -"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 "gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 diff --git a/parm/parm_fv3diag/diag_table_da_gsl b/parm/ufs/fv3/diag_table_da_gsl similarity index 100% rename from parm/parm_fv3diag/diag_table_da_gsl rename to parm/ufs/fv3/diag_table_da_gsl diff --git a/parm/ufs/fv3/diag_table_da_orig b/parm/ufs/fv3/diag_table_da_orig new file mode 100644 index 0000000000..59268883cb --- /dev/null +++ b/parm/ufs/fv3/diag_table_da_orig @@ -0,0 +1,80 @@ + +"fv3_history", 0, "hours", 1, "hours", "time" +"fv3_history2d", 0, "hours", 1, "hours", "time" + +"gfs_dyn", "ucomp", "ucomp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "vcomp", "vcomp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "sphum", "sphum", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "temp", "temp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "liq_wat", "liq_wat", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "delp", "delp", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "pfhy", "hypres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "pfnh", "nhpres", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "w", "vvel", "fv3_history", "all", .false., "none", 2 + +"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "u10m", "u10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "v10m", "v10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "cnvprcp", "cnvprcp", "fv3_history2d", "all", .false., "none", 2 + +"gfs_sfc", "hgtsfc", "hgtsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "q2m", "q2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "t2m", "t2m", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tsfc", "tsfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "stype", "stype", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slmsksfc", "slmsk", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "vfracsfc", "vfrac", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "zorlsfc", "zorl", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "uustar", "uustar", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt1", "soilt1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt2", "soilt2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt3", "soilt3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilt4", "soilt4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw1", "soilw1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw2", "soilw2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw3", "soilw3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "soilw4", "soilw4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_1", "slc_1", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_2", "slc_2", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_3", "slc_3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slc_4", "slc_4", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "slope", "slope", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "canopy", "canopy", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "fice", "fice", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "hice", "hice", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "snowd", "snowd", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2 +"gfs_sfc" "tref" "tref" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "z_c" "z_c" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "c_0" "c_0" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "c_d" "c_d" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "w_0" "w_0" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "w_d" "w_d" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xt" "xt" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xz" "xz" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "dt_cool" "dt_cool" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xs" "xs" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xu" "xu" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xv" "xv" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xtts" "xtts" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "xzts" "xzts" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "d_conv" "d_conv" "fv3_history2d" "all" .false. "none" 2 +"gfs_sfc" "qrain" "qrain" "fv3_history2d" "all" .false. "none" 2 diff --git a/parm/parm_fv3diag/diag_table_gsl b/parm/ufs/fv3/diag_table_gsl similarity index 100% rename from parm/parm_fv3diag/diag_table_gsl rename to parm/ufs/fv3/diag_table_gsl diff --git a/parm/parm_fv3diag/diag_table_gsl_ruc b/parm/ufs/fv3/diag_table_gsl_ruc similarity index 100% rename from parm/parm_fv3diag/diag_table_gsl_ruc rename to parm/ufs/fv3/diag_table_gsl_ruc diff --git a/parm/parm_fv3diag/field_table b/parm/ufs/fv3/field_table similarity index 100% rename from parm/parm_fv3diag/field_table rename to parm/ufs/fv3/field_table diff --git a/parm/parm_fv3diag/field_table.aero b/parm/ufs/fv3/field_table.aero similarity index 100% rename from parm/parm_fv3diag/field_table.aero rename to parm/ufs/fv3/field_table.aero diff --git a/parm/parm_fv3diag/field_table_gfdl b/parm/ufs/fv3/field_table_gfdl similarity index 100% rename from parm/parm_fv3diag/field_table_gfdl rename to parm/ufs/fv3/field_table_gfdl diff --git a/parm/parm_fv3diag/field_table_gfdl_progsigma b/parm/ufs/fv3/field_table_gfdl_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_gfdl_progsigma rename to parm/ufs/fv3/field_table_gfdl_progsigma diff --git a/parm/parm_fv3diag/field_table_gfdl_satmedmf b/parm/ufs/fv3/field_table_gfdl_satmedmf similarity index 100% rename from parm/parm_fv3diag/field_table_gfdl_satmedmf rename to parm/ufs/fv3/field_table_gfdl_satmedmf diff --git a/parm/parm_fv3diag/field_table_gfdl_satmedmf_progsigma b/parm/ufs/fv3/field_table_gfdl_satmedmf_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_gfdl_satmedmf_progsigma rename to parm/ufs/fv3/field_table_gfdl_satmedmf_progsigma diff --git a/parm/parm_fv3diag/field_table_thompson b/parm/ufs/fv3/field_table_thompson similarity index 100% rename from parm/parm_fv3diag/field_table_thompson rename to parm/ufs/fv3/field_table_thompson diff --git a/parm/parm_fv3diag/field_table_thompson_aero_tke b/parm/ufs/fv3/field_table_thompson_aero_tke similarity index 100% rename from parm/parm_fv3diag/field_table_thompson_aero_tke rename to parm/ufs/fv3/field_table_thompson_aero_tke diff --git a/parm/parm_fv3diag/field_table_thompson_aero_tke_progsigma b/parm/ufs/fv3/field_table_thompson_aero_tke_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_thompson_aero_tke_progsigma rename to parm/ufs/fv3/field_table_thompson_aero_tke_progsigma diff --git a/parm/parm_fv3diag/field_table_thompson_noaero_tke b/parm/ufs/fv3/field_table_thompson_noaero_tke similarity index 100% rename from parm/parm_fv3diag/field_table_thompson_noaero_tke rename to parm/ufs/fv3/field_table_thompson_noaero_tke diff --git a/parm/parm_fv3diag/field_table_thompson_noaero_tke_progsigma b/parm/ufs/fv3/field_table_thompson_noaero_tke_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_thompson_noaero_tke_progsigma rename to parm/ufs/fv3/field_table_thompson_noaero_tke_progsigma diff --git a/parm/parm_fv3diag/field_table_thompson_satmedmf b/parm/ufs/fv3/field_table_thompson_satmedmf similarity index 100% rename from parm/parm_fv3diag/field_table_thompson_satmedmf rename to parm/ufs/fv3/field_table_thompson_satmedmf diff --git a/parm/parm_fv3diag/field_table_wsm6 b/parm/ufs/fv3/field_table_wsm6 similarity index 100% rename from parm/parm_fv3diag/field_table_wsm6 rename to parm/ufs/fv3/field_table_wsm6 diff --git a/parm/parm_fv3diag/field_table_wsm6_progsigma b/parm/ufs/fv3/field_table_wsm6_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_wsm6_progsigma rename to parm/ufs/fv3/field_table_wsm6_progsigma diff --git a/parm/parm_fv3diag/field_table_wsm6_satmedmf b/parm/ufs/fv3/field_table_wsm6_satmedmf similarity index 100% rename from parm/parm_fv3diag/field_table_wsm6_satmedmf rename to parm/ufs/fv3/field_table_wsm6_satmedmf diff --git a/parm/parm_fv3diag/field_table_wsm6_satmedmf_progsigma b/parm/ufs/fv3/field_table_wsm6_satmedmf_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_wsm6_satmedmf_progsigma rename to parm/ufs/fv3/field_table_wsm6_satmedmf_progsigma diff --git a/parm/parm_fv3diag/field_table_zhaocarr b/parm/ufs/fv3/field_table_zhaocarr similarity index 100% rename from parm/parm_fv3diag/field_table_zhaocarr rename to parm/ufs/fv3/field_table_zhaocarr diff --git a/parm/parm_fv3diag/field_table_zhaocarr_progsigma b/parm/ufs/fv3/field_table_zhaocarr_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_zhaocarr_progsigma rename to parm/ufs/fv3/field_table_zhaocarr_progsigma diff --git a/parm/parm_fv3diag/field_table_zhaocarr_satmedmf b/parm/ufs/fv3/field_table_zhaocarr_satmedmf similarity index 100% rename from parm/parm_fv3diag/field_table_zhaocarr_satmedmf rename to parm/ufs/fv3/field_table_zhaocarr_satmedmf diff --git a/parm/parm_fv3diag/field_table_zhaocarr_satmedmf_progsigma b/parm/ufs/fv3/field_table_zhaocarr_satmedmf_progsigma similarity index 100% rename from parm/parm_fv3diag/field_table_zhaocarr_satmedmf_progsigma rename to parm/ufs/fv3/field_table_zhaocarr_satmedmf_progsigma diff --git a/parm/parm_fv3diag/variable_table.txt b/parm/ufs/fv3/variable_table.txt similarity index 100% rename from parm/parm_fv3diag/variable_table.txt rename to parm/ufs/fv3/variable_table.txt diff --git a/parm/parm_fv3diag/variable_table_da.txt b/parm/ufs/fv3/variable_table_da.txt similarity index 100% rename from parm/parm_fv3diag/variable_table_da.txt rename to parm/ufs/fv3/variable_table_da.txt diff --git a/parm/parm_fv3diag/variable_table_da_nonsst.txt b/parm/ufs/fv3/variable_table_da_nonsst.txt similarity index 100% rename from parm/parm_fv3diag/variable_table_da_nonsst.txt rename to parm/ufs/fv3/variable_table_da_nonsst.txt diff --git a/parm/ufs/gocart/AERO.rc b/parm/ufs/gocart/AERO.rc new file mode 100644 index 0000000000..ff40fba2aa --- /dev/null +++ b/parm/ufs/gocart/AERO.rc @@ -0,0 +1,10 @@ +NX: 4 +NY: 24 + +# Atmospheric Model Configuration Parameters +# ------------------------------------------ +IOSERVER_NODES: 0 + +DYCORE: NONE + +NUM_BANDS: 30 diff --git a/parm/ufs/gocart/AERO_HISTORY.rc b/parm/ufs/gocart/AERO_HISTORY.rc new file mode 100644 index 0000000000..4c7df15b2a --- /dev/null +++ b/parm/ufs/gocart/AERO_HISTORY.rc @@ -0,0 +1,452 @@ +####################################################################### +# Create History List for Output +####################################################################### + +VERSION: 1 +EXPID: gocart +EXPDSC: GOCART2g_diagnostics_at_c360 +EXPSRC: GEOSgcm-v10.16.0 +Allow_Overwrite: .true. + +COLLECTIONS: 'inst_aod' +# 'inst_du_ss' +# 'inst_ca' +# 'inst_ni' +# 'inst_su' +# 'inst_du_bin' +# 'inst_ss_bin' +# 'inst_ca_bin' +# 'inst_ni_bin' +# 'inst_su_bin' +# 'inst_2d' +# 'inst_3d' +# 'inst_aod' +# 'tavg_du_ss' +# 'tavg_du_bin' +# 'tavg_2d_rad' +# 'tavg_3d_rad' + :: + +################################################## +# The GRID_LABELS section must be after the main # +# list of COLLECTIONS for scripting purposes. # +################################################## + +GRID_LABELS: PC720x361-DC +:: + + +PC720x361-DC.GRID_TYPE: LatLon +PC720x361-DC.IM_WORLD: 720 +PC720x361-DC.JM_WORLD: 361 +PC720x361-DC.POLE: PC +PC720x361-DC.DATELINE: DC +PC720x361-DC.LM: 72 + +# --------------------- +# Aerosols/Constituents +# --------------------- +# +#### Instantaneous (hourly) output + +# +# 3d aerosols +# + inst_du_ss.format: 'CFIO' , + inst_du_ss.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', + inst_du_ss.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_du_ss.mode: 'instantaneous', + inst_du_ss.grid_label: PC720x361-DC , + inst_du_ss.splitField: 1, + inst_du_ss.frequency: 120000 , + inst_du_ss.duration: 010000 , + inst_du_ss.ref_time: 000000 , + inst_du_ss.nbits: 10, + inst_du_ss.fields: 'DU' , 'DU' , + 'SS' , 'SS' , + :: + + tavg_du_ss.format: 'CFIO' , + tavg_du_ss.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', + tavg_du_ss.template: '%y4%m2%d2_%h2%n2z.nc4', + tavg_du_ss.mode: 'time-averaged', + tavg_du_ss.grid_label: PC720x361-DC , + tavg_du_ss.splitField: 1, + tavg_du_ss.frequency: 120000 , + tavg_du_ss.duration: 010000 , + tavg_du_ss.ref_time: 000000 , + tavg_du_ss.nbits: 10, + tavg_du_ss.fields: 'DU' , 'DU' , + 'SS' , 'SS' , + :: + + inst_ca.format: 'CFIO' , + inst_ca.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', + inst_ca.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_ca.mode: 'instantaneous', + inst_ca.grid_label: PC720x361-DC , + inst_ca.frequency: 120000 , + inst_ca.duration: 010000 , + inst_ca.ref_time: 000000 , + inst_ca.nbits: 10, + inst_ca.fields: 'CAphilicCA.bc' , 'CA.bc' , + 'CAphobicCA.bc' , 'CA.bc' , + 'CAphilicCA.oc' , 'CA.oc' , + 'CAphobicCA.oc' , 'CA.oc' , + :: + + inst_ni.format: 'CFIO' , + inst_ni.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', + inst_ni.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_ni.mode: 'instantaneous', + inst_ni.grid_label: PC720x361-DC , + inst_ni.frequency: 120000 , + inst_ni.duration: 010000 , + inst_ni.ref_time: 000000 , + inst_ni.nbits: 10, + inst_ni.fields: 'NH3' , 'NI' , + 'NH4a' , 'NI' , + 'NO3an1' , 'NI' , + 'NO3an2' , 'NI' , + 'NO3an3' , 'NI' , + :: + + inst_su.format: 'CFIO' , + inst_su.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', + inst_su.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_su.mode: 'instantaneous', + inst_su.grid_label: PC720x361-DC , + inst_su.frequency: 120000 , + inst_su.duration: 010000 , + inst_su.ref_time: 000000 , + inst_su.nbits: 10, + inst_su.fields: 'DMS' , 'SU' , + 'SO2' , 'SU' , + 'SO4' , 'SU' , + 'MSA' , 'SU' , + :: +# +# Binned aerosols +# + + inst_du_bin.format: 'CFIO' , + inst_du_bin.descr: '2d,Hourly,Instantaneous' + inst_du_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_du_bin.mode: 'instantaneous' + inst_du_bin.grid_label: PC720x361-DC , + inst_du_bin.splitField: 1, + inst_du_bin.frequency: 010000 , + inst_du_bin.duration: 010000 , + inst_du_bin.ref_time: 000000 , + inst_du_bin.nbits: 10, + inst_du_bin.fields: 'DUEM' , 'DU' , + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: + + tavg_du_bin.format: 'CFIO' , + tavg_du_bin.descr: '2d,Hourly,Instantaneous' + tavg_du_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + tavg_du_bin.mode: 'time-averaged' + tavg_du_bin.grid_label: PC720x361-DC , + tavg_du_bin.splitField: 1, + tavg_du_bin.frequency: 030000 , + tavg_du_bin.duration: 010000 , + tavg_du_bin.ref_time: 000000 , + tavg_du_bin.nbits: 10, + tavg_du_bin.fields: 'DUEM' , 'DU' , + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: + + inst_ss_bin.format: 'CFIO' , + inst_ss_bin.descr: '2d,Hourly,Instantaneous' + inst_ss_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_ss_bin.mode: 'instantaneous' + inst_ss_bin.grid_label: PC720x361-DC , + inst_ss_bin.splitField: 1, + inst_ss_bin.frequency: 060000 , + inst_ss_bin.duration: 010000 , + inst_ss_bin.ref_time: 000000 , + inst_ss_bin.nbits: 10, + inst_ss_bin.fields: 'SSEM' , 'SS' , + 'SSSD' , 'SS' , + 'SSDP' , 'SS' , + 'SSWT' , 'SS' , + 'SSSV' , 'SS' , + :: + + inst_ca_bin.format: 'CFIO' , + inst_ca_bin.descr: '3d,Hourly,Instantaneous,Model-Level' + inst_ca_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_ca_bin.mode: 'instantaneous' + inst_ca_bin.grid_label: PC720x361-DC , + inst_ca_bin.splitField: 1, + inst_ca_bin.frequency: 120000 , + inst_ca_bin.duration: 010000 , + inst_ca_bin.ref_time: 000000 , + inst_ca_bin.nbits: 10, + inst_ca_bin.fields: 'CAEMCA.bc' , 'CA.bc' , + 'CAEMCA.oc' , 'CA.oc' , + 'CASDCA.bc' , 'CA.bc' , + 'CASDCA.oc' , 'CA.oc' , + 'CADPCA.bc' , 'CA.bc' , + 'CADPCA.oc' , 'CA.oc' , + 'CAWTCA.bc' , 'CA.bc' , + 'CAWTCA.oc' , 'CA.oc' , + 'CASVCA.bc' , 'CA.bc' , + 'CASVCA.oc' , 'CA.oc' , + :: + + inst_ni_bin.format: 'CFIO' , + inst_ni_bin.descr: '3d,Hourly,Instantaneous,Model-Level' + inst_ni_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_ni_bin.mode: 'instantaneous', + inst_ni_bin.grid_label: PC720x361-DC , + inst_ni_bin.splitField: 1, + inst_ni_bin.frequency: 120000 , + inst_ni_bin.duration: 010000 , + inst_ni_bin.ref_time: 000000 , + inst_ni_bin.nbits: 10, + inst_ni_bin.fields: 'NIHT' , 'NI' , + 'NISD' , 'NI' , + 'NIDP' , 'NI' , + 'NIWT' , 'NI' , + 'NISV' , 'NI' , + :: + + inst_su_bin.format: 'CFIO' , + inst_su_bin.descr: '3d,Hourly,Instantaneous,Model-Level' + inst_su_bin.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_su_bin.mode: 'instantaneous', + inst_su_bin.grid_label: PC720x361-DC , + inst_su_bin.splitField: 1, + inst_su_bin.frequency: 120000 , + inst_su_bin.duration: 010000 , + inst_su_bin.ref_time: 000000 , + inst_su_bin.nbits: 10, + inst_su_bin.fields: 'SUEM' , 'SU', + 'SUSD' , 'SU', + 'SUDP' , 'SU', + 'SUWT' , 'SU', + 'SUSV' , 'SU', + :: + +# +# Other 2d diagnostics +# + inst_2d.format: 'CFIO' , + inst_2d.descr: '3d,Hourly,Instantaneous' + inst_2d.template: '%y4%m2%d2_%h2%n2z.nc4', + inst_2d.archive: '%c/Y%y4', + inst_2d.mode: 'instantaneous' + inst_2d.frequency: 030000, + inst_2d.duration: 030000, + inst_2d.ref_time: 000000, + inst_2d.grid_label: PC720x361-DC + inst_2d.fields: 'DUSMASS' , 'DU' , + 'DUCMASS' , 'DU' , + 'DUSMASS25' , 'DU' , + 'DUCMASS25' , 'DU' , + 'DUAERIDX' , 'DU' , + 'DUFLUXU' , 'DU' , + 'DUFLUXV' , 'DU' , + 'DUANGSTR' , 'DU' , + 'SSSMASS' , 'SS' , + 'SSCMASS' , 'SS' , + 'SSSMASS25' , 'SS' , + 'SSCMASS25' , 'SS' , + 'SSAERIDX' , 'SS' , + 'SSANGSTR' , 'SS' , + 'SSFLUXU' , 'SS' , + 'SSFLUXV' , 'SS' , + 'CAEMANCA.bc' , 'CA.bc' , + 'CAEMANCA.oc' , 'CA.oc' , + 'CAEMBBCA.bc' , 'CA.bc' , + 'CAEMBBCA.oc' , 'CA.oc' , + 'CAEMBFCA.bc' , 'CA.bc' , + 'CAEMBFCA.oc' , 'CA.oc' , + 'CAEMBGCA.bc' , 'CA.bc' , + 'CAEMBGCA.oc' , 'CA.oc' , + 'CAHYPHILCA.bc' , 'CA.bc' , + 'CAHYPHILCA.oc' , 'CA.oc' , + 'CAPSOACA.bc' , 'CA.bc' , + 'CAPSOACA.oc' , 'CA.oc' , + 'CASMASSCA.bc' , 'CA.bc' , + 'CASMASSCA.oc' , 'CA.oc' , + 'CACMASSCA.bc' , 'CA.bc' , + 'CACMASSCA.oc' , 'CA.oc' , + 'CAANGSTRCA.bc' , 'CA.bc' , + 'CAANGSTRCA.oc' , 'CA.oc' , + 'CAFLUXUCA.bc' , 'CA.bc' , + 'CAFLUXUCA.oc' , 'CA.oc' , + 'CAFLUXVCA.bc' , 'CA.bc' , + 'CAFLUXVCA.oc' , 'CA.oc' , + 'CAAERIDXCA.bc' , 'CA.bc' , + 'CAAERIDXCA.oc' , 'CA.oc' , + 'NIPNO3AQ' , 'NI' , + 'NIPNH4AQ' , 'NI' , + 'NIPNH3AQ' , 'NI' , + 'NH3EM' , 'NI' , + 'NH3DP' , 'NI' , + 'NH3WT' , 'NI' , + 'NH3SV' , 'NI' , + 'NH4SD' , 'NI' , + 'NH4DP' , 'NI' , + 'NH4WT' , 'NI' , + 'NH4SV' , 'NI' , + 'HNO3SMASS' , 'NI' , + 'NH3SMASS' , 'NI' , + 'NH4SMASS' , 'NI' , + 'NISMASS' , 'NI' , + 'NISMASS25' , 'NI' , + 'HNO3CMASS' , 'NI' , + 'NH3CMASS' , 'NI' , + 'NH4CMASS' , 'NI' , + 'NICMASS' , 'NI' , + 'NICMASS25' , 'NI' , + 'NIANGSTR' , 'NI' , + 'NIFLUXU' , 'NI' , + 'NIFLUXV' , 'NI' , + 'SUPSO2' , 'SU' , + 'SUPSO4' , 'SU' , + 'SUPSO4G' , 'SU' , + 'SUPSO4AQ' , 'SU' , + 'SUPSO4WT' , 'SU' , + 'SUPMSA' , 'SU' , + 'SO2SMASS' , 'SU' , + 'SO2CMASS' , 'SU' , + 'SO4SMASS' , 'SU' , + 'SO4CMASS' , 'SU' , + 'DMSSMASS' , 'SU' , + 'DMSCMASS' , 'SU' , + 'MSASMASS' , 'SU' , + 'MSACMASS' , 'SU' , + 'SUANGSTR' , 'SU' , + 'SUFLUXU' , 'SU' , + 'SUFLUXV' , 'SU' , + 'SO4EMAN' , 'SU' , + 'SO2EMAN' , 'SU' , + 'SO2EMBB' , 'SU' , + 'SO2EMVN' , 'SU' , + 'SO2EMVE' , 'SU' , + :: + +# +# 3d diagnostics +# + inst_3d.format: 'CFIO' , + inst_3d.template: '%y4%m2%d2_%h2%n2z.nc4' , + inst_3d.archive: '%c/Y%y4' , + inst_3d.mode: 'instantaneous' + inst_3d.frequency: 060000, + inst_3d.duration: 010000, + inst_3d.ref_time: 000000, + inst_3d.grid_label: PC720x361-DC + inst_3d.fields: 'DUMASS' , 'DU', + 'DUMASS25' , 'DU', + 'DUCONC' , 'DU', + 'SSMASS' , 'SS', + 'SSMASS25' , 'SS', + 'SSCONC' , 'SS', + 'CAMASSCA.bc' , 'CA.bc' , + 'CACONCCA.bc' , 'CA.bc' , + 'CAMASSCA.oc' , 'CA.oc' , + 'CACONCCA.oc' , 'CA.oc' , + 'SO4MASS' , 'SU', + 'SO4SAREA' , 'SU', + 'SO4SNUM' , 'SU', + 'SUCONC' , 'SU', + 'PSO2' , 'SU', + 'PMSA' , 'SU', + 'PSO4' , 'SU', + 'PSO4G' , 'SU', + 'PSO4WET' , 'SU', + 'PSO4AQ' , 'SU', + 'DMS' , 'SU', + 'SO2' , 'SU', + 'SO4' , 'SU', + 'MSA' , 'SU', + :: + + +# +# Radiation-related diagnostics +# + inst_aod.format: 'CFIO' , + inst_aod.template: '%y4%m2%d2_%h2%n2z.nc4' , + inst_aod.archive: '%c/Y%y4' , + inst_aod.mode: 'instantaneous' + inst_aod.frequency: 060000, + inst_aod.duration: 010000, + inst_aod.ref_time: 000000, + inst_aod.grid_label: PC720x361-DC + inst_aod.fields: 'CA.bcEXTTAU' , 'CA.bc' , 'AOD_BC', + 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', + 'DUEXTTAU' , 'DU' , 'AOD_DU', + 'SSEXTTAU' , 'SS' , 'AOD_SS', + 'SUEXTTAU' , 'SU' , 'AOD_SU', + 'TOTEXTTAU' , 'GOCART2G' , 'AOD' , + :: + + + tavg_2d_rad.format: 'CFIO' , + tavg_2d_rad.template: '%y4%m2%d2_%h2%n2z.nc4', + tavg_2d_rad.archive: '%c/Y%y4', + tavg_2d_rad.mode: 'time-averaged', + tavg_2d_rad.frequency: 120000, + tavg_2d_rad.duration: 120000, + tavg_2d_rad.ref_time: 000000, + tavg_2d_rad.grid_label: PC720x361-DC + tavg_2d_rad.fields: 'CA.bcEXTTAU' , 'CA.bc' , + 'CA.ocEXTTAU' , 'CA.oc' , + 'CASCATAUCA.bc' , 'CA.bc' , + 'CASCATAUCA.oc' , 'CA.oc' , + 'DUEXTTAU' , 'DU' , + 'DUSCATAU' , 'DU' , + 'DUEXTT25' , 'DU' , + 'DUSCAT25' , 'DU' , + 'DUEXTTFM' , 'DU' , + 'DUSCATFM' , 'DU' , + 'NIEXTTFM' , 'NI' , + 'NISCATFM' , 'NI' , + 'NIEXTT25' , 'NI' , + 'NISCAT25' , 'NI' , + 'NIEXTTAU' , 'NI' , + 'NISCATAU' , 'NI' , + 'SSEXTTAU' , 'SS' , + 'SSSCATAU' , 'SS' , + 'SSEXTT25' , 'SS' , + 'SSSCAT25' , 'SS' , + 'SSEXTTFM' , 'SS' , + 'SSSCATFM' , 'SS' , + 'SUEXTTAU' , 'SU' , + 'SUSCATAU' , 'SU' , + :: + + tavg_3d_rad.format: 'CFIO' , + tavg_3d_rad.template: '%y4%m2%d2_%h2%n2z.nc4', + tavg_3d_rad.archive: '%c/Y%y4', + tavg_3d_rad.mode: 'time-averaged', + tavg_3d_rad.frequency: 120000, + tavg_3d_rad.duration: 120000, + tavg_3d_rad.ref_time: 000000, + tavg_3d_rad.grid_label: PC720x361-DC + tavg_3d_rad.splitField: 1, + tavg_3d_rad.fields: 'CAEXTCOEFCA.bc' , 'CA.bc' , + 'CAEXTCOEFCA.oc' , 'CA.oc' , + 'CASCACOEFCA.bc' , 'CA.bc' , + 'CASCACOEFCA.oc' , 'CA.oc' , + 'DUEXTCOEF' , 'DU' , + 'DUSCACOEF' , 'DU' , + 'NIEXTCOEF' , 'NI' , + 'NISCACOEF' , 'NI' , + 'SSEXTCOEF' , 'SS' , + 'SSSCACOEF' , 'SS' , + 'SUEXTCOEF' , 'SU' , + 'SUSCACOEF' , 'SU' , + :: diff --git a/parm/ufs/gocart/AGCM.rc b/parm/ufs/gocart/AGCM.rc new file mode 100644 index 0000000000..9539b49655 --- /dev/null +++ b/parm/ufs/gocart/AGCM.rc @@ -0,0 +1,5 @@ +# ------------------------------------------ +# Atmospheric Model Configuration Parameters +# ------------------------------------------ +# +# This file must be present but empty for UFS Aerosols diff --git a/parm/ufs/gocart/CA2G_instance_CA.bc.rc b/parm/ufs/gocart/CA2G_instance_CA.bc.rc new file mode 100644 index 0000000000..f6411e2c9d --- /dev/null +++ b/parm/ufs/gocart/CA2G_instance_CA.bc.rc @@ -0,0 +1,41 @@ +# +# Resource file for Black Carbon parameters. +# + +nbins: 2 + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_BC.v1_3.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_BC.v1_3.nc + +# Aircraft emission factor: convert input unit to kg C +aircraft_fuel_emission_factor: 1.0000 + +# Heights [m] of LTO, CDS and CRS aviation emissions layers +aviation_vertical_layers: 0.0 100.0 9.0e3 10.0e3 + +# Initially hydrophobic portion +hydrophobic_fraction: 0.8 + +# Scavenging efficiency per bin [km-1] (NOT USED UNLESS RAS IS CALLED) +fscav: 0.0 0.4 + +# Dry particle density [kg m-3] +particle_density: 1800 1800 + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.18 0.18 + +# Number of particles per kg mass +fnum: 1.50e19 1.50e19 + +# Number median radius [um] +particle_radius_microns: 0.35 0.35 + +rhFlag: 0 + +# Sigma of lognormal number distribution +sigma: 2.0 2.0 + +pressure_lid_in_hPa: 0.01 + +point_emissions_srcfilen: /dev/null diff --git a/parm/ufs/gocart/CA2G_instance_CA.br.rc b/parm/ufs/gocart/CA2G_instance_CA.br.rc new file mode 100644 index 0000000000..41360831f3 --- /dev/null +++ b/parm/ufs/gocart/CA2G_instance_CA.br.rc @@ -0,0 +1,48 @@ +# +# Resource file for BR parameters. +# + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_BRC.v1_5.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_BRC.v1_5.nc + +# Aircraft emission factor: convert input unit to kg C +aircraft_fuel_emission_factor: 1.0000 + +# Heights [m] of LTO, CDS and CRS aviation emissions layers +aviation_vertical_layers: 0.0 100.0 9.0e3 10.0e3 + +# Fraction of biogenic VOCs emissions for SOA production +monoterpenes_emission_fraction: 0.0 +isoprene_emission_fraction: 0.0 + +# Ratio of POM/BRC -> convert source masses from carbon to POM +pom_ca_ratio: 1.8 + +# Initially hydrophobic portion +hydrophobic_fraction: 0.5 + +# Scavenging efficiency per bin [km-1] (NOT USED UNLESS RAS IS CALLED) +fscav: 0.0 0.4 + +# particle radius +particle_radius_microns: 0.35 0.35 + +rhFlag: 0 + +# Dry particle density [kg m-3] +particle_density: 1800 1800 + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.18 0.18 + +# Number of particles per kg mass +fnum: 9.76e17 9.76e17 + +# Sigma of lognormal number distribution +sigma: 2.20 2.20 + +nbins: 2 + +pressure_lid_in_hPa: 0.01 + +point_emissions_srcfilen: /dev/null diff --git a/parm/ufs/gocart/CA2G_instance_CA.oc.rc b/parm/ufs/gocart/CA2G_instance_CA.oc.rc new file mode 100644 index 0000000000..6e3f5ef978 --- /dev/null +++ b/parm/ufs/gocart/CA2G_instance_CA.oc.rc @@ -0,0 +1,48 @@ +# +# Resource file for Organic Carbon parameters. +# + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_OC.v1_3.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_OC.v1_3.nc + +# Aircraft emission factor: convert input unit to kg C +aircraft_fuel_emission_factor: 1.0000 + +# Heights [m] of LTO, CDS and CRS aviation emissions layers +aviation_vertical_layers: 0.0 100.0 9.0e3 10.0e3 + +# Fraction of biogenic VOCs emissions for SOA production +monoterpenes_emission_fraction: 0.05 +isoprene_emission_fraction: 0.03 + +# Ratio of POM/OC -> convert source masses from carbon to POM +pom_ca_ratio: 1.8 + +# particle radius +particle_radius_microns: 0.35 0.35 + +rhFlag: 0 + +# Initially hydrophobic portion +hydrophobic_fraction: 0.5 + +# Scavenging efficiency per bin [km-1] (NOT USED UNLESS RAS IS CALLED) +fscav: 0.0 0.4 + +# Dry particle density [kg m-3] +particle_density: 1800 1800 + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.18 0.18 + +# Number of particles per kg mass +fnum: 9.76e17 9.76e17 + +# Sigma of lognormal number distribution +sigma: 2.20 2.20 + +pressure_lid_in_hPa: 0.01 + +nbins: 2 + +point_emissions_srcfilen: /dev/null diff --git a/parm/ufs/gocart/CAP.rc b/parm/ufs/gocart/CAP.rc new file mode 100644 index 0000000000..2b8e71975b --- /dev/null +++ b/parm/ufs/gocart/CAP.rc @@ -0,0 +1,82 @@ +MAPLROOT_COMPNAME: AERO + ROOT_NAME: AERO +ROOT_CF: AERO.rc +HIST_CF: AERO_HISTORY.rc +EXTDATA_CF: AERO_ExtData.rc + +REPORT_THROUGHPUT: .false. + +USE_SHMEM: 0 + +GridType: Cubed-Sphere + +MAPL_ENABLE_TIMERS: NO +MAPL_ENABLE_MEMUTILS: NO +PRINTSPEC: 0 # (0: OFF, 1: IMPORT & EXPORT, 2: IMPORT, 3: EXPORT) + + +# Meteorological fields imported from atmospheric model +# ----------------------------------------------------- +CAP_IMPORTS: + FROCEAN + FRACI + FRSNOW + LWI + U10M + V10M + USTAR + TS + DZ + FRLAKE + AREA + ZPBL + SH + Z0H + CN_PRCP + NCN_PRCP + AIRDENS + DELP + T + RH2 + ZLE + PLE + PFL_LSAN + PFI_LSAN + U + V + WET1 + SLC + FCLD +:: + + +# Prognostic Tracers Table +# GOCARTname,GOCARTcomp. AtmTracerName +#--------------------------------------- +CAP_EXPORTS: + SS,SS seas* + DU,DU dust* + DMS,SU dms + MSA,SU msa + SO2,SU so2 + SO4,SU so4 + CA.bcphobic,CA.bc bc1 + CA.bcphilic,CA.bc bc2 + CA.ocphobic,CA.oc oc1 + CA.ocphilic,CA.oc oc2 +:: +# NH3,NI nh3 +# NH4a,NI nh4a +# NO3an1,NI no3an1 +# NO3an2,NI no3an2 +# NO3an3,NI no3an3 +# :: + + +# Diagnostic Tracers Table (only PM10 & PM25 available) +# InternalName AtmTracerName +#--------------------------------------- +CAP_DIAGNOSTICS: + PM10 pm10 + PM25 pm25 +:: diff --git a/parm/ufs/gocart/DU2G_instance_DU.rc b/parm/ufs/gocart/DU2G_instance_DU.rc new file mode 100644 index 0000000000..6c30cdf06b --- /dev/null +++ b/parm/ufs/gocart/DU2G_instance_DU.rc @@ -0,0 +1,48 @@ +# +# Resource file Dust parameters. +# + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_DU.v15_3.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_DU.v15_3.nc + +particle_radius_microns: 0.73 1.4 2.4 4.5 8.0 + +radius_lower: 0.1 1.0 1.8 3.0 6.0 + +radius_upper: 1.0 1.8 3.0 6.0 10.0 + +source_fraction: 1.0 1.0 1.0 1.0 1.0 + +# units [kg/m-3] +particle_density: 2500. 2650. 2650. 2650. 2650. + +# Resolution dependent tuning constant for emissions (a,b,c,d,e,f) (Ginoux, K14) +Ch_DU: 0.2 0.2 0.07 0.07 0.07 0.056 + +# Scavenging efficiency per bin [km-1] +fscav: 0.2 0.2 0.2 0.2 0.2 # + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.1 0.1 0.1 0.1 0.1 + +# Number of particles per kg mass +fnum: 2.45e14 3.28e13 6.52e12 9.89e11 1.76e11 + +rhFlag: 0 + +# Maring settling velocity correction +maringFlag: .true. + +nbins: 5 + +pressure_lid_in_hPa: 0.01 + +# Emissions methods +emission_scheme: fengsha # choose among: fengsha, ginoux, k14 + +# FENGSHA settings +alpha: 0.1 +gamma: 1.0 +soil_moisture_factor: 1 +soil_drylimit_factor: 1 +vertical_to_horizontal_flux_ratio_limit: 2.e-04 diff --git a/parm/ufs/gocart/ExtData.gbbepx b/parm/ufs/gocart/ExtData.gbbepx new file mode 100644 index 0000000000..3bd516c772 --- /dev/null +++ b/parm/ufs/gocart/ExtData.gbbepx @@ -0,0 +1,8 @@ +#====== BIOMASS BURNING EMISSIONS ======================================= + +# GBBEPx +#-------------------------------------------------------------------------------------------------------------------------------- +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 SO2 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 OC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 BC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 NH3 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc diff --git a/parm/ufs/gocart/ExtData.none b/parm/ufs/gocart/ExtData.none new file mode 100644 index 0000000000..15ad023eb8 --- /dev/null +++ b/parm/ufs/gocart/ExtData.none @@ -0,0 +1,8 @@ +#====== BIOMASS BURNING EMISSIONS ======================================= + +# NONE +#-------------------------------------------------------------------------------------------------------------------------------- +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass /dev/null +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass /dev/null +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass /dev/null +EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none none biomass /dev/null diff --git a/parm/ufs/gocart/ExtData.other b/parm/ufs/gocart/ExtData.other new file mode 100644 index 0000000000..789576305e --- /dev/null +++ b/parm/ufs/gocart/ExtData.other @@ -0,0 +1,147 @@ +# -------------|-------|-------|--------|----------------------|--------|--------|-------------|----------| +# Import | | | Regrid | Refresh | OffSet | Scale | Variable On | File | +# Name | Units | Clim | Method | Time Template | Factor | Factor | File | Template | +# -------------|-------|-------|--------|----------------------|--------|--------|-------------|----------| + +#====== Atmospheric Parameters ======================================= +TROPP 'Pa' Y N - 0.0 1.0 TROPP /dev/null:10000. + +#====== Dust Imports ================================================= +# FENGSHA input files. Note: regridding should be N or E - Use files with _FillValue != NaN +DU_CLAY '1' Y E - none none clayfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_SAND '1' Y E - none none sandfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_SILT '1' Y E - none none siltfrac /dev/null +DU_SSM '1' Y E - none none sep ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_UTHRES '1' Y E - none none uthres ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc + +#====== Sulfate Sources ================================================= +# Anthropogenic (BF & FF) emissions -- allowed to input as two layers +SU_ANTHROL1 NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +SU_ANTHROL2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_elev ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# Ship emissions +SU_SHIPSO2 NA N Y %y4-%m2-%d2t12:00:00 none none SO2_ship ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +SU_SHIPSO4 NA N Y %y4-%m2-%d2t12:00:00 none none SO4_ship ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# Aircraft fuel consumption +SU_AIRCRAFT NA Y Y %y4-%m2-%d2t12:00:00 none none none /dev/null + +# DMS concentration +SU_DMSO NA Y Y %y4-%m2-%d2t12:00:00 none none conc ExtData/MERRA2/sfc/DMSclim_sfcconcentration.x360_y181_t12.Lana2011.nc4 + +# Aviation emissions during the three phases of flight +SU_AVIATION_LTO NA Y Y %y4-%m2-%d2t12:00:00 none none so2_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_so2.aviation_lto.x3600_y1800_t12.2010.nc4 +SU_AVIATION_CDS NA Y Y %y4-%m2-%d2t12:00:00 none none so2_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_so2.aviation_cds.x3600_y1800_t12.2010.nc4 +SU_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none so2_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_so2.aviation_crs.x3600_y1800_t12.2010.nc4 + +# H2O2, OH and NO3 mixing ratios +# -------------------------------------------------------------- +# If using 64 levels please replace this section with the correct values (ie replace 127 with 64) + +SU_H2O2 NA N Y %y4-%m2-%d2t12:00:00 none none h2o2 ExtData/PIESA/L127/A2_ACCMIP_gmic_MERRA_oh_h2o2_no3.x144_y91_z127_t14.%y4.nc +SU_OH NA N Y %y4-%m2-%d2t12:00:00 none none oh ExtData/PIESA/L127/A2_ACCMIP_gmic_MERRA_oh_h2o2_no3.x144_y91_z127_t14.%y4.nc +SU_NO3 NA N Y %y4-%m2-%d2t12:00:00 none none no3 ExtData/PIESA/L127/A2_ACCMIP_gmic_MERRA_oh_h2o2_no3.x144_y91_z127_t14.%y4.nc +#--------------------------------------------------------------- + +# Production of SO2 from OCS oxidation +pSO2_OCS NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +#SU_regionMask NA N v - none none REGION_MASK /scratch1/NCEPDEV/nems/Raffaele.Montuoro/data/NASA/ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc + +#=========== Carbonaceous aerosol sources =========================================== +# ORGANIC CARBON +# --------------- + +# # VOCs - OFFLINE MEGAN BIOG +OC_ISOPRENE NA N Y %y4-%m2-%d2t12:00:00 none none isoprene ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc +OC_LIMO NA N Y %y4-%m2-%d2t12:00:00 none none limo ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc +OC_MTPA NA N Y %y4-%m2-%d2t12:00:00 none none mtpa ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc +OC_MTPO NA N Y %y4-%m2-%d2t12:00:00 none none mtpo ExtData/nexus/MEGAN_OFFLINE_BVOC/v2019-10/%y4/MEGAN.OFFLINE.BIOVOC.%y4.emis.%y4%m2%d2.nc + +# Biofuel Source -- Included in AeroCom anthropogenic emissions +OC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +# Anthropogenic (BF & FF) emissions -- allowed to input as two layers +OC_ANTEOC1 NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +OC_ANTEOC2 NA N Y %y4-%m2-%d2t12:00:00 none none OC_elev ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# EDGAR based ship emissions +OC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none OC_ship ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# Aircraft fuel consumption +OC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null + +# Aviation emissions during the three phases of flight +OC_AVIATION_LTO NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_oc.aviation_lto.x3600_y1800_t12.2010.nc4 +OC_AVIATION_CDS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_oc.aviation_cds.x3600_y1800_t12.2010.nc4 +OC_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_oc.aviation_crs.x3600_y1800_t12.2010.nc4 + +# SOA production +pSOA_ANTHRO_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +#============================================================================================================ +# BLACK CARBON +# ------------ + +# Biofuel Source -- Included in AeroCom anthropogenic emissions +BC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +# Anthropogenic (BF & FF) emissions -- allowed to input as two layers +BC_ANTEBC1 NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +BC_ANTEBC2 NA N Y %y4-%m2-%d2t12:00:00 none none BC_elev ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# EDGAR based ship emissions +BC_SHIP NA N Y %y4-%m2-%d2t12:00:00 none none BC_ship ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc + +# Aircraft fuel consumption +BC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none bc_aviation /dev/null + +# Aviation emissions during the LTO, SDC and CRS phases of flight +BC_AVIATION_LTO NA Y Y %y4-%m2-%d2t12:00:00 none none bc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_bc.aviation_lto.x3600_y1800_t12.2010.nc4 +BC_AVIATION_CDS NA Y Y %y4-%m2-%d2t12:00:00 none none bc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_bc.aviation_cds.x3600_y1800_t12.2010.nc4 +BC_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none bc_aviation ExtData/PIESA/sfc/HTAP/v2.2/htap-v2.2.emis_bc.aviation_crs.x3600_y1800_t12.2010.nc4 + +#============================================================================================================ +# BROWN CARBON +# ------------ +# Biomass burning -- QFED-v2.x +BRC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass /dev/null + +# Terpene emission +BRC_TERPENE NA Y Y %y4-%m2-%d2t12:00:00 none none terpene /dev/null + +# Biofuel Source -- Included in AeroCom anthropogenic emissions +BRC_BIOFUEL NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +# Anthropogenic (BF & FF) emissions -- allowed to input as two layers +BRC_ANTEBRC1 NA Y Y %y4-%m2-%d2t12:00:00 none none anteoc1 /dev/null +BRC_ANTEBRC2 NA Y Y %y4-%m2-%d2t12:00:00 none none anteoc2 /dev/null + +# EDGAR based ship emissions +BRC_SHIP NA Y Y %y4-%m2-%d2t12:00:00 none none oc_ship /dev/null + +# Aircraft fuel consumption +BRC_AIRCRAFT NA N Y %y4-%m2-%d2t12:00:00 none none none /dev/null + +# Aviation emissions during the three phases of flight +BRC_AVIATION_LTO NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null +BRC_AVIATION_CDS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null +BRC_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null + +# SOA production +pSOA_BIOB_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null + +# # ======= Nitrate Sources ======== +# EMI_NH3_AG 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_ag ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_EN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_en /dev/null +# EMI_NH3_IN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_in ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_RE 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_re ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_TR 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_tr ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_OC 'kg m-2 s-1' Y Y %y4-%m2-%d2T12:00:00 none none emiss_ocn ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4 + +# # -------------------------------------------------------------- +# # If using 64 levels please replace this section with the correct values (ie replace 127 with 64) +# NITRATE_HNO3 'mol mol-1' Y N %y4-%m2-%d2T12:00:00 none 0.20 hno3 ExtData/PIESA/L127/GMI.vmr_HNO3.x144_y91.t12.2006.nc4 +# # -------------------------------------------------------------- +# NI_regionMask NA Y V - none none REGION_MASK ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc diff --git a/parm/ufs/gocart/ExtData.qfed b/parm/ufs/gocart/ExtData.qfed new file mode 100644 index 0000000000..b3a721211e --- /dev/null +++ b/parm/ufs/gocart/ExtData.qfed @@ -0,0 +1,8 @@ +#====== BIOMASS BURNING EMISSIONS ======================================= + +# QFED +#-------------------------------------------------------------------------------------------------------------------------------- +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_so2.006.%y4%m2%d2.nc4 +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_oc.006.%y4%m2%d2.nc4 +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_bc.006.%y4%m2%d2.nc4 +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_nh3.006.%y4%m2%d2.nc4 diff --git a/parm/ufs/gocart/GOCART2G_GridComp.rc b/parm/ufs/gocart/GOCART2G_GridComp.rc new file mode 100644 index 0000000000..2dd63a6d17 --- /dev/null +++ b/parm/ufs/gocart/GOCART2G_GridComp.rc @@ -0,0 +1,43 @@ +# +# !RESOURCE: GOCART2G_GridComp.rc --- GOCART2G resource file +# +# DESCRIPTION: +# The GOCART2G resource file is used to control basic +# properties of the GOCART2G Grid Components. Instances are +# defined here. Default is the data component. +# +# Only the FIRST entry in the ACTIVE_INSTANCE_XX is given as +# the AERO_PROVIDER. +# +# !REVISION HISTORY: +# +# 11Oct2019 E.Sherman GOCART2G resource file has been created +#-------------------------------------------------------------------- + + + # &Label Active Constituents + +# Include the constituent in the simulation? +# ---------------------------------------------------- +ACTIVE_INSTANCES_DU: DU # DU.data +PASSIVE_INSTANCES_DU: + +ACTIVE_INSTANCES_SS: SS # SS.data +PASSIVE_INSTANCES_SS: + +ACTIVE_INSTANCES_SU: SU # SU.data +PASSIVE_INSTANCES_SU: + +ACTIVE_INSTANCES_CA: CA.oc CA.bc # CA.oc.data CA.bc.data +PASSIVE_INSTANCES_CA: + +ACTIVE_INSTANCES_NI: # NI # NI.data +PASSIVE_INSTANCES_NI: + +# Set optics parameters +# --------------------- +aerosol_monochromatic_optics_wavelength_in_nm_from_LUT: 470 550 670 870 +wavelengths_for_profile_aop_in_nm: 470 550 670 870 # must be included in LUT +wavelengths_for_vertically_integrated_aop_in_nm: 470 550 670 870 # must be included in LUT + +use_threads: .TRUE. diff --git a/parm/ufs/gocart/NI2G_instance_NI.rc b/parm/ufs/gocart/NI2G_instance_NI.rc new file mode 100644 index 0000000000..73db601073 --- /dev/null +++ b/parm/ufs/gocart/NI2G_instance_NI.rc @@ -0,0 +1,33 @@ +# +# Resource file Nitrate parameters. +# + +nbins: 5 + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_NI.v2_5.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_NI.v2_5.nc + +# Scavenging efficiency per bin [km-1] +fscav: 0.0 0.4 0.4 0.4 0.4 + +# Dry particle radius [um], used for settling +particle_radius_microns: 0.0 0.2695 0.2695 2.1 7.57 + +# Dry particle density [kg m-3] +particle_density: 1000 1769 1725 2200 2650 + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.18 0.18 0.18 0.18 0.18 + +# Number of particles per kg mass +fnum: 1.50e19 1.50e19 1.50e19 1.50e19 1.50e19 + +# Number median radius [um] +particle_radius_number: 0.0118 0.0118 0.0118 0.0118 0.0118 + +# Sigma of lognormal number distribution +sigma: 2.0 2.0 2.0 2.0 2.0 + +pressure_lid_in_hPa: 0.01 + +rhFlag: 0 diff --git a/parm/ufs/gocart/SS2G_instance_SS.rc b/parm/ufs/gocart/SS2G_instance_SS.rc new file mode 100644 index 0000000000..d8faa3efa8 --- /dev/null +++ b/parm/ufs/gocart/SS2G_instance_SS.rc @@ -0,0 +1,43 @@ +# +# Resource file Sea Salt parameters +# + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_SS.v3_3.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_SS.v3_3.nc + +particle_radius_microns: 0.079 0.316 1.119 2.818 7.772 + +radius_lower: 0.03 0.1 0.5 1.5 5.0 + +radius_upper: 0.1 0.5 1.5 5.0 10.0 + +particle_density: 2200. 2200. 2200. 2200. 2200. + +# Scavenging efficiency per bin [km-1] +fscav: 0.4 0.4 0.4 0.4 0.4 + +# Emissions methods and scaling +emission_scheme: 3 # 1 for Gong 2003, 2 for ... +emission_scale: 1.0 1.0 1.0 1.0 1.0 1.0 # a global scaling factor for C96 +sstEmisFlag: 2 # Apply a correction to emissions based on SST (see code) +hoppelFlag: .false. # Apply Hoppel correction (set non-zero, see Fan and Toon 2011) +weibullFlag: .false. # Apply Weibull distribution (set non-zero, see Fan and Toon 2011) + +# Method of apply relative humidity to particle radius +rhFlag: 2 # RH swelling of Seasalt (1 for Fitzgerald 1975, + # 2 for Gerber 1985 method) + +# Molecular weight of species [kg mole-1] +molecular_weight: 0.058 0.058 0.058 0.058 0.058 + +# Number of particles per kg mass +fnum: 3.017e17 1.085e16 1.207e14 9.391e12 2.922e11 + +# Number median radius [um] +particle_radius_number: 0.066 0.176 0.885 2.061 6.901 + +nbins: 5 + +pressure_lid_in_hPa: 0.01 + + diff --git a/parm/ufs/gocart/SU2G_instance_SU.rc b/parm/ufs/gocart/SU2G_instance_SU.rc new file mode 100644 index 0000000000..e365827760 --- /dev/null +++ b/parm/ufs/gocart/SU2G_instance_SU.rc @@ -0,0 +1,53 @@ +# +# Resource file for Sulfer parameters. +# + +aerosol_radBands_optics_file: ExtData/optics/opticsBands_SU.v1_3.RRTMG.nc +aerosol_monochromatic_optics_file: ExtData/monochromatic/optics_SU.v1_3.nc + +nbins: 4 + +# Volcanic pointwise sources +volcano_srcfilen: ExtData/nexus/VOLCANO/v2021-09/%y4/%m2/so2_volcanic_emissions_Carns.%y4%m2%d2.rc + +# Heights [m] of LTO, CDS and CRS aviation emissions layers +aviation_vertical_layers: 0.0 100.0 9.0e3 10.0e3 + +# Fraction of anthropogenic emissions that are SO4 +so4_anthropogenic_fraction: 0.03 + +# Aircraft emission factor: go from kg fuel to kg SO2 +aircraft_fuel_emission_factor: 0.0008 + +# Scavenging efficiency per bin [km-1] (NOT USED UNLESS RAS IS CALLED) +fscav: 0.0 0.0 0.4 0.4 + +# Dry particle radius [um], used for settling +particle_radius_microns: 0.0 0.0 0.35 0.0 + +# Type of settling to use (see Chem_SettlingMod) +rhFlag: 4 + +# Dry particle density [kg m-3] +particle_density: -1 -1 1700 -1 + +pressure_lid_in_hPa: 0.01 + +# Molecular weight of species [kg mole-1] +molecular_weight: -1 -1 0.132 -1 + +# Number of particles per kg mass +fnum: -1 -1 9.01e16 -1 + +# Number median radius [um] +particle_radius_number: -1 -1 0.0695 -1 + +# Sigma of lognormal number distribution +sigma: -1 -1 2.03 -1 + +# OH H2O2 NO3 from GMI Combined Stratosphere Troposphere (Lower case yes to enable) +# ------------------------------------------------------------------------------------- +export_H2O2: no +using_GMI_OH: .false. +using_GMI_NO3: .false. +using_GMI_H2O2: .false. diff --git a/parm/ufs/gocart/gocart_tracer.list b/parm/ufs/gocart/gocart_tracer.list new file mode 100644 index 0000000000..8742aff67c --- /dev/null +++ b/parm/ufs/gocart/gocart_tracer.list @@ -0,0 +1,20 @@ +so2 +so4 +dms +msa +bc1 +bc2 +oc1 +oc2 +dust1 +dust2 +dust3 +dust4 +dust5 +seas1 +seas2 +seas3 +seas4 +seas5 +pm25 +pm10 diff --git a/parm/ufs/mom6/MOM_input_template_025 b/parm/ufs/mom6/MOM_input_template_025 new file mode 100644 index 0000000000..df56a3f486 --- /dev/null +++ b/parm/ufs/mom6/MOM_input_template_025 @@ -0,0 +1,902 @@ +! This input file provides the adjustable run-time parameters for version 6 of the Modular Ocean Model (MOM6). +! Where appropriate, parameters use usually given in MKS units. + +! This particular file is for the example in ice_ocean_SIS2/OM4_025. + +! This MOM_input file typically contains only the non-default values that are needed to reproduce this example. +! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file +! which is generated by the model at run-time. +! === module MOM_domains === +TRIPOLAR_N = True ! [Boolean] default = False + ! Use tripolar connectivity at the northern edge of the domain. With + ! TRIPOLAR_N, NIGLOBAL must be even. +NIGLOBAL = @[NX_GLB] ! + ! The total number of thickness grid points in the x-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NJGLOBAL = @[NY_GLB] ! + ! The total number of thickness grid points in the y-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NIHALO = 4 ! default = 4 + ! The number of halo points on each side in the x-direction. With + ! STATIC_MEMORY_ this is set as NIHALO_ in MOM_memory.h at compile time; without + ! STATIC_MEMORY_ the default is NIHALO_ in MOM_memory.h (if defined) or 2. +NJHALO = 4 ! default = 4 + ! The number of halo points on each side in the y-direction. With + ! STATIC_MEMORY_ this is set as NJHALO_ in MOM_memory.h at compile time; without + ! STATIC_MEMORY_ the default is NJHALO_ in MOM_memory.h (if defined) or 2. +! LAYOUT = 32, 18 ! + ! The processor layout that was actually used. +! IO_LAYOUT = 1, 1 ! default = 1 + ! The processor layout to be used, or 0,0 to automatically set the io_layout to + ! be the same as the layout. + +! === module MOM === +USE_REGRIDDING = True ! [Boolean] default = False + ! If True, use the ALE algorithm (regridding/remapping). If False, use the + ! layered isopycnal algorithm. +THICKNESSDIFFUSE = True ! [Boolean] default = False + ! If true, interface heights are diffused with a coefficient of KHTH. +THICKNESSDIFFUSE_FIRST = True ! [Boolean] default = False + ! If true, do thickness diffusion before dynamics. This is only used if + ! THICKNESSDIFFUSE is true. +DT = @[DT_DYNAM_MOM6] ! [s] + ! The (baroclinic) dynamics time step. The time-step that is actually used will + ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode + ! or the coupling timestep in coupled mode.) +DT_THERM = @[DT_THERM_MOM6] ! [s] default = 1800.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. +THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN] ! [Boolean] default = False + ! If true, the MOM will take thermodynamic and tracer timesteps that can be + ! longer than the coupling timestep. The actual thermodynamic timestep that is + ! used in this case is the largest integer multiple of the coupling timestep + ! that is less than or equal to DT_THERM. +HFREEZE = 20.0 ! [m] default = -1.0 + ! If HFREEZE > 0, melt potential will be computed. The actual depth + ! over which melt potential is computed will be min(HFREEZE, OBLD) + ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default) + ! melt potential will not be computed. +USE_PSURF_IN_EOS = False ! [Boolean] default = False + ! If true, always include the surface pressure contributions in equation of + ! state calculations. +FRAZIL = True ! [Boolean] default = False + ! If true, water freezes if it gets too cold, and the accumulated heat deficit + ! is returned in the surface state. FRAZIL is only used if + ! ENABLE_THERMODYNAMICS is true. +DO_GEOTHERMAL = True ! [Boolean] default = False + ! If true, apply geothermal heating. +BOUND_SALINITY = True ! [Boolean] default = False + ! If true, limit salinity to being positive. (The sea-ice model may ask for more + ! salt than is available and drive the salinity negative otherwise.) +MIN_SALINITY = 0.01 ! [PPT] default = 0.01 + ! The minimum value of salinity when BOUND_SALINITY=True. The default is 0.01 + ! for backward compatibility but ideally should be 0. +C_P = 3992.0 ! [J kg-1 K-1] default = 3991.86795711963 + ! The heat capacity of sea water, approximated as a constant. This is only used + ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10 + ! definition of conservative temperature. +CHECK_BAD_SURFACE_VALS = True ! [Boolean] default = False + ! If true, check the surface state for ridiculous values. +BAD_VAL_SSH_MAX = 50.0 ! [m] default = 20.0 + ! The value of SSH above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SSS_MAX = 75.0 ! [PPT] default = 45.0 + ! The value of SSS above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MAX = 55.0 ! [deg C] default = 45.0 + ! The value of SST above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MIN = -3.0 ! [deg C] default = -2.1 + ! The value of SST below which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +DEFAULT_2018_ANSWERS = True ! [Boolean] default = True + ! This sets the default value for the various _2018_ANSWERS parameters. +WRITE_GEOM = 2 ! default = 1 + ! If =0, never write the geometry and vertical grid files. If =1, write the + ! geometry and vertical grid files only for a new simulation. If =2, always + ! write the geometry and vertical grid files. Other values are invalid. +SAVE_INITIAL_CONDS = False ! [Boolean] default = False + ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE. + +! === module MOM_hor_index === +! Sets the horizontal array index types. + +! === module MOM_fixed_initialization === +INPUTDIR = "INPUT" ! default = "." + ! The directory in which input files are found. + +! === module MOM_grid_init === +GRID_CONFIG = "mosaic" ! + ! A character string that determines the method for defining the horizontal + ! grid. Current options are: + ! mosaic - read the grid from a mosaic (supergrid) + ! file set by GRID_FILE. + ! cartesian - use a (flat) Cartesian grid. + ! spherical - use a simple spherical grid. + ! mercator - use a Mercator spherical grid. +GRID_FILE = "ocean_hgrid.nc" ! + ! Name of the file from which to read horizontal grid data. +GRID_ROTATION_ANGLE_BUGS = False ! [Boolean] default = True + ! If true, use an older algorithm to calculate the sine and + ! cosines needed rotate between grid-oriented directions and + ! true north and east. Differences arise at the tripolar fold +USE_TRIPOLAR_GEOLONB_BUG = False ! [Boolean] default = True + ! If true, use older code that incorrectly sets the longitude in some points + ! along the tripolar fold to be off by 360 degrees. +TOPO_CONFIG = "file" ! + ! This specifies how bathymetry is specified: + ! file - read bathymetric information from the file + ! specified by (TOPO_FILE). + ! flat - flat bottom set to MAXIMUM_DEPTH. + ! bowl - an analytically specified bowl-shaped basin + ! ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH. + ! spoon - a similar shape to 'bowl', but with an vertical + ! wall at the southern face. + ! halfpipe - a zonally uniform channel with a half-sine + ! profile in the meridional direction. + ! benchmark - use the benchmark test case topography. + ! Neverland - use the Neverland test case topography. + ! DOME - use a slope and channel configuration for the + ! DOME sill-overflow test case. + ! ISOMIP - use a slope and channel configuration for the + ! ISOMIP test case. + ! DOME2D - use a shelf and slope configuration for the + ! DOME2D gravity current/overflow test case. + ! Kelvin - flat but with rotated land mask. + ! seamount - Gaussian bump for spontaneous motion test case. + ! dumbbell - Sloshing channel with reservoirs on both ends. + ! shelfwave - exponential slope for shelfwave test case. + ! Phillips - ACC-like idealized topography used in the Phillips config. + ! dense - Denmark Strait-like dense water formation and overflow. + ! USER - call a user modified routine. +TOPO_FILE = "ocean_topog.nc" ! default = "topog.nc" + ! The file from which the bathymetry is read. +TOPO_EDITS_FILE = "All_edits.nc" ! default = "" + ! The file from which to read a list of i,j,z topography overrides. +ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES] ! default = "False" + ! If true, allow topography overrides to change ocean points to land +MAXIMUM_DEPTH = 6500.0 ! [m] + ! The maximum depth of the ocean. +MINIMUM_DEPTH = 9.5 ! [m] default = 0.0 + ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is + ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is + ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than + ! MASKING_DEPTH are rounded to MINIMUM_DEPTH. + +! === module MOM_open_boundary === +! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply, +! if any. +MASKING_DEPTH = 0.0 ! [m] default = -9999.0 + ! The depth below which to mask points as land points, for which all fluxes are + ! zeroed out. MASKING_DEPTH is ignored if negative. +CHANNEL_CONFIG = "list" ! default = "none" + ! A parameter that determines which set of channels are + ! restricted to specific widths. Options are: + ! none - All channels have the grid width. + ! global_1deg - Sets 16 specific channels appropriate + ! for a 1-degree model, as used in CM2G. + ! list - Read the channel locations and widths from a + ! text file, like MOM_channel_list in the MOM_SIS + ! test case. + ! file - Read open face widths everywhere from a + ! NetCDF file on the model grid. +CHANNEL_LIST_FILE = "MOM_channels_global_025" ! default = "MOM_channel_list" + ! The file from which the list of narrowed channels is read. + +! === module MOM_verticalGrid === +! Parameters providing information about the vertical grid. +NK = 75 ! [nondim] + ! The number of model layers. + +! === module MOM_tracer_registry === + +! === module MOM_EOS === +DTFREEZE_DP = -7.75E-08 ! [deg C Pa-1] default = 0.0 + ! When TFREEZE_FORM=LINEAR, this is the derivative of the freezing potential + ! temperature with pressure. + +! === module MOM_restart === +PARALLEL_RESTARTFILES = True ! [Boolean] default = False + ! If true, each processor writes its own restart file, otherwise a single + ! restart file is generated + +! === module MOM_tracer_flow_control === +USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False + ! If true, use the ideal_age_example tracer package. + +! === module ideal_age_example === + +! === module MOM_coord_initialization === +COORD_CONFIG = "file" ! + ! This specifies how layers are to be defined: + ! ALE or none - used to avoid defining layers in ALE mode + ! file - read coordinate information from the file + ! specified by (COORD_FILE). + ! BFB - Custom coords for buoyancy-forced basin case + ! based on SST_S, T_BOT and DRHO_DT. + ! linear - linear based on interfaces not layers + ! layer_ref - linear based on layer densities + ! ts_ref - use reference temperature and salinity + ! ts_range - use range of temperature and salinity + ! (T_REF and S_REF) to determine surface density + ! and GINT calculate internal densities. + ! gprime - use reference density (RHO_0) for surface + ! density and GINT calculate internal densities. + ! ts_profile - use temperature and salinity profiles + ! (read from COORD_FILE) to set layer densities. + ! USER - call a user modified routine. +COORD_FILE = "layer_coord.nc" ! + ! The file from which the coordinate densities are read. +REMAP_UV_USING_OLD_ALG = True ! [Boolean] default = True + ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If + ! false, uses the new method that remaps between grids described by an old and + ! new thickness. +REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER" + ! Coordinate mode for vertical regridding. Choose among the following + ! possibilities: LAYER - Isopycnal or stacked shallow water layers + ! ZSTAR, Z* - stretched geopotential z* + ! SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf + ! SIGMA - terrain following coordinates + ! RHO - continuous isopycnal + ! HYCOM1 - HyCOM-like hybrid coordinate + ! SLIGHT - stretched coordinates above continuous isopycnal + ! ADAPTIVE - optimize for smooth neutral density surfaces +BOUNDARY_EXTRAPOLATION = True ! [Boolean] default = False + ! When defined, a proper high-order reconstruction scheme is used within + ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM + ! reconstruction will also be used within boundary cells. +ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter ALE_RESOLUTION + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 ! [m] + ! The distribution of vertical resolution for the target + ! grid used for Eulerian-like coordinates. For example, + ! in z-coordinate mode, the parameter is a list of level + ! thicknesses (in m). In sigma-coordinate mode, the list + ! is of non-dimensional fractions of the water column. +!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 ! [m] + ! HYBRID target densities for interfaces +REGRID_COMPRESSIBILITY_FRACTION = 0.01 ! [nondim] default = 0.0 + ! When interpolating potential density profiles we can add some artificial + ! compressibility solely to make homogeneous regions appear stratified. +MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE" + ! Determines how to specify the maximum interface depths. + ! Valid options are: + ! NONE - there are no maximum interface depths + ! PARAM - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0 + ! The list of maximum depths for each interface. +MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE" + ! Determines how to specify the maximum layer thicknesses. + ! Valid options are: + ! NONE - there are no maximum layer thicknesses + ! PARAM - use the vector-parameter MAX_LAYER_THICKNESS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 ! [m] + ! The list of maximum thickness for each layer. +REMAPPING_SCHEME = "PPM_H4" ! default = "PLM" + ! This sets the reconstruction scheme used for vertical remapping for all + ! variables. It can be one of the following schemes: PCM (1st-order + ! accurate) + ! PLM (2nd-order accurate) + ! PPM_H4 (3rd-order accurate) + ! PPM_IH4 (3rd-order accurate) + ! PQM_IH4IH3 (4th-order accurate) + ! PQM_IH6IH5 (5th-order accurate) + +! === module MOM_grid === +! Parameters providing information about the lateral grid. + +! === module MOM_state_initialization === +INIT_LAYERS_FROM_Z_FILE = True ! [Boolean] default = False + ! If true, initialize the layer thicknesses, temperatures, and salinities from a + ! Z-space file on a latitude-longitude grid. + +! === module MOM_initialize_layers_from_Z === +TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc" ! default = "temp_salt_z.nc" + ! The name of the z-space input file used to initialize + ! temperatures (T) and salinities (S). If T and S are not + ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE + ! must be set. +Z_INIT_FILE_PTEMP_VAR = "temp" ! default = "ptemp" + ! The name of the potential temperature variable in + ! TEMP_Z_INIT_FILE. +Z_INIT_FILE_SALT_VAR = "salt" ! default = "salt" + ! The name of the salinity variable in + ! SALT_Z_INIT_FILE. + +Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False + ! If True, then remap straight to model coordinate from file. +Z_INIT_REMAP_OLD_ALG = True ! [Boolean] default = True + ! If false, uses the preferred remapping algorithm for initialization. If true, + ! use an older, less robust algorithm for remapping. + +! === module MOM_diag_mediator === +!Jiande NUM_DIAG_COORDS = 2 ! default = 1 +NUM_DIAG_COORDS = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" ! +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] +!DIAG_COORD_DEF_RHO2 = "FILE:diag_rho2.nc,interfaces=rho2" ! default = "WOA09" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter DIAG_COORD_RES_RHO2 + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz + +! === module MOM_MEKE === +USE_MEKE = True ! [Boolean] default = False + ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy + ! kinetic energy budget. +MEKE_GMCOEFF = 1.0 ! [nondim] default = -1.0 + ! The efficiency of the conversion of potential energy into MEKE by the + ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this + ! conversion is not used or calculated. +MEKE_BGSRC = 1.0E-13 ! [W kg-1] default = 0.0 + ! A background energy source for MEKE. +MEKE_KHMEKE_FAC = 1.0 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to Kh for MEKE itself. +MEKE_ALPHA_RHINES = 0.15 ! [nondim] default = 0.05 + ! If positive, is a coefficient weighting the Rhines scale in the expression for + ! mixing length used in MEKE-derived diffusivity. +MEKE_ALPHA_EADY = 0.15 ! [nondim] default = 0.05 + ! If positive, is a coefficient weighting the Eady length scale in the + ! expression for mixing length used in MEKE-derived diffusivity. + +! === module MOM_lateral_mixing_coeffs === +USE_VARIABLE_MIXING = True ! [Boolean] default = False + ! If true, the variable mixing code will be called. This allows diagnostics to + ! be created even if the scheme is not used. If KHTR_SLOPE_CFF>0 or + ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter + ! file. +RESOLN_SCALED_KH = True ! [Boolean] default = False + ! If true, the Laplacian lateral viscosity is scaled away when the first + ! baroclinic deformation radius is well resolved. +RESOLN_SCALED_KHTH = True ! [Boolean] default = False + ! If true, the interface depth diffusivity is scaled away when the first + ! baroclinic deformation radius is well resolved. +KHTR_SLOPE_CFF = 0.25 ! [nondim] default = 0.0 + ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer + ! diffusivity +USE_STORED_SLOPES = True ! [Boolean] default = False + ! If true, the isopycnal slopes are calculated once and stored for re-use. This + ! uses more memory but avoids calling the equation of state more times than + ! should be necessary. +INTERPOLATE_RES_FN = False ! [Boolean] default = True + ! If true, interpolate the resolution function to the velocity points from the + ! thickness points; otherwise interpolate the wave speed and calculate the + ! resolution function independently at each point. +GILL_EQUATORIAL_LD = True ! [Boolean] default = False + ! If true, uses Gill's definition of the baroclinic equatorial deformation + ! radius, otherwise, if false, use Pedlosky's definition. These definitions + ! differ by a factor of 2 in front of the beta term in the denominator. Gill's + ! is the more appropriate definition. +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True + ! If true, use a more robust estimate of the first mode wave speed as the + ! starting point for iterations. + +! === module MOM_set_visc === +CHANNEL_DRAG = True ! [Boolean] default = False + ! If true, the bottom drag is exerted directly on each layer proportional to the + ! fraction of the bottom it overlies. +PRANDTL_TURB = 1.25 ! [nondim] default = 1.0 + ! The turbulent Prandtl number applied to shear instability. +HBBL = 10.0 ! [m] + ! The thickness of a bottom boundary layer with a viscosity of KVBBL if + ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom + ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but + ! LINEAR_DRAG is not. +DRAG_BG_VEL = 0.1 ! [m s-1] default = 0.0 + ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an + ! unresolved velocity that is combined with the resolved velocity to estimate + ! the velocity magnitude. DRAG_BG_VEL is only used when BOTTOMDRAGLAW is + ! defined. +BBL_USE_EOS = True ! [Boolean] default = False + ! If true, use the equation of state in determining the properties of the bottom + ! boundary layer. Otherwise use the layer target potential densities. +BBL_THICK_MIN = 0.1 ! [m] default = 0.0 + ! The minimum bottom boundary layer thickness that can be used with + ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum + ! near-bottom viscosity. +KV = 1.0E-04 ! [m2 s-1] + ! The background kinematic viscosity in the interior. The molecular value, ~1e-6 + ! m2 s-1, may be used. +KV_BBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the bottom boundary layer. +KV_TBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the top boundary layer. + +! === module MOM_thickness_diffuse === +KHTH_MAX_CFL = 0.1 ! [nondimensional] default = 0.8 + ! The maximum value of the local diffusive CFL ratio that is permitted for the + ! thickness diffusivity. 1.0 is the marginally unstable value in a pure layered + ! model, but much smaller numbers (e.g. 0.1) seem to work better for ALE-based + ! models. +USE_GM_WORK_BUG = True ! [Boolean] default = True + ! If true, compute the top-layer work tendency on the u-grid with the incorrect + ! sign, for legacy reproducibility. + +! === module MOM_continuity === + +! === module MOM_continuity_PPM === +ETA_TOLERANCE = 1.0E-06 ! [m] default = 3.75E-09 + ! The tolerance for the differences between the barotropic and baroclinic + ! estimates of the sea surface height due to the fluxes through each face. The + ! total tolerance for SSH is 4 times this value. The default is + ! 0.5*NK*ANGSTROM, and this should not be set less than about + ! 10^-15*MAXIMUM_DEPTH. +ETA_TOLERANCE_AUX = 0.001 ! [m] default = 1.0E-06 + ! The tolerance for free-surface height discrepancies between the barotropic + ! solution and the sum of the layer thicknesses when calculating the auxiliary + ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can + ! be made larger for efficiency. + +! === module MOM_CoriolisAdv === +CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY" + ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid + ! values are: + ! SADOURNY75_ENERGY - Sadourny, 1975; energy cons. + ! ARAKAWA_HSU90 - Arakawa & Hsu, 1990 + ! SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons. + ! ARAKAWA_LAMB81 - Arakawa & Lamb, 1981; En. + Enst. + ! ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with + ! Arakawa & Hsu and Sadourny energy +BOUND_CORIOLIS = True ! [Boolean] default = False + ! If true, the Coriolis terms at u-points are bounded by the four estimates of + ! (f+rv)v from the four neighboring v-points, and similarly at v-points. This + ! option would have no effect on the SADOURNY Coriolis scheme if it were + ! possible to use centered difference thickness fluxes. + +! === module MOM_PressureForce === + +! === module MOM_PressureForce_AFV === +MASS_WEIGHT_IN_PRESSURE_GRADIENT = True ! [Boolean] default = False + ! If true, use mass weighting when interpolating T/S for integrals near the + ! bathymetry in AFV pressure gradient calculations. + +! === module MOM_hor_visc === +LAPLACIAN = True ! [Boolean] default = False + ! If true, use a Laplacian horizontal viscosity. +AH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the cube of the grid spacing to + ! calculate the biharmonic viscosity. The final viscosity is the largest of this + ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH. +SMAGORINSKY_AH = True ! [Boolean] default = False + ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity. +SMAG_BI_CONST = 0.06 ! [nondim] default = 0.0 + ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06. +USE_LAND_MASK_FOR_HVISC = False ! [Boolean] default = False + ! If true, use Use the land mask for the computation of thicknesses at velocity + ! locations. This eliminates the dependence on arbitrary values over land or + ! outside of the domain. Default is False in order to maintain answers with + ! legacy experiments but should be changed to True for new experiments. + +! === module MOM_vert_friction === +HMIX_FIXED = 0.5 ! [m] + ! The prescribed depth over which the near-surface viscosity and diffusivity are + ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. +MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 + ! The maximum velocity allowed before the velocity components are truncated. + +! === module MOM_PointAccel === +U_TRUNC_FILE = "U_velocity_truncations" ! default = "" + ! The absolute path to a file into which the accelerations leading to zonal + ! velocity truncations are written. Undefine this for efficiency if this + ! diagnostic is not needed. +V_TRUNC_FILE = "V_velocity_truncations" ! default = "" + ! The absolute path to a file into which the accelerations leading to meridional + ! velocity truncations are written. Undefine this for efficiency if this + ! diagnostic is not needed. + +! === module MOM_barotropic === +BOUND_BT_CORRECTION = True ! [Boolean] default = False + ! If true, the corrective pseudo mass-fluxes into the barotropic solver are + ! limited to values that require less than maxCFL_BT_cont to be accommodated. +BT_PROJECT_VELOCITY = True ! [Boolean] default = False + ! If true, step the barotropic velocity first and project out the velocity + ! tendency by 1+BEBT when calculating the transport. The default (false) is to + ! use a predictor continuity step to find the pressure field, and then to do a + ! corrector continuity step using a weighted average of the old and new + ! velocities, with weights of (1-BEBT) and BEBT. +DYNAMIC_SURFACE_PRESSURE = True ! [Boolean] default = False + ! If true, add a dynamic pressure due to a viscous ice shelf, for instance. +BEBT = 0.2 ! [nondim] default = 0.1 + ! BEBT determines whether the barotropic time stepping uses the forward-backward + ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range + ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1 + ! (for a backward Euler treatment). In practice, BEBT must be greater than about + ! 0.05. +DTBT = -0.9 ! [s or nondim] default = -0.98 + ! The barotropic time step, in s. DTBT is only used with the split explicit time + ! stepping. To set the time step automatically based the maximum stable value + ! use 0, or a negative value gives the fraction of the stable value. Setting + ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will + ! actually be used is an integer fraction of DT, rounding down. +BT_USE_OLD_CORIOLIS_BRACKET_BUG = True ! [Boolean] default = False + ! If True, use an order of operations that is not bitwise rotationally symmetric + ! in the meridional Coriolis term of the barotropic solver. + +! === module MOM_mixed_layer_restrat === +MIXEDLAYER_RESTRAT = True ! [Boolean] default = False + ! If true, a density-gradient dependent re-stratifying flow is imposed in the + ! mixed layer. Can be used in ALE mode without restriction but in layer mode can + ! only be used if BULKMIXEDLAYER is true. +FOX_KEMPER_ML_RESTRAT_COEF = 1.0 ! [nondim] default = 0.0 + ! A nondimensional coefficient that is proportional to the ratio of the + ! deformation radius to the dominant lengthscale of the submesoscale mixed layer + ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic + ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of + ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al. + ! (2010) +MLE_FRONT_LENGTH = 500.0 ! [m] default = 0.0 + ! If non-zero, is the frontal-length scale used to calculate the upscaling of + ! buoyancy gradients that is otherwise represented by the parameter + ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended + ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0. +MLE_USE_PBL_MLD = True ! [Boolean] default = False + ! If true, the MLE parameterization will use the mixed-layer depth provided by + ! the active PBL parameterization. If false, MLE will estimate a MLD based on a + ! density difference with the surface using the parameter MLE_DENSITY_DIFF. +MLE_MLD_DECAY_TIME = 2.592E+06 ! [s] default = 0.0 + ! The time-scale for a running-mean filter applied to the mixed-layer depth used + ! in the MLE restratification parameterization. When the MLD deepens below the + ! current running-mean the running-mean is instantaneously set to the current + ! MLD. + +! === module MOM_diabatic_driver === +! The following parameters are used for diabatic processes. +ENERGETICS_SFC_PBL = True ! [Boolean] default = False + ! If true, use an implied energetics planetary boundary layer scheme to + ! determine the diffusivity and viscosity in the surface boundary layer. +EPBL_IS_ADDITIVE = False ! [Boolean] default = True + ! If true, the diffusivity from ePBL is added to all other diffusivities. + ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used. + +! === module MOM_CVMix_KPP === +! This is the MOM wrapper to CVMix:KPP +! See http://cvmix.github.io/ + +! === module MOM_tidal_mixing === +! Vertical Tidal Mixing Parameterization +INT_TIDE_DISSIPATION = True ! [Boolean] default = False + ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing, + ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004). +INT_TIDE_PROFILE = "POLZIN_09" ! default = "STLAURENT_02" + ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with + ! INT_TIDE_DISSIPATION. Valid values are: + ! STLAURENT_02 - Use the St. Laurent et al exponential + ! decay profile. + ! POLZIN_09 - Use the Polzin WKB-stretched algebraic + ! decay profile. +INT_TIDE_DECAY_SCALE = 300.3003003003003 ! [m] default = 500.0 + ! The decay scale away from the bottom for tidal TKE with the new coding when + ! INT_TIDE_DISSIPATION is used. +KAPPA_ITIDES = 6.28319E-04 ! [m-1] default = 6.283185307179586E-04 + ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10 + ! km, as in St.Laurent et al. 2002. +KAPPA_H2_FACTOR = 0.84 ! [nondim] default = 1.0 + ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION. +TKE_ITIDE_MAX = 0.1 ! [W m-2] default = 1000.0 + ! The maximum internal tide energy source available to mix above the bottom + ! boundary layer with INT_TIDE_DISSIPATION. +READ_TIDEAMP = True ! [Boolean] default = False + ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude + ! with INT_TIDE_DISSIPATION. +TIDEAMP_FILE = "tidal_amplitude.v20140616.nc" ! default = "tideamp.nc" + ! The path to the file containing the spatially varying tidal amplitudes with + ! INT_TIDE_DISSIPATION. +H2_FILE = "ocean_topog.nc" ! + ! The path to the file containing the sub-grid-scale topographic roughness + ! amplitude with INT_TIDE_DISSIPATION. + +! === module MOM_CVMix_conv === +! Parameterization of enhanced mixing due to convection via CVMix + +! === module MOM_geothermal === +GEOTHERMAL_SCALE = 1.0 ! [W m-2 or various] default = 0.0 + ! The constant geothermal heat flux, a rescaling factor for the heat flux read + ! from GEOTHERMAL_FILE, or 0 to disable the geothermal heating. +GEOTHERMAL_FILE = "geothermal_davies2013_v1.nc" ! default = "" + ! The file from which the geothermal heating is to be read, or blank to use a + ! constant heating rate. +GEOTHERMAL_VARNAME = "geothermal_hf" ! default = "geo_heat" + ! The name of the geothermal heating variable in GEOTHERMAL_FILE. + +! === module MOM_set_diffusivity === +BBL_MIXING_AS_MAX = False ! [Boolean] default = True + ! If true, take the maximum of the diffusivity from the BBL mixing and the other + ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added. +USE_LOTW_BBL_DIFFUSIVITY = True ! [Boolean] default = False + ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL + ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL + ! scheme. +SIMPLE_TKE_TO_KD = True ! [Boolean] default = False + ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary + ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact + ! energetics for an isopycnal layer-formulation. + +! === module MOM_bkgnd_mixing === +! Adding static vertical background mixing coefficients +KD = 1.5E-05 ! [m2 s-1] + ! The background diapycnal diffusivity of density in the interior. Zero or the + ! molecular value, ~1e-7 m2 s-1, may be used. +KD_MIN = 2.0E-06 ! [m2 s-1] default = 1.5E-07 + ! The minimum diapycnal diffusivity. +HENYEY_IGW_BACKGROUND = True ! [Boolean] default = False + ! If true, use a latitude-dependent scaling for the near surface background + ! diffusivity, as described in Harrison & Hallberg, JPO 2008. +KD_MAX = 0.1 ! [m2 s-1] default = -1.0 + ! The maximum permitted increment for the diapycnal diffusivity from TKE-based + ! parameterizations, or a negative value for no limit. + +! === module MOM_kappa_shear === +! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008 +USE_JACKSON_PARAM = True ! [Boolean] default = False + ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing + ! parameterization. +MAX_RINO_IT = 25 ! [nondim] default = 50 + ! The maximum number of iterations that may be used to estimate the Richardson + ! number driven mixing. +VERTEX_SHEAR = False ! [Boolean] default = False + ! If true, do the calculations of the shear-driven mixing + ! at the cell vertices (i.e., the vorticity points). +KAPPA_SHEAR_ITER_BUG = True ! [Boolean] default = True + ! If true, use an older, dimensionally inconsistent estimate of the derivative + ! of diffusivity with energy in the Newton's method iteration. The bug causes + ! undercorrections when dz > 1 m. +KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True ! [Boolean] default = True + ! If true, report back the latest estimate of TKE instead of the time average + ! TKE when there is mass in all layers. Otherwise always report the time + ! averaged TKE, as is currently done when there are some massless layers. + +! === module MOM_CVMix_shear === +! Parameterization of shear-driven turbulence via CVMix (various options) + +! === module MOM_CVMix_ddiff === +! Parameterization of mixing due to double diffusion processes via CVMix + +! === module MOM_diabatic_aux === +! The following parameters are used for auxiliary diabatic processes. +PRESSURE_DEPENDENT_FRAZIL = False ! [Boolean] default = False + ! If true, use a pressure dependent freezing temperature when making frazil. The + ! default is false, which will be faster but is inappropriate with ice-shelf + ! cavities. +VAR_PEN_SW = True ! [Boolean] default = False + ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine + ! the e-folding depth of incoming short wave radiation. +CHL_FILE = @[CHLCLIM] ! + ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It + ! is used when VAR_PEN_SW and CHL_FROM_FILE are true. +CHL_VARNAME = "chlor_a" ! default = "CHL_A" + ! Name of CHL_A variable in CHL_FILE. + +! === module MOM_energetic_PBL === +ML_OMEGA_FRAC = 0.001 ! [nondim] default = 0.0 + ! When setting the decay scale for turbulence, use this fraction of the absolute + ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 + + ! of*4*omega^2). +TKE_DECAY = 0.01 ! [nondim] default = 2.5 + ! TKE_DECAY relates the vertical rate of decay of the TKE available for + ! mechanical entrainment to the natural Ekman depth. +EPBL_MSTAR_SCHEME = "OM4" ! default = "CONSTANT" + ! EPBL_MSTAR_SCHEME selects the method for setting mstar. Valid values are: + ! CONSTANT - Use a fixed mstar given by MSTAR + ! OM4 - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4 + ! REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018. +MSTAR_CAP = 10.0 ! [nondim] default = -1.0 + ! If this value is positive, it sets the maximum value of mstar allowed in ePBL. + ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT). +MSTAR2_COEF1 = 0.29 ! [nondim] default = 0.3 + ! Coefficient in computing mstar when rotation and stabilizing effects are both + ! important (used if EPBL_MSTAR_SCHEME = OM4). +MSTAR2_COEF2 = 0.152 ! [nondim] default = 0.085 + ! Coefficient in computing mstar when only rotation limits the total mixing + ! (used if EPBL_MSTAR_SCHEME = OM4) +NSTAR = 0.06 ! [nondim] default = 0.2 + ! The portion of the buoyant potential energy imparted by surface fluxes that is + ! available to drive entrainment at the base of mixed layer when that energy is + ! positive. +EPBL_MLD_BISECTION = True ! [Boolean] default = False + ! If true, use bisection with the iterative determination of the self-consistent + ! mixed layer depth. Otherwise use the false position after a maximum and + ! minimum bound have been evaluated and the returned value or bisection before + ! this. +MSTAR_CONV_ADJ = 0.667 ! [nondim] default = 0.0 + ! Coefficient used for reducing mstar during convection due to reduction of + ! stable density gradient. +USE_MLD_ITERATION = True ! [Boolean] default = False + ! A logical that specifies whether or not to use the distance to the bottom of + ! the actively turbulent boundary layer to help set the EPBL length scale. +EPBL_TRANSITION_SCALE = 0.01 ! [nondim] default = 0.1 + ! A scale for the mixing length in the transition layer at the edge of the + ! boundary layer as a fraction of the boundary layer thickness. +MIX_LEN_EXPONENT = 1.0 ! [nondim] default = 2.0 + ! The exponent applied to the ratio of the distance to the MLD and the MLD depth + ! which determines the shape of the mixing length. This is only used if + ! USE_MLD_ITERATION is True. +USE_LA_LI2016 = @[MOM6_USE_LI2016] ! [nondim] default = False + ! A logical to use the Li et al. 2016 (submitted) formula to determine the + ! Langmuir number. +USE_WAVES = @[MOM6_USE_WAVES] ! [Boolean] default = False + ! If true, enables surface wave modules. +WAVE_METHOD = "SURFACE_BANDS" ! default = "EMPTY" + ! Choice of wave method, valid options include: + ! TEST_PROFILE - Prescribed from surface Stokes drift + ! and a decay wavelength. + ! SURFACE_BANDS - Computed from multiple surface values + ! and decay wavelengths. + ! DHH85 - Uses Donelan et al. 1985 empirical + ! wave spectrum with prescribed values. + ! LF17 - Infers Stokes drift profile from wind + ! speed following Li and Fox-Kemper 2017. +SURFBAND_SOURCE = "COUPLER" ! default = "EMPTY" + ! Choice of SURFACE_BANDS data mode, valid options include: + ! DATAOVERRIDE - Read from NetCDF using FMS DataOverride. + ! COUPLER - Look for variables from coupler pass + ! INPUT - Testing with fixed values. +STK_BAND_COUPLER = 3 ! default = 1 + ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has + ! to be consistent with the number of Stokes drift bands in WW3, or the model + ! will fail. +SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 ! [rad/m] default = 0.12566 + ! Central wavenumbers for surface Stokes drift bands. +EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE" + ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence. + ! Valid values are: + ! NONE - Do not do any extra mixing due to Langmuir turbulence + ! RESCALE - Use a multiplicative rescaling of mstar to account for Langmuir + ! turbulence + ! ADDITIVE - Add a Langmuir turblence contribution to mstar to other + ! contributions +LT_ENHANCE_COEF = 0.044 ! [nondim] default = 0.447 + ! Coefficient for Langmuir enhancement of mstar +LT_ENHANCE_EXP = -1.5 ! [nondim] default = -1.33 + ! Exponent for Langmuir enhancementt of mstar +LT_MOD_LAC1 = 0.0 ! [nondim] default = -0.87 + ! Coefficient for modification of Langmuir number due to MLD approaching Ekman + ! depth. +LT_MOD_LAC4 = 0.0 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! stable Obukhov depth. +LT_MOD_LAC5 = 0.22 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! unstable Obukhov depth. + +! === module MOM_regularize_layers === + +! === module MOM_opacity === +PEN_SW_NBANDS = 3 ! default = 1 + ! The number of bands of penetrating shortwave radiation. + +! === module MOM_tracer_advect === +TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM" + ! The horizontal transport scheme for tracers: + ! PLM - Piecewise Linear Method + ! PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order) + ! PPM - Piecewise Parabolic Method (Colella-Woodward) + +! === module MOM_tracer_hor_diff === +CHECK_DIFFUSIVE_CFL = True ! [Boolean] default = False + ! If true, use enough iterations the diffusion to ensure that the diffusive + ! equivalent of the CFL limit is not violated. If false, always use the greater + ! of 1 or MAX_TR_DIFFUSION_CFL iteration. + +! === module MOM_neutral_diffusion === +! This module implements neutral diffusion of tracers + +! === module MOM_lateral_boundary_diffusion === +! This module implements lateral diffusion of tracers near boundaries + +! === module MOM_sum_output === +MAXTRUNC = 100000 ! [truncations save_interval-1] default = 0 + ! The run will be stopped, and the day set to a very large value if the velocity + ! is truncated more than MAXTRUNC times between energy saves. Set MAXTRUNC to 0 + ! to stop if there is any truncation of velocities. +ENERGYSAVEDAYS = 1.00 ! [days] default = 1.0 + ! The interval in units of TIMEUNIT between saves of the energies of the run and + ! other globally summed diagnostics. + +! === module ocean_model_init === + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = true ! +ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + +! === module MOM_surface_forcing === +OCEAN_SURFACE_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the surface velocity field that is + ! returned to the coupler. Valid values include + ! 'A', 'B', or 'C'. + +MAX_P_SURF = 0.0 ! [Pa] default = -1.0 + ! The maximum surface pressure that can be exerted by the atmosphere and + ! floating sea-ice or ice shelves. This is needed because the FMS coupling + ! structure does not limit the water that can be frozen out of the ocean and the + ! ice-ocean heat fluxes are treated explicitly. No limit is applied if a + ! negative value is used. +WIND_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the input wind stress field. Valid + ! values are 'A', 'B', or 'C'. +CD_TIDES = 0.0018 ! [nondim] default = 1.0E-04 + ! The drag coefficient that applies to the tides. +GUST_CONST = 0.0 ! [Pa] default = 0.02 + ! The background gustiness in the winds. +FIX_USTAR_GUSTLESS_BUG = False ! [Boolean] default = False + ! If true correct a bug in the time-averaging of the gustless wind friction + ! velocity +USE_RIGID_SEA_ICE = True ! [Boolean] default = False + ! If true, sea-ice is rigid enough to exert a nonhydrostatic pressure that + ! resist vertical motion. +SEA_ICE_RIGID_MASS = 100.0 ! [kg m-2] default = 1000.0 + ! The mass of sea-ice per unit area at which the sea-ice starts to exhibit + ! rigidity +LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF] ! [Boolean] default = False + ! If true, allows liquid river runoff to be specified via + ! the data_table using the component name 'OCN'. +! === module ocean_stochastics === +DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL +! === module MOM_restart === +RESTART_CHECKSUMS_REQUIRED = False +! === module MOM_file_parser === diff --git a/parm/ufs/mom6/MOM_input_template_050 b/parm/ufs/mom6/MOM_input_template_050 new file mode 100644 index 0000000000..4c39198c02 --- /dev/null +++ b/parm/ufs/mom6/MOM_input_template_050 @@ -0,0 +1,947 @@ +! This input file provides the adjustable run-time parameters for version 6 of the Modular Ocean Model (MOM6). +! Where appropriate, parameters use usually given in MKS units. + +! This particular file is for the example in ice_ocean_SIS2/OM4_05. + +! This MOM_input file typically contains only the non-default values that are needed to reproduce this example. +! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file +! which is generated by the model at run-time. +! === module MOM_domains === +TRIPOLAR_N = True ! [Boolean] default = False + ! Use tripolar connectivity at the northern edge of the domain. With + ! TRIPOLAR_N, NIGLOBAL must be even. +NIGLOBAL = @[NX_GLB] ! + ! The total number of thickness grid points in the x-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NJGLOBAL = @[NY_GLB] ! + ! The total number of thickness grid points in the y-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NIHALO = 4 ! default = 4 + ! The number of halo points on each side in the x-direction. With + ! STATIC_MEMORY_ this is set as NIHALO_ in MOM_memory.h at compile time; without + ! STATIC_MEMORY_ the default is NIHALO_ in MOM_memory.h (if defined) or 2. +NJHALO = 4 ! default = 4 + ! The number of halo points on each side in the y-direction. With + ! STATIC_MEMORY_ this is set as NJHALO_ in MOM_memory.h at compile time; without + ! STATIC_MEMORY_ the default is NJHALO_ in MOM_memory.h (if defined) or 2. +! LAYOUT = 21, 20 ! + ! The processor layout that was actually used. +! IO_LAYOUT = 1, 1 ! default = 1 + ! The processor layout to be used, or 0,0 to automatically set the io_layout to + ! be the same as the layout. + +! === module MOM === +USE_REGRIDDING = True ! [Boolean] default = False + ! If True, use the ALE algorithm (regridding/remapping). If False, use the + ! layered isopycnal algorithm. +THICKNESSDIFFUSE = True ! [Boolean] default = False + ! If true, interface heights are diffused with a coefficient of KHTH. +THICKNESSDIFFUSE_FIRST = True ! [Boolean] default = False + ! If true, do thickness diffusion before dynamics. This is only used if + ! THICKNESSDIFFUSE is true. +DT = @[DT_DYNAM_MOM6] ! [s] + ! The (baroclinic) dynamics time step. The time-step that is actually used will + ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode + ! or the coupling timestep in coupled mode.) +DT_THERM = @[DT_THERM_MOM6] ! [s] default = 1800.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. +THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN] ! [Boolean] default = False + ! If true, the MOM will take thermodynamic and tracer timesteps that can be + ! longer than the coupling timestep. The actual thermodynamic timestep that is + ! used in this case is the largest integer multiple of the coupling timestep + ! that is less than or equal to DT_THERM. +HFREEZE = 20.0 ! [m] default = -1.0 + ! If HFREEZE > 0, melt potential will be computed. The actual depth + ! over which melt potential is computed will be min(HFREEZE, OBLD) + ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default) + ! melt potential will not be computed. +USE_PSURF_IN_EOS = False ! [Boolean] default = False + ! If true, always include the surface pressure contributions in equation of + ! state calculations. +FRAZIL = True ! [Boolean] default = False + ! If true, water freezes if it gets too cold, and the accumulated heat deficit + ! is returned in the surface state. FRAZIL is only used if + ! ENABLE_THERMODYNAMICS is true. +DO_GEOTHERMAL = True ! [Boolean] default = False + ! If true, apply geothermal heating. +BOUND_SALINITY = True ! [Boolean] default = False + ! If true, limit salinity to being positive. (The sea-ice model may ask for more + ! salt than is available and drive the salinity negative otherwise.) +MIN_SALINITY = 0.01 ! [PPT] default = 0.01 + ! The minimum value of salinity when BOUND_SALINITY=True. The default is 0.01 + ! for backward compatibility but ideally should be 0. +C_P = 3992.0 ! [J kg-1 K-1] default = 3991.86795711963 + ! The heat capacity of sea water, approximated as a constant. This is only used + ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10 + ! definition of conservative temperature. +CHECK_BAD_SURFACE_VALS = True ! [Boolean] default = False + ! If true, check the surface state for ridiculous values. +BAD_VAL_SSH_MAX = 50.0 ! [m] default = 20.0 + ! The value of SSH above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SSS_MAX = 75.0 ! [PPT] default = 45.0 + ! The value of SSS above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MAX = 55.0 ! [deg C] default = 45.0 + ! The value of SST above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MIN = -3.0 ! [deg C] default = -2.1 + ! The value of SST below which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +DEFAULT_2018_ANSWERS = True ! [Boolean] default = True + ! This sets the default value for the various _2018_ANSWERS parameters. +WRITE_GEOM = 2 ! default = 1 + ! If =0, never write the geometry and vertical grid files. If =1, write the + ! geometry and vertical grid files only for a new simulation. If =2, always + ! write the geometry and vertical grid files. Other values are invalid. +SAVE_INITIAL_CONDS = False ! [Boolean] default = False + ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE. + +! === module MOM_hor_index === +! Sets the horizontal array index types. + +! === module MOM_fixed_initialization === +INPUTDIR = "INPUT" ! default = "." + ! The directory in which input files are found. + +! === module MOM_grid_init === +GRID_CONFIG = "mosaic" ! + ! A character string that determines the method for defining the horizontal + ! grid. Current options are: + ! mosaic - read the grid from a mosaic (supergrid) + ! file set by GRID_FILE. + ! cartesian - use a (flat) Cartesian grid. + ! spherical - use a simple spherical grid. + ! mercator - use a Mercator spherical grid. +GRID_FILE = "ocean_hgrid.nc" ! + ! Name of the file from which to read horizontal grid data. +GRID_ROTATION_ANGLE_BUGS = False ! [Boolean] default = True + ! If true, use an older algorithm to calculate the sine and + ! cosines needed rotate between grid-oriented directions and + ! true north and east. Differences arise at the tripolar fold +USE_TRIPOLAR_GEOLONB_BUG = False ! [Boolean] default = True + ! If true, use older code that incorrectly sets the longitude in some points + ! along the tripolar fold to be off by 360 degrees. +TOPO_CONFIG = "file" ! + ! This specifies how bathymetry is specified: + ! file - read bathymetric information from the file + ! specified by (TOPO_FILE). + ! flat - flat bottom set to MAXIMUM_DEPTH. + ! bowl - an analytically specified bowl-shaped basin + ! ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH. + ! spoon - a similar shape to 'bowl', but with an vertical + ! wall at the southern face. + ! halfpipe - a zonally uniform channel with a half-sine + ! profile in the meridional direction. + ! benchmark - use the benchmark test case topography. + ! Neverland - use the Neverland test case topography. + ! DOME - use a slope and channel configuration for the + ! DOME sill-overflow test case. + ! ISOMIP - use a slope and channel configuration for the + ! ISOMIP test case. + ! DOME2D - use a shelf and slope configuration for the + ! DOME2D gravity current/overflow test case. + ! Kelvin - flat but with rotated land mask. + ! seamount - Gaussian bump for spontaneous motion test case. + ! dumbbell - Sloshing channel with reservoirs on both ends. + ! shelfwave - exponential slope for shelfwave test case. + ! Phillips - ACC-like idealized topography used in the Phillips config. + ! dense - Denmark Strait-like dense water formation and overflow. + ! USER - call a user modified routine. +TOPO_FILE = "ocean_topog.nc" ! default = "topog.nc" + ! The file from which the bathymetry is read. +ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES] ! default = "False" + ! If true, allow topography overrides to change ocean points to land +MAXIMUM_DEPTH = 6500.0 ! [m] + ! The maximum depth of the ocean. +MINIMUM_DEPTH = 9.5 ! [m] default = 0.0 + ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is + ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is + ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than + ! MASKING_DEPTH are rounded to MINIMUM_DEPTH. + +! === module MOM_open_boundary === +! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply, +! if any. +MASKING_DEPTH = 0.0 ! [m] default = -9999.0 + ! The depth below which to mask points as land points, for which all fluxes are + ! zeroed out. MASKING_DEPTH is ignored if negative. +CHANNEL_CONFIG = "list" ! default = "none" + ! A parameter that determines which set of channels are + ! restricted to specific widths. Options are: + ! none - All channels have the grid width. + ! global_1deg - Sets 16 specific channels appropriate + ! for a 1-degree model, as used in CM2G. + ! list - Read the channel locations and widths from a + ! text file, like MOM_channel_list in the MOM_SIS + ! test case. + ! file - Read open face widths everywhere from a + ! NetCDF file on the model grid. +CHANNEL_LIST_FILE = "MOM_channels_global_025" ! default = "MOM_channel_list" + ! The file from which the list of narrowed channels is read. + +! === module MOM_verticalGrid === +! Parameters providing information about the vertical grid. +NK = 75 ! [nondim] + ! The number of model layers. + +! === module MOM_tracer_registry === + +! === module MOM_EOS === +DTFREEZE_DP = -7.75E-08 ! [deg C Pa-1] default = 0.0 + ! When TFREEZE_FORM=LINEAR, this is the derivative of the freezing potential + ! temperature with pressure. + +! === module MOM_restart === +PARALLEL_RESTARTFILES = True ! [Boolean] default = False + ! If true, each processor writes its own restart file, otherwise a single + ! restart file is generated + +! === module MOM_tracer_flow_control === +USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False + ! If true, use the ideal_age_example tracer package. + +! === module ideal_age_example === + +! === module MOM_coord_initialization === +COORD_CONFIG = "file" ! + ! This specifies how layers are to be defined: + ! ALE or none - used to avoid defining layers in ALE mode + ! file - read coordinate information from the file + ! specified by (COORD_FILE). + ! BFB - Custom coords for buoyancy-forced basin case + ! based on SST_S, T_BOT and DRHO_DT. + ! linear - linear based on interfaces not layers + ! layer_ref - linear based on layer densities + ! ts_ref - use reference temperature and salinity + ! ts_range - use range of temperature and salinity + ! (T_REF and S_REF) to determine surface density + ! and GINT calculate internal densities. + ! gprime - use reference density (RHO_0) for surface + ! density and GINT calculate internal densities. + ! ts_profile - use temperature and salinity profiles + ! (read from COORD_FILE) to set layer densities. + ! USER - call a user modified routine. +COORD_FILE = "layer_coord.nc" ! + ! The file from which the coordinate densities are read. +REMAP_UV_USING_OLD_ALG = True ! [Boolean] default = True + ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If + ! false, uses the new method that remaps between grids described by an old and + ! new thickness. +REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER" + ! Coordinate mode for vertical regridding. Choose among the following + ! possibilities: LAYER - Isopycnal or stacked shallow water layers + ! ZSTAR, Z* - stretched geopotential z* + ! SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf + ! SIGMA - terrain following coordinates + ! RHO - continuous isopycnal + ! HYCOM1 - HyCOM-like hybrid coordinate + ! SLIGHT - stretched coordinates above continuous isopycnal + ! ADAPTIVE - optimize for smooth neutral density surfaces +BOUNDARY_EXTRAPOLATION = True ! [Boolean] default = False + ! When defined, a proper high-order reconstruction scheme is used within + ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM + ! reconstruction will also be used within boundary cells. +ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter ALE_RESOLUTION + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 ! [m] + ! The distribution of vertical resolution for the target + ! grid used for Eulerian-like coordinates. For example, + ! in z-coordinate mode, the parameter is a list of level + ! thicknesses (in m). In sigma-coordinate mode, the list + ! is of non-dimensional fractions of the water column. +!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 ! [m] + ! HYBRID target densities for interfaces +REGRID_COMPRESSIBILITY_FRACTION = 0.01 ! [nondim] default = 0.0 + ! When interpolating potential density profiles we can add some artificial + ! compressibility solely to make homogeneous regions appear stratified. +MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE" + ! Determines how to specify the maximum interface depths. + ! Valid options are: + ! NONE - there are no maximum interface depths + ! PARAM - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0 + ! The list of maximum depths for each interface. +MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE" + ! Determines how to specify the maximum layer thicknesses. + ! Valid options are: + ! NONE - there are no maximum layer thicknesses + ! PARAM - use the vector-parameter MAX_LAYER_THICKNESS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 ! [m] + ! The list of maximum thickness for each layer. +REMAPPING_SCHEME = "PPM_H4" ! default = "PLM" + ! This sets the reconstruction scheme used for vertical remapping for all + ! variables. It can be one of the following schemes: PCM (1st-order + ! accurate) + ! PLM (2nd-order accurate) + ! PPM_H4 (3rd-order accurate) + ! PPM_IH4 (3rd-order accurate) + ! PQM_IH4IH3 (4th-order accurate) + ! PQM_IH6IH5 (5th-order accurate) + +! === module MOM_grid === +! Parameters providing information about the lateral grid. + +! === module MOM_state_initialization === +INIT_LAYERS_FROM_Z_FILE = True ! [Boolean] default = False + ! If true, initialize the layer thicknesses, temperatures, and salinities from a + ! Z-space file on a latitude-longitude grid. + +! === module MOM_initialize_layers_from_Z === +TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc" ! default = "temp_salt_z.nc" + ! The name of the z-space input file used to initialize + ! temperatures (T) and salinities (S). If T and S are not + ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE + ! must be set. +Z_INIT_FILE_PTEMP_VAR = "temp" ! default = "ptemp" + ! The name of the potential temperature variable in + ! TEMP_Z_INIT_FILE. +Z_INIT_FILE_SALT_VAR = "salt" ! default = "salt" + ! The name of the salinity variable in + ! SALT_Z_INIT_FILE. + +Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False + ! If True, then remap straight to model coordinate from file. +Z_INIT_REMAP_OLD_ALG = True ! [Boolean] default = True + ! If false, uses the preferred remapping algorithm for initialization. If true, + ! use an older, less robust algorithm for remapping. + +! === module MOM_diag_mediator === +!Jiande NUM_DIAG_COORDS = 2 ! default = 1 +NUM_DIAG_COORDS = 1 ! default = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" ! +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:interpolate_zgrid_40L.nc,interfaces=zw" +DIAG_MISVAL = -1e34 +!DIAG_COORD_DEF_RHO2 = "RFNC1:35,999.5,1028,1028.5,8.,1038.,0.0078125" ! default = "WOA09" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter DIAG_COORD_RES_RHO2 + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz + +! === module MOM_MEKE === +USE_MEKE = True ! [Boolean] default = False + ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy + ! kinetic energy budget. +MEKE_GMCOEFF = 1.0 ! [nondim] default = -1.0 + ! The efficiency of the conversion of potential energy into MEKE by the + ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this + ! conversion is not used or calculated. +MEKE_BGSRC = 1.0E-13 ! [W kg-1] default = 0.0 + ! A background energy source for MEKE. +MEKE_KHTH_FAC = 0.5 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to KhTh. +MEKE_KHTR_FAC = 0.5 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to KhTr. +MEKE_KHMEKE_FAC = 1.0 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to Kh for MEKE itself. +MEKE_VISCOSITY_COEFF_KU = 1.0 ! [nondim] default = 0.0 + ! If non-zero, is the scaling coefficient in the expression forviscosity used to + ! parameterize harmonic lateral momentum mixing byunresolved eddies represented + ! by MEKE. Can be negative torepresent backscatter from the unresolved eddies. +MEKE_ALPHA_RHINES = 0.15 ! [nondim] default = 0.05 + ! If positive, is a coefficient weighting the Rhines scale in the expression for + ! mixing length used in MEKE-derived diffusivity. +MEKE_ALPHA_EADY = 0.15 ! [nondim] default = 0.05 + ! If positive, is a coefficient weighting the Eady length scale in the + ! expression for mixing length used in MEKE-derived diffusivity. + +! === module MOM_lateral_mixing_coeffs === +USE_VARIABLE_MIXING = True ! [Boolean] default = False + ! If true, the variable mixing code will be called. This allows diagnostics to + ! be created even if the scheme is not used. If KHTR_SLOPE_CFF>0 or + ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter + ! file. +RESOLN_SCALED_KH = True ! [Boolean] default = False + ! If true, the Laplacian lateral viscosity is scaled away when the first + ! baroclinic deformation radius is well resolved. +RESOLN_SCALED_KHTH = True ! [Boolean] default = False + ! If true, the interface depth diffusivity is scaled away when the first + ! baroclinic deformation radius is well resolved. +KHTH_USE_EBT_STRUCT = True ! [Boolean] default = False + ! If true, uses the equivalent barotropic structure as the vertical structure of + ! thickness diffusivity. +KHTR_SLOPE_CFF = 0.25 ! [nondim] default = 0.0 + ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer + ! diffusivity +USE_STORED_SLOPES = True ! [Boolean] default = False + ! If true, the isopycnal slopes are calculated once and stored for re-use. This + ! uses more memory but avoids calling the equation of state more times than + ! should be necessary. +KH_RES_FN_POWER = 100 ! [nondim] default = 2 + ! The power of dx/Ld in the Kh resolution function. Any positive integer may be + ! used, although even integers are more efficient to calculate. Setting this + ! greater than 100 results in a step-function being used. +INTERPOLATE_RES_FN = False ! [Boolean] default = True + ! If true, interpolate the resolution function to the velocity points from the + ! thickness points; otherwise interpolate the wave speed and calculate the + ! resolution function independently at each point. +GILL_EQUATORIAL_LD = True ! [Boolean] default = False + ! If true, uses Gill's definition of the baroclinic equatorial deformation + ! radius, otherwise, if false, use Pedlosky's definition. These definitions + ! differ by a factor of 2 in front of the beta term in the denominator. Gill's + ! is the more appropriate definition. +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True + ! If true, use a more robust estimate of the first mode wave speed as the + ! starting point for iterations. + +! === module MOM_set_visc === +CHANNEL_DRAG = True ! [Boolean] default = False + ! If true, the bottom drag is exerted directly on each layer proportional to the + ! fraction of the bottom it overlies. +PRANDTL_TURB = 1.25 ! [nondim] default = 1.0 + ! The turbulent Prandtl number applied to shear instability. +HBBL = 10.0 ! [m] + ! The thickness of a bottom boundary layer with a viscosity of KVBBL if + ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom + ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but + ! LINEAR_DRAG is not. +DRAG_BG_VEL = 0.1 ! [m s-1] default = 0.0 + ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an + ! unresolved velocity that is combined with the resolved velocity to estimate + ! the velocity magnitude. DRAG_BG_VEL is only used when BOTTOMDRAGLAW is + ! defined. +BBL_USE_EOS = True ! [Boolean] default = False + ! If true, use the equation of state in determining the properties of the bottom + ! boundary layer. Otherwise use the layer target potential densities. +BBL_THICK_MIN = 0.1 ! [m] default = 0.0 + ! The minimum bottom boundary layer thickness that can be used with + ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum + ! near-bottom viscosity. +KV = 1.0E-04 ! [m2 s-1] + ! The background kinematic viscosity in the interior. The molecular value, ~1e-6 + ! m2 s-1, may be used. +KV_BBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the bottom boundary layer. +KV_TBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the top boundary layer. + +! === module MOM_thickness_diffuse === +KHTH_MAX_CFL = 0.1 ! [nondimensional] default = 0.8 + ! The maximum value of the local diffusive CFL ratio that is permitted for the + ! thickness diffusivity. 1.0 is the marginally unstable value in a pure layered + ! model, but much smaller numbers (e.g. 0.1) seem to work better for ALE-based + ! models. +KHTH_USE_FGNV_STREAMFUNCTION = True ! [Boolean] default = False + ! If true, use the streamfunction formulation of Ferrari et al., 2010, which + ! effectively emphasizes graver vertical modes by smoothing in the vertical. +FGNV_FILTER_SCALE = 0.1 ! [nondim] default = 1.0 + ! A coefficient scaling the vertical smoothing term in the Ferrari et al., 2010, + ! streamfunction formulation. +USE_GM_WORK_BUG = True ! [Boolean] default = True + ! If true, compute the top-layer work tendency on the u-grid with the incorrect + ! sign, for legacy reproducibility. + +! === module MOM_continuity === + +! === module MOM_continuity_PPM === +ETA_TOLERANCE = 1.0E-06 ! [m] default = 3.75E-09 + ! The tolerance for the differences between the barotropic and baroclinic + ! estimates of the sea surface height due to the fluxes through each face. The + ! total tolerance for SSH is 4 times this value. The default is + ! 0.5*NK*ANGSTROM, and this should not be set less than about + ! 10^-15*MAXIMUM_DEPTH. +ETA_TOLERANCE_AUX = 0.001 ! [m] default = 1.0E-06 + ! The tolerance for free-surface height discrepancies between the barotropic + ! solution and the sum of the layer thicknesses when calculating the auxiliary + ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can + ! be made larger for efficiency. + +! === module MOM_CoriolisAdv === +CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY" + ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid + ! values are: + ! SADOURNY75_ENERGY - Sadourny, 1975; energy cons. + ! ARAKAWA_HSU90 - Arakawa & Hsu, 1990 + ! SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons. + ! ARAKAWA_LAMB81 - Arakawa & Lamb, 1981; En. + Enst. + ! ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with + ! Arakawa & Hsu and Sadourny energy +BOUND_CORIOLIS = True ! [Boolean] default = False + ! If true, the Coriolis terms at u-points are bounded by the four estimates of + ! (f+rv)v from the four neighboring v-points, and similarly at v-points. This + ! option would have no effect on the SADOURNY Coriolis scheme if it were + ! possible to use centered difference thickness fluxes. + +! === module MOM_PressureForce === + +! === module MOM_PressureForce_AFV === +MASS_WEIGHT_IN_PRESSURE_GRADIENT = True ! [Boolean] default = False + ! If true, use mass weighting when interpolating T/S for integrals near the + ! bathymetry in AFV pressure gradient calculations. + +! === module MOM_hor_visc === +LAPLACIAN = True ! [Boolean] default = False + ! If true, use a Laplacian horizontal viscosity. +KH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the grid spacing to calculate the + ! Laplacian viscosity. The final viscosity is the largest of this scaled + ! viscosity, the Smagorinsky and Leith viscosities, and KH. +KH_SIN_LAT = 2000.0 ! [m2 s-1] default = 0.0 + ! The amplitude of a latitudinally-dependent background viscosity of the form + ! KH_SIN_LAT*(SIN(LAT)**KH_PWR_OF_SINE). +SMAGORINSKY_KH = True ! [Boolean] default = False + ! If true, use a Smagorinsky nonlinear eddy viscosity. +SMAG_LAP_CONST = 0.15 ! [nondim] default = 0.0 + ! The nondimensional Laplacian Smagorinsky constant, often 0.15. +AH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the cube of the grid spacing to + ! calculate the biharmonic viscosity. The final viscosity is the largest of this + ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH. +SMAGORINSKY_AH = True ! [Boolean] default = False + ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity. +SMAG_BI_CONST = 0.06 ! [nondim] default = 0.0 + ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06. +USE_LAND_MASK_FOR_HVISC = False ! [Boolean] default = False + ! If true, use Use the land mask for the computation of thicknesses at velocity + ! locations. This eliminates the dependence on arbitrary values over land or + ! outside of the domain. Default is False in order to maintain answers with + ! legacy experiments but should be changed to True for new experiments. + +! === module MOM_vert_friction === +HMIX_FIXED = 0.5 ! [m] + ! The prescribed depth over which the near-surface viscosity and diffusivity are + ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. +MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 + ! The maximum velocity allowed before the velocity components are truncated. + +! === module MOM_PointAccel === +U_TRUNC_FILE = "U_velocity_truncations" ! default = "" + ! The absolute path to a file into which the accelerations leading to zonal + ! velocity truncations are written. Undefine this for efficiency if this + ! diagnostic is not needed. +V_TRUNC_FILE = "V_velocity_truncations" ! default = "" + ! The absolute path to a file into which the accelerations leading to meridional + ! velocity truncations are written. Undefine this for efficiency if this + ! diagnostic is not needed. + +! === module MOM_barotropic === +BOUND_BT_CORRECTION = True ! [Boolean] default = False + ! If true, the corrective pseudo mass-fluxes into the barotropic solver are + ! limited to values that require less than maxCFL_BT_cont to be accommodated. +BT_PROJECT_VELOCITY = True ! [Boolean] default = False + ! If true, step the barotropic velocity first and project out the velocity + ! tendency by 1+BEBT when calculating the transport. The default (false) is to + ! use a predictor continuity step to find the pressure field, and then to do a + ! corrector continuity step using a weighted average of the old and new + ! velocities, with weights of (1-BEBT) and BEBT. +DYNAMIC_SURFACE_PRESSURE = True ! [Boolean] default = False + ! If true, add a dynamic pressure due to a viscous ice shelf, for instance. +BEBT = 0.2 ! [nondim] default = 0.1 + ! BEBT determines whether the barotropic time stepping uses the forward-backward + ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range + ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1 + ! (for a backward Euler treatment). In practice, BEBT must be greater than about + ! 0.05. +DTBT = -0.9 ! [s or nondim] default = -0.98 + ! The barotropic time step, in s. DTBT is only used with the split explicit time + ! stepping. To set the time step automatically based the maximum stable value + ! use 0, or a negative value gives the fraction of the stable value. Setting + ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will + ! actually be used is an integer fraction of DT, rounding down. +BT_USE_OLD_CORIOLIS_BRACKET_BUG = True ! [Boolean] default = False + ! If True, use an order of operations that is not bitwise rotationally symmetric + ! in the meridional Coriolis term of the barotropic solver. + +! === module MOM_mixed_layer_restrat === +MIXEDLAYER_RESTRAT = True ! [Boolean] default = False + ! If true, a density-gradient dependent re-stratifying flow is imposed in the + ! mixed layer. Can be used in ALE mode without restriction but in layer mode can + ! only be used if BULKMIXEDLAYER is true. +FOX_KEMPER_ML_RESTRAT_COEF = 1.0 ! [nondim] default = 0.0 + ! A nondimensional coefficient that is proportional to the ratio of the + ! deformation radius to the dominant lengthscale of the submesoscale mixed layer + ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic + ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of + ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al. + ! (2010) +MLE_FRONT_LENGTH = 200.0 ! [m] default = 0.0 + ! If non-zero, is the frontal-length scale used to calculate the upscaling of + ! buoyancy gradients that is otherwise represented by the parameter + ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended + ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0. +MLE_USE_PBL_MLD = True ! [Boolean] default = False + ! If true, the MLE parameterization will use the mixed-layer depth provided by + ! the active PBL parameterization. If false, MLE will estimate a MLD based on a + ! density difference with the surface using the parameter MLE_DENSITY_DIFF. +MLE_MLD_DECAY_TIME = 2.592E+06 ! [s] default = 0.0 + ! The time-scale for a running-mean filter applied to the mixed-layer depth used + ! in the MLE restratification parameterization. When the MLD deepens below the + ! current running-mean the running-mean is instantaneously set to the current + ! MLD. + +! === module MOM_diabatic_driver === +! The following parameters are used for diabatic processes. +ENERGETICS_SFC_PBL = True ! [Boolean] default = False + ! If true, use an implied energetics planetary boundary layer scheme to + ! determine the diffusivity and viscosity in the surface boundary layer. +EPBL_IS_ADDITIVE = False ! [Boolean] default = True + ! If true, the diffusivity from ePBL is added to all other diffusivities. + ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used. + +! === module MOM_CVMix_KPP === +! This is the MOM wrapper to CVMix:KPP +! See http://cvmix.github.io/ + +! === module MOM_tidal_mixing === +! Vertical Tidal Mixing Parameterization +INT_TIDE_DISSIPATION = True ! [Boolean] default = False + ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing, + ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004). +INT_TIDE_PROFILE = "POLZIN_09" ! default = "STLAURENT_02" + ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with + ! INT_TIDE_DISSIPATION. Valid values are: + ! STLAURENT_02 - Use the St. Laurent et al exponential + ! decay profile. + ! POLZIN_09 - Use the Polzin WKB-stretched algebraic + ! decay profile. +INT_TIDE_DECAY_SCALE = 300.3003003003003 ! [m] default = 500.0 + ! The decay scale away from the bottom for tidal TKE with the new coding when + ! INT_TIDE_DISSIPATION is used. +KAPPA_ITIDES = 6.28319E-04 ! [m-1] default = 6.283185307179586E-04 + ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10 + ! km, as in St.Laurent et al. 2002. +KAPPA_H2_FACTOR = 0.84 ! [nondim] default = 1.0 + ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION. +TKE_ITIDE_MAX = 0.1 ! [W m-2] default = 1000.0 + ! The maximum internal tide energy source available to mix above the bottom + ! boundary layer with INT_TIDE_DISSIPATION. +READ_TIDEAMP = True ! [Boolean] default = False + ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude + ! with INT_TIDE_DISSIPATION. +TIDEAMP_FILE = "tidal_amplitude.nc" ! default = "tideamp.nc" + ! The path to the file containing the spatially varying tidal amplitudes with + ! INT_TIDE_DISSIPATION. +H2_FILE = "ocean_topog.nc" ! + ! The path to the file containing the sub-grid-scale topographic roughness + ! amplitude with INT_TIDE_DISSIPATION. + +! === module MOM_CVMix_conv === +! Parameterization of enhanced mixing due to convection via CVMix + +! === module MOM_geothermal === +GEOTHERMAL_SCALE = 1.0 ! [W m-2 or various] default = 0.0 + ! The constant geothermal heat flux, a rescaling factor for the heat flux read + ! from GEOTHERMAL_FILE, or 0 to disable the geothermal heating. +GEOTHERMAL_FILE = "geothermal_davies2013_v1.nc" ! default = "" + ! The file from which the geothermal heating is to be read, or blank to use a + ! constant heating rate. +GEOTHERMAL_VARNAME = "geothermal_hf" ! default = "geo_heat" + ! The name of the geothermal heating variable in GEOTHERMAL_FILE. + +! === module MOM_set_diffusivity === +BBL_MIXING_AS_MAX = False ! [Boolean] default = True + ! If true, take the maximum of the diffusivity from the BBL mixing and the other + ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added. +USE_LOTW_BBL_DIFFUSIVITY = True ! [Boolean] default = False + ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL + ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL + ! scheme. +SIMPLE_TKE_TO_KD = True ! [Boolean] default = False + ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary + ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact + ! energetics for an isopycnal layer-formulation. + +! === module MOM_bkgnd_mixing === +! Adding static vertical background mixing coefficients +KD = 1.5E-05 ! [m2 s-1] + ! The background diapycnal diffusivity of density in the interior. Zero or the + ! molecular value, ~1e-7 m2 s-1, may be used. +KD_MIN = 2.0E-06 ! [m2 s-1] default = 1.5E-07 + ! The minimum diapycnal diffusivity. +HENYEY_IGW_BACKGROUND = True ! [Boolean] default = False + ! If true, use a latitude-dependent scaling for the near surface background + ! diffusivity, as described in Harrison & Hallberg, JPO 2008. +KD_MAX = 0.1 ! [m2 s-1] default = -1.0 + ! The maximum permitted increment for the diapycnal diffusivity from TKE-based + ! parameterizations, or a negative value for no limit. + +! === module MOM_kappa_shear === +! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008 +USE_JACKSON_PARAM = True ! [Boolean] default = False + ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing + ! parameterization. +MAX_RINO_IT = 25 ! [nondim] default = 50 + ! The maximum number of iterations that may be used to estimate the Richardson + ! number driven mixing. +VERTEX_SHEAR = False ! [Boolean] default = False + ! If true, do the calculations of the shear-driven mixing + ! at the cell vertices (i.e., the vorticity points). +KAPPA_SHEAR_ITER_BUG = True ! [Boolean] default = True + ! If true, use an older, dimensionally inconsistent estimate of the derivative + ! of diffusivity with energy in the Newton's method iteration. The bug causes + ! undercorrections when dz > 1 m. +KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True ! [Boolean] default = True + ! If true, report back the latest estimate of TKE instead of the time average + ! TKE when there is mass in all layers. Otherwise always report the time + ! averaged TKE, as is currently done when there are some massless layers. + +! === module MOM_CVMix_shear === +! Parameterization of shear-driven turbulence via CVMix (various options) + +! === module MOM_CVMix_ddiff === +! Parameterization of mixing due to double diffusion processes via CVMix + +! === module MOM_diabatic_aux === +! The following parameters are used for auxiliary diabatic processes. +PRESSURE_DEPENDENT_FRAZIL = False ! [Boolean] default = False + ! If true, use a pressure dependent freezing temperature when making frazil. The + ! default is false, which will be faster but is inappropriate with ice-shelf + ! cavities. +VAR_PEN_SW = True ! [Boolean] default = False + ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine + ! the e-folding depth of incoming short wave radiation. +CHL_FILE = @[CHLCLIM] ! + ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It + ! is used when VAR_PEN_SW and CHL_FROM_FILE are true. +CHL_VARNAME = "chlor_a" ! default = "CHL_A" + ! Name of CHL_A variable in CHL_FILE. + +! === module MOM_energetic_PBL === +ML_OMEGA_FRAC = 0.001 ! [nondim] default = 0.0 + ! When setting the decay scale for turbulence, use this fraction of the absolute + ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 + + ! of*4*omega^2). +TKE_DECAY = 0.01 ! [nondim] default = 2.5 + ! TKE_DECAY relates the vertical rate of decay of the TKE available for + ! mechanical entrainment to the natural Ekman depth. +EPBL_MSTAR_SCHEME = "OM4" ! default = "CONSTANT" + ! EPBL_MSTAR_SCHEME selects the method for setting mstar. Valid values are: + ! CONSTANT - Use a fixed mstar given by MSTAR + ! OM4 - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4 + ! REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018. +MSTAR_CAP = 10.0 ! [nondim] default = -1.0 + ! If this value is positive, it sets the maximum value of mstar allowed in ePBL. + ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT). +MSTAR2_COEF1 = 0.29 ! [nondim] default = 0.3 + ! Coefficient in computing mstar when rotation and stabilizing effects are both + ! important (used if EPBL_MSTAR_SCHEME = OM4). +MSTAR2_COEF2 = 0.152 ! [nondim] default = 0.085 + ! Coefficient in computing mstar when only rotation limits the total mixing + ! (used if EPBL_MSTAR_SCHEME = OM4) +EPBL_MLD_BISECTION = True ! [Boolean] default = False + ! If true, use bisection with the iterative determination of the self-consistent + ! mixed layer depth. Otherwise use the false position after a maximum and + ! minimum bound have been evaluated and the returned value or bisection before + ! this. +NSTAR = 0.06 ! [nondim] default = 0.2 + ! The portion of the buoyant potential energy imparted by surface fluxes that is + ! available to drive entrainment at the base of mixed layer when that energy is + ! positive. +MSTAR_CONV_ADJ = 0.667 ! [nondim] default = 0.0 + ! Coefficient used for reducing mstar during convection due to reduction of + ! stable density gradient. +USE_MLD_ITERATION = True ! [Boolean] default = False + ! A logical that specifies whether or not to use the distance to the bottom of + ! the actively turbulent boundary layer to help set the EPBL length scale. +EPBL_TRANSITION_SCALE = 0.01 ! [nondim] default = 0.1 + ! A scale for the mixing length in the transition layer at the edge of the + ! boundary layer as a fraction of the boundary layer thickness. +MIX_LEN_EXPONENT = 1.0 ! [nondim] default = 2.0 + ! The exponent applied to the ratio of the distance to the MLD and the MLD depth + ! which determines the shape of the mixing length. This is only used if + ! USE_MLD_ITERATION is True. +USE_LA_LI2016 = @[MOM6_USE_LI2016] ! [nondim] default = False + ! A logical to use the Li et al. 2016 (submitted) formula to determine the + ! Langmuir number. +USE_WAVES = @[MOM6_USE_WAVES] ! [Boolean] default = False + ! If true, enables surface wave modules. +WAVE_METHOD = "SURFACE_BANDS" ! default = "EMPTY" + ! Choice of wave method, valid options include: + ! TEST_PROFILE - Prescribed from surface Stokes drift + ! and a decay wavelength. + ! SURFACE_BANDS - Computed from multiple surface values + ! and decay wavelengths. + ! DHH85 - Uses Donelan et al. 1985 empirical + ! wave spectrum with prescribed values. + ! LF17 - Infers Stokes drift profile from wind + ! speed following Li and Fox-Kemper 2017. +SURFBAND_SOURCE = "COUPLER" ! default = "EMPTY" + ! Choice of SURFACE_BANDS data mode, valid options include: + ! DATAOVERRIDE - Read from NetCDF using FMS DataOverride. + ! COUPLER - Look for variables from coupler pass + ! INPUT - Testing with fixed values. +STK_BAND_COUPLER = 3 ! default = 1 + ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has + ! to be consistent with the number of Stokes drift bands in WW3, or the model + ! will fail. +SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 ! [rad/m] default = 0.12566 + ! Central wavenumbers for surface Stokes drift bands. +EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE" + ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence. + ! Valid values are: + ! NONE - Do not do any extra mixing due to Langmuir turbulence + ! RESCALE - Use a multiplicative rescaling of mstar to account for Langmuir + ! turbulence + ! ADDITIVE - Add a Langmuir turblence contribution to mstar to other + ! contributions +LT_ENHANCE_COEF = 0.044 ! [nondim] default = 0.447 + ! Coefficient for Langmuir enhancement of mstar +LT_ENHANCE_EXP = -1.5 ! [nondim] default = -1.33 + ! Exponent for Langmuir enhancementt of mstar +LT_MOD_LAC1 = 0.0 ! [nondim] default = -0.87 + ! Coefficient for modification of Langmuir number due to MLD approaching Ekman + ! depth. +LT_MOD_LAC4 = 0.0 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! stable Obukhov depth. +LT_MOD_LAC5 = 0.22 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! unstable Obukhov depth. + +! === module MOM_regularize_layers === + +! === module MOM_opacity === +PEN_SW_NBANDS = 3 ! default = 1 + ! The number of bands of penetrating shortwave radiation. + +! === module MOM_tracer_advect === +TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM" + ! The horizontal transport scheme for tracers: + ! PLM - Piecewise Linear Method + ! PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order) + ! PPM - Piecewise Parabolic Method (Colella-Woodward) + +! === module MOM_tracer_hor_diff === +KHTR = 50.0 ! [m2 s-1] default = 0.0 + ! The background along-isopycnal tracer diffusivity. +CHECK_DIFFUSIVE_CFL = True ! [Boolean] default = False + ! If true, use enough iterations the diffusion to ensure that the diffusive + ! equivalent of the CFL limit is not violated. If false, always use the greater + ! of 1 or MAX_TR_DIFFUSION_CFL iteration. +MAX_TR_DIFFUSION_CFL = 2.0 ! [nondim] default = -1.0 + ! If positive, locally limit the along-isopycnal tracer diffusivity to keep the + ! diffusive CFL locally at or below this value. The number of diffusive + ! iterations is often this value or the next greater integer. + +! === module MOM_neutral_diffusion === +! This module implements neutral diffusion of tracers +USE_NEUTRAL_DIFFUSION = True ! [Boolean] default = False + ! If true, enables the neutral diffusion module. + +! === module ocean_model_init === +RESTART_CHECKSUMS_REQUIRED = False + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = false ! +!ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +!ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + +! === module MOM_lateral_boundary_diffusion === +! This module implements lateral diffusion of tracers near boundaries + +! === module MOM_sum_output === +MAXTRUNC = 100000 ! [truncations save_interval-1] default = 0 + ! The run will be stopped, and the day set to a very large value if the velocity + ! is truncated more than MAXTRUNC times between energy saves. Set MAXTRUNC to 0 + ! to stop if there is any truncation of velocities. +ENERGYSAVEDAYS = 1.0 ! [days] default = 1.0 + ! The interval in units of TIMEUNIT between saves of the energies of the run and + ! other globally summed diagnostics. +ENERGYSAVEDAYS_GEOMETRIC = 0.25 ! [days] default = 0.0 + ! The starting interval in units of TIMEUNIT for the first call to save the + ! energies of the run and other globally summed diagnostics. The interval + ! increases by a factor of 2. after each call to write_energy. + +! === module ocean_model_init === + +! === module MOM_surface_forcing === +OCEAN_SURFACE_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the surface velocity field that is + ! returned to the coupler. Valid values include + ! 'A', 'B', or 'C'. + +MAX_P_SURF = 0.0 ! [Pa] default = -1.0 + ! The maximum surface pressure that can be exerted by the atmosphere and + ! floating sea-ice or ice shelves. This is needed because the FMS coupling + ! structure does not limit the water that can be frozen out of the ocean and the + ! ice-ocean heat fluxes are treated explicitly. No limit is applied if a + ! negative value is used. +WIND_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the input wind stress field. Valid + ! values are 'A', 'B', or 'C'. +CD_TIDES = 0.0018 ! [nondim] default = 1.0E-04 + ! The drag coefficient that applies to the tides. +GUST_CONST = 0.0 ! [Pa] default = 0.02 + ! The background gustiness in the winds. +FIX_USTAR_GUSTLESS_BUG = False ! [Boolean] default = False + ! If true correct a bug in the time-averaging of the gustless wind friction + ! velocity +USE_RIGID_SEA_ICE = True ! [Boolean] default = False + ! If true, sea-ice is rigid enough to exert a nonhydrostatic pressure that + ! resist vertical motion. +SEA_ICE_RIGID_MASS = 100.0 ! [kg m-2] default = 1000.0 + ! The mass of sea-ice per unit area at which the sea-ice starts to exhibit + ! rigidity +LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF] ! [Boolean] default = False + ! If true, allows liquid river runoff to be specified via + ! the data_table using the component name 'OCN'. +! === module ocean_stochastics === +DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL +! === module MOM_restart === + +! === module MOM_file_parser === diff --git a/parm/ufs/mom6/MOM_input_template_100 b/parm/ufs/mom6/MOM_input_template_100 new file mode 100644 index 0000000000..f26d6e4bfb --- /dev/null +++ b/parm/ufs/mom6/MOM_input_template_100 @@ -0,0 +1,866 @@ +! This file was written by the model and records all non-layout or debugging parameters used at run-time. +! === module MOM === + +! === module MOM_unit_scaling === +! Parameters for doing unit scaling of variables. +USE_REGRIDDING = True ! [Boolean] default = False + ! If True, use the ALE algorithm (regridding/remapping). If False, use the + ! layered isopycnal algorithm. +THICKNESSDIFFUSE = True ! [Boolean] default = False + ! If true, interface heights are diffused with a coefficient of KHTH. +THICKNESSDIFFUSE_FIRST = True ! [Boolean] default = False + ! If true, do thickness diffusion before dynamics. This is only used if + ! THICKNESSDIFFUSE is true. +DT = @[DT_DYNAM_MOM6] ! [s] + ! The (baroclinic) dynamics time step. The time-step that is actually used will + ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode + ! or the coupling timestep in coupled mode.) +DT_THERM = @[DT_THERM_MOM6] ! [s] default = 1800.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. +THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN] ! [Boolean] default = False + ! If true, the MOM will take thermodynamic and tracer timesteps that can be + ! longer than the coupling timestep. The actual thermodynamic timestep that is + ! used in this case is the largest integer multiple of the coupling timestep + ! that is less than or equal to DT_THERM. +HFREEZE = 20.0 ! [m] default = -1.0 + ! If HFREEZE > 0, melt potential will be computed. The actual depth + ! over which melt potential is computed will be min(HFREEZE, OBLD) + ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default) + ! melt potential will not be computed. +DTBT_RESET_PERIOD = -1.0 ! [s] default = 7200.0 + ! The period between recalculations of DTBT (if DTBT <= 0). If DTBT_RESET_PERIOD + ! is negative, DTBT is set based only on information available at + ! initialization. If 0, DTBT will be set every dynamics time step. The default + ! is set by DT_THERM. This is only used if SPLIT is true. +FRAZIL = True ! [Boolean] default = False + ! If true, water freezes if it gets too cold, and the accumulated heat deficit + ! is returned in the surface state. FRAZIL is only used if + ! ENABLE_THERMODYNAMICS is true. +BOUND_SALINITY = True ! [Boolean] default = False + ! If true, limit salinity to being positive. (The sea-ice model may ask for more + ! salt than is available and drive the salinity negative otherwise.) +MIN_SALINITY = 0.01 ! [PPT] default = 0.0 + ! The minimum value of salinity when BOUND_SALINITY=True. +C_P = 3925.0 ! [J kg-1 K-1] default = 3991.86795711963 + ! The heat capacity of sea water, approximated as a constant. This is only used + ! if ENABLE_THERMODYNAMICS is true. The default value is from the TEOS-10 + ! definition of conservative temperature. +USE_PSURF_IN_EOS = False ! [Boolean] default = True + ! If true, always include the surface pressure contributions in equation of + ! state calculations. +CHECK_BAD_SURFACE_VALS = True ! [Boolean] default = False + ! If true, check the surface state for ridiculous values. +BAD_VAL_SSH_MAX = 50.0 ! [m] default = 20.0 + ! The value of SSH above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SSS_MAX = 75.0 ! [PPT] default = 45.0 + ! The value of SSS above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MAX = 55.0 ! [deg C] default = 45.0 + ! The value of SST above which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +BAD_VAL_SST_MIN = -3.0 ! [deg C] default = -2.1 + ! The value of SST below which a bad value message is triggered, if + ! CHECK_BAD_SURFACE_VALS is true. +DEFAULT_2018_ANSWERS = True ! [Boolean] default = False + ! This sets the default value for the various _2018_ANSWERS parameters. +WRITE_GEOM = 2 ! default = 1 + ! If =0, never write the geometry and vertical grid files. If =1, write the + ! geometry and vertical grid files only for a new simulation. If =2, always + ! write the geometry and vertical grid files. Other values are invalid. +SAVE_INITIAL_CONDS = False ! [Boolean] default = False + ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE. + +! === module MOM_domains === +TRIPOLAR_N = True ! [Boolean] default = False + ! Use tripolar connectivity at the northern edge of the domain. With + ! TRIPOLAR_N, NIGLOBAL must be even. +NIGLOBAL = @[NX_GLB] ! + ! The total number of thickness grid points in the x-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NJGLOBAL = @[NY_GLB] ! + ! The total number of thickness grid points in the y-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. + +! === module MOM_hor_index === +! Sets the horizontal array index types. + +! === module MOM_fixed_initialization === +INPUTDIR = "INPUT" ! default = "." + ! The directory in which input files are found. + +! === module MOM_grid_init === +GRID_CONFIG = "mosaic" ! + ! A character string that determines the method for defining the horizontal + ! grid. Current options are: + ! mosaic - read the grid from a mosaic (supergrid) + ! file set by GRID_FILE. + ! cartesian - use a (flat) Cartesian grid. + ! spherical - use a simple spherical grid. + ! mercator - use a Mercator spherical grid. +GRID_FILE = "ocean_hgrid.nc" ! + ! Name of the file from which to read horizontal grid data. +GRID_ROTATION_ANGLE_BUGS = False ! [Boolean] default = True + ! If true, use an older algorithm to calculate the sine and + ! cosines needed rotate between grid-oriented directions and + ! true north and east. Differences arise at the tripolar fold +USE_TRIPOLAR_GEOLONB_BUG = False ! [Boolean] default = True + ! If true, use older code that incorrectly sets the longitude in some points + ! along the tripolar fold to be off by 360 degrees. +TOPO_CONFIG = "file" ! + ! This specifies how bathymetry is specified: + ! file - read bathymetric information from the file + ! specified by (TOPO_FILE). + ! flat - flat bottom set to MAXIMUM_DEPTH. + ! bowl - an analytically specified bowl-shaped basin + ! ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH. + ! spoon - a similar shape to 'bowl', but with an vertical + ! wall at the southern face. + ! halfpipe - a zonally uniform channel with a half-sine + ! profile in the meridional direction. + ! bbuilder - build topography from list of functions. + ! benchmark - use the benchmark test case topography. + ! Neverworld - use the Neverworld test case topography. + ! DOME - use a slope and channel configuration for the + ! DOME sill-overflow test case. + ! ISOMIP - use a slope and channel configuration for the + ! ISOMIP test case. + ! DOME2D - use a shelf and slope configuration for the + ! DOME2D gravity current/overflow test case. + ! Kelvin - flat but with rotated land mask. + ! seamount - Gaussian bump for spontaneous motion test case. + ! dumbbell - Sloshing channel with reservoirs on both ends. + ! shelfwave - exponential slope for shelfwave test case. + ! Phillips - ACC-like idealized topography used in the Phillips config. + ! dense - Denmark Strait-like dense water formation and overflow. + ! USER - call a user modified routine. +TOPO_EDITS_FILE = "@[TOPOEDITS]" ! default = "" + ! The file from which to read a list of i,j,z topography overrides. +ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES] ! default = "False" + ! If true, allow topography overrides to change ocean points to land +MAXIMUM_DEPTH = 6500.0 ! [m] + ! The maximum depth of the ocean. +MINIMUM_DEPTH = 9.5 ! [m] default = 0.0 + ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is + ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is + ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than + ! MASKING_DEPTH are rounded to MINIMUM_DEPTH. + +! === module MOM_open_boundary === +! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply, +! if any. +MASKING_DEPTH = 0.0 ! [m] default = -9999.0 + ! The depth below which to mask points as land points, for which all fluxes are + ! zeroed out. MASKING_DEPTH is ignored if negative. +CHANNEL_CONFIG = "list" ! default = "none" + ! A parameter that determines which set of channels are + ! restricted to specific widths. Options are: + ! none - All channels have the grid width. + ! global_1deg - Sets 16 specific channels appropriate + ! for a 1-degree model, as used in CM2G. + ! list - Read the channel locations and widths from a + ! text file, like MOM_channel_list in the MOM_SIS + ! test case. + ! file - Read open face widths everywhere from a + ! NetCDF file on the model grid. +CHANNEL_LIST_FILE = "MOM_channels_SPEAR" ! default = "MOM_channel_list" + ! The file from which the list of narrowed channels is read. + +! === module MOM_verticalGrid === +! Parameters providing information about the vertical grid. +NK = 75 ! [nondim] + ! The number of model layers. + +! === module MOM_tracer_registry === + +! === module MOM_EOS === +TFREEZE_FORM = "MILLERO_78" ! default = "LINEAR" + ! TFREEZE_FORM determines which expression should be used for the freezing + ! point. Currently, the valid choices are "LINEAR", "MILLERO_78", "TEOS10" + +! === module MOM_restart === +PARALLEL_RESTARTFILES = True ! [Boolean] default = False + ! If true, each processor writes its own restart file, otherwise a single + ! restart file is generated + +! === module MOM_tracer_flow_control === +USE_IDEAL_AGE_TRACER = False ! [Boolean] default = False + ! If true, use the ideal_age_example tracer package. + +! === module ideal_age_example === + +! === module MOM_coord_initialization === +COORD_CONFIG = "file" ! default = "none" + ! This specifies how layers are to be defined: + ! ALE or none - used to avoid defining layers in ALE mode + ! file - read coordinate information from the file + ! specified by (COORD_FILE). + ! BFB - Custom coords for buoyancy-forced basin case + ! based on SST_S, T_BOT and DRHO_DT. + ! linear - linear based on interfaces not layers + ! layer_ref - linear based on layer densities + ! ts_ref - use reference temperature and salinity + ! ts_range - use range of temperature and salinity + ! (T_REF and S_REF) to determine surface density + ! and GINT calculate internal densities. + ! gprime - use reference density (RHO_0) for surface + ! density and GINT calculate internal densities. + ! ts_profile - use temperature and salinity profiles + ! (read from COORD_FILE) to set layer densities. + ! USER - call a user modified routine. +COORD_FILE = "layer_coord.nc" ! + ! The file from which the coordinate densities are read. +REMAP_UV_USING_OLD_ALG = True ! [Boolean] default = False + ! If true, uses the old remapping-via-a-delta-z method for remapping u and v. If + ! false, uses the new method that remaps between grids described by an old and + ! new thickness. +REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER" + ! Coordinate mode for vertical regridding. Choose among the following + ! possibilities: LAYER - Isopycnal or stacked shallow water layers + ! ZSTAR, Z* - stretched geopotential z* + ! SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf + ! SIGMA - terrain following coordinates + ! RHO - continuous isopycnal + ! HYCOM1 - HyCOM-like hybrid coordinate + ! SLIGHT - stretched coordinates above continuous isopycnal + ! ADAPTIVE - optimize for smooth neutral density surfaces +BOUNDARY_EXTRAPOLATION = True ! [Boolean] default = False + ! When defined, a proper high-order reconstruction scheme is used within + ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM + ! reconstruction will also be used within boundary cells. +ALE_COORDINATE_CONFIG = "HYBRID:hycom1_75_800m.nc,sigma2,FNC1:2,4000,4.5,.01" ! default = "UNIFORM" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter ALE_RESOLUTION + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!ALE_RESOLUTION = 7*2.0, 2*2.01, 2.02, 2.03, 2.05, 2.08, 2.11, 2.15, 2.21, 2.2800000000000002, 2.37, 2.48, 2.61, 2.77, 2.95, 3.17, 3.4299999999999997, 3.74, 4.09, 4.49, 4.95, 5.48, 6.07, 6.74, 7.5, 8.34, 9.280000000000001, 10.33, 11.49, 12.77, 14.19, 15.74, 17.450000000000003, 19.31, 21.35, 23.56, 25.97, 28.580000000000002, 31.41, 34.47, 37.77, 41.32, 45.14, 49.25, 53.65, 58.370000000000005, 63.42, 68.81, 74.56, 80.68, 87.21000000000001, 94.14, 101.51, 109.33, 117.62, 126.4, 135.68, 145.5, 155.87, 166.81, 178.35, 190.51, 203.31, 216.78, 230.93, 245.8, 261.42, 277.83 ! [m] + ! The distribution of vertical resolution for the target + ! grid used for Eulerian-like coordinates. For example, + ! in z-coordinate mode, the parameter is a list of level + ! thicknesses (in m). In sigma-coordinate mode, the list + ! is of non-dimensional fractions of the water column. +!TARGET_DENSITIES = 1010.0, 1014.3034, 1017.8088, 1020.843, 1023.5566, 1025.813, 1027.0275, 1027.9114, 1028.6422, 1029.2795, 1029.852, 1030.3762, 1030.8626, 1031.3183, 1031.7486, 1032.1572, 1032.5471, 1032.9207, 1033.2798, 1033.6261, 1033.9608, 1034.2519, 1034.4817, 1034.6774, 1034.8508, 1035.0082, 1035.1533, 1035.2886, 1035.4159, 1035.5364, 1035.6511, 1035.7608, 1035.8661, 1035.9675, 1036.0645, 1036.1554, 1036.2411, 1036.3223, 1036.3998, 1036.4739, 1036.5451, 1036.6137, 1036.68, 1036.7441, 1036.8062, 1036.8526, 1036.8874, 1036.9164, 1036.9418, 1036.9647, 1036.9857, 1037.0052, 1037.0236, 1037.0409, 1037.0574, 1037.0738, 1037.0902, 1037.1066, 1037.123, 1037.1394, 1037.1558, 1037.1722, 1037.1887, 1037.206, 1037.2241, 1037.2435, 1037.2642, 1037.2866, 1037.3112, 1037.3389, 1037.3713, 1037.4118, 1037.475, 1037.6332, 1037.8104, 1038.0 ! [m] + ! HYBRID target densities for interfaces +MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.01" ! default = "NONE" + ! Determines how to specify the maximum interface depths. + ! Valid options are: + ! NONE - there are no maximum interface depths + ! PARAM - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 12.75, 23.25, 36.49, 52.480000000000004, 71.22, 92.71000000000001, 116.94000000000001, 143.92000000000002, 173.65, 206.13, 241.36, 279.33000000000004, 320.05000000000007, 363.5200000000001, 409.7400000000001, 458.7000000000001, 510.4100000000001, 564.8700000000001, 622.0800000000002, 682.0300000000002, 744.7300000000002, 810.1800000000003, 878.3800000000003, 949.3300000000004, 1023.0200000000004, 1099.4600000000005, 1178.6500000000005, 1260.5900000000006, 1345.2700000000007, 1432.7000000000007, 1522.8800000000008, 1615.8100000000009, 1711.490000000001, 1809.910000000001, 1911.080000000001, 2015.0000000000011, 2121.670000000001, 2231.080000000001, 2343.2400000000007, 2458.1500000000005, 2575.8100000000004, 2696.2200000000003, 2819.3700000000003, 2945.2700000000004, 3073.9200000000005, 3205.3200000000006, 3339.4600000000005, 3476.3500000000004, 3615.9900000000002, 3758.38, 3903.52, 4051.4, 4202.03, 4355.41, 4511.54, 4670.41, 4832.03, 4996.4, 5163.5199999999995, 5333.379999999999, 5505.989999999999, 5681.3499999999985, 5859.459999999998, 6040.319999999998, 6223.919999999998, 6410.269999999999, 6599.369999999999, 6791.219999999999, 6985.8099999999995, 7183.15, 7383.24, 7586.08, 7791.67, 8000.0 + ! The list of maximum depths for each interface. +MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE" + ! Determines how to specify the maximum layer thicknesses. + ! Valid options are: + ! NONE - there are no maximum layer thicknesses + ! PARAM - use the vector-parameter MAX_LAYER_THICKNESS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAX_LAYER_THICKNESS = 400.0, 409.63, 410.32, 410.75, 411.07, 411.32, 411.52, 411.7, 411.86, 412.0, 412.13, 412.24, 412.35, 412.45, 412.54, 412.63, 412.71, 412.79, 412.86, 412.93, 413.0, 413.06, 413.12, 413.18, 413.24, 413.29, 413.34, 413.39, 413.44, 413.49, 413.54, 413.58, 413.62, 413.67, 413.71, 413.75, 413.78, 413.82, 413.86, 413.9, 413.93, 413.97, 414.0, 414.03, 414.06, 414.1, 414.13, 414.16, 414.19, 414.22, 414.24, 414.27, 414.3, 414.33, 414.35, 414.38, 414.41, 414.43, 414.46, 414.48, 414.51, 414.53, 414.55, 414.58, 414.6, 414.62, 414.65, 414.67, 414.69, 414.71, 414.73, 414.75, 414.77, 414.79, 414.83 ! [m] + ! The list of maximum thickness for each layer. +REMAPPING_SCHEME = "PPM_H4" ! default = "PLM" + ! This sets the reconstruction scheme used for vertical remapping for all + ! variables. It can be one of the following schemes: PCM (1st-order + ! accurate) + ! PLM (2nd-order accurate) + ! PPM_H4 (3rd-order accurate) + ! PPM_IH4 (3rd-order accurate) + ! PQM_IH4IH3 (4th-order accurate) + ! PQM_IH6IH5 (5th-order accurate) + +! === module MOM_grid === +! Parameters providing information about the lateral grid. + +! === module MOM_state_initialization === +INIT_LAYERS_FROM_Z_FILE = True ! [Boolean] default = False + ! If true, initialize the layer thicknesses, temperatures, and salinities from a + ! Z-space file on a latitude-longitude grid. + +! === module MOM_initialize_layers_from_Z === +TEMP_SALT_Z_INIT_FILE = "MOM6_IC_TS.nc" ! default = "temp_salt_z.nc" + ! The name of the z-space input file used to initialize + ! temperatures (T) and salinities (S). If T and S are not + ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE + ! must be set. +Z_INIT_FILE_PTEMP_VAR = "temp" ! default = "ptemp" + ! The name of the potential temperature variable in + ! TEMP_Z_INIT_FILE. +Z_INIT_FILE_SALT_VAR = "salt" ! default = "salt" + ! The name of the salinity variable in + ! SALT_Z_INIT_FILE. +Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False + ! If True, then remap straight to model coordinate from file. +Z_INIT_REMAP_OLD_ALG = True ! [Boolean] default = False + ! If false, uses the preferred remapping algorithm for initialization. If true, + ! use an older, less robust algorithm for remapping. + +! === module MOM_diag_mediator === +!Jiande NUM_DIAG_COORDS = 2 ! default = 1 +NUM_DIAG_COORDS = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +!Jiande DIAG_COORDS = "z Z ZSTAR", "rho2 RHO2 RHO" ! +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] +!AVAILABLE_DIAGS_FILE = "available_diags.002160" ! default = "available_diags.000000" + ! A file into which to write a list of all available ocean diagnostics that can + ! be included in a diag_table. +!DIAG_COORD_DEF_Z = "FILE:vgrid_75_2m.nc,dz" ! default = "WOA09" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter DIAG_COORD_RES_Z + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!DIAG_COORD_DEF_RHO2 = "RFNC1:35,999.5,1028,1028.5,8.,1038.,0.0078125" ! default = "WOA09" + ! Determines how to specify the coordinate resolution. Valid options are: + ! PARAM - use the vector-parameter DIAG_COORD_RES_RHO2 + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz + +! === module MOM_MEKE === +USE_MEKE = True ! [Boolean] default = False + ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy + ! kinetic energy budget. +MEKE_GMCOEFF = 1.0 ! [nondim] default = -1.0 + ! The efficiency of the conversion of potential energy into MEKE by the + ! thickness mixing parameterization. If MEKE_GMCOEFF is negative, this + ! conversion is not used or calculated. +MEKE_BGSRC = 1.0E-13 ! [W kg-1] default = 0.0 + ! A background energy source for MEKE. +MEKE_KHTH_FAC = 0.8 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to KhTh. +MEKE_KHTR_FAC = 0.8 ! [nondim] default = 0.0 + ! A factor that maps MEKE%Kh to KhTr. +MEKE_ALPHA_RHINES = 0.05 ! [nondim] default = 0.0 + ! If positive, is a coefficient weighting the Rhines scale in the expression for + ! mixing length used in MEKE-derived diffusivity. +MEKE_ALPHA_EADY = 0.05 ! [nondim] default = 0.0 + ! If positive, is a coefficient weighting the Eady length scale in the + ! expression for mixing length used in MEKE-derived diffusivity. + +! === module MOM_lateral_mixing_coeffs === +USE_VARIABLE_MIXING = True ! [Boolean] default = False + ! If true, the variable mixing code will be called. This allows diagnostics to + ! be created even if the scheme is not used. If KHTR_SLOPE_CFF>0 or + ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter + ! file. +RESOLN_SCALED_KH = True ! [Boolean] default = False + ! If true, the Laplacian lateral viscosity is scaled away when the first + ! baroclinic deformation radius is well resolved. +RESOLN_SCALED_KHTH = True ! [Boolean] default = False + ! If true, the interface depth diffusivity is scaled away when the first + ! baroclinic deformation radius is well resolved. +KHTR_SLOPE_CFF = 0.25 ! [nondim] default = 0.0 + ! The nondimensional coefficient in the Visbeck formula for the epipycnal tracer + ! diffusivity +USE_STORED_SLOPES = True ! [Boolean] default = False + ! If true, the isopycnal slopes are calculated once and stored for re-use. This + ! uses more memory but avoids calling the equation of state more times than + ! should be necessary. +KH_RES_FN_POWER = 100 ! [nondim] default = 2 + ! The power of dx/Ld in the Kh resolution function. Any positive integer may be + ! used, although even integers are more efficient to calculate. Setting this + ! greater than 100 results in a step-function being used. +VISC_RES_FN_POWER = 2 ! [nondim] default = 100 + ! The power of dx/Ld in the Kh resolution function. Any positive integer may be + ! used, although even integers are more efficient to calculate. Setting this + ! greater than 100 results in a step-function being used. This function affects + ! lateral viscosity, Kh, and not KhTh. +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True + ! If true, use a more robust estimate of the first mode wave speed as the + ! starting point for iterations. + +! === module MOM_set_visc === +CHANNEL_DRAG = True ! [Boolean] default = False + ! If true, the bottom drag is exerted directly on each layer proportional to the + ! fraction of the bottom it overlies. +HBBL = 10.0 ! [m] + ! The thickness of a bottom boundary layer with a viscosity of KVBBL if + ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom + ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but + ! LINEAR_DRAG is not. +DRAG_BG_VEL = 0.1 ! [m s-1] default = 0.0 + ! DRAG_BG_VEL is either the assumed bottom velocity (with LINEAR_DRAG) or an + ! unresolved velocity that is combined with the resolved velocity to estimate + ! the velocity magnitude. DRAG_BG_VEL is only used when BOTTOMDRAGLAW is + ! defined. +BBL_USE_EOS = True ! [Boolean] default = False + ! If true, use the equation of state in determining the properties of the bottom + ! boundary layer. Otherwise use the layer target potential densities. +BBL_THICK_MIN = 0.1 ! [m] default = 0.0 + ! The minimum bottom boundary layer thickness that can be used with + ! BOTTOMDRAGLAW. This might be Kv/(cdrag*drag_bg_vel) to give Kv as the minimum + ! near-bottom viscosity. +KV = 1.0E-04 ! [m2 s-1] + ! The background kinematic viscosity in the interior. The molecular value, ~1e-6 + ! m2 s-1, may be used. +KV_BBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the bottom boundary layer. +KV_TBL_MIN = 0.0 ! [m2 s-1] default = 1.0E-04 + ! The minimum viscosities in the top boundary layer. + +! === module MOM_thickness_diffuse === +USE_GM_WORK_BUG = True ! [Boolean] default = False + ! If true, compute the top-layer work tendency on the u-grid with the incorrect + ! sign, for legacy reproducibility. + +! === module MOM_dynamics_split_RK2 === + +! === module MOM_continuity === + +! === module MOM_continuity_PPM === +ETA_TOLERANCE = 1.0E-06 ! [m] default = 3.75E-09 + ! The tolerance for the differences between the barotropic and baroclinic + ! estimates of the sea surface height due to the fluxes through each face. The + ! total tolerance for SSH is 4 times this value. The default is + ! 0.5*NK*ANGSTROM, and this should not be set less than about + ! 10^-15*MAXIMUM_DEPTH. +ETA_TOLERANCE_AUX = 0.001 ! [m] default = 1.0E-06 + ! The tolerance for free-surface height discrepancies between the barotropic + ! solution and the sum of the layer thicknesses when calculating the auxiliary + ! corrected velocities. By default, this is the same as ETA_TOLERANCE, but can + ! be made larger for efficiency. + +! === module MOM_CoriolisAdv === +CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY" + ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid + ! values are: + ! SADOURNY75_ENERGY - Sadourny, 1975; energy cons. + ! ARAKAWA_HSU90 - Arakawa & Hsu, 1990 + ! SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons. + ! ARAKAWA_LAMB81 - Arakawa & Lamb, 1981; En. + Enst. + ! ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with + ! Arakawa & Hsu and Sadourny energy +BOUND_CORIOLIS = True ! [Boolean] default = False + ! If true, the Coriolis terms at u-points are bounded by the four estimates of + ! (f+rv)v from the four neighboring v-points, and similarly at v-points. This + ! option would have no effect on the SADOURNY Coriolis scheme if it were + ! possible to use centered difference thickness fluxes. + +! === module MOM_PressureForce === + +! === module MOM_PressureForce_AFV === +MASS_WEIGHT_IN_PRESSURE_GRADIENT = True ! [Boolean] default = False + ! If true, use mass weighting when interpolating T/S for integrals near the + ! bathymetry in AFV pressure gradient calculations. + +! === module MOM_hor_visc === +LAPLACIAN = True ! [Boolean] default = False + ! If true, use a Laplacian horizontal viscosity. +SMAGORINSKY_KH = True ! [Boolean] default = False + ! If true, use a Smagorinsky nonlinear eddy viscosity. +SMAG_LAP_CONST = 0.15 ! [nondim] default = 0.0 + ! The nondimensional Laplacian Smagorinsky constant, often 0.15. +AH_VEL_SCALE = 0.05 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the cube of the grid spacing to + ! calculate the biharmonic viscosity. The final viscosity is the largest of this + ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH. +SMAGORINSKY_AH = True ! [Boolean] default = False + ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity. +SMAG_BI_CONST = 0.06 ! [nondim] default = 0.0 + ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06. +USE_KH_BG_2D = True ! [Boolean] default = False + ! If true, read a file containing 2-d background harmonic viscosities. The final + ! viscosity is the maximum of the other terms and this background value. + +! === module MOM_vert_friction === +HMIX_FIXED = 0.5 ! [m] + ! The prescribed depth over which the near-surface viscosity and diffusivity are + ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. +MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 + ! The maximum velocity allowed before the velocity components are truncated. + +! === module MOM_barotropic === +BOUND_BT_CORRECTION = True ! [Boolean] default = False + ! If true, the corrective pseudo mass-fluxes into the barotropic solver are + ! limited to values that require less than maxCFL_BT_cont to be accommodated. +BT_PROJECT_VELOCITY = True ! [Boolean] default = False + ! If true, step the barotropic velocity first and project out the velocity + ! tendency by 1+BEBT when calculating the transport. The default (false) is to + ! use a predictor continuity step to find the pressure field, and then to do a + ! corrector continuity step using a weighted average of the old and new + ! velocities, with weights of (1-BEBT) and BEBT. +BT_STRONG_DRAG = True ! [Boolean] default = False + ! If true, use a stronger estimate of the retarding effects of strong bottom + ! drag, by making it implicit with the barotropic time-step instead of implicit + ! with the baroclinic time-step and dividing by the number of barotropic steps. +BEBT = 0.2 ! [nondim] default = 0.1 + ! BEBT determines whether the barotropic time stepping uses the forward-backward + ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range + ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1 + ! (for a backward Euler treatment). In practice, BEBT must be greater than about + ! 0.05. +DTBT = -0.9 ! [s or nondim] default = -0.98 + ! The barotropic time step, in s. DTBT is only used with the split explicit time + ! stepping. To set the time step automatically based the maximum stable value + ! use 0, or a negative value gives the fraction of the stable value. Setting + ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will + ! actually be used is an integer fraction of DT, rounding down. + +! === module MOM_mixed_layer_restrat === +MIXEDLAYER_RESTRAT = True ! [Boolean] default = False + ! If true, a density-gradient dependent re-stratifying flow is imposed in the + ! mixed layer. Can be used in ALE mode without restriction but in layer mode can + ! only be used if BULKMIXEDLAYER is true. +FOX_KEMPER_ML_RESTRAT_COEF = 60.0 ! [nondim] default = 0.0 + ! A nondimensional coefficient that is proportional to the ratio of the + ! deformation radius to the dominant lengthscale of the submesoscale mixed layer + ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic + ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of + ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al. + ! (2010) +MLE_USE_PBL_MLD = True ! [Boolean] default = False + ! If true, the MLE parameterization will use the mixed-layer depth provided by + ! the active PBL parameterization. If false, MLE will estimate a MLD based on a + ! density difference with the surface using the parameter MLE_DENSITY_DIFF. +MLE_MLD_DECAY_TIME = 2.592E+06 ! [s] default = 0.0 + ! The time-scale for a running-mean filter applied to the mixed-layer depth used + ! in the MLE restratification parameterization. When the MLD deepens below the + ! current running-mean the running-mean is instantaneously set to the current + ! MLD. + +! === module MOM_diabatic_driver === +! The following parameters are used for diabatic processes. +ENERGETICS_SFC_PBL = True ! [Boolean] default = False + ! If true, use an implied energetics planetary boundary layer scheme to + ! determine the diffusivity and viscosity in the surface boundary layer. +EPBL_IS_ADDITIVE = False ! [Boolean] default = True + ! If true, the diffusivity from ePBL is added to all other diffusivities. + ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used. +KD_MIN_TR = 2.0E-06 ! [m2 s-1] default = 2.0E-06 + ! A minimal diffusivity that should always be applied to tracers, especially in + ! massless layers near the bottom. The default is 0.1*KD. + +! === module MOM_CVMix_KPP === +! This is the MOM wrapper to CVMix:KPP +! See http://cvmix.github.io/ + +! === module MOM_tidal_mixing === +! Vertical Tidal Mixing Parameterization +INT_TIDE_DISSIPATION = True ! [Boolean] default = False + ! If true, use an internal tidal dissipation scheme to drive diapycnal mixing, + ! along the lines of St. Laurent et al. (2002) and Simmons et al. (2004). +INT_TIDE_PROFILE = "POLZIN_09" ! default = "STLAURENT_02" + ! INT_TIDE_PROFILE selects the vertical profile of energy dissipation with + ! INT_TIDE_DISSIPATION. Valid values are: + ! STLAURENT_02 - Use the St. Laurent et al exponential + ! decay profile. + ! POLZIN_09 - Use the Polzin WKB-stretched algebraic + ! decay profile. +KAPPA_ITIDES = 6.28319E-04 ! [m-1] default = 6.283185307179586E-04 + ! A topographic wavenumber used with INT_TIDE_DISSIPATION. The default is 2pi/10 + ! km, as in St.Laurent et al. 2002. +KAPPA_H2_FACTOR = 0.84 ! [nondim] default = 1.0 + ! A scaling factor for the roughness amplitude with INT_TIDE_DISSIPATION. +TKE_ITIDE_MAX = 0.1 ! [W m-2] default = 1000.0 + ! The maximum internal tide energy source available to mix above the bottom + ! boundary layer with INT_TIDE_DISSIPATION. +READ_TIDEAMP = True ! [Boolean] default = False + ! If true, read a file (given by TIDEAMP_FILE) containing the tidal amplitude + ! with INT_TIDE_DISSIPATION. +TIDEAMP_FILE = "tidal_amplitude.nc" ! default = "tideamp.nc" + ! The path to the file containing the spatially varying tidal amplitudes with + ! INT_TIDE_DISSIPATION. +H2_FILE = "topog.nc" ! + ! The path to the file containing the sub-grid-scale topographic roughness + ! amplitude with INT_TIDE_DISSIPATION. + +! === module MOM_CVMix_conv === +! Parameterization of enhanced mixing due to convection via CVMix + +! === module MOM_set_diffusivity === +BBL_MIXING_AS_MAX = False ! [Boolean] default = True + ! If true, take the maximum of the diffusivity from the BBL mixing and the other + ! diffusivities. Otherwise, diffusivity from the BBL_mixing is simply added. +USE_LOTW_BBL_DIFFUSIVITY = True ! [Boolean] default = False + ! If true, uses a simple, imprecise but non-coordinate dependent, model of BBL + ! mixing diffusivity based on Law of the Wall. Otherwise, uses the original BBL + ! scheme. +SIMPLE_TKE_TO_KD = True ! [Boolean] default = False + ! If true, uses a simple estimate of Kd/TKE that will work for arbitrary + ! vertical coordinates. If false, calculates Kd/TKE and bounds based on exact + ! energetics for an isopycnal layer-formulation. + +! === module MOM_bkgnd_mixing === +! Adding static vertical background mixing coefficients +KD = 2.0E-05 ! [m2 s-1] default = 0.0 + ! The background diapycnal diffusivity of density in the interior. Zero or the + ! molecular value, ~1e-7 m2 s-1, may be used. +KD_MIN = 2.0E-06 ! [m2 s-1] default = 2.0E-07 + ! The minimum diapycnal diffusivity. +HENYEY_IGW_BACKGROUND = True ! [Boolean] default = False + ! If true, use a latitude-dependent scaling for the near surface background + ! diffusivity, as described in Harrison & Hallberg, JPO 2008. +KD_MAX = 0.1 ! [m2 s-1] default = -1.0 + ! The maximum permitted increment for the diapycnal diffusivity from TKE-based + ! parameterizations, or a negative value for no limit. + +! === module MOM_kappa_shear === +! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008 +USE_JACKSON_PARAM = True ! [Boolean] default = False + ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing + ! parameterization. +MAX_RINO_IT = 25 ! [nondim] default = 50 + ! The maximum number of iterations that may be used to estimate the Richardson + ! number driven mixing. +VERTEX_SHEAR = False ! [Boolean] default = False + ! If true, do the calculations of the shear-driven mixing + ! at the cell vertices (i.e., the vorticity points). +KD_TRUNC_KAPPA_SHEAR = 2.0E-07 ! [m2 s-1] default = 2.0E-07 + ! The value of shear-driven diffusivity that is considered negligible and is + ! rounded down to 0. The default is 1% of KD_KAPPA_SHEAR_0. +KAPPA_SHEAR_ITER_BUG = True ! [Boolean] default = False + ! If true, use an older, dimensionally inconsistent estimate of the derivative + ! of diffusivity with energy in the Newton's method iteration. The bug causes + ! undercorrections when dz > 1 m. +KAPPA_SHEAR_ALL_LAYER_TKE_BUG = True ! [Boolean] default = False + ! If true, report back the latest estimate of TKE instead of the time average + ! TKE when there is mass in all layers. Otherwise always report the time + ! averaged TKE, as is currently done when there are some massless layers. + +! === module MOM_CVMix_shear === +! Parameterization of shear-driven turbulence via CVMix (various options) + +! === module MOM_CVMix_ddiff === +! Parameterization of mixing due to double diffusion processes via CVMix + +! === module MOM_diabatic_aux === +! The following parameters are used for auxiliary diabatic processes. +PRESSURE_DEPENDENT_FRAZIL = False ! [Boolean] default = False + ! If true, use a pressure dependent freezing temperature when making frazil. The + ! default is false, which will be faster but is inappropriate with ice-shelf + ! cavities. +VAR_PEN_SW = True ! [Boolean] default = False + ! If true, use one of the CHL_A schemes specified by OPACITY_SCHEME to determine + ! the e-folding depth of incoming short wave radiation. +CHL_FILE = @[CHLCLIM] ! + ! CHL_FILE is the file containing chl_a concentrations in the variable CHL_A. It + ! is used when VAR_PEN_SW and CHL_FROM_FILE are true. + +! === module MOM_energetic_PBL === +ML_OMEGA_FRAC = 0.001 ! [nondim] default = 0.0 + ! When setting the decay scale for turbulence, use this fraction of the absolute + ! rotation rate blended with the local value of f, as sqrt((1-of)*f^2 + + ! of*4*omega^2). +TKE_DECAY = 0.01 ! [nondim] default = 2.5 + ! TKE_DECAY relates the vertical rate of decay of the TKE available for + ! mechanical entrainment to the natural Ekman depth. +EPBL_MSTAR_SCHEME = "OM4" ! default = "CONSTANT" + ! EPBL_MSTAR_SCHEME selects the method for setting mstar. Valid values are: + ! CONSTANT - Use a fixed mstar given by MSTAR + ! OM4 - Use L_Ekman/L_Obukhov in the sabilizing limit, as in OM4 + ! REICHL_H18 - Use the scheme documented in Reichl & Hallberg, 2018. +MSTAR_CAP = 10.0 ! [nondim] default = -1.0 + ! If this value is positive, it sets the maximum value of mstar allowed in ePBL. + ! (This is not used if EPBL_MSTAR_SCHEME = CONSTANT). +MSTAR2_COEF1 = 0.29 ! [nondim] default = 0.3 + ! Coefficient in computing mstar when rotation and stabilizing effects are both + ! important (used if EPBL_MSTAR_SCHEME = OM4). +MSTAR2_COEF2 = 0.152 ! [nondim] default = 0.085 + ! Coefficient in computing mstar when only rotation limits the total mixing + ! (used if EPBL_MSTAR_SCHEME = OM4) +NSTAR = 0.06 ! [nondim] default = 0.2 + ! The portion of the buoyant potential energy imparted by surface fluxes that is + ! available to drive entrainment at the base of mixed layer when that energy is + ! positive. +MSTAR_CONV_ADJ = 0.667 ! [nondim] default = 0.0 + ! Coefficient used for reducing mstar during convection due to reduction of + ! stable density gradient. +USE_MLD_ITERATION = False ! [Boolean] default = True + ! A logical that specifies whether or not to use the distance to the bottom of + ! the actively turbulent boundary layer to help set the EPBL length scale. +EPBL_TRANSITION_SCALE = 0.01 ! [nondim] default = 0.1 + ! A scale for the mixing length in the transition layer at the edge of the + ! boundary layer as a fraction of the boundary layer thickness. +MIX_LEN_EXPONENT = 1.0 ! [nondim] default = 2.0 + ! The exponent applied to the ratio of the distance to the MLD and the MLD depth + ! which determines the shape of the mixing length. This is only used if + ! USE_MLD_ITERATION is True. +USE_LA_LI2016 = @[MOM6_USE_LI2016] ! [nondim] default = False + ! A logical to use the Li et al. 2016 (submitted) formula to determine the + ! Langmuir number. +USE_WAVES = @[MOM6_USE_WAVES] ! [Boolean] default = False + ! If true, enables surface wave modules. +WAVE_METHOD = "SURFACE_BANDS" ! default = "EMPTY" + ! Choice of wave method, valid options include: + ! TEST_PROFILE - Prescribed from surface Stokes drift + ! and a decay wavelength. + ! SURFACE_BANDS - Computed from multiple surface values + ! and decay wavelengths. + ! DHH85 - Uses Donelan et al. 1985 empirical + ! wave spectrum with prescribed values. + ! LF17 - Infers Stokes drift profile from wind + ! speed following Li and Fox-Kemper 2017. +SURFBAND_SOURCE = "COUPLER" ! default = "EMPTY" + ! Choice of SURFACE_BANDS data mode, valid options include: + ! DATAOVERRIDE - Read from NetCDF using FMS DataOverride. + ! COUPLER - Look for variables from coupler pass + ! INPUT - Testing with fixed values. +STK_BAND_COUPLER = 3 ! default = 1 + ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has + ! to be consistent with the number of Stokes drift bands in WW3, or the model + ! will fail. +SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 ! [rad/m] default = 0.12566 + ! Central wavenumbers for surface Stokes drift bands. +EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE" + ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence. + ! Valid values are: + ! NONE - Do not do any extra mixing due to Langmuir turbulence + ! RESCALE - Use a multiplicative rescaling of mstar to account for Langmuir + ! turbulence + ! ADDITIVE - Add a Langmuir turblence contribution to mstar to other + ! contributions +LT_ENHANCE_COEF = 0.044 ! [nondim] default = 0.447 + ! Coefficient for Langmuir enhancement of mstar +LT_ENHANCE_EXP = -1.5 ! [nondim] default = -1.33 + ! Exponent for Langmuir enhancementt of mstar +LT_MOD_LAC1 = 0.0 ! [nondim] default = -0.87 + ! Coefficient for modification of Langmuir number due to MLD approaching Ekman + ! depth. +LT_MOD_LAC4 = 0.0 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! stable Obukhov depth. +LT_MOD_LAC5 = 0.22 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! unstable Obukhov depth. + +! === module MOM_regularize_layers === + +! === module MOM_opacity === +PEN_SW_NBANDS = 3 ! default = 1 + ! The number of bands of penetrating shortwave radiation. + +! === module MOM_tracer_advect === +TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM" + ! The horizontal transport scheme for tracers: + ! PLM - Piecewise Linear Method + ! PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order) + ! PPM - Piecewise Parabolic Method (Colella-Woodward) + +! === module MOM_tracer_hor_diff === +CHECK_DIFFUSIVE_CFL = True ! [Boolean] default = False + ! If true, use enough iterations the diffusion to ensure that the diffusive + ! equivalent of the CFL limit is not violated. If false, always use the greater + ! of 1 or MAX_TR_DIFFUSION_CFL iteration. + +! === module MOM_neutral_diffusion === +! This module implements neutral diffusion of tracers +USE_NEUTRAL_DIFFUSION = True ! [Boolean] default = False + ! If true, enables the neutral diffusion module. + +! === module MOM_lateral_boundary_diffusion === +! This module implements lateral diffusion of tracers near boundaries + +! === module MOM_sum_output === +CALCULATE_APE = False ! [Boolean] default = True + ! If true, calculate the available potential energy of the interfaces. Setting + ! this to false reduces the memory footprint of high-PE-count models + ! dramatically. +MAXTRUNC = 100000 ! [truncations save_interval-1] default = 0 + ! The run will be stopped, and the day set to a very large value if the velocity + ! is truncated more than MAXTRUNC times between energy saves. Set MAXTRUNC to 0 + ! to stop if there is any truncation of velocities. +ENERGYSAVEDAYS = 0.25 ! [days] default = 1.0 + ! The interval in units of TIMEUNIT between saves of the energies of the run and + ! other globally summed diagnostics. + +! === module ocean_model_init === + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = true ! +ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + +! === module MOM_surface_forcing === +OCEAN_SURFACE_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the surface velocity field that is + ! returned to the coupler. Valid values include + ! 'A', 'B', or 'C'. + +MAX_P_SURF = 0.0 ! [Pa] default = -1.0 + ! The maximum surface pressure that can be exerted by the atmosphere and + ! floating sea-ice or ice shelves. This is needed because the FMS coupling + ! structure does not limit the water that can be frozen out of the ocean and the + ! ice-ocean heat fluxes are treated explicitly. No limit is applied if a + ! negative value is used. +WIND_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the input wind stress field. Valid + ! values are 'A', 'B', or 'C'. +CD_TIDES = 0.0018 ! [nondim] default = 1.0E-04 + ! The drag coefficient that applies to the tides. +GUST_CONST = 0.02 ! [Pa] default = 0.0 + ! The background gustiness in the winds. +FIX_USTAR_GUSTLESS_BUG = False ! [Boolean] default = True + ! If true correct a bug in the time-averaging of the gustless wind friction + ! velocity +! === module ocean_stochastics === +DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL + +! === module MOM_restart === + +! === module MOM_file_parser === diff --git a/parm/ufs/mom6/MOM_input_template_500 b/parm/ufs/mom6/MOM_input_template_500 new file mode 100644 index 0000000000..dde805d247 --- /dev/null +++ b/parm/ufs/mom6/MOM_input_template_500 @@ -0,0 +1,592 @@ +! This file was written by the model and records the non-default parameters used at run-time. +! === module MOM === + +! === module MOM_unit_scaling === +! Parameters for doing unit scaling of variables. +USE_REGRIDDING = True ! [Boolean] default = False + ! If True, use the ALE algorithm (regridding/remapping). If False, use the + ! layered isopycnal algorithm. +THICKNESSDIFFUSE = True ! [Boolean] default = False + ! If true, interface heights are diffused with a coefficient of KHTH. +THICKNESSDIFFUSE_FIRST = True ! [Boolean] default = False + ! If true, do thickness diffusion before dynamics. This is only used if + ! THICKNESSDIFFUSE is true. +DT = @[DT_DYNAM_MOM6] ! [s] + ! The (baroclinic) dynamics time step. The time-step that is actually used will + ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode + ! or the coupling timestep in coupled mode.) +DT_THERM = @[DT_THERM_MOM6] ! [s] default = 1800.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. +THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN] ! [Boolean] default = False + ! If true, the MOM will take thermodynamic and tracer timesteps that can be + ! longer than the coupling timestep. The actual thermodynamic timestep that is + ! used in this case is the largest integer multiple of the coupling timestep + ! that is less than or equal to DT_THERM. +HFREEZE = 20.0 ! [m] default = -1.0 + ! If HFREEZE > 0, melt potential will be computed. The actual depth + ! over which melt potential is computed will be min(HFREEZE, OBLD) + ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default) + ! melt potential will not be computed. +FRAZIL = True ! [Boolean] default = False + ! If true, water freezes if it gets too cold, and the accumulated heat deficit + ! is returned in the surface state. FRAZIL is only used if + ! ENABLE_THERMODYNAMICS is true. +BOUND_SALINITY = True ! [Boolean] default = False + ! If true, limit salinity to being positive. (The sea-ice model may ask for more + ! salt than is available and drive the salinity negative otherwise.) + +! === module MOM_domains === +TRIPOLAR_N = True ! [Boolean] default = False + ! Use tripolar connectivity at the northern edge of the domain. With + ! TRIPOLAR_N, NIGLOBAL must be even. +NIGLOBAL = @[NX_GLB] ! + ! The total number of thickness grid points in the x-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NJGLOBAL = @[NY_GLB] ! + ! The total number of thickness grid points in the y-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. + +! === module MOM_hor_index === +! Sets the horizontal array index types. + +! === module MOM_fixed_initialization === +INPUTDIR = "INPUT" ! default = "." + ! The directory in which input files are found. + +! === module MOM_grid_init === +GRID_CONFIG = "mosaic" ! + ! A character string that determines the method for defining the horizontal + ! grid. Current options are: + ! mosaic - read the grid from a mosaic (supergrid) + ! file set by GRID_FILE. + ! cartesian - use a (flat) Cartesian grid. + ! spherical - use a simple spherical grid. + ! mercator - use a Mercator spherical grid. +GRID_FILE = "ocean_hgrid.nc" ! + ! Name of the file from which to read horizontal grid data. +GRID_ROTATION_ANGLE_BUGS = False ! [Boolean] default = True + ! If true, use an older algorithm to calculate the sine and + ! cosines needed rotate between grid-oriented directions and + ! true north and east. Differences arise at the tripolar fold +USE_TRIPOLAR_GEOLONB_BUG = False ! [Boolean] default = True + ! If true, use older code that incorrectly sets the longitude in some points + ! along the tripolar fold to be off by 360 degrees. +TOPO_CONFIG = "file" ! + ! This specifies how bathymetry is specified: + ! file - read bathymetric information from the file + ! specified by (TOPO_FILE). + ! flat - flat bottom set to MAXIMUM_DEPTH. + ! bowl - an analytically specified bowl-shaped basin + ! ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH. + ! spoon - a similar shape to 'bowl', but with an vertical + ! wall at the southern face. + ! halfpipe - a zonally uniform channel with a half-sine + ! profile in the meridional direction. + ! bbuilder - build topography from list of functions. + ! benchmark - use the benchmark test case topography. + ! Neverworld - use the Neverworld test case topography. + ! DOME - use a slope and channel configuration for the + ! DOME sill-overflow test case. + ! ISOMIP - use a slope and channel configuration for the + ! ISOMIP test case. + ! DOME2D - use a shelf and slope configuration for the + ! DOME2D gravity current/overflow test case. + ! Kelvin - flat but with rotated land mask. + ! seamount - Gaussian bump for spontaneous motion test case. + ! dumbbell - Sloshing channel with reservoirs on both ends. + ! shelfwave - exponential slope for shelfwave test case. + ! Phillips - ACC-like idealized topography used in the Phillips config. + ! dense - Denmark Strait-like dense water formation and overflow. + ! USER - call a user modified routine. +TOPO_FILE = "ocean_topog.nc" ! default = "topog.nc" + ! The file from which the bathymetry is read. +!MAXIMUM_DEPTH = 5801.341919389728 ! [m] + ! The (diagnosed) maximum depth of the ocean. +MINIMUM_DEPTH = 10.0 ! [m] default = 0.0 + ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is + ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is + ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than + ! MASKING_DEPTH are rounded to MINIMUM_DEPTH. + +! === module MOM_open_boundary === +! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply, +! if any. +MASKING_DEPTH = 0.0 ! [m] default = -9999.0 + ! The depth below which to mask points as land points, for which all fluxes are + ! zeroed out. MASKING_DEPTH is ignored if negative. + +! === module MOM_verticalGrid === +! Parameters providing information about the vertical grid. +NK = 25 ! [nondim] + ! The number of model layers. + +! === module MOM_tracer_registry === + +! === module MOM_EOS === +TFREEZE_FORM = "MILLERO_78" ! default = "LINEAR" + ! TFREEZE_FORM determines which expression should be used for the freezing + ! point. Currently, the valid choices are "LINEAR", "MILLERO_78", "TEOS10" + +! === module MOM_restart === +RESTART_CHECKSUMS_REQUIRED = False +! === module MOM_tracer_flow_control === + +! === module MOM_coord_initialization === +COORD_CONFIG = "file" ! default = "none" + ! This specifies how layers are to be defined: + ! ALE or none - used to avoid defining layers in ALE mode + ! file - read coordinate information from the file + ! specified by (COORD_FILE). + ! BFB - Custom coords for buoyancy-forced basin case + ! based on SST_S, T_BOT and DRHO_DT. + ! linear - linear based on interfaces not layers + ! layer_ref - linear based on layer densities + ! ts_ref - use reference temperature and salinity + ! ts_range - use range of temperature and salinity + ! (T_REF and S_REF) to determine surface density + ! and GINT calculate internal densities. + ! gprime - use reference density (RHO_0) for surface + ! density and GINT calculate internal densities. + ! ts_profile - use temperature and salinity profiles + ! (read from COORD_FILE) to set layer densities. + ! USER - call a user modified routine. +COORD_FILE = "layer_coord25.nc" ! + ! The file from which the coordinate densities are read. +REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER" + ! Coordinate mode for vertical regridding. Choose among the following + ! possibilities: LAYER - Isopycnal or stacked shallow water layers + ! ZSTAR, Z* - stretched geopotential z* + ! SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf + ! SIGMA - terrain following coordinates + ! RHO - continuous isopycnal + ! HYCOM1 - HyCOM-like hybrid coordinate + ! SLIGHT - stretched coordinates above continuous isopycnal + ! ADAPTIVE - optimize for smooth neutral density surfaces +BOUNDARY_EXTRAPOLATION = True ! [Boolean] default = False + ! When defined, a proper high-order reconstruction scheme is used within + ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM + ! reconstruction will also be used within boundary cells. +ALE_COORDINATE_CONFIG = "HYBRID:hycom1_25.nc,sigma2,FNC1:5,4000,4.5,.01" ! default = "UNIFORM" + ! Determines how to specify the coordinate + ! resolution. Valid options are: + ! PARAM - use the vector-parameter ALE_RESOLUTION + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!ALE_RESOLUTION = 2*5.0, 5.01, 5.07, 5.25, 5.68, 6.55, 8.1, 10.66, 14.620000000000001, 20.450000000000003, 28.73, 40.1, 55.32, 75.23, 100.8, 133.09, 173.26, 222.62, 282.56, 354.62, 440.47, 541.87, 660.76, 799.1800000000001 ! [m] + ! The distribution of vertical resolution for the target + ! grid used for Eulerian-like coordinates. For example, + ! in z-coordinate mode, the parameter is a list of level + ! thicknesses (in m). In sigma-coordinate mode, the list + ! is of non-dimensional fractions of the water column. +!TARGET_DENSITIES = 1010.0, 1020.843017578125, 1027.0274658203125, 1029.279541015625, 1030.862548828125, 1032.1572265625, 1033.27978515625, 1034.251953125, 1034.850830078125, 1035.28857421875, 1035.651123046875, 1035.967529296875, 1036.2410888671875, 1036.473876953125, 1036.6800537109375, 1036.8525390625, 1036.9417724609375, 1037.0052490234375, 1037.057373046875, 1037.1065673828125, 1037.15576171875, 1037.2060546875, 1037.26416015625, 1037.3388671875, 1037.4749755859375, 1038.0 ! [m] + ! HYBRID target densities for itnerfaces +REGRID_COMPRESSIBILITY_FRACTION = 0.01 ! [not defined] default = 0.0 + ! When interpolating potential density profiles we can add + ! some artificial compressibility solely to make homogenous + ! regions appear stratified. +MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.125" ! default = "NONE" + ! Determines how to specify the maximum interface depths. + ! Valid options are: + ! NONE - there are no maximum interface depths + ! PARAM - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 36.25, 93.75, 177.5, 287.5, 423.75, 586.25, 775.0, 990.0, 1231.25, 1498.75, 1792.5, 2112.5, 2458.75, 2831.25, 3230.0, 3655.0, 4106.25, 4583.75, 5087.5, 5617.5, 6173.75, 6756.25, 7365.0, 8000.0 ! [m] + ! The list of maximum depths for each interface. +MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE" + ! Determines how to specify the maximum layer thicknesses. + ! Valid options are: + ! NONE - there are no maximum layer thicknesses + ! PARAM - use the vector-parameter MAX_LAYER_THICKNESS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAX_LAYER_THICKNESS = 400.0, 1094.2, 1144.02, 1174.81, 1197.42, 1215.4099999999999, 1230.42, 1243.3200000000002, 1254.65, 1264.78, 1273.94, 1282.31, 1290.02, 1297.17, 1303.85, 1310.1, 1316.0, 1321.5700000000002, 1326.85, 1331.87, 1336.67, 1341.25, 1345.6399999999999, 1349.85, 1353.88 ! [m] + ! The list of maximum thickness for each layer. +REMAPPING_SCHEME = "PPM_H4" ! default = "PLM" + ! This sets the reconstruction scheme used for vertical remapping for all + ! variables. It can be one of the following schemes: PCM (1st-order + ! accurate) + ! PLM (2nd-order accurate) + ! PPM_H4 (3rd-order accurate) + ! PPM_IH4 (3rd-order accurate) + ! PQM_IH4IH3 (4th-order accurate) + ! PQM_IH6IH5 (5th-order accurate) + +! === module MOM_grid === +! Parameters providing information about the lateral grid. + +! === module MOM_state_initialization === +INIT_LAYERS_FROM_Z_FILE = True ! [Boolean] default = False + ! If true, initialize the layer thicknesses, temperatures, and salinities from a + ! Z-space file on a latitude-longitude grid. + +! === module MOM_initialize_layers_from_Z === +TEMP_SALT_Z_INIT_FILE = "" ! default = "temp_salt_z.nc" + ! The name of the z-space input file used to initialize + ! temperatures (T) and salinities (S). If T and S are not + ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE + ! must be set. +TEMP_Z_INIT_FILE = "woa18_decav_t00_01.nc" ! default = "" + ! The name of the z-space input file used to initialize + ! temperatures, only. +SALT_Z_INIT_FILE = "woa18_decav_s00_01.nc" ! default = "" + ! The name of the z-space input file used to initialize + ! temperatures, only. +Z_INIT_FILE_PTEMP_VAR = "t_an" ! default = "ptemp" + ! The name of the potential temperature variable in + ! TEMP_Z_INIT_FILE. +Z_INIT_FILE_SALT_VAR = "s_an" ! default = "salt" + ! The name of the salinity variable in + ! SALT_Z_INIT_FILE. +Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False + ! If True, then remap straight to model coordinate from file. + +! === module MOM_diag_mediator === +NUM_DIAG_COORDS = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] + +! === module MOM_MEKE === +USE_MEKE = True ! [Boolean] default = False + ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy + ! kinetic energy budget. + +! === module MOM_lateral_mixing_coeffs === +USE_VARIABLE_MIXING = True ! [Boolean] default = False + ! If true, the variable mixing code will be called. This allows diagnostics to + ! be created even if the scheme is not used. If KHTR_SLOPE_CFF>0 or + ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter + ! file. +! === module MOM_set_visc === +CHANNEL_DRAG = True ! [Boolean] default = False + ! If true, the bottom drag is exerted directly on each layer proportional to the + ! fraction of the bottom it overlies. +HBBL = 10.0 ! [m] + ! The thickness of a bottom boundary layer with a viscosity of KVBBL if + ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom + ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but + ! LINEAR_DRAG is not. +KV = 1.0E-04 ! [m2 s-1] + ! The background kinematic viscosity in the interior. The molecular value, ~1e-6 + ! m2 s-1, may be used. + +! === module MOM_continuity === + +! === module MOM_continuity_PPM === + +! === module MOM_CoriolisAdv === +CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY" + ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid + ! values are: + ! SADOURNY75_ENERGY - Sadourny, 1975; energy cons. + ! ARAKAWA_HSU90 - Arakawa & Hsu, 1990 + ! SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons. + ! ARAKAWA_LAMB81 - Arakawa & Lamb, 1981; En. + Enst. + ! ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with + ! Arakawa & Hsu and Sadourny energy +BOUND_CORIOLIS = True ! [Boolean] default = False + ! If true, the Coriolis terms at u-points are bounded by the four estimates of + ! (f+rv)v from the four neighboring v-points, and similarly at v-points. This + ! option would have no effect on the SADOURNY Coriolis scheme if it were + ! possible to use centered difference thickness fluxes. + +! === module MOM_PressureForce === + +! === module MOM_PressureForce_AFV === +MASS_WEIGHT_IN_PRESSURE_GRADIENT = True ! [Boolean] default = False + ! If true, use mass weighting when interpolating T/S for integrals near the + ! bathymetry in AFV pressure gradient calculations. + +! === module MOM_hor_visc === +LAPLACIAN = True ! [Boolean] default = False + ! If true, use a Laplacian horizontal viscosity. +KH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the grid spacing to calculate the + ! Laplacian viscosity. The final viscosity is the largest of this scaled + ! viscosity, the Smagorinsky and Leith viscosities, and KH. +KH_SIN_LAT = 2000.0 ! [m2 s-1] default = 0.0 + ! The amplitude of a latitudinally-dependent background viscosity of the form + ! KH_SIN_LAT*(SIN(LAT)**KH_PWR_OF_SINE). +SMAGORINSKY_KH = True ! [Boolean] default = False + ! If true, use a Smagorinsky nonlinear eddy viscosity. +SMAG_LAP_CONST = 0.15 ! [nondim] default = 0.0 + ! The nondimensional Laplacian Smagorinsky constant, often 0.15. +AH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the cube of the grid spacing to + ! calculate the biharmonic viscosity. The final viscosity is the largest of this + ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH. +SMAGORINSKY_AH = True ! [Boolean] default = False + ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity. +SMAG_BI_CONST = 0.06 ! [nondim] default = 0.0 + ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06. +USE_LAND_MASK_FOR_HVISC = True ! [Boolean] default = False + ! If true, use Use the land mask for the computation of thicknesses at velocity + ! locations. This eliminates the dependence on arbitrary values over land or + ! outside of the domain. + +! === module MOM_vert_friction === +HMIX_FIXED = 0.5 ! [m] + ! The prescribed depth over which the near-surface viscosity and diffusivity are + ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. +MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 + ! The maximum velocity allowed before the velocity components are truncated. + +! === module MOM_barotropic === +BOUND_BT_CORRECTION = True ! [Boolean] default = False + ! If true, the corrective pseudo mass-fluxes into the barotropic solver are + ! limited to values that require less than maxCFL_BT_cont to be accommodated. +BT_PROJECT_VELOCITY = True ! [Boolean] default = False + ! If true, step the barotropic velocity first and project out the velocity + ! tendency by 1+BEBT when calculating the transport. The default (false) is to + ! use a predictor continuity step to find the pressure field, and then to do a + ! corrector continuity step using a weighted average of the old and new + ! velocities, with weights of (1-BEBT) and BEBT. +DYNAMIC_SURFACE_PRESSURE = False ! [Boolean] default = False + ! If true, add a dynamic pressure due to a viscous ice shelf, for instance. +BEBT = 0.2 ! [nondim] default = 0.1 + ! BEBT determines whether the barotropic time stepping uses the forward-backward + ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range + ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1 + ! (for a backward Euler treatment). In practice, BEBT must be greater than about + ! 0.05. +DTBT = -0.9 ! [s or nondim] default = -0.98 + ! The barotropic time step, in s. DTBT is only used with the split explicit time + ! stepping. To set the time step automatically based the maximum stable value + ! use 0, or a negative value gives the fraction of the stable value. Setting + ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will + ! actually be used is an integer fraction of DT, rounding down. + +! === module MOM_mixed_layer_restrat === +MIXEDLAYER_RESTRAT = False ! [Boolean] default = False + ! If true, a density-gradient dependent re-stratifying flow is imposed in the + ! mixed layer. Can be used in ALE mode without restriction but in layer mode can + ! only be used if BULKMIXEDLAYER is true. +FOX_KEMPER_ML_RESTRAT_COEF = 60.0 ! [nondim] default = 0.0 + ! A nondimensional coefficient that is proportional to the ratio of the + ! deformation radius to the dominant lengthscale of the submesoscale mixed layer + ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic + ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of + ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al. + ! (2010) +MLE_FRONT_LENGTH = 200.0 ! [m] default = 0.0 + ! If non-zero, is the frontal-length scale used to calculate the upscaling of + ! buoyancy gradients that is otherwise represented by the parameter + ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended + ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0. +MLE_USE_PBL_MLD = True ! [Boolean] default = False + ! If true, the MLE parameterization will use the mixed-layer depth provided by + ! the active PBL parameterization. If false, MLE will estimate a MLD based on a + ! density difference with the surface using the parameter MLE_DENSITY_DIFF. +MLE_MLD_DECAY_TIME = 2.592E+06 ! [s] default = 0.0 + ! The time-scale for a running-mean filter applied to the mixed-layer depth used + ! in the MLE restratification parameterization. When the MLD deepens below the + ! current running-mean the running-mean is instantaneously set to the current + ! MLD. + +! === module MOM_diabatic_driver === +! The following parameters are used for diabatic processes. +ENERGETICS_SFC_PBL = True ! [Boolean] default = False + ! If true, use an implied energetics planetary boundary layer scheme to + ! determine the diffusivity and viscosity in the surface boundary layer. +EPBL_IS_ADDITIVE = False ! [Boolean] default = True + ! If true, the diffusivity from ePBL is added to all other diffusivities. + ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used. + +! === module MOM_CVMix_KPP === +! This is the MOM wrapper to CVMix:KPP +! See http://cvmix.github.io/ + +! === module MOM_tidal_mixing === +! Vertical Tidal Mixing Parameterization + +! === module MOM_CVMix_conv === +! Parameterization of enhanced mixing due to convection via CVMix + +! === module MOM_set_diffusivity === + +! === module MOM_bkgnd_mixing === +! Adding static vertical background mixing coefficients +KD = 1.5E-05 ! [m2 s-1] default = 0.0 + ! The background diapycnal diffusivity of density in the interior. Zero or the + ! molecular value, ~1e-7 m2 s-1, may be used. +KD_MIN = 2.0E-06 ! [m2 s-1] default = 2.0E-07 + ! The minimum diapycnal diffusivity. +HENYEY_IGW_BACKGROUND = True ! [Boolean] default = False + ! If true, use a latitude-dependent scaling for the near surface background + ! diffusivity, as described in Harrison & Hallberg, JPO 2008. + +! === module MOM_kappa_shear === +! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008 +USE_JACKSON_PARAM = True ! [Boolean] default = False + ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing + ! parameterization. +MAX_RINO_IT = 25 ! [nondim] default = 50 + ! The maximum number of iterations that may be used to estimate the Richardson + ! number driven mixing. + +! === module MOM_CVMix_shear === +! Parameterization of shear-driven turbulence via CVMix (various options) + +! === module MOM_CVMix_ddiff === +! Parameterization of mixing due to double diffusion processes via CVMix + +! === module MOM_diabatic_aux === +! The following parameters are used for auxiliary diabatic processes. + +! === module MOM_energetic_PBL === +EPBL_USTAR_MIN = 1.45842E-18 ! [m s-1] + ! The (tiny) minimum friction velocity used within the ePBL code, derived from + ! OMEGA and ANGSTROM.. +USE_LA_LI2016 = @[MOM6_USE_LI2016] ! [nondim] default = False + ! A logical to use the Li et al. 2016 (submitted) formula to determine the + ! Langmuir number. +USE_WAVES = @[MOM6_USE_WAVES] ! [Boolean] default = False + ! If true, enables surface wave modules. +WAVE_METHOD = "SURFACE_BANDS" ! default = "EMPTY" + ! Choice of wave method, valid options include: + ! TEST_PROFILE - Prescribed from surface Stokes drift + ! and a decay wavelength. + ! SURFACE_BANDS - Computed from multiple surface values + ! and decay wavelengths. + ! DHH85 - Uses Donelan et al. 1985 empirical + ! wave spectrum with prescribed values. + ! LF17 - Infers Stokes drift profile from wind + ! speed following Li and Fox-Kemper 2017. +SURFBAND_SOURCE = "COUPLER" ! default = "EMPTY" + ! Choice of SURFACE_BANDS data mode, valid options include: + ! DATAOVERRIDE - Read from NetCDF using FMS DataOverride. + ! COUPLER - Look for variables from coupler pass + ! INPUT - Testing with fixed values. +STK_BAND_COUPLER = 3 ! default = 1 + ! STK_BAND_COUPLER is the number of Stokes drift bands in the coupler. This has + ! to be consistent with the number of Stokes drift bands in WW3, or the model + ! will fail. +SURFBAND_WAVENUMBERS = 0.04, 0.11, 0.3305 ! [rad/m] default = 0.12566 + ! Central wavenumbers for surface Stokes drift bands. +EPBL_LANGMUIR_SCHEME = "ADDITIVE" ! default = "NONE" + ! EPBL_LANGMUIR_SCHEME selects the method for including Langmuir turbulence. + ! Valid values are: + ! NONE - Do not do any extra mixing due to Langmuir turbulence + ! RESCALE - Use a multiplicative rescaling of mstar to account for Langmuir + ! turbulence + ! ADDITIVE - Add a Langmuir turblence contribution to mstar to other + ! contributions +LT_ENHANCE_COEF = 0.044 ! [nondim] default = 0.447 + ! Coefficient for Langmuir enhancement of mstar +LT_ENHANCE_EXP = -1.5 ! [nondim] default = -1.33 + ! Exponent for Langmuir enhancementt of mstar +LT_MOD_LAC1 = 0.0 ! [nondim] default = -0.87 + ! Coefficient for modification of Langmuir number due to MLD approaching Ekman + ! depth. +LT_MOD_LAC4 = 0.0 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! stable Obukhov depth. +LT_MOD_LAC5 = 0.22 ! [nondim] default = 0.95 + ! Coefficient for modification of Langmuir number due to ratio of Ekman to + ! unstable Obukhov depth. + +! === module MOM_regularize_layers === + +! === module MOM_opacity === + +! === module MOM_tracer_advect === +TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM" + ! The horizontal transport scheme for tracers: + ! PLM - Piecewise Linear Method + ! PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order) + ! PPM - Piecewise Parabolic Method (Colella-Woodward) + +! === module MOM_tracer_hor_diff === +KHTR = 50.0 ! [m2 s-1] default = 0.0 + ! The background along-isopycnal tracer diffusivity. +CHECK_DIFFUSIVE_CFL = True ! [Boolean] default = False + ! If true, use enough iterations the diffusion to ensure that the diffusive + ! equivalent of the CFL limit is not violated. If false, always use the greater + ! of 1 or MAX_TR_DIFFUSION_CFL iteration. +MAX_TR_DIFFUSION_CFL = 2.0 ! [nondim] default = -1.0 + ! If positive, locally limit the along-isopycnal tracer diffusivity to keep the + ! diffusive CFL locally at or below this value. The number of diffusive + ! iterations is often this value or the next greater integer. + +! === module MOM_neutral_diffusion === +! This module implements neutral diffusion of tracers +USE_NEUTRAL_DIFFUSION = True ! [Boolean] default = False + ! If true, enables the neutral diffusion module. + +! === module MOM_sum_output === +MAXTRUNC = 1000 ! [truncations save_interval-1] default = 0 + ! The run will be stopped, and the day set to a very large value if the velocity + ! is truncated more than MAXTRUNC times between energy saves. Set MAXTRUNC to 0 + ! to stop if there is any truncation of velocities. + +! === module ocean_model_init === + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = true ! +ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + +! === module MOM_surface_forcing === +OCEAN_SURFACE_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the surface velocity field that is + ! returned to the coupler. Valid values include + ! 'A', 'B', or 'C'. + +MAX_P_SURF = 0.0 ! [Pa] default = -1.0 + ! The maximum surface pressure that can be exerted by the atmosphere and + ! floating sea-ice or ice shelves. This is needed because the FMS coupling + ! structure does not limit the water that can be frozen out of the ocean and the + ! ice-ocean heat fluxes are treated explicitly. No limit is applied if a + ! negative value is used. +WIND_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the input wind stress field. Valid + ! values are 'A', 'B', or 'C'. +! === module MOM_restart === + +! === module MOM_file_parser === diff --git a/parm/ufs/ufs.configure.atm.IN b/parm/ufs/ufs.configure.atm.IN new file mode 100644 index 0000000000..3457d8cf53 --- /dev/null +++ b/parm/ufs/ufs.configure.atm.IN @@ -0,0 +1,22 @@ +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +EARTH_component_list: ATM +EARTH_attributes:: + Verbosity = 0 +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + Diagnostic = 0 +:: + +# Run Sequence # +runSeq:: + ATM +:: diff --git a/parm/ufs/ufs.configure.atm_aero.IN b/parm/ufs/ufs.configure.atm_aero.IN new file mode 100644 index 0000000000..629cc156ce --- /dev/null +++ b/parm/ufs/ufs.configure.atm_aero.IN @@ -0,0 +1,40 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: ATM CHM +EARTH_attributes:: + Verbosity = 0 +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 +:: + +# CHM # +CHM_model: @[chm_model] +CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] +CHM_attributes:: + Verbosity = 0 +:: + +# Run Sequence # +runSeq:: + @@[coupling_interval_fast_sec] + ATM phase1 + ATM -> CHM + CHM + CHM -> ATM + ATM phase2 + @ +:: diff --git a/parm/ufs/ufs.configure.blocked_atm_wav.IN b/parm/ufs/ufs.configure.blocked_atm_wav.IN new file mode 100644 index 0000000000..b68aa2e735 --- /dev/null +++ b/parm/ufs/ufs.configure.blocked_atm_wav.IN @@ -0,0 +1,41 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: ATM WAV +EARTH_attributes:: + Verbosity = max +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = max + DumpFields = true +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = max +:: + + + +# Run Sequence # +runSeq:: + @@[coupling_interval_sec] + ATM -> WAV + ATM + WAV + @ +:: diff --git a/parm/ufs/ufs.configure.cpld.IN b/parm/ufs/ufs.configure.cpld.IN new file mode 100644 index 0000000000..e473fb2a03 --- /dev/null +++ b/parm/ufs/ufs.configure.cpld.IN @@ -0,0 +1,122 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM OCN ICE +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_ocn_avg + MED -> OCN :remapMethod=redist + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + @ + OCN -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.cpld_aero.IN b/parm/ufs/ufs.configure.cpld_aero.IN new file mode 100644 index 0000000000..d90d377006 --- /dev/null +++ b/parm/ufs/ufs.configure.cpld_aero.IN @@ -0,0 +1,134 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM CHM OCN ICE +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# CHM # +CHM_model: @[chm_model] +CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] +CHM_attributes:: + Verbosity = 0 +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_ocn_avg + MED -> OCN :remapMethod=redist + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM phase1 + ATM -> CHM + CHM + CHM -> ATM + ATM phase2 + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + @ + OCN -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.cpld_aero_outerwave.IN b/parm/ufs/ufs.configure.cpld_aero_outerwave.IN new file mode 100644 index 0000000000..23e7751112 --- /dev/null +++ b/parm/ufs/ufs.configure.cpld_aero_outerwave.IN @@ -0,0 +1,151 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM CHM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# CHM # +CHM_model: @[chm_model] +CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] +CHM_attributes:: + Verbosity = 0 +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + mesh_wav = @[MESH_WAV] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_wav_avg + MED med_phases_prep_ocn_avg + MED -> WAV :remapMethod=redist + MED -> OCN :remapMethod=redist + WAV + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM phase1 + ATM -> CHM + CHM + CHM -> ATM + ATM phase2 + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + MED med_phases_prep_wav_accum + @ + OCN -> MED :remapMethod=redist + WAV -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_post_wav + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.cpld_aero_wave.IN b/parm/ufs/ufs.configure.cpld_aero_wave.IN new file mode 100644 index 0000000000..ab0f6a9f8d --- /dev/null +++ b/parm/ufs/ufs.configure.cpld_aero_wave.IN @@ -0,0 +1,151 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM CHM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# CHM # +CHM_model: @[chm_model] +CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] +CHM_attributes:: + Verbosity = 0 +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + mesh_wav = @[MESH_WAV] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_ocn_avg + MED -> OCN :remapMethod=redist + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED med_phases_prep_wav_accum + MED med_phases_prep_wav_avg + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + MED -> WAV :remapMethod=redist + ATM phase1 + ATM -> CHM + CHM + CHM -> ATM + ATM phase2 + ICE + WAV + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + WAV -> MED :remapMethod=redist + MED med_phases_post_wav + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + @ + OCN -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.cpld_outerwave.IN b/parm/ufs/ufs.configure.cpld_outerwave.IN new file mode 100644 index 0000000000..9a45d5ff9a --- /dev/null +++ b/parm/ufs/ufs.configure.cpld_outerwave.IN @@ -0,0 +1,139 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + mesh_wav = @[MESH_WAV] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_wav_avg + MED med_phases_prep_ocn_avg + MED -> WAV :remapMethod=redist + MED -> OCN :remapMethod=redist + WAV + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + MED med_phases_prep_wav_accum + @ + OCN -> MED :remapMethod=redist + WAV -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_post_wav + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.cpld_wave.IN b/parm/ufs/ufs.configure.cpld_wave.IN new file mode 100644 index 0000000000..37a462a5d4 --- /dev/null +++ b/parm/ufs/ufs.configure.cpld_wave.IN @@ -0,0 +1,139 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + eps_imesh = @[eps_imesh] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + mesh_wav = @[MESH_WAV] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_ocn_avg + MED -> OCN :remapMethod=redist + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED med_phases_prep_wav_accum + MED med_phases_prep_wav_avg + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + MED -> WAV :remapMethod=redist + ATM + ICE + WAV + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + WAV -> MED :remapMethod=redist + MED med_phases_post_wav + MED med_phases_ocnalb_run + MED med_phases_prep_ocn_accum + @ + OCN -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] + pio_rearranger = box + ocean_albedo_limit = @[ocean_albedo_limit] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 + orb_eccen = 1.e36 + orb_iyear = 2000 + orb_iyear_align = 2000 + orb_mode = fixed_year + orb_mvelp = 1.e36 + orb_obliq = 1.e36 +:: diff --git a/parm/ufs/ufs.configure.leapfrog_atm_wav.IN b/parm/ufs/ufs.configure.leapfrog_atm_wav.IN new file mode 100644 index 0000000000..ec22c9478c --- /dev/null +++ b/parm/ufs/ufs.configure.leapfrog_atm_wav.IN @@ -0,0 +1,41 @@ +############################################# +#### UFS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: ATM WAV +EARTH_attributes:: + Verbosity = max +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = max + DumpFields = true +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = max +:: + + + +# Run Sequence # +runSeq:: + @@[coupling_interval_slow_sec] + ATM + ATM -> WAV + WAV + @ +:: diff --git a/parm/wave/wave_gefs.buoys b/parm/wave/wave_gefs.buoys new file mode 100644 index 0000000000..6a56303b87 --- /dev/null +++ b/parm/wave/wave_gefs.buoys @@ -0,0 +1,300 @@ +$ +$ Global output point data file for global wave ensembles +$ +$ Retained only deep water buoys or so from general buoy file +$ taken from multi_1/2 +$ +$ Key to data in file: +$ +$ LON Longitude, east positive +$ LAT Latitude +$ NAME Output point name C*10, no blanks in name allowed +$ AH Anemometer height, dummy value for none-data points +$ TYPE Buoy type indicator, used for plotting and postprocessing +$ DAT Data point +$ XDT Former data point +$ BPT Boundary data for external models. +$ VBY 'Virtual buoy' +$ SOURCE Source of data point +$ ENCAN Environment Canada +$ GOMOOS Gulf of Maine OOS +$ IDT Irish Department of Transportation +$ METFR Meteo France +$ NCEP Boundary and other data points +$ NDBC National Data Buoy Center +$ PRIV Private and incidental data sources +$ SCRIPPS Scripps +$ UKMO UK Met Office +$ PDES Puertos del Estados +$ SHOM Service Hydrographique et Oceanographique de la Marine +$ OCNOR Fugro Oceanor +$ WHOI Woods Hole Oceanographic Institute +$ SKOREA South Korea +$ MVEW Ministerie van Verkeer en Waterstaat +$ CORMP Coastal Ocean Research and Monitoring Program +$ DIMAR Direccion General Maritima (Columbia) +$ BP British Petroleum +$ SCALE Scale indicator for plotting of locations on map +$ Point will only be plotted if SCALE =< DX in our +$ GrADS scripts, DX is width of plot in logitude +$ +$ DEptH Depth in meters +$ +$ Notes: +$ +$ - The '$' at the first position identifies comments for WAVEWATCH III +$ input. +$ - The first three data columns are used by the forecats code, the other +$ are used by postprocessing scripts. +$ +$ LON LAT NAME AH TYPE SOURCE SCALE DEPTH +$ --------------------------------------------------------------- + -85.078 -19.425 '32012 ' -1 DAT WHOI 360 4534 + -90.000 -55.000 '34002 ' 6.2 DAT OCOBSI 360 4800 + -72.522 34.502 '41001 ' 4 DAT NDBC 360 4556 + -74.930 31.892 '41002 ' 4 DAT NDBC 360 3920 + -79.099 32.501 '41004 ' 4 DAT NDBC 360 39 + -80.868 31.400 '41008 ' 5 DAT NDBC 360 14.9 + -80.185 28.508 '41009 ' 4 DAT NDBC 360 42 + -78.485 28.878 '41010 ' 4 DAT NDBC 360 890 + -77.743 33.436 '41013 ' 4 DAT NDBC 360 23.5 + -78.477 33.837 '41024 ' 2.95 DAT CORMP 360 10 + -75.363 35.025 '41025 ' 4 DAT NDBC 360 59.4 + -79.624 32.803 '41029 ' 2.95 DAT CORMP 360 10 + -80.406 32.279 '41033 ' 2.95 DAT CORMP 360 10 + -77.362 33.988 '41037 ' 2.95 DAT CORMP 360 30 + -77.715 34.141 '41038 ' 2.95 DAT CORMP 360 18 + -53.045 14.554 '41040 ' 4 DAT NDBC 360 5112 + -46.093 14.311 '41041 ' 4 DAT NDBC 360 3595 + -64.830 21.124 '41043 ' 4 DAT NDBC 360 5271 + -58.630 21.582 '41044 ' 4 DAT NDBC 360 5419 + -68.384 23.822 '41046 ' 4 DAT NDBC 360 5549 + -71.494 27.514 '41047 ' 4 DAT NDBC 360 5321 + -69.585 31.838 '41048 ' 4 DAT NDBC 360 5340 + -62.938 27.490 '41049 ' 4 DAT NDBC 360 5459 + -65.004 18.257 '41051 ' 4 DAT CARICOOS 360 32 + -64.763 18.249 '41052 ' 4 DAT CARICOOS 360 44 + -65.464 18.261 '41056 ' 4 DAT CARICOOS 360 30 + -65.157 18.476 '41058 ' 4 DAT CARICOOS 360 40 + -51.017 14.824 '41060 ' -1 DAT WHOI 360 5021 + -75.095 35.778 '41062 ' 3.5 DAT UNC 360 36 + -75.941 34.782 '41063 ' 3.5 DAT UNC 360 30 + -76.949 34.207 '41064 ' 2.95 DAT CORMP 360 30 + -78.015 33.721 '41108 ' -1 DAT SIO 360 12.8 + -77.717 34.141 '41110 ' -1 DAT CORMP 360 17 + -81.292 30.709 '41112 ' -1 DAT SIO 360 15.54 + -80.534 28.400 '41113 ' -1 DAT SIO 360 9.8 + -80.188 28.523 '41116 ' -1 DAT SIO 360 41 + -81.080 30.000 '41117 ' -1 DAT USACE 360 24.4 + -80.590 28.609 '41118 ' -1 DAT SIO 360 7.3 + -78.483 33.842 '41119 ' -1 DAT CORMP 360 13 + -76.946 34.213 '41159 ' -1 DAT CORMP 360 30.5 + -89.650 25.961 '42001 ' 3.6 DAT NDBC 360 3334 + -93.758 26.091 '42002 ' 5 DAT NDBC 360 3125.1 + -85.615 25.925 '42003 ' 4 DAT NDBC 360 3265 + -87.551 30.064 '42012 ' 4 DAT NDBC 360 25.9 + -95.350 27.906 '42019 ' 5 DAT NDBC 360 82.2 + -96.693 26.968 '42020 ' 4 DAT NDBC 360 84.1 + -83.741 27.505 '42022 ' 3.1 DAT COMPS 360 50 + -83.086 26.010 '42023 ' 3.1 DAT COMPS 360 50 + -83.475 25.171 '42026 ' 3.2 DAT COMPS 360 70 + -94.413 29.232 '42035 ' 4 DAT NDBC 360 16.2 + -84.516 28.501 '42036 ' 4 DAT NDBC 360 49.7 + -86.008 28.788 '42039 ' 4 DAT NDBC 360 270 + -88.226 29.208 '42040 ' 4 DAT NDBC 360 183 + -94.899 28.982 '42043 ' 4 DAT TAMUNI 360 19 + -96.500 26.217 '42045 ' 4 DAT TAMUNI 360 62 + -94.037 27.890 '42046 ' 4 DAT TAMUNI 360 105 + -93.597 27.897 '42047 ' 4 DAT TAMUNI 360 89 + -93.941 22.124 '42055 ' 4 DAT NDBC 360 3624 + -84.946 19.812 '42056 ' 4 DAT NDBC 360 4565 + -81.422 16.908 '42057 ' 4 DAT NDBC 360 377 + -74.560 14.775 '42058 ' 4 DAT NDBC 360 4150 + -67.483 15.252 '42059 ' 4 DAT NDBC 360 4784 + -63.354 16.413 '42060 ' 4 DAT NDBC 360 1507 + -81.251 24.500 '42078 ' -1 DAT CDIP 360 170 + -81.242 24.535 '42079 ' -1 DAT CDIP 360 99 + -66.532 17.869 '42085 ' 4 DAT CARICOOS 360 18.9 + -60.848 11.185 '42087 ' 3.35 DAT ICON 360 1266 + -60.521 11.301 '42088 ' 3.35 DAT ICON 360 1266 + -80.061 19.699 '42089 ' 3.35 DAT ICON 360 5726 + -69.580 18.432 '42090 ' 3.35 DAT ICON 360 1188 + -81.967 24.407 '42095 ' -1 DAT USACE 360 100 + -83.650 25.700 '42097 ' -1 DAT USACE 360 81 + -84.275 27.345 '42099 ' -1 DAT SIO 360 93.9 + -90.459 26.689 '42360 ' 3 DAT MURPHY 360 2539.9 + -92.490 27.550 '42361 ' 122 DAT SHELL 360 871.7 + -90.648 27.795 '42362 ' 122 DAT ENVEN 360 910 + -89.220 28.160 '42363 ' 122 DAT SHELL 360 894 + -88.090 29.060 '42364 ' 122 DAT SHELL 360 980.2 + -90.283 27.207 '42369 ' 60.4 DAT BP 360 1371.9 + -88.289 28.521 '42375 ' 61 DAT BP 360 1920.2 + -94.898 26.129 '42390 ' -1 DAT SHELL 360 2382.6 + -90.027 27.196 '42392 ' -1 DAT BP 360 2156.16 + -89.240 28.157 '42394 ' 100 DAT SHELL 360 923.5 + -90.792 26.404 '42395 ' 3 DAT SHELL 360 2941.9 + -125.032 10.051 '43010 ' -1 DAT WHOI 360 4769 + -69.128 43.201 '44005 ' 5 DAT NDBC 360 180.7 + -70.141 43.525 '44007 ' 5 DAT NDBC 360 26.5 + -69.248 40.504 '44008 ' 4 DAT NDBC 360 74.7 + -74.702 38.457 '44009 ' 5 DAT NDBC 360 30 + -66.588 41.070 '44011 ' 4 DAT NDBC 360 88.4 + -70.651 42.346 '44013 ' 5 DAT NDBC 360 64 + -74.840 36.606 '44014 ' 5 DAT NDBC 360 47 + -72.049 40.693 '44017 ' 5 DAT NDBC 360 48 + -70.143 42.206 '44018 ' 5 DAT NDBC 360 41.5 + -70.279 41.493 '44020 ' 4 DAT NDBC 360 14.3 + -73.728 40.883 '44022 ' 3.5 DAT UCT 360 22.5 + -65.912 42.327 '44024 ' 4 DAT NRCOOS 360 225 + -73.164 40.251 '44025 ' 5 DAT NDBC 360 36.3 + -67.300 44.283 '44027 ' 5 DAT NDBC 360 185.3 + -70.566 42.523 '44029 ' 4 DAT NRCOOS 360 65 + -70.426 43.179 '44030 ' 4 DAT NRCOOS 360 62 + -69.355 43.715 '44032 ' 4 DAT NRCOOS 360 100 + -68.996 44.055 '44033 ' 4 DAT NRCOOS 360 110 + -68.112 44.103 '44034 ' 4 DAT NRCOOS 360 100 + -67.876 43.497 '44037 ' 4 DAT NRCOOS 360 285 + -73.580 40.956 '44040 ' 3.5 DAT UCT 360 18.3 + -75.715 36.199 '44056 ' -1 DAT USACE 360 17.4 + -73.703 40.369 '44065 ' 4 DAT NDBC 360 25 + -72.644 39.618 '44066 ' 4 DAT NDBC 360 78 + -73.087 40.699 '44069 ' 3 DAT SBROOKU 360 46 + -70.540 43.020 '44073 ' 2.6 DAT UNH 360 131 + -75.421 36.001 '44086 ' -1 DAT SIO 360 21 + -74.838 36.612 '44088 ' -1 DAT SIO 360 47 + -75.334 37.757 '44089 ' -1 DAT USACE 360 16.9 + -70.329 41.840 '44090 ' -1 DAT USGS 360 26 + -73.769 39.778 '44091 ' -1 DAT USACE 360 25.6 + -75.492 36.872 '44093 ' -1 DAT SIO 360 26.82 + -75.330 35.750 '44095 ' -1 DAT SIO 360 18.3 + -75.809 37.023 '44096 ' -1 DAT SIO 360 11.9 + -71.127 40.969 '44097 ' -1 DAT SIO 360 48.16 + -70.168 42.798 '44098 ' -1 DAT SIO 360 76.5 + -75.720 36.914 '44099 ' -1 DAT SIO 360 18.3 + -75.594 36.260 '44100 ' -1 DAT SIO 360 25.9 + -62.000 42.260 '44137 ' 5 DAT ENCAN 360 4000 + -57.100 44.240 '44139 ' 5 DAT ENCAN 360 1500 + -64.020 42.500 '44150 ' 5 DAT ENCAN 360 1300 + -63.400 44.500 '44258 ' 5 DAT ENCAN 360 58 + -147.949 56.232 '46001 ' 5 DAT NDBC 360 4054 + -130.537 42.612 '46002 ' 4 DAT NDBC 360 3413 + -136.100 50.930 '46004 ' 5 DAT ENCAN 360 3600 + -131.079 46.134 '46005 ' 4 DAT NDBC 360 2852 + -137.397 40.782 '46006 ' 4 DAT NDBC 360 4378 + -121.019 34.956 '46011 ' 5 DAT NDBC 360 464.8 + -160.000 57.700 '46021 ' -1 DAT NDBC 360 59 + -120.967 34.714 '46023 ' 10 DAT NDBC 360 384.1 + -119.200 33.000 '46024 ' -1 DAT NDBC 360 1213 + -119.053 33.763 '46025 ' 4 DAT NDBC 360 888 + -121.857 35.703 '46028 ' 5 DAT NDBC 360 1048 + -177.708 57.018 '46035 ' 5 DAT NDBC 360 3687 + -122.398 36.785 '46042 ' 5 DAT NDBC 360 1645.9 + -119.506 32.404 '46047 ' 4 DAT NDBC 360 1394 + -120.477 34.265 '46054 ' 4 DAT NDBC 360 469.4 + -129.951 38.094 '46059 ' 4 DAT NDBC 360 4628 + -146.805 60.584 '46060 ' 5 DAT NDBC 360 445 + -154.987 52.765 '46066 ' 5 DAT NDBC 360 4460 + -120.213 33.677 '46069 ' 4 DAT NDBC 360 977.8 + 175.153 55.082 '46070 ' 5 DAT NDBC 360 3835 + 179.012 51.125 '46071 ' 5 DAT NDBC 360 1681 + -172.088 51.672 '46072 ' 5 DAT NDBC 360 3572 + -172.001 55.031 '46073 ' 5 DAT NDBC 360 3051.5 + -160.817 53.983 '46075 ' 5 DAT NDBC 360 2392.7 + -147.990 59.502 '46076 ' 5 DAT NDBC 360 195.1 + -152.582 55.556 '46078 ' 5 DAT NDBC 360 5380 + -150.042 57.947 '46080 ' 5 DAT NDBC 360 254.5 + -143.372 59.681 '46082 ' 5 DAT NDBC 360 300 + -137.997 58.300 '46083 ' 5 DAT NDBC 360 136 + -136.102 56.622 '46084 ' 5 DAT NDBC 360 1158 + -142.882 55.883 '46085 ' 4 DAT NDBC 360 3721 + -118.052 32.499 '46086 ' 4 DAT NDBC 360 1844.7 + -125.771 45.925 '46089 ' 5 DAT NDBC 360 2293.3 + -122.029 36.751 '46092 ' 4 DAT MBARI 360 1000 + -122.351 36.723 '46114 ' -1 DAT SIO 360 1463 + -131.220 51.830 '46147 ' 5 DAT ENCAN 360 2000 + -131.100 53.620 '46183 ' 5 DAT ENCAN 360 65 + -138.850 53.910 '46184 ' 5 DAT ENCAN 360 3200 + -129.790 52.420 '46185 ' 5 DAT ENCAN 360 230 + -128.770 51.380 '46204 ' -1 DAT ENCAN 360 222 + -134.320 54.190 '46205 ' 5 DAT ENCAN 360 2675 + -126.000 48.840 '46206 ' 5 DAT ENCAN 360 72 + -129.920 50.870 '46207 ' 5 DAT ENCAN 360 2125 + -132.690 52.520 '46208 ' 5 DAT ENCAN 360 2950 + -123.472 37.950 '46214 ' -1 DAT SIO 360 550 + -120.783 34.454 '46218 ' -1 DAT SIO 360 548.6 + -119.882 33.225 '46219 ' -1 DAT SIO 360 274.3 + -118.634 33.855 '46221 ' -1 DAT SIO 360 370 + -118.317 33.618 '46222 ' -1 DAT SIO 360 457 + -117.391 32.933 '46225 ' -1 DAT SIO 360 549 + -117.370 32.747 '46231 ' -1 DAT SIO 360 201.17 + -117.421 32.530 '46232 ' -1 DAT SIO 360 1143 + -117.169 32.570 '46235 ' -1 DAT SIO 360 21 + -122.096 36.343 '46239 ' -1 DAT SIO 360 369 + -145.200 50.033 '46246 ' -1 DAT SIO 360 4252 + -119.559 33.761 '46251 ' -1 DAT SIO 360 1920 + -118.181 33.576 '46253 ' -1 DAT SIO 360 66 + -117.267 32.868 '46254 ' -1 DAT SIO 360 38.71 + -119.651 33.400 '46255 ' -1 DAT SIO 360 105 + -118.201 33.700 '46256 ' -1 DAT SIO 360 23.25 + -120.766 34.439 '46257 ' -1 DAT SIO 360 576.07 + -117.501 32.752 '46258 ' -1 DAT SIO 360 588.6 + -121.497 34.767 '46259 ' -1 DAT USACE 360 646.8 + -119.004 33.704 '46262 ' -1 DAT SIO 360 905 + -151.695 57.479 '46264 ' -1 DAT SIO 360 86 + -165.475 64.473 '46265 ' -1 DAT SIO 360 18.2 + -153.781 23.535 '51000 ' 5 DAT NDBC 360 4811 + -162.000 24.453 '51001 ' 4 DAT NDBC 360 4895 + -157.742 17.043 '51002 ' 4 DAT NDBC 360 4948 + -160.662 19.172 '51003 ' 4 DAT NDBC 360 4943.3 + -152.364 17.604 '51004 ' 4 DAT NDBC 360 4998 + -162.075 24.361 '51101 ' 4 DAT NDBC 360 4849 + -158.117 21.671 '51201 ' -1 DAT SIO 360 200 + -157.678 21.415 '51202 ' -1 DAT SIO 360 89 + -157.010 20.788 '51203 ' -1 DAT SIO 360 201 + -158.124 21.281 '51204 ' -1 DAT SIO 360 300 + -156.425 21.018 '51205 ' -1 DAT SIO 360 200 + -154.970 19.780 '51206 ' -1 DAT SIO 360 345 + -157.752 21.477 '51207 ' -1 DAT SIO 360 81 + -159.574 22.285 '51208 ' -1 DAT SIO 360 200 + -170.493 -14.264 '51209 ' -1 DAT SIO 360 82 + -157.756 21.477 '51210 ' -1 DAT SIO 360 80 + -157.959 21.297 '51211 ' -1 DAT SIO 360 35 + -158.149 21.323 '51212 ' -1 DAT SIO 360 280 + -157.003 20.750 '51213 ' -1 DAT SIO 360 235 + 144.788 13.354 '52200 ' -1 DAT SIO 360 200 + 171.392 7.083 '52201 ' -1 DAT SIO 360 540 + 144.812 13.684 '52202 ' -1 DAT SIO 360 510 + 145.662 15.268 '52211 ' -1 DAT SIO 360 487.68 + 134.667 7.630 '52212 ' -1 DAT SIO 360 97 + -5.000 45.230 '62001 ' 3 DAT UKMO 360 4556 + -12.430 48.720 '62029 ' 3 DAT UKMO 360 3575 + -4.400 50.000 '62050 ' -1 DAT UKMO 360 53 + -2.900 49.900 '62103 ' 14 DAT UKMO 360 18 + -12.570 55.420 '62105 ' 3 DAT UKMO 360 2899 + -6.100 50.102 '62107 ' 14 DAT UKMO 360 61 + 0.000 58.300 '62114 ' -1 DAT PRIVATE 360 142 + 0.700 54.000 '62127 ' -1 DAT PRIVATE 360 39 + 1.700 53.400 '62144 ' -1 DAT PRIVATE 360 16 + 2.800 53.102 '62145 ' -1 DAT PRIVATE 360 26 + 2.100 57.200 '62146 ' -1 DAT PRIVATE 360 98 + 1.500 53.600 '62148 ' -1 DAT PRIVATE 360 16 + 1.100 53.700 '62149 ' -1 DAT PRIVATE 360 16 + -8.470 47.550 '62163 ' 3 DAT UKMO 360 4000 + 1.100 54.000 '62165 ' -1 DAT PRIVATE 360 41 + 0.000 50.400 '62305 ' 14 DAT UKMO 360 39 + 1.500 59.500 '63110 ' -1 DAT PRIVATE 360 117 + 1.000 61.100 '63112 ' -1 DAT PRIVATE 360 156 + 1.708 61.000 '63113 ' -1 DAT PRIVATE 360 156 + 1.300 61.600 '63115 ' -1 DAT PRIVATE 360 156 + 1.100 61.400 '63117 ' -1 DAT PRIVATE 360 156 + -11.420 59.070 '64045 ' 3 DAT UKMO 360 1935 + -4.167 60.483 '64046 ' 3 DAT UKMO 360 230 +$ +$ --------------------------------------------------------------- +$ +$ End of list +$ +$ --------------------------------------------------------------- +$ + 0.00 0.00 'STOPSTRING' 999. XXX NCEP 0 +$ diff --git a/parm/wave/ww3_grib2.glo_200.inp.tmpl b/parm/wave/ww3_grib2.glo_200.inp.tmpl new file mode 100755 index 0000000000..ddfabdb13d --- /dev/null +++ b/parm/wave/ww3_grib2.glo_200.inp.tmpl @@ -0,0 +1,9 @@ +$ WAVEWATCH-III gridded output input file +$ ---------------------------------------- +TIME DT NT +N +FLAGS +$ +TIME 7 MODNR GRIDNR 0 0 +$ +$ end of input file diff --git a/parm/wave/ww3_grib2.glo_500.inp.tmpl b/parm/wave/ww3_grib2.glo_500.inp.tmpl new file mode 100755 index 0000000000..ddfabdb13d --- /dev/null +++ b/parm/wave/ww3_grib2.glo_500.inp.tmpl @@ -0,0 +1,9 @@ +$ WAVEWATCH-III gridded output input file +$ ---------------------------------------- +TIME DT NT +N +FLAGS +$ +TIME 7 MODNR GRIDNR 0 0 +$ +$ end of input file diff --git a/parm/wave/ww3_multi.inp.tmpl b/parm/wave/ww3_multi.inp.tmpl new file mode 100755 index 0000000000..50b59544cb --- /dev/null +++ b/parm/wave/ww3_multi.inp.tmpl @@ -0,0 +1,38 @@ +$ WAVEWATCH III multi-grid input file +$ ------------------------------------ + NMGRIDS NFGRIDS FUNIPNT IOSRV FPNTPROC FGRDPROC +$ +CPLILINE +WINDLINE +ICELINE +CURRLINE +UNIPOINTS +GRIDLINE +$ + RUN_BEG RUN_END +$ + FLAGMASKCOMP FLAGMASKOUT +$ + OUT_BEG DTFLD OUT_END GOFILETYPE + N + OUTPARS +$ + OUT_BEG DTPNT OUT_END POFILETYPE +BUOY_FILE +$ + OUT_BEG 0 OUT_END +$ +$ Keep next two lines formatting as is to allow proper parsing + RST_BEG DTRST RST_END RSTTYPE +RST_2_BEG DT_2_RST RST_2_END +$ + OUT_BEG 0 OUT_END +$ + OUT_BEG 0 OUT_END +$ + 'the_end' 0 +$ + 'STP' +$ +$ End of input file + diff --git a/parm/wave/ww3_shel.inp.tmpl b/parm/wave/ww3_shel.inp.tmpl new file mode 100644 index 0000000000..0b9b335e1b --- /dev/null +++ b/parm/wave/ww3_shel.inp.tmpl @@ -0,0 +1,42 @@ +$ -------------------------------------------------------------------- $ +$ WAVEWATCH III shel input file $ +$ -------------------------------------------------------------------- $ +$ Include ice and mud parameters only if IC1/2/3/4 used : + F F Water levels + CURRLINE + WINDLINE + ICELINE + F F Atmospheric momentum + F F Air density + F Assimilation data : Mean parameters + F Assimilation data : 1-D spectra + F Assimilation data : 2-D spectra +$ + RUN_BEG + RUN_END +$ +$ IOSTYP + IOSRV +$ + OUT_BEG DTFLD OUT_END GOFILETYPE + N + OUTPARS +$ + OUT_BEG DTPNT OUT_END POFILETYPE +BUOY_FILE +$ + OUT_BEG 0 OUT_END +$ +$ Keep next two lines formatting as is to allow proper parsing + RST_BEG DTRST RST_END RSTTYPE +RST_2_BEG DT_2_RST RST_2_END +$ + OUT_BEG 0 OUT_END +$ + OUT_BEG 0 OUT_END +$ + 'the_end' 0 +$ + 'STP' +$ +$ End of input file diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh index 25d034ef47..d48d58947e 100755 --- a/scripts/exgdas_atmos_chgres_forenkf.sh +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -21,7 +21,7 @@ source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) -export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export FIXam=${FIXam:-$HOMEgfs/fix/am} # Base variables CDATE=${CDATE:-"2001010100"} @@ -59,7 +59,7 @@ SENDECF=${SENDECF:-"NO"} SENDDBN=${SENDDBN:-"NO"} # level info file -SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} +SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt} # forecast files APREFIX=${APREFIX:-""} @@ -129,7 +129,7 @@ if [ $DO_CALC_ANALYSIS == "YES" ]; then $NLN $ATMF09ENS fcst.ensres.09 fi export OMP_NUM_THREADS=$NTHREADS_CHGRES - SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS_ENKF}.txt} + SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS_ENKF}.txt} if [ $USE_CFP = "YES" ]; then [[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh index 725cb0223f..94a23f2a85 100755 --- a/scripts/exgdas_atmos_nawips.sh +++ b/scripts/exgdas_atmos_nawips.sh @@ -135,21 +135,19 @@ while [ $fhcnt -le $fend ] ; do EOF export err=$?;err_chk - if [ $SENDCOM = "YES" ] ; then - cp "${GEMGRD}" "${destination}/.${GEMGRD}" - export err=$? - if [[ ${err} -ne 0 ]] ; then - echo " File ${GEMGRD} does not exist." - exit "${err}" - fi - - mv "${destination}/.${GEMGRD}" "${destination}/${GEMGRD}" - if [[ ${SENDDBN} = "YES" ]] ; then - "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ - "${destination}/${GEMGRD}" - else - echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" - fi + cp "${GEMGRD}" "${destination}/.${GEMGRD}" + export err=$? + if [[ ${err} -ne 0 ]] ; then + echo " File ${GEMGRD} does not exist." + exit "${err}" + fi + + mv "${destination}/.${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} = "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" + else + echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" fi if [ $fhcnt -ge 240 ] ; then diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh index aa686284be..1810fdef5d 100755 --- a/scripts/exgdas_atmos_verfozn.sh +++ b/scripts/exgdas_atmos_verfozn.sh @@ -1,79 +1,44 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ -# exgdas_vrfyozn.sh +# exgdas_atmos_verfozn.sh # # This script runs the data extract/validation portion of the Ozone Monitor -# (OznMon) DA package. +# (OznMon) DA package. # ################################################################################ err=0 -#------------------------------------------------------------------------------- -# Set environment -# -export RUN_ENVIR=${RUN_ENVIR:-nco} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export envir=${envir:-prod} - -# Other variables -export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} -export PDATE=${PDY}${cyc} -export DO_DATA_RPT=${DO_DATA_RPT:-1} -export NCP=${NCP:-/bin/cp} - - -#----------------------------------------------------------------- -# ensure work and TANK dirs exist, verify oznstat is available -# -export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} - -if [[ ! -d ${OZN_WORK_DIR} ]]; then - mkdir $OZN_WORK_DIR -fi -cd $OZN_WORK_DIR - -if [[ ! -d ${TANKverf_ozn} ]]; then - mkdir -p $TANKverf_ozn -fi - -if [[ -s ${oznstat} ]]; then - echo ${oznstat} is available -fi - - - data_available=0 if [[ -s ${oznstat} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar oznstat file. + # Copy data files file to local data directory. + # Untar oznstat file. #------------------------------------------------------------------ - $NCP $oznstat ./oznstat.$PDATE + ${NCP} "${oznstat}" "./oznstat.${PDY}${cyc}" - tar -xvf oznstat.$PDATE - rm oznstat.$PDATE + tar -xvf "oznstat.${PDY}${cyc}" + rm "oznstat.${PDY}${cyc}" netcdf=0 count=$(ls diag* | grep ".nc4" | wc -l) - if [ $count -gt 0 ] ; then + if [ "${count}" -gt 0 ] ; then netcdf=1 for filenc4 in $(ls diag*nc4.gz); do - file=$(echo $filenc4 | cut -d'.' -f1-2).gz - mv $filenc4 $file + file=$(echo "${filenc4}" | cut -d'.' -f1-2).gz + mv "${filenc4}" "${file}" done fi - + export OZNMON_NETCDF=${netcdf} - ${HOMEoznmon}/ush/ozn_xtrct.sh + "${USHgfs}/ozn_xtrct.sh" err=$? else diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh index 5306fbbdba..50320ffba1 100755 --- a/scripts/exgdas_atmos_verfrad.sh +++ b/scripts/exgdas_atmos_verfrad.sh @@ -1,17 +1,17 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block # . . -# Script name: exgdas_vrfyrad.sh +# Script name: exgdas_atmos_verfrad.sh # Script description: Runs data extract/validation for global radiance diag data # # Author: Ed Safford Org: NP23 Date: 2012-01-18 # -# Abstract: This script runs the data extract/validation portion of the -# RadMon package. +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. # # Condition codes # 0 - no problem encountered @@ -19,110 +19,72 @@ source "$HOMEgfs/ush/preamble.sh" # ################################################################################ -export VERBOSE=${VERBOSE:-YES} - -export RUN_ENVIR=${RUN_ENVIR:-nco} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export envir=${envir:-prod} - -# Filenames -biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} -radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} -satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} - -# Other variables -export RAD_AREA=${RAD_AREA:-glb} -export MAKE_CTL=${MAKE_CTL:-1} -export MAKE_DATA=${MAKE_DATA:-1} -export USE_ANL=${USE_ANL:-1} -export PDATE=${PDY}${cyc} -export DO_DIAG_RPT=${DO_DIAG_RPT:-1} -export DO_DATA_RPT=${DO_DATA_RPT:-1} -export NCP=${NCP:-/bin/cp} - -########################################################################### -# ensure TANK dir exists, verify radstat and biascr are available -# -if [[ ! -d ${TANKverf_rad} ]]; then - mkdir -p $TANKverf_rad -fi - -if [[ "$VERBOSE" = "YES" ]]; then - if [[ -s ${radstat} ]]; then - echo ${radstat} is available - fi - if [[ -s ${biascr} ]]; then - echo ${biascr} is available - fi -fi -##################################################################### - data_available=0 + if [[ -s ${radstat} && -s ${biascr} ]]; then - data_available=1 + data_available=1 #------------------------------------------------------------------ - # Copy data files file to local data directory. - # Untar radstat file. + # Copy data files file to local data directory. + # Untar radstat file. #------------------------------------------------------------------ - $NCP $biascr ./biascr.$PDATE - $NCP $radstat ./radstat.$PDATE + ${NCP} "${biascr}" "./biascr.${PDY}${cyc}" + ${NCP} "${radstat}" "./radstat.${PDY}${cyc}" - tar -xvf radstat.$PDATE - rm radstat.$PDATE + tar -xvf "radstat.${PDY}${cyc}" + rm "radstat.${PDY}${cyc}" #------------------------------------------------------------------ # SATYPE is the list of expected satellite/instrument sources - # in the radstat file. It should be stored in the $TANKverf - # directory. If it isn't there then use the $FIXgdas copy. In all + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the $FIXgdas copy. In all # cases write it back out to the radmon.$PDY directory. Add any # new sources to the list before writing back out. #------------------------------------------------------------------ radstat_satype=$(ls d*ges* | awk -F_ '{ print $2 "_" $3 }') - if [[ "$VERBOSE" = "YES" ]]; then - echo $radstat_satype + if [[ "${VERBOSE}" = "YES" ]]; then + echo "${radstat_satype}" fi - echo satype_file = $satype_file - + echo satype_file = "${satype_file}" + #------------------------------------------------------------------ - # Get previous cycle's date, and look for the satype_file. Using - # the previous cycle will get us the previous day's directory if + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if # the cycle being processed is 00z. #------------------------------------------------------------------ - if [[ $cyc = "00" ]]; then + if [[ ${cyc} = "00" ]]; then use_tankdir=${TANKverf_radM1} else use_tankdir=${TANKverf_rad} fi - echo satype_file = $satype_file - export SATYPE=$(cat ${satype_file}) - + echo satype_file = "${satype_file}" + export SATYPE=$(cat "${satype_file}") + #------------------------------------------------------------- - # Update the SATYPE if any new sat/instrument was - # found in $radstat_satype. Write the SATYPE contents back + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back # to $TANKverf/radmon.$PDY. #------------------------------------------------------------- satype_changes=0 - new_satype=$SATYPE + new_satype=${SATYPE} for type in ${radstat_satype}; do - test=$(echo $SATYPE | grep $type | wc -l) + type_count=$(echo "${SATYPE}" | grep "${type}" | wc -l) - if [[ $test -eq 0 ]]; then - if [[ "$VERBOSE" = "YES" ]]; then - echo "Found $type in radstat file but not in SATYPE list. Adding it now." + if (( type_count == 0 )); then + if [[ "${VERBOSE}" = "YES" ]]; then + echo "Found ${type} in radstat file but not in SATYPE list. Adding it now." fi satype_changes=1 - new_satype="$new_satype $type" + new_satype="${new_satype} ${type}" fi done - + #------------------------------------------------------------------ # Rename the diag files and uncompress #------------------------------------------------------------------ @@ -130,43 +92,43 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then for type in ${SATYPE}; do - if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then + if (( netcdf == 0 )) && [[ -e "diag_${type}_ges.${PDY}${cyc}.nc4.${Z}" ]]; then netcdf=1 fi - - if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then - mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} - ${UNCOMPRESS} ./${type}.${Z} + + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then + mv "diag_${type}_ges.${PDY}${cyc}"*".${Z}" "${type}.${Z}" + ${UNCOMPRESS} "./${type}.${Z}" else - echo "WARNING: diag_${type}_ges.${PDATE}*.${Z} not available, skipping" + echo "WARNING: diag_${type}_ges.${PDY}${cyc}*.${Z} not available, skipping" fi - - if [[ $USE_ANL -eq 1 ]]; then - if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then - mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} - ${UNCOMPRESS} ./${type}_anl.${Z} + + if [[ ${USE_ANL} -eq 1 ]]; then + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDY}${cyc}*.${Z}" | wc -l) -gt 0 ]]; then + mv "diag_${type}_anl.${PDY}${cyc}"*".${Z}" "${type}_anl.${Z}" + ${UNCOMPRESS} "./${type}_anl.${Z}" else - echo "WARNING: diag_${type}_anl.${PDATE}*.${Z} not available, skipping" + echo "WARNING: diag_${type}_anl.${PDY}${cyc}*.${Z} not available, skipping" fi fi done - export RADMON_NETCDF=$netcdf + export RADMON_NETCDF=${netcdf} #------------------------------------------------------------------ - # Run the child sccripts. + # Run the child scripts. #------------------------------------------------------------------ - ${USHradmon}/radmon_verf_angle.sh ${PDATE} + "${USHgfs}/radmon_verf_angle.sh" rc_angle=$? - ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} + "${USHgfs}/radmon_verf_bcoef.sh" rc_bcoef=$? - ${USHradmon}/radmon_verf_bcor.sh "${PDATE}" + "${USHgfs}/radmon_verf_bcor.sh" rc_bcor=$? - ${USHradmon}/radmon_verf_time.sh "${PDATE}" + "${USHgfs}/radmon_verf_time.sh" rc_time=$? #-------------------------------------- @@ -175,7 +137,7 @@ if [[ -s ${radstat} && -s ${biascr} ]]; then if [[ ${CLEAN_TANKVERF:-0} -eq 1 ]]; then "${USHradmon}/clean_tankdir.sh" glb 60 rc_clean_tankdir=$? - echo "rc_clean_tankdir = $rc_clean_tankdir" + echo "rc_clean_tankdir = ${rc_clean_tankdir}" fi fi @@ -188,23 +150,23 @@ fi err=0 if [[ ${data_available} -ne 1 ]]; then err=1 -elif [[ $rc_angle -ne 0 ]]; then - err=$rc_angle -elif [[ $rc_bcoef -ne 0 ]]; then - err=$rc_bcoef -elif [[ $rc_bcor -ne 0 ]]; then - err=$rc_bcor -elif [[ $rc_time -ne 0 ]]; then - err=$rc_time +elif [[ ${rc_angle} -ne 0 ]]; then + err=${rc_angle} +elif [[ ${rc_bcoef} -ne 0 ]]; then + err=${rc_bcoef} +elif [[ ${rc_bcor} -ne 0 ]]; then + err=${rc_bcor} +elif [[ ${rc_time} -ne 0 ]]; then + err=${rc_time} fi ##################################################################### # Restrict select sensors and satellites export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} rlist="saphir" -for rtype in $rlist; do - if compgen -G "$TANKverf_rad/*${rtype}*" > /dev/null; then - ${CHGRP_CMD} "${TANKverf_rad}"/*${rtype}* +for rtype in ${rlist}; do + if compgen -G "${TANKverf_rad}/"*"${rtype}"* > /dev/null; then + ${CHGRP_CMD} "${TANKverf_rad}/"*"${rtype}"* fi done diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh index 8f1928042f..199b5609a2 100755 --- a/scripts/exgdas_enkf_earc.sh +++ b/scripts/exgdas_enkf_earc.sh @@ -8,8 +8,6 @@ source "${HOMEgfs}/ush/preamble.sh" export n=$((10#${ENSGRP})) export CDUMP_ENKF="${EUPD_CYC:-"gdas"}" -export ARCH_LIST="${COM_TOP}/earc${ENSGRP}" - # ICS are restarts and always lag INC by $assim_freq hours. EARCINC_CYC=${ARCH_CYC} EARCICS_CYC=$((ARCH_CYC-assim_freq)) @@ -17,10 +15,6 @@ if [ "${EARCICS_CYC}" -lt 0 ]; then EARCICS_CYC=$((EARCICS_CYC+24)) fi -[[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" -mkdir -p "${ARCH_LIST}" -cd "${ARCH_LIST}" || exit 2 - "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" status=$? if [ "${status}" -ne 0 ]; then @@ -66,7 +60,7 @@ if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; if [ "${PDY}${cyc}" -gt "${SDATE}" ]; then # Don't run for first half cycle - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_grp${n}.txt") status=$? if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_grp${ENSGRP}.tar failed" @@ -74,7 +68,7 @@ if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; fi if [ "${SAVEWARMICA}" = "YES" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restarta_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restarta_grp${n}.txt") status=$? if [ "${status}" -ne 0 ]; then echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restarta_grp${ENSGRP}.tar failed" @@ -83,7 +77,7 @@ if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; fi if [ "${SAVEWARMICB}" = "YES" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restartb_grp${n}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${DATA}/${RUN}_restartb_grp${n}.txt") status=$? if [ "${status}" -ne 0 ]; then echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restartb_grp${ENSGRP}.tar failed" @@ -112,12 +106,45 @@ if [ "${ENSGRP}" -eq 0 ]; then fi set +e - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt") + # Check if the tarball will have rstprod in it + has_rstprod="NO" + while IFS= read -r file; do + if [[ -f ${file} ]]; then + group=$( stat -c "%G" "${file}" ) + if [[ "${group}" == "rstprod" ]]; then + has_rstprod="YES" + break + fi + fi + done < "${DATA}/${RUN}.txt" + + # Create the tarball + tar_fl=${ATARDIR}/${PDY}${cyc}/${RUN}.tar + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${RUN}.txt") status=$? - ${HSICMD} chgrp rstprod "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" - ${HSICMD} chmod 640 "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + if [[ "${status}" -ne 0 ]]; then + echo "FATAL ERROR: Tarring of ${tar_fl} failed" + exit "${status}" + fi + + # If rstprod was found, change the group of the tarball + if [[ "${has_rstprod}" == "YES" ]]; then + ${HSICMD} chgrp rstprod "${tar_fl}" + stat_chgrp=$? + ${HSICMD} chmod 640 "${tar_fl}" + stat_chgrp=$((stat_chgrp+$?)) + if [[ "${stat_chgrp}" -gt 0 ]]; then + echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" + echo "Attempting to delete ${tar_fl}" + ${HSICMD} rm "${tar_fl}" + echo "Please verify that ${tar_fl} was deleted!" + exit "${stat_chgrp}" + fi + fi + + # For safety, test if the htar/tar command failed only after changing groups if (( status != 0 && ${PDY}${cyc} >= firstday )); then - echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}.tar failed" + echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" exit "${status}" fi set_strict @@ -133,172 +160,4 @@ if [ "${ENSGRP}" -eq 0 ]; then "gsistat.${RUN}.${PDY}${cyc}.ensmean" fi - -if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then - exit 0 -fi - -############################################################### -# ENSGRP 0 also does clean-up -############################################################### -if [[ "${ENSGRP}" -eq 0 ]]; then - function remove_files() { - # TODO: move this to a new location - local directory=$1 - shift - if [[ ! -d ${directory} ]]; then - echo "No directory ${directory} to remove files from, skiping" - return - fi - local exclude_list="" - if (($# > 0)); then - exclude_list=$* - fi - local file_list - declare -a file_list - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - readarray -t file_list < <(find -L "${directory}" -type f) - if (( ${#file_list[@]} == 0 )); then return; fi - for exclude in ${exclude_list}; do - echo "Excluding ${exclude}" - declare -a file_list_old=("${file_list[@]}") - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") - if (( ${#file_list[@]} == 0 )); then return; fi - done - - for file in "${file_list[@]}"; do - rm -f "${file}" - done - # Remove directory if empty - rmdir "${directory}" || true - } - - # Start start and end dates to remove - GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}" "${PDY}${cyc}") - GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") - - while [ "${GDATE}" -le "${GDATEEND}" ]; do - - gPDY="${GDATE:0:8}" - gcyc="${GDATE:8:2}" - - if [[ -d ${COM_TOP} ]]; then - rocotolog="${EXPDIR}/logs/${GDATE}.log" - if [[ -f "${rocotolog}" ]]; then - set +e - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") - rc=$? - set_strict - if [ "${rc}" -eq 0 ]; then - case ${CDUMP} in - gdas) nmem="${NMEM_ENS}";; - gfs) nmem="${NMEM_ENS_GFS}";; - *) - echo "FATAL ERROR: Unknown CDUMP ${CDUMP} during cleanup" - exit 10 - ;; - esac - - readarray memlist< <(seq --format="mem%03g" 1 "${nmem}") - memlist+=("ensstat") - - for mem in "${memlist[@]}"; do - # Atmos - exclude_list="f006.ens" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Wave - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ocean - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') - for template in ${templates}; do - YMEMDIR="${mem}" MD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Ice - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Aerosols (GOCART) - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - - # Mediator - exclude_list="" - # Suppress warnings about chained commands suppressing exit codes - # shellcheck disable=SC2312 - templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') - for template in ${templates}; do - MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" - remove_files "${directory}" "${exclude_list[@]}" - done - done - fi - fi - fi - - # Remove any empty directories - YMD=${gPDY} HH=${gcyc} generate_com target_dir:COM_TOP_TMPL - target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" - if [[ -d ${target_dir} ]]; then - find "${target_dir}" -empty -type d -delete - fi - - # Advance to next cycle - GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") - done -fi - -# Remove enkf*.$rPDY for the older of GDATE or RDATE -GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") -fhmax=${FHMAX_GFS} -RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") -if [ "${GDATE}" -lt "${RDATE}" ]; then - RDATE=${GDATE} -fi -rPDY=$(echo "${RDATE}" | cut -c1-8) -clist="enkfgdas enkfgfs" -for ctype in ${clist}; do - COMIN="${ROTDIR}/${ctype}.${rPDY}" - [[ -d ${COMIN} ]] && rm -rf "${COMIN}" -done - -############################################################### - - exit 0 diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh index de603cba3f..c20d1dec78 100755 --- a/scripts/exgdas_enkf_ecen.sh +++ b/scripts/exgdas_enkf_ecen.sh @@ -24,7 +24,6 @@ pwd=$(pwd) # Base variables CDATE=${CDATE:-"2010010100"} -CDUMP=${CDUMP:-"gdas"} DONST=${DONST:-"NO"} export CASE=${CASE:-384} ntiles=${ntiles:-6} @@ -60,7 +59,7 @@ FHMIN=${FHMIN_ECEN:-3} FHMAX=${FHMAX_ECEN:-9} FHOUT=${FHOUT_ECEN:-3} FHSFC=${FHSFC_ECEN:-$FHMIN} -if [ $CDUMP = "enkfgfs" ]; then +if [ $RUN = "enkfgfs" ]; then DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} else DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} @@ -77,8 +76,8 @@ CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} -export FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} -export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export FIXorog=${FIXorog:-$HOMEgfs/fix/orog} +export FIXam=${FIXam:-$HOMEgfs/fix/am} export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} export FHOUR=${FHOUR:-0} export DELTSFC=${DELTSFC:-6} @@ -242,7 +241,7 @@ if [ $RECENTER_ENKF = "YES" ]; then $NLN $ATMANL_GSI atmanl_gsi $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres - SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} + SIGLEVEL=${SIGLEVEL:-${FIXam}/global_hyblev.l${LEVS}.txt} $NLN $CHGRESNC chgres.x chgresnml=chgres_nc_gauss.nml nmltitle=chgres diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh index bc126d5906..fd6136ddd2 100755 --- a/scripts/exgdas_enkf_fcst.sh +++ b/scripts/exgdas_enkf_fcst.sh @@ -18,33 +18,13 @@ #### ################################################################################ -source "$HOMEgfs/ush/preamble.sh" - -# Directories. -export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} -export FIX_AM=${FIX_AM:-$FIX_DIR/am} - -# Utilities -export NCP=${NCP:-"/bin/cp -p"} -export NMV=${NMV:-"/bin/mv"} -export NLN=${NLN:-"/bin/ln -sf"} - -# Scripts. -FORECASTSH=${FORECASTSH:-$HOMEgfs/scripts/exglobal_forecast.sh} +source "${HOMEgfs}/ush/preamble.sh" # Enemble group, begin and end ENSGRP=${ENSGRP:-1} ENSBEG=${ENSBEG:-1} ENSEND=${ENSEND:-1} -# Model builds -export FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/sorc/fv3gfs.fd/BUILD/bin} -export FCSTEXEC=${FCSTEXEC:-fv3gfs.x} - -# Get DA specific diag table. -export PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} -export DIAG_TABLE=${DIAG_TABLE_ENKF:-${DIAG_TABLE:-$PARM_FV3DIAG/diag_table_da}} - # Re-run failed members, or entire group RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"} @@ -52,24 +32,20 @@ RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"} RECENTER_ENKF=${RECENTER_ENKF:-"YES"} export PREFIX_ATMINC=${PREFIX_ATMINC:-""} -# Ops related stuff -SENDECF=${SENDECF:-"NO"} -SENDDBN=${SENDDBN:-"NO"} - ################################################################################ # Preprocessing -cd $DATA || exit 99 -DATATOP=$DATA +cd "${DATA}" || exit 99 +DATATOP=${DATA} ################################################################################ # Set output data EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}" -if [ -f $EFCSGRP ]; then - if [ $RERUN_EFCSGRP = "YES" ]; then - rm -f $EFCSGRP +if [[ -f ${EFCSGRP} ]]; then + if [[ ${RERUN_EFCSGRP} = "YES" ]]; then + rm -f "${EFCSGRP}" else - echo "RERUN_EFCSGRP = $RERUN_EFCSGRP, will re-run FAILED members only!" - $NMV $EFCSGRP ${EFCSGRP}.fail + echo "RERUN_EFCSGRP = ${RERUN_EFCSGRP}, will re-run FAILED members only!" + ${NMV} "${EFCSGRP}" "${EFCSGRP}.fail" fi fi @@ -105,8 +81,6 @@ if [[ ${RUN} == "enkfgfs" ]]; then export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX_ENKF:-${FHMAX}}} fi -export restart_interval=${restart_interval_ENKF:-${restart_interval:-6}} - # gfs_physics_nml export FHSWR=${FHSWR_ENKF:-${FHSWR:-3600.}} export FHLWR=${FHLWR_ENKF:-${FHLWR:-3600.}} @@ -121,7 +95,7 @@ export FHZER=${FHZER_ENKF:-${FHZER:-6}} export FHCYC=${FHCYC_ENKF:-${FHCYC:-6}} # Set PREFIX_ATMINC to r when recentering on -if [ $RECENTER_ENKF = "YES" ]; then +if [[ ${RECENTER_ENKF} = "YES" ]]; then export PREFIX_ATMINC="r" fi @@ -134,44 +108,47 @@ declare -x gcyc="${GDATE:8:2}" ################################################################################ # Run forecast for ensemble member rc=0 -for imem in $(seq $ENSBEG $ENSEND); do +for imem in $(seq "${ENSBEG}" "${ENSEND}"); do - cd $DATATOP + cd "${DATATOP}" - cmem=$(printf %03i $imem) - memchar="mem${cmem}" + ENSMEM=$(printf %03i "${imem}") + export ENSMEM + memchar="mem${ENSMEM}" - echo "Processing MEMBER: $cmem" + echo "Processing MEMBER: ${ENSMEM}" ra=0 skip_mem="NO" - if [ -f ${EFCSGRP}.fail ]; then - memstat=$(cat ${EFCSGRP}.fail | grep "MEMBER $cmem" | grep "PASS" | wc -l) - [[ $memstat -eq 1 ]] && skip_mem="YES" + if [[ -f ${EFCSGRP}.fail ]]; then + set +e + memstat=$(grep "MEMBER ${ENSMEM}" "${EFCSGRP}.fail" | grep -c "PASS") + set_strict + [[ ${memstat} -eq 1 ]] && skip_mem="YES" fi # Construct COM variables from templates (see config.com) # Can't make these read-only because we are looping over members MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ - COM_ATMOS_HISTORY COM_ATMOS_MASTER + COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_CONF - RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL if [[ ${DO_WAVE} == "YES" ]]; then MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY - RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL fi if [[ ${DO_OCN} == "YES" ]]; then MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_MED_RESTART COM_OCEAN_RESTART \ COM_OCEAN_INPUT COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS - RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL + MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL fi if [[ ${DO_ICE} == "YES" ]]; then MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART - RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL fi if [[ ${DO_AERO} == "YES" ]]; then @@ -179,70 +156,70 @@ for imem in $(seq $ENSBEG $ENSEND); do fi - if [ $skip_mem = "NO" ]; then + if [[ ${skip_mem} = "NO" ]]; then ra=0 - export MEMBER=$imem + export MEMBER=${imem} export DATA="${DATATOP}/${memchar}" - if [ -d $DATA ]; then rm -rf $DATA; fi - mkdir -p $DATA - $FORECASTSH + if [[ -d ${DATA} ]]; then rm -rf "${DATA}"; fi + mkdir -p "${DATA}" + ${FORECASTSH} ra=$? # Notify a member forecast failed and abort - if [ $ra -ne 0 ]; then - err_exit "FATAL ERROR: forecast of member $cmem FAILED. Aborting job" + if [[ ${ra} -ne 0 ]]; then + err_exit "FATAL ERROR: forecast of member ${ENSMEM} FAILED. Aborting job" fi rc=$((rc+ra)) fi - if [ $SENDDBN = YES ]; then - fhr=$FHOUT - while [ $fhr -le $FHMAX ]; do - FH3=$(printf %03i $fhr) - if [ $(expr $fhr % 3) -eq 0 ]; then + if [[ ${SENDDBN} = YES ]]; then + fhr=${FHOUT} + while [[ ${fhr} -le ${FHMAX} ]]; do + FH3=$(printf %03i "${fhr}") + if (( fhr % 3 == 0 )); then "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" fi fhr=$((fhr+FHOUT)) done fi - cd $DATATOP + cd "${DATATOP}" - if [ -s $EFCSGRP ]; then - $NCP $EFCSGRP log_old + if [[ -s ${EFCSGRP} ]]; then + ${NCP} "${EFCSGRP}" log_old fi [[ -f log ]] && rm log [[ -f log_new ]] && rm log_new - if [ $ra -ne 0 ]; then - echo "MEMBER $cmem : FAIL" > log + if [[ ${ra} -ne 0 ]]; then + echo "MEMBER ${ENSMEM} : FAIL" > log else - echo "MEMBER $cmem : PASS" > log + echo "MEMBER ${ENSMEM} : PASS" > log fi - if [ -s log_old ] ; then + if [[ -s log_old ]] ; then cat log_old log > log_new else cat log > log_new fi - $NCP log_new $EFCSGRP + ${NCP} log_new "${EFCSGRP}" done ################################################################################ # Echo status of ensemble group -cd $DATATOP -echo "Status of ensemble members in group $ENSGRP:" -cat $EFCSGRP -[[ -f ${EFCSGRP}.fail ]] && rm ${EFCSGRP}.fail +cd "${DATATOP}" +echo "Status of ensemble members in group ${ENSGRP}:" +cat "${EFCSGRP}" +[[ -f ${EFCSGRP}.fail ]] && rm "${EFCSGRP}".fail ################################################################################ # If any members failed, error out -export err=$rc; err_chk +export err=${rc}; err_chk ################################################################################ # Postprocessing -exit $err +exit "${err}" diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 5bbe7a460f..81d68fb9fe 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -54,8 +54,8 @@ CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} -export FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} -export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export FIXorog=${FIXorog:-$HOMEgfs/fix/orog} +export FIXam=${FIXam:-$HOMEgfs/fix/am} export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} export FHOUR=${FHOUR:-0} export DELTSFC=${DELTSFC:-6} @@ -152,8 +152,8 @@ if [ $DOIAU = "YES" ]; then "${DATA}/fnbgsi.${cmem}" ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done @@ -188,8 +188,8 @@ if [ $DOSFCANL_ENKF = "YES" ]; then "${DATA}/fnbgsi.${cmem}" ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 2bb749e226..1f11026ac4 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -41,7 +41,6 @@ ENKFEXEC=${ENKFEXEC:-$HOMEgfs/exec/enkf.x} # Cycling and forecast hour specific parameters CDATE=${CDATE:-"2001010100"} -CDUMP=${CDUMP:-"gdas"} # Filenames. GPREFIX=${GPREFIX:-""} @@ -82,7 +81,7 @@ cnvw_option=${cnvw_option:-".false."} netcdf_diag=${netcdf_diag:-".true."} modelspace_vloc=${modelspace_vloc:-".false."} # if true, 'vlocal_eig.dat' is needed IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} -if [ $CDUMP = "enkfgfs" ]; then +if [ $RUN = "enkfgfs" ]; then DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} else DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} @@ -100,7 +99,7 @@ use_gfs_nemsio=".false." paranc=${paranc:-".true."} WRITE_INCR_ZERO="incvars_to_zero= $INCREMENTS_TO_ZERO," if [ $DO_CALC_INCREMENT = "YES" ]; then - write_fv3_incr=".false." + write_fv3_incr=".false." else write_fv3_incr=".true." fi @@ -256,7 +255,7 @@ cat > enkf.nml << EOFnml &nam_enkf datestring="${PDY}${cyc}",datapath="$DATA/", analpertwtnh=${analpertwt},analpertwtsh=${analpertwt},analpertwttr=${analpertwt}, - covinflatemax=1.e2,covinflatemin=1,pseudo_rh=.true.,iassim_order=0, + covinflatemax=1.e2,covinflatemin=1,pseudo_rh=.false.,iassim_order=0, corrlengthnh=${corrlength},corrlengthsh=${corrlength},corrlengthtr=${corrlength}, lnsigcutoffnh=${lnsigcutoff},lnsigcutoffsh=${lnsigcutoff},lnsigcutofftr=${lnsigcutoff}, lnsigcutoffpsnh=${lnsigcutoff},lnsigcutoffpssh=${lnsigcutoff},lnsigcutoffpstr=${lnsigcutoff}, @@ -270,7 +269,7 @@ cat > enkf.nml << EOFnml use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},imp_physics=$imp_physics,lupp=$lupp, univaroz=.false.,adp_anglebc=.true.,angord=4,use_edges=.false.,emiss_bc=.true., letkf_flag=${letkf_flag},nobsl_max=${nobsl_max},denkf=${denkf},getkf=${getkf}., - nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF},use_qsatensmean=.true., + nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF}, lobsdiag_forenkf=$lobsdiag_forenkf, write_spread_diag=$write_spread_diag, modelspace_vloc=$modelspace_vloc, diff --git a/scripts/exgfs_aero_init_aerosol.py b/scripts/exgfs_aero_init_aerosol.py index db5e462f64..1c81880ca9 100755 --- a/scripts/exgfs_aero_init_aerosol.py +++ b/scripts/exgfs_aero_init_aerosol.py @@ -41,14 +41,14 @@ from functools import partial # Constants -atm_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/atmos/INPUT" # Location of atmosphere ICs +atm_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/model_data/atmos/input" # Location of atmosphere ICs atm_file_pattern = "{path}/gfs_data.{tile}.nc" # Atm IC file names atm_ctrl_pattern = "{path}/gfs_ctrl.nc" # Atm IC control file name -restart_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/atmos/RERUN_RESTART" # Location of restart files (time of previous run) +restart_base_pattern = "{rot_dir}/{cdump}.%Y%m%d/%H/model_data/atmos/restart" # Location of restart files (time of previous run) restart_file_pattern = "{file_base}/{timestamp}fv_core.res.{tile}.nc" # Name of restart data files (time when restart is valid) tracer_file_pattern = "{file_base}/{timestamp}fv_tracer.res.{tile}.nc" # Name of restart tracer files (time when restart is valid) dycore_file_pattern = "{file_base}/{timestamp}fv_core.res.nc" # Name of restart dycore file (time when restart is valid) -tracer_list_file_pattern = "{parm_gfs}/chem/gocart_tracer.list" # Text list of tracer names to copy +tracer_list_file_pattern = "{parm_gfs}/ufs/gocart/gocart_tracer.list" # Text list of tracer names to copy merge_script_pattern = "{ush_gfs}/merge_fv3_aerosol_tile.py" n_tiles = 6 max_lookback = 4 # Maximum number of past cycles to look for for tracer data diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 0f9868a506..7546f3cabe 100755 --- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -37,6 +37,9 @@ fi cd "${DATA}" || exit 2 +# "Import" functions used in this script +source "${HOMEgfs}/ush/product_functions.sh" + ############################################### # Wait for the availability of the pgrb file ############################################### @@ -51,6 +54,7 @@ while (( icnt < 1000 )); do if (( icnt >= 180 )); then msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" err_exit "${msg}" + exit 5 fi done @@ -79,8 +83,6 @@ export opt25=":(APCP|ACPCP|PRATE|CPRAT):" export opt26=' -set_grib_max_bits 25 -fi -if ' export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" export opt28=' -new_grid_interpolation budget -fi ' -export TRIMRH=${TRIMRH:-$USHgfs/trim_rh.sh} -export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} ############################################################### # Process GFS GRIB AWIP PRODUCTS IN GRIB2 # @@ -109,43 +111,28 @@ export err=$?; err_chk cat temp_gfs gfs_pwat_levels_10.grb > tmp_masterfile for GRID in conus ak prico pac 003; do - # shellcheck disable=SC2086 case ${GRID} in conus) - # Grid 20km_conus - CONUS - 20 km Quadruple Resolution (Lambert Conformal) - # export grid_20km_conus="30 6 0 0 0 0 0 0 369 257 12190000 226541000 8 25000000 265000000 20318000 20318000 0 64 25000000 25000000 0 0" - # $COPYGB2 -g "$grid_20km_conus" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" + gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" + # shellcheck disable=SC2086,SC2248 ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ ${opt27} ${opt28} -new_grid ${gridconus} "awps_file_f${fcsthrs}_${GRID}" ;; ak) - # Grid 20km_ak - Alaska - Double Resolution (Polar Stereographic) - # Redefined grid 217 for Alaska region - # export grid_20km_ak="20 6 0 0 0 0 0 0 277 213 30000000 187000000 8 60000000 225000000 22500000 22500000 0 64" - # $COPYGB2 -g "$grid_20km_ak" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" + gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" + # shellcheck disable=SC2086,SC2248 ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ ${opt27} ${opt28} -new_grid ${gridak} "awps_file_f${fcsthrs}_${GRID}" ;; prico) - # Grid 20km_prico - 0.25 degree Lat/Lon grid for Puerto Rico (20km) - # export grid_20km_prico="0 6 0 0 0 0 0 0 275 205 0 0 50750000 271750000 48 -250000 340250000 250000 250000 0" - # $COPYGB2 -g "$grid_20km_prico" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" + gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" + # shellcheck disable=SC2086,SC2248 ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ ${opt27} ${opt28} -new_grid ${gridprico} "awps_file_f${fcsthrs}_${GRID}" ;; pac) - # Grid 20km_pac - 20 km Mercator grid for Pacific Region - # export grid_20km_pac="10 6 0 0 0 0 0 0 837 692 -45000000 110000000 48 20000000 65720000 270000000 64 0 20000000 20000000" - # NEW export grid_20km_pac="10 6 0 0 0 0 0 0 837 725 -45000000 110000000 48 20000000 65734500 270000000 64 0 20000000 20000000" - # $COPYGB2 -g "$grid_20km_pac" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" + gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" + # shellcheck disable=SC2086,SC2248 ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ ${opt27} ${opt28} -new_grid ${gridpac} "awps_file_f${fcsthrs}_${GRID}" ;; @@ -153,7 +140,8 @@ for GRID in conus ak prico pac 003; do ###################################################################### # Process GFS GRIB AWIP 1.0 DEGREE (GRID 003) PRODUCTS IN GRIB2 # ###################################################################### - export grid003="latlon 0:360:1.0 90:181:-1.0" + grid003="latlon 0:360:1.0 90:181:-1.0" + # shellcheck disable=SC2086,SC2248 ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ ${opt27} ${opt28} -new_grid ${grid003} "awps_file_f${fcsthrs}_${GRID}" ;; @@ -162,9 +150,8 @@ for GRID in conus ak prico pac 003; do exit 2 ;; esac - # shellcheck disable= - ${TRIMRH} "awps_file_f${fcsthrs}_${GRID}" - ${SCALEDEC} "awps_file_f${fcsthrs}_${GRID}" + trim_rh "awps_file_f${fcsthrs}_${GRID}" + scale_dec "awps_file_f${fcsthrs}_${GRID}" ${GRB2INDEX} "awps_file_f${fcsthrs}_${GRID}" "awps_file_fi${fcsthrs}_${GRID}" ########################################################################### @@ -187,7 +174,7 @@ for GRID in conus ak prico pac 003; do export pgm; prep_step startmsg - if [[ ${GRID} = "003" && $(( fcsthrs % 6 )) == 0 ]]; then + if [[ ${GRID} = "003" && $(( 10#${fcsthrs} % 6 )) == 0 ]]; then export FORT11="awps_file_f${fcsthrs}_${GRID}" export FORT31="awps_file_fi${fcsthrs}_${GRID}" export FORT51="grib2.awpgfs${fcsthrs}.${GRID}" @@ -201,26 +188,25 @@ for GRID in conus ak prico pac 003; do ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile export err=$?; err_chk - echo " error from tocgrib2=",$err + # TODO: Should this be fatal? + echo "error from tocgrib2=${err}" - if [[ ${SENDCOM} == "YES" ]]; then - ############################## - # Post Files to ${COM_ATMOS_WMO} - ############################## + ############################## + # Post Files to ${COM_ATMOS_WMO} + ############################## - mv "grib2.awpgfs${fcsthrs}.${GRID}" \ + mv "grib2.awpgfs${fcsthrs}.${GRID}" \ "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ - "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" - else - echo "File ${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." - fi + if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" + else + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." fi elif [[ ${GRID} != "003" ]]; then export FORT11="awps_file_f${fcsthrs}_${GRID}" @@ -237,25 +223,22 @@ for GRID in conus ak prico pac 003; do ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile export err=$?; err_chk || exit "${err}" - if [[ ${SENDCOM} = "YES" ]]; then - - ############################## - # Post Files to ${COM_ATMOS_WMO} - ############################## + ############################## + # Post Files to ${COM_ATMOS_WMO} + ############################## - mv "grib2.awpgfs_20km_${GRID}_f${fcsthrs}" \ - "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" + mv "grib2.awpgfs_20km_${GRID}_f${fcsthrs}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [[ "${SENDDBN}" = 'YES' || "${SENDAWIP}" = 'YES' ]]; then - "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ - "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" - else - echo "File ${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name} not posted to db_net." - fi + if [[ "${SENDDBN}" = 'YES' || "${SENDAWIP}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" + else + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name} not posted to db_net." fi fi echo "Awip Processing ${fcsthrs} hour completed normally" diff --git a/scripts/exgfs_atmos_fbwind.sh b/scripts/exgfs_atmos_fbwind.sh index e7d0ff3d82..735a906bff 100755 --- a/scripts/exgfs_atmos_fbwind.sh +++ b/scripts/exgfs_atmos_fbwind.sh @@ -71,10 +71,8 @@ startmsg $EXECgfs/fbwndgfs < $PARMproduct/fbwnd_pacific.stnlist >> $pgmout 2> errfile export err=$?; err_chk -if test "$SENDCOM" = 'YES' -then - cp tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name -fi + +cp tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name if test "$SENDDBN" = 'YES' then diff --git a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh index 64562daeed..2dd7fa886a 100755 --- a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh +++ b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh @@ -95,14 +95,12 @@ export filesize=$( ls -l rdbfmsua.out | awk '{print $5}' ) if [ $filesize -gt 40 ] then -if [ $SENDCOM = "YES" ]; then - cp rdbfmsua.out $COMOUT/${RUN}.${cycle}.msupperair - cp sonde.idsms.tbl $COMOUT/${RUN}.${cycle}.msupperairtble - if [ $SENDDBN = "YES" ]; then - $DBNROOT/bin/dbn_alert DATA MSUPPER_AIR $job $COMOUT/${RUN}.${cycle}.msupperair - $DBNROOT/bin/dbn_alert DATA MSUPPER_AIRTBL $job $COMOUT/${RUN}.${cycle}.msupperairtble - fi -fi + cp rdbfmsua.out $COMOUT/${RUN}.${cycle}.msupperair + cp sonde.idsms.tbl $COMOUT/${RUN}.${cycle}.msupperairtble + if [ $SENDDBN = "YES" ]; then + $DBNROOT/bin/dbn_alert DATA MSUPPER_AIR $job $COMOUT/${RUN}.${cycle}.msupperair + $DBNROOT/bin/dbn_alert DATA MSUPPER_AIRTBL $job $COMOUT/${RUN}.${cycle}.msupperairtble + fi fi diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh index 76ae067280..583593fef8 100755 --- a/scripts/exgfs_atmos_goes_nawips.sh +++ b/scripts/exgfs_atmos_goes_nawips.sh @@ -103,15 +103,13 @@ EOF $GEMEXE/gpend - if [ $SENDCOM = "YES" ] ; then - cp $GEMGRD $COMOUT/.$GEMGRD - mv $COMOUT/.$GEMGRD $COMOUT/$GEMGRD - if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD - else - echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" - fi + cp $GEMGRD $COMOUT/.$GEMGRD + mv $COMOUT/.$GEMGRD $COMOUT/$GEMGRD + if [ $SENDDBN = "YES" ] ; then + $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ + $COMOUT/$GEMGRD + else + echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" fi let fhcnt=fhcnt+finc diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index 4009a8e66a..a43c279ae6 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -40,13 +40,52 @@ export opt26=' -set_grib_max_bits 25 -fi -if ' export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" export opt28=' -new_grid_interpolation budget -fi ' -SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) +#################################### +# Specify Timeout Behavior of Post +# +# SLEEP_TIME - Amount of time to wait for +# a restart file before exiting +# SLEEP_INT - Amount of time to wait between +# checking for restart files +#################################### +export SLEEP_TIME=${SLEEP_TIME:-900} +export SLEEP_INT=${SLEEP_TIME:-5} + +SLEEP_LOOP_MAX=$(( SLEEP_TIME / SLEEP_INT )) + +# TODO: Does this section do anything? I retained if for clarity of +# changes/updates, but it does not appear to do anything. + +#################################### +# Check if this is a restart +#################################### +if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb2" ]]; then + modelrecvy=$(cat < "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb") + recvy_cyc="${modelrecvy:8:2}" + recvy_shour="${modelrecvy:10:13}" + + if [[ ${RERUN} == "NO" ]]; then + NEW_SHOUR=$(( recvy_shour + FHINC )) + if (( NEW_SHOUR >= SHOUR )); then + export SHOUR="${NEW_SHOUR}" + fi + if (( recvy_shour >= FHOUR )); then + echo "Forecast Pgrb Generation Already Completed to ${FHOUR}" + else + echo "Starting: PDY=${PDY} cycle=t${recvy_cyc}z SHOUR=${SHOUR}" + fi + fi +fi ############################################################################## # Specify Forecast Hour Range F000 - F024 for GFS_NPOESS_PGRB2_0P5DEG ############################################################################## export SHOUR=000 export FHOUR=024 +export FHINC=003 +if [[ "${FHOUR}" -gt "${FHMAX_GFS}" ]]; then + export FHOUR="${FHMAX_GFS}" +fi ############################################################ # Loop Through the Post Forecast Files @@ -90,18 +129,16 @@ for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do ${WGRIB2} tmpfile | grep -F -f ${paramlist} | ${WGRIB2} -i -grib pgb2file tmpfile export err=$?; err_chk - if [[ ${SENDCOM} == "YES" ]]; then - cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" - if [[ ${SENDDBN} == "YES" ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGBNPOESS "${job}" \ - "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" - else - msg="File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net." - postmsg "${msg}" || echo "${msg}" - fi - echo "${PDY}${cyc}${fhr3}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.halfdeg.npoess" + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGBNPOESS "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + else + msg="File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net." + postmsg "${msg}" || echo "${msg}" fi + echo "${PDY}${cyc}${fhr3}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.halfdeg.npoess" rm tmpfile pgb2file done @@ -111,6 +148,10 @@ done ################################################################ export SHOUR=000 export FHOUR=180 +export FHINC=003 +if [[ "${FHOUR}" -gt "${FHMAX_GFS}" ]]; then + export FHOUR="${FHMAX_GFS}" +fi ################################# # Process GFS PGRB2_SPECIAL_POST @@ -161,23 +202,20 @@ for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do ${WGRIB2} pgb2file -s > pgb2ifile - if [[ ${SENDCOM} == "YES" ]]; then - - cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}" - cp pgb2ifile "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}.idx" - cp pgb2file2 "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr3}.grd221" - - if [[ ${SENDDBN} == "YES" ]]; then - "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25 "${job}" \ - "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25_WIDX "${job}" \ - "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx" - "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMGRD221_PGB2 "${job}" \ - "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221" - fi - - echo "${PDY}${cyc}${fhr}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb" + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}" + cp pgb2ifile "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}.idx" + cp pgb2file2 "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr3}.grd221" + + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25_WIDX "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMGRD221_PGB2 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221" fi + + echo "${PDY}${cyc}${fhr}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb" rm pgb2file2 pgb2ifile if [[ ${SENDECF} == "YES" ]]; then diff --git a/scripts/exgfs_atmos_grib_awips.sh b/scripts/exgfs_atmos_grib_awips.sh index f10508626f..037b4ce191 100755 --- a/scripts/exgfs_atmos_grib_awips.sh +++ b/scripts/exgfs_atmos_grib_awips.sh @@ -37,13 +37,10 @@ if (( num != 1 )); then exit 16 fi -cd "${DATA}" || exit 2 +cd "${DATA}/awips_g1" || exit 2 -fcsthrs=$(printf "%03d" "${fcsthrs}") - -export SCALEDEC=${SCALDEC:-${USHgfs}/scale_dec.sh} - -cd ${DATA}/awips_g1 || exit 2 +# "Import" functions used in this script +source "${HOMEgfs}/ush/product_functions.sh" ############################################### # Wait for the availability of the pgrb file @@ -59,6 +56,7 @@ while (( icnt < 1000 )); do if (( icnt >= 180 )); then msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" err_exit "${msg}" + exit 5 fi done @@ -79,7 +77,7 @@ cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b" cat tmpfile2 tmpfile2b > tmpfile ${WGRIB2} tmpfile | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \ ${WGRIB2} -i -grib masterfile tmpfile -${SCALEDEC} masterfile +scale_dec masterfile ${CNVGRIB} -g21 masterfile masterfile.grib1 ln -s masterfile.grib1 fort.11 @@ -114,23 +112,21 @@ export err=$?; err_chk # Post Files to ${COM_ATMOS_WMO} ############################## -if [[ "${SENDCOM}" = 'YES' ]]; then - cp "xtrn.awpgfs${fcsthrs}.${GRID}" "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" +cp "xtrn.awpgfs${fcsthrs}.${GRID}" "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" - ############################## - # Distribute Data - ############################## +############################## +# Distribute Data +############################## - if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]] ; then - "${DBNROOT}/bin/dbn_alert" "${DBNALERT_TYPE}" "${NET}" "${job}" \ - "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" - else - echo "File ${output_grb}.${job_name} not posted to db_net." - fi +if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]] ; then + "${DBNROOT}/bin/dbn_alert" "${DBNALERT_TYPE}" "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" +else + echo "File xtrn.awpgfs${fcsthrs}.${GRID}.${job_name} not posted to db_net." fi if [[ -e "${pgmout}" ]] ; then - cat ${pgmout} + cat "${pgmout}" fi ############################################################################### diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh index 07b0ca8b3f..ebb509d392 100755 --- a/scripts/exgfs_atmos_nawips.sh +++ b/scripts/exgfs_atmos_nawips.sh @@ -10,25 +10,28 @@ # echo " data on the CCS is properly protected." ##################################################################### -source "$HOMEgfs/ush/preamble.sh" "${2}" +source "${HOMEgfs}/ush/preamble.sh" "${2}" #### If EMC GFS PARA runs hourly file are not available, The ILPOST #### will set to 3 hour in EMC GFS PARA. #### Note: ILPOST default set to 1 export ILPOST=${ILPOST:-1} -cd $DATA +cd "${DATA}" || exit 1 RUN2=$1 fend=$2 DBN_ALERT_TYPE=$3 -destination=${4} +destination=$4 -DATA_RUN=$DATA/$RUN2 -mkdir -p $DATA_RUN -cd $DATA_RUN +DATA_RUN="${DATA}/${RUN2}" +mkdir -p "${DATA_RUN}" +cd "${DATA_RUN}" || exit 1 + +# "Import" functions used in this script +source "${HOMEgfs}/ush/product_functions.sh" # -NAGRIB=$GEMEXE/nagrib2 +NAGRIB="${GEMEXE}/nagrib2" # cpyfil=gds @@ -42,19 +45,19 @@ output=T pdsext=no maxtries=360 -fhcnt=$fstart -while [ $fhcnt -le $fend ] ; do +fhcnt=${fstart} +while (( fhcnt <= fend )) ; do if mkdir "lock.${fhcnt}" ; then - cd lock.$fhcnt - cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl - cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl - cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl - cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl + cd "lock.${fhcnt}" || exit 1 + cp "${FIXgempak}/g2varswmo2.tbl" "g2varswmo2.tbl" + cp "${FIXgempak}/g2vcrdwmo2.tbl" "g2vcrdwmo2.tbl" + cp "${FIXgempak}/g2varsncep1.tbl" "g2varsncep1.tbl" + cp "${FIXgempak}/g2vcrdncep1.tbl" "g2vcrdncep1.tbl" fhr=$(printf "%03d" "${fhcnt}") - GEMGRD=${RUN2}_${PDY}${cyc}f${fhr} + GEMGRD="${RUN2}_${PDY}${cyc}f${fhr}" # Set type of Interpolation for WGRIB2 export opt1=' -set_grib_type same -new_grid_winds earth ' @@ -67,7 +70,6 @@ while [ $fhcnt -le $fend ] ; do export opt26=' -set_grib_max_bits 25 -fi -if ' export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" export opt28=' -new_grid_interpolation budget -fi ' - export TRIMRH=$HOMEgfs/ush/trim_rh.sh case ${RUN2} in # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km? @@ -80,99 +82,92 @@ while [ $fhcnt -le $fend ] ; do GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}.idx" icnt=1 - while [ $icnt -lt 1000 ]; do - if [ -r $GRIBIN_chk ] ; then + while (( icnt < 1000 )); do + if [[ -r "${GRIBIN_chk}" ]] ; then + # File available, wait 5 seconds then proceed sleep 5 break else + # File not available yet, wait 10 seconds and try again echo "The process is waiting ... ${GRIBIN_chk} file to proceed." sleep 10 - let "icnt=icnt+1" + icnt=$((icnt+1)) fi - if [ $icnt -ge $maxtries ]; then - echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." + if (( icnt >= maxtries )); then + echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr} to end." export err=7 ; err_chk - exit $err + exit "${err}" fi done - case $RUN2 in + case "${RUN2}" in gfs35_pac) - # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 0 75125000 130000000 48 17000000 260000000 312000 312000 0" -x $GRIBIN grib$fhr - # NEW define gfs35_pac="0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" - # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -x $GRIBIN grib$fhr - export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_pac} grib$fhr - $TRIMRH grib$fhr + # shellcheck disable=SC2086,SC2248 + "${WGRIB2}" "${GRIBIN}" ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs35_pac} "grib${fhr}" + trim_rh "grib${fhr}" ;; gfs35_atl) - # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 0 75125000 230000000 48 -500000 20000000 312000 312000 0" -x $GRIBIN grib$fhr - # NEW define gfs35_atl="0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" - # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -x $GRIBIN grib$fhr - export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_atl} grib$fhr - $TRIMRH grib$fhr + # shellcheck disable=SC2086,SC2248 + "${WGRIB2}" "${GRIBIN}" ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs35_atl} "grib${fhr}" + trim_rh "grib${fhr}" ;; gfs40) - # $COPYGB2 -g "30 6 0 0 0 0 0 0 185 129 12190000 226541000 8 25000000 265000000 40635000 40635000 0 64 25000000 25000000 0 0" -x $GRIBIN grib$fhr - export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0' - $WGRIB2 $GRIBIN $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs40} grib$fhr - $TRIMRH grib$fhr + # shellcheck disable=SC2086,SC2248 + "${WGRIB2}" "${GRIBIN}" ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${gfs40} "grib${fhr}" + trim_rh "grib${fhr}" ;; *) - cp $GRIBIN grib$fhr + cp "${GRIBIN}" "grib${fhr}" esac - export pgm="nagrib2 F$fhr" + export pgm="nagrib2 F${fhr}" startmsg - $NAGRIB << EOF - GBFILE = grib$fhr - INDXFL = - GDOUTF = $GEMGRD - PROJ = $proj - GRDAREA = $grdarea - KXKY = $kxky - MAXGRD = $maxgrd - CPYFIL = $cpyfil - GAREA = $garea - OUTPUT = $output - GBTBLS = $gbtbls - GBDIAG = - PDSEXT = $pdsext - l - r + ${NAGRIB} << EOF +GBFILE = grib${fhr} +INDXFL = +GDOUTF = ${GEMGRD} +PROJ = ${proj} +GRDAREA = ${grdarea} +KXKY = ${kxky} +MAXGRD = ${maxgrd} +CPYFIL = ${cpyfil} +GAREA = ${garea} +OUTPUT = ${output} +GBTBLS = ${gbtbls} +GBDIAG = +PDSEXT = ${pdsext} +l +r EOF export err=$?;err_chk - if [[ ${SENDCOM} == "YES" ]] ; then - cpfs "${GEMGRD}" "${destination}/${GEMGRD}" - if [[ ${SENDDBN} == "YES" ]] ; then - "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ - "${destination}/${GEMGRD}" - fi + cpfs "${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} == "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" fi - cd $DATA_RUN + cd "${DATA_RUN}" || exit 1 else - if [ $fhcnt -ge 240 ] ; then - if [ $fhcnt -lt 276 -a $RUN2 = "gfs_0p50" ] ; then - let fhcnt=fhcnt+6 + if (( fhcnt <= 240 )) ; then + if (( fhcnt < 276 )) && [[ "${RUN2}" = "gfs_0p50" ]] ; then + fhcnt=$((fhcnt+6)) else - let fhcnt=fhcnt+12 + fhcnt=$((fhcnt+12)) fi - elif [ $fhcnt -lt 120 -a $RUN2 = "gfs_0p25" ] ; then + elif ((fhcnt < 120)) && [[ "${RUN2}" = "gfs_0p25" ]] ; then #### let fhcnt=fhcnt+1 - let fhcnt=fhcnt+$ILPOST + fhcnt=$((hcnt + ILPOST)) else fhcnt=$((ILPOST > finc ? fhcnt+ILPOST : fhcnt+finc )) fi fi done -$GEMEXE/gpend +"${GEMEXE}/gpend" ##################################################################### diff --git a/scripts/exgfs_atmos_postsnd.sh b/scripts/exgfs_atmos_postsnd.sh index 668234c357..368f001ed0 100755 --- a/scripts/exgfs_atmos_postsnd.sh +++ b/scripts/exgfs_atmos_postsnd.sh @@ -71,7 +71,7 @@ export FINT=$NINT1 ic=0 while [ $ic -lt 1000 ]; do - if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.logf${FEND}.${logfm}" ]]; then + if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${FEND}.${logfm}" ]]; then sleep 10 ic=$(expr $ic + 1) else diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index 2be224d1da..ce903a2284 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -203,6 +203,17 @@ source "${HOMEgfs}/ush/preamble.sh" fi done +# Copy to other members if needed +if (( NMEM_ENS > 0 )); then + for mem in $(seq -f "%03g" 1 "${NMEM_ENS}"); do + MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com COM_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL + mkdir -p "${COM_WAVE_PREP_MEM}" + for grdID in ${grdALL}; do + ${NLN} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COM_WAVE_PREP_MEM}/" + done + done +fi + # --------------------------------------------------------------------------- # # 2. Ending diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh index 09d23ec685..63690ff1b0 100755 --- a/scripts/exgfs_wave_nawips.sh +++ b/scripts/exgfs_wave_nawips.sh @@ -11,7 +11,7 @@ # March-2020 Roberto.Padilla@noaa.gov ##################################################################### -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" #export grids=${grids:-'glo_30m at_10m ep_10m wc_10m ao_9km'} #Interpolated grids export grids=${grids:-'glo_30m'} #Native grids @@ -23,15 +23,15 @@ export FHOUT_WAV=${FHOUT_WAV:-6} export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3} export maxtries=${maxtries:-720} export cycle=${cycle:-t${cyc}z} -export GEMwave=${GEMwave:-$HOMEgfs/gempak} -export FIXwave=${FIXwave:-HOMEgfs/fix} +export GEMwave=${GEMwave:-${HOMEgfs}/gempak} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/wave} export DATA=${DATA:-${DATAROOT:?}/${jobid}} -if [ ! -d $DATA ];then - mkdir -p $DATA +if [ ! -d ${DATA} ];then + mkdir -p ${DATA} fi -cd $DATA -cp $GEMwave/fix/g2varswmo2.tbl . +cd ${DATA} +cp ${GEMwave}/fix/g2varswmo2.tbl . cpyfil=gds garea=dset @@ -46,11 +46,11 @@ g2tbls=g2varswmo2.tbl NAGRIB=nagrib2 maxtries=15 -fhcnt=$fstart -while [ $fhcnt -le $FHMAX_WAV ]; do - fhr=$(printf "%03d" $fhcnt) - for grid in $grids;do - case $grid in +fhcnt=${fstart} +while [ ${fhcnt} -le ${FHMAX_WAV} ]; do + fhr=$(printf "%03d" ${fhcnt}) + for grid in ${grids};do + case ${grid} in ao_9km) grdIDin='arctic.9km' #grdIDout='gfswaveao9km' ;; grdIDout='gfswavearc' ;; @@ -72,36 +72,36 @@ while [ $fhcnt -le $FHMAX_WAV ]; do grdIDout= ;; esac GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdIDin}.f${fhr}.grib2" - GRIBIN_chk=$GRIBIN.idx + GRIBIN_chk=${GRIBIN}.idx icnt=1 - while [ $icnt -lt 1000 ]; do - if [ -r $GRIBIN_chk ] ; then + while [ ${icnt} -lt 1000 ]; do + if [ -r ${GRIBIN_chk} ] ; then break else let "icnt=icnt+1" sleep 20 fi - if [ $icnt -ge $maxtries ]; then - msg="ABORTING after 5 minutes of waiting for $GRIBIN." + if [ ${icnt} -ge ${maxtries} ]; then + msg="ABORTING after 5 minutes of waiting for ${GRIBIN}." echo ' ' echo '**************************** ' echo '*** ERROR : NO GRIB FILE *** ' echo '**************************** ' echo ' ' - echo $msg + echo ${msg} set_trace - echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog + echo "${RUNwave} ${grdID} ${fhr} prdgen ${date} ${cycle} : GRIB file missing." >> ${wavelog} err=1;export err;${errchk} || exit ${err} fi done #if [ "$grdIDin" = "global.0p25" && "$grid" = "glo_30m" ]; then - if [ "$grdIDin" = "global.0p25" ]; then - $WGRIB2 -lola 0:720:0.5 -90:361:0.5 gribfile.$grdIDout.f${fhr} grib \ - $GRIBIN 1> out 2>&1 + if [ "${grdIDin}" = "global.0p25" ]; then + ${WGRIB2} -lola 0:720:0.5 -90:361:0.5 gribfile.${grdIDout}.f${fhr} grib \ + ${GRIBIN} 1> out 2>&1 OK=$? - if [ "$OK" != '0' ]; then + if [ "${OK}" != '0' ]; then msg="ABNORMAL EXIT: ERROR IN interpolation the global grid" #set +x echo ' ' @@ -109,68 +109,66 @@ while [ $fhcnt -le $FHMAX_WAV ]; do echo '*** FATAL ERROR : ERROR IN making gribfile.$grdID.f${fhr}*** ' echo '************************************************************* ' echo ' ' - echo $msg + echo ${msg} #set_trace - echo "$RUNwave $grdID prdgen $date $cycle : error in grbindex." >> $wavelog + echo "${RUNwave} ${grdID} prdgen ${date} ${cycle} : error in grbindex." >> ${wavelog} err=2;export err;err_chk else #cp $GRIBIN gribfile.$grdID.f${fhr} - GRIBIN=gribfile.$grdIDout.f${fhr} + GRIBIN=gribfile.${grdIDout}.f${fhr} fi fi - echo $GRIBIN + echo ${GRIBIN} GEMGRD=${grdIDout}_${PDY}${cyc}f${fhr} - cp $GRIBIN grib_$grid + cp ${GRIBIN} grib_${grid} startmsg - $NAGRIB <<-EOF - GBFILE = grib_$grid + ${NAGRIB} <<-EOF + GBFILE = grib_${grid} INDXFL = - GDOUTF = $GEMGRD - PROJ = $proj - GRDAREA = $grdarea - KXKY = $kxky - MAXGRD = $maxgrd - CPYFIL = $cpyfil - GAREA = $garea - OUTPUT = $output - GBTBLS = $gbtbls - G2TBLS = $g2tbls + GDOUTF = ${GEMGRD} + PROJ = ${proj} + GRDAREA = ${grdarea} + KXKY = ${kxky} + MAXGRD = ${maxgrd} + CPYFIL = ${cpyfil} + GAREA = ${garea} + OUTPUT = ${output} + GBTBLS = ${gbtbls} + G2TBLS = ${g2tbls} GBDIAG = - PDSEXT = $pdsext + PDSEXT = ${pdsext} l r EOF - export err=$?;pgm=$NAGRIB;err_chk + export err=$?;pgm=${NAGRIB};err_chk ##################################################### # GEMPAK DOES NOT ALWAYS HAVE A NON ZERO RETURN CODE # WHEN IT CAN NOT PRODUCE THE DESIRED GRID. CHECK # FOR THIS CASE HERE. ##################################################### - ls -l $GEMGRD + ls -l ${GEMGRD} export err=$?;export pgm="GEMPAK CHECK FILE";err_chk - if [ "$NAGRIB" = "nagrib2" ] ; then + if [ "${NAGRIB}" = "nagrib2" ] ; then gpend fi - if [ $SENDCOM = "YES" ] ; then - cpfs "${GEMGRD}" "${COM_WAVE_GEMPAK}/${GEMGRD}" - if [ $SENDDBN = "YES" ] ; then + cpfs "${GEMGRD}" "${COM_WAVE_GEMPAK}/${GEMGRD}" + if [ ${SENDDBN} = "YES" ] ; then "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" "${COM_WAVE_GEMPAK}/${GEMGRD}" - else + else echo "##### DBN_ALERT is: MODEL ${DBN_ALERT_TYPE} ${job} ${COM_WAVE_GEMPAK}/${GEMGRD}#####" - fi fi - rm grib_$grid + rm grib_${grid} done - if [ $fhcnt -ge $FHMAX_HF_WAV ]; then - inc=$FHOUT_WAV + if [ ${fhcnt} -ge ${FHMAX_HF_WAV} ]; then + inc=${FHOUT_WAV} else - inc=$FHOUT_HF_WAV + inc=${FHOUT_HF_WAV} fi let fhcnt=fhcnt+inc done diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index 54350180fe..af362b1c45 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -287,6 +287,8 @@ source "$HOMEgfs/ush/preamble.sh" glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255 ; MODNR=11 ;; + glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255 ; MODNR=11 ;; glo_30mxt) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=11 ;; glo_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=11 ;; at_10m) GRDNAME='atlocn' ; GRDRES=0p16 ; GRIDNR=255 ; MODNR=11 ;; @@ -319,6 +321,8 @@ source "$HOMEgfs/ush/preamble.sh" glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + glo_200) GRDNAME='global' ; GRDRES=2p00 ; GRIDNR=255 ; MODNR=11 ;; + glo_500) GRDNAME='global' ; GRDRES=5p00 ; GRIDNR=255 ; MODNR=11 ;; gwes_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=10 ;; esac echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdnow} diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh index e75df8dfd1..2e6cb2071b 100755 --- a/scripts/exgfs_wave_prdgen_bulls.sh +++ b/scripts/exgfs_wave_prdgen_bulls.sh @@ -202,18 +202,16 @@ source "$HOMEgfs/ush/preamble.sh" # 3. Send output files to the proper destination set_trace -if [ "$SENDCOM" = YES ]; then - cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" - if [ "$SENDDBN_NTC" = YES ]; then +cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" +if [ "$SENDDBN_NTC" = YES ]; then make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" "${DATA}/awipsbull.${cycle}.${RUNwave}" \ - "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" - else + "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" +else if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then - echo "Making NTC bulletin for parallel environment, but do not alert." - (export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \ - "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}") + echo "Making NTC bulletin for parallel environment, but do not alert." + (export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \ + "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}") fi - fi fi # --------------------------------------------------------------------------- # diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh index de7f2c4974..b0cbc124ce 100755 --- a/scripts/exgfs_wave_prdgen_gridded.sh +++ b/scripts/exgfs_wave_prdgen_gridded.sh @@ -31,7 +31,7 @@ source "$HOMEgfs/ush/preamble.sh" export FHOUT_WAV=${FHOUT_WAV:-6} #from 72 to 180 inc=6 export FHOUT_HF_WAV=${FHOUT_HF_WAV:-3} export maxtries=720 - export FIXwave=${FIXwave:-$HOMEgfs/fix} + export FIXwave=${FIXwave:-$HOMEgfs/fix/wave} export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave} export USHwave=${USHwave:-$HOMEgfs/ush} export cyc=${cyc:-00} @@ -233,15 +233,13 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} #set +x echo " Get awips GRIB bulletins out ..." #set_trace - if [ "$SENDCOM" = 'YES' ] - then - #set +x - echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}" - echo " in ${COM_WAVE_WMO}" - #set_trace - cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" - #set +x - fi + #set +x + echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}" + echo " in ${COM_WAVE_WMO}" + #set_trace + cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" + #set +x + if [ "$SENDDBN" = 'YES' ] then diff --git a/scripts/exglobal_aero_analysis_finalize.py b/scripts/exglobal_aero_analysis_finalize.py index 7342bf8357..e9464b47e5 100755 --- a/scripts/exglobal_aero_analysis_finalize.py +++ b/scripts/exglobal_aero_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis diff --git a/scripts/exglobal_aero_analysis_initialize.py b/scripts/exglobal_aero_analysis_initialize.py index 6c4135fc2d..3a57dc8401 100755 --- a/scripts/exglobal_aero_analysis_initialize.py +++ b/scripts/exglobal_aero_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_run.py index 887700f476..85f4b963a4 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh deleted file mode 120000 index 5c0d685ab0..0000000000 --- a/scripts/exglobal_archive.sh +++ /dev/null @@ -1 +0,0 @@ -exglobal_archive_gsl.sh \ No newline at end of file diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh new file mode 100755 index 0000000000..18217f4efc --- /dev/null +++ b/scripts/exglobal_archive.sh @@ -0,0 +1,315 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# ICS are restarts and always lag INC by $assim_freq hours +ARCHINC_CYC=${ARCH_CYC} +ARCHICS_CYC=$((ARCH_CYC-assim_freq)) +if [ "${ARCHICS_CYC}" -lt 0 ]; then + ARCHICS_CYC=$((ARCHICS_CYC+24)) +fi + +# CURRENT CYCLE +APREFIX="${RUN}.t${cyc}z." + +# Realtime parallels run GFS MOS on 1 day delay +# If realtime parallel, back up CDATE_MOS one day +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +CDATE_MOS=${PDY}${cyc} +if [ "${REALTIME}" = "YES" ]; then + CDATE_MOS=$(${NDATE} -24 "${PDY}${cyc}") +fi +PDY_MOS="${CDATE_MOS:0:8}" + +############################################################### +# Archive online for verification and diagnostics +############################################################### +source "${HOMEgfs}/ush/file_utils.sh" + +[[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" +nb_copy "${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat" "${ARCDIR}/gsistat.${RUN}.${PDY}${cyc}" +nb_copy "${COM_CHEM_ANALYSIS}/${APREFIX}aerostat" "${ARCDIR}/aerostat.${RUN}.${PDY}${cyc}" +nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.anl" "${ARCDIR}/pgbanl.${RUN}.${PDY}${cyc}.grib2" + +# Archive 1 degree forecast GRIB2 files for verification +if [[ "${RUN}" == "gfs" ]]; then + fhmax=${FHMAX_GFS} + fhr=0 + while [ "${fhr}" -le "${fhmax}" ]; do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") + nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr3}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" + fhr=$((10#${fhr} + 10#${FHOUT_GFS} )) + done +fi +if [[ "${RUN}" == "gdas" ]]; then + flist="000 003 006 009" + for fhr in ${flist}; do + fname="${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr}" + # TODO Shouldn't the archived files also use three-digit tags? + fhr2=$(printf %02i $((10#${fhr}))) + nb_copy "${fname}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" + done +fi + +if [[ -s "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avnop.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" +fi + +if [[ "${RUN}" == "gdas" ]] && [[ -s "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdasp.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" +fi + +if [ "${RUN}" = "gfs" ]; then + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.altg.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.altg.${PDY}${cyc}" "${ARCDIR}/." + + mkdir -p "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" + blist="epac natl" + for basin in ${blist}; do + if [[ -f ${basin} ]]; then + cp -rp "${COM_ATMOS_TRACK}/${basin}" "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" + fi + done +fi + +# Archive required gaussian gfs forecast files for Fit2Obs +if [[ "${RUN}" == "gfs" ]] && [[ "${FITSARC}" = "YES" ]]; then + VFYARC=${VFYARC:-${ROTDIR}/vrfyarch} + [[ ! -d ${VFYARC} ]] && mkdir -p "${VFYARC}" + mkdir -p "${VFYARC}/${RUN}.${PDY}/${cyc}" + prefix="${RUN}.t${cyc}z" + fhmax=${FHMAX_FITS:-${FHMAX_GFS}} + fhr=0 + while [[ ${fhr} -le ${fhmax} ]]; do + fhr3=$(printf %03i "${fhr}") + sfcfile="${COM_ATMOS_HISTORY}/${prefix}.sfcf${fhr3}.nc" + sigfile="${COM_ATMOS_HISTORY}/${prefix}.atmf${fhr3}.nc" + nb_copy "${sfcfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" + nb_copy "${sigfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" + (( fhr = 10#${fhr} + 6 )) + done +fi + + +############################################################### +# Archive data either to HPSS or locally +if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then +############################################################### + + # --set the archiving command and create local directories, if necessary + TARCMD="htar" + HSICMD="hsi" + if [[ ${LOCALARCH} = "YES" ]]; then + TARCMD="tar" + HSICMD='' + [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]] && mkdir -p "${ATARDIR}/${PDY}${cyc}" + [[ ! -d "${ATARDIR}/${CDATE_MOS}" ]] && [[ -d "${ROTDIR}/gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]] && mkdir -p "${ATARDIR}/${CDATE_MOS}" + fi + + #--determine when to save ICs for warm start and forecast-only runs + SAVEWARMICA="NO" + SAVEWARMICB="NO" + SAVEFCSTIC="NO" + firstday=$(${NDATE} +24 "${SDATE}") + mm="${PDY:2:2}" + dd="${PDY:4:2}" + # TODO: This math yields multiple dates sharing the same nday + nday=$(( (10#${mm}-1)*30+10#${dd} )) + mod=$((nday % ARCH_WARMICFREQ)) + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + + if [[ "${ARCHICS_CYC}" -eq 18 ]]; then + nday1=$((nday+1)) + mod1=$((nday1 % ARCH_WARMICFREQ)) + if [[ "${mod1}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + if [[ "${mod1}" -ne 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="NO" ; fi + if [[ "${PDY}${cyc}" -eq "${SDATE}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + fi + + mod=$((nday % ARCH_FCSTICFREQ)) + if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi + + cd "${DATA}" || exit 2 + + "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" + status=$? + if [ "${status}" -ne 0 ]; then + echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!" + exit "${status}" + fi + + cd "${ROTDIR}" || exit 2 + + if [[ "${RUN}" = "gfs" ]]; then + + targrp_list="gfsa gfsb" + + if [ "${ARCH_GAUSSIAN:-"NO"}" = "YES" ]; then + targrp_list="${targrp_list} gfs_flux gfs_netcdfb gfs_pgrb2b" + if [ "${MODE}" = "cycled" ]; then + targrp_list="${targrp_list} gfs_netcdfa" + fi + fi + + if [ "${DO_WAVE}" = "YES" ]; then + targrp_list="${targrp_list} gfswave" + fi + + if [ "${DO_OCN}" = "YES" ]; then + targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00" + fi + + if [ "${DO_ICE}" = "YES" ]; then + targrp_list="${targrp_list} ice" + fi + + # Aerosols + if [ "${DO_AERO}" = "YES" ]; then + for targrp in chem; do + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${DATA}/${targrp}.txt") + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" + exit "${status}" + fi + done + fi + + #for restarts + if [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gfs_restarta" + fi + + #for downstream products + if [ "${DO_BUFRSND}" = "YES" ]; then + targrp_list="${targrp_list} gfs_downstream" + fi + + #--save mdl gfsmos output from all cycles in the 18Z archive directory + if [[ -d "gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]]; then + set +e + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${CDATE_MOS}/gfsmos.tar" "./gfsmos.${PDY_MOS}" + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "${TARCMD^^} ${PDY}${cyc} gfsmos.tar failed" + exit "${status}" + fi + set_strict + fi + elif [[ "${RUN}" = "gdas" ]]; then + + targrp_list="gdas" + + #gdaswave + if [ "${DO_WAVE}" = "YES" ]; then + targrp_list="${targrp_list} gdaswave" + fi + + #gdasocean + if [ "${DO_OCN}" = "YES" ]; then + targrp_list="${targrp_list} gdasocean gdasocean_analysis" + fi + + #gdasice + if [ "${DO_ICE}" = "YES" ]; then + targrp_list="${targrp_list} gdasice" + fi + + if [ "${SAVEWARMICA}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restarta" + if [ "${DO_WAVE}" = "YES" ]; then targrp_list="${targrp_list} gdaswave_restart"; fi + if [ "${DO_OCN}" = "YES" ]; then targrp_list="${targrp_list} gdasocean_restart"; fi + if [ "${DO_ICE}" = "YES" ]; then targrp_list="${targrp_list} gdasice_restart"; fi + fi + + if [ "${SAVEWARMICB}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restartb" + fi + fi + + # Turn on extended globbing options + shopt -s extglob + for targrp in ${targrp_list}; do + set +e + + # Test whether gdas.tar or gdas_restarta.tar will have rstprod data + has_rstprod="NO" + case ${targrp} in + 'gdas'|'gdas_restarta') + # Test for rstprod in each archived file + while IFS= read -r file; do + if [[ -f ${file} ]]; then + group=$( stat -c "%G" "${file}" ) + if [[ "${group}" == "rstprod" ]]; then + has_rstprod="YES" + break + fi + fi + done < "${DATA}/${targrp}.txt" + + ;; + *) ;; + esac + + # Create the tarball + tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar" + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${targrp}.txt") + status=$? + + # Change group to rstprod if it was found even if htar/tar failed in case of partial creation + if [[ "${has_rstprod}" == "YES" ]]; then + ${HSICMD} chgrp rstprod "${tar_fl}" + stat_chgrp=$? + ${HSICMD} chmod 640 "${tar_fl}" + stat_chgrp=$((stat_chgrp+$?)) + if [ "${stat_chgrp}" -gt 0 ]; then + echo "FATAL ERROR: Unable to properly restrict ${tar_fl}!" + echo "Attempting to delete ${tar_fl}" + ${HSICMD} rm "${tar_fl}" + echo "Please verify that ${tar_fl} was deleted!" + exit "${stat_chgrp}" + fi + fi + + # For safety, test if the htar/tar command failed after changing groups + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "FATAL ERROR: ${TARCMD} ${tar_fl} failed" + exit "${status}" + fi + set_strict + done + # Turn extended globbing back off + shopt -u extglob + +############################################################### +fi ##end of HPSS archive +############################################################### + +exit 0 diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py index e51bf082b5..3f4313631c 100755 --- a/scripts/exglobal_atm_analysis_finalize.py +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index e0077f3323..1793b24b0b 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py index 6b29a56976..8adbe4a267 100755 --- a/scripts/exglobal_atm_analysis_run.py +++ b/scripts/exglobal_atm_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py index 7bac671aee..b49cb3c413 100755 --- a/scripts/exglobal_atmens_analysis_finalize.py +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 1461e0b441..1d578b44f2 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py index dda4f7a11d..b2eb9fb2e4 100755 --- a/scripts/exglobal_atmens_analysis_run.py +++ b/scripts/exglobal_atmens_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh index f81f7f0a33..cb3c6467a1 100755 --- a/scripts/exglobal_atmos_analysis.sh +++ b/scripts/exglobal_atmos_analysis.sh @@ -289,7 +289,6 @@ else fi # GSI Fix files -RTMFIX=${CRTM_FIX} BERROR=${BERROR:-${FIXgsi}/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77} SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} @@ -408,22 +407,22 @@ fi # CRTM Spectral and Transmittance coefficients mkdir -p crtm_coeffs for file in $(awk '{if($1!~"!"){print $1}}' satinfo | sort | uniq); do - ${NLN} ${RTMFIX}/${file}.SpcCoeff.bin ./crtm_coeffs/${file}.SpcCoeff.bin - ${NLN} ${RTMFIX}/${file}.TauCoeff.bin ./crtm_coeffs/${file}.TauCoeff.bin + ${NLN} ${CRTM_FIX}/${file}.SpcCoeff.bin ./crtm_coeffs/${file}.SpcCoeff.bin + ${NLN} ${CRTM_FIX}/${file}.TauCoeff.bin ./crtm_coeffs/${file}.TauCoeff.bin done -${NLN} ${RTMFIX}/amsua_metop-a_v2.SpcCoeff.bin ./crtm_coeffs/amsua_metop-a_v2.SpcCoeff.bin - -${NLN} ${RTMFIX}/Nalli.IRwater.EmisCoeff.bin ./crtm_coeffs/Nalli.IRwater.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.IRice.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRice.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.IRland.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRland.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.IRsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRsnow.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.VISice.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISice.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.VISland.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISland.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.EmisCoeff.bin -${NLN} ${RTMFIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin -${NLN} ${RTMFIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin -${NLN} ${RTMFIX}/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin -${NLN} ${RTMFIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin +${NLN} ${CRTM_FIX}/amsua_metop-a_v2.SpcCoeff.bin ./crtm_coeffs/amsua_metop-a_v2.SpcCoeff.bin + +${NLN} ${CRTM_FIX}/Nalli.IRwater.EmisCoeff.bin ./crtm_coeffs/Nalli.IRwater.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.IRice.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRice.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.IRland.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRland.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.IRsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRsnow.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.VISice.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISice.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.VISland.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISland.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.EmisCoeff.bin +${NLN} ${CRTM_FIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin +${NLN} ${CRTM_FIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin +${NLN} ${CRTM_FIX}/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin +${NLN} ${CRTM_FIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin ############################################################## # Observational data @@ -434,18 +433,13 @@ ${NLN} ${OSCATBF} oscatbufr ${NLN} ${RAPIDSCATBF} rapidscatbufr ${NLN} ${GSNDBF} gsndrbufr ${NLN} ${GSNDBF1} gsnd1bufr -${NLN} ${B1HRS2} hirs2bufr ${NLN} ${B1MSU} msubufr -${NLN} ${B1HRS3} hirs3bufr -${NLN} ${B1HRS4} hirs4bufr ${NLN} ${B1AMUA} amsuabufr ${NLN} ${B1AMUB} amsubbufr ${NLN} ${B1MHS} mhsbufr -${NLN} ${ESHRS3} hirs3bufrears ${NLN} ${ESAMUA} amsuabufrears ${NLN} ${ESAMUB} amsubbufrears #$NLN $ESMHS mhsbufrears -${NLN} ${HRS3DB} hirs3bufr_db ${NLN} ${AMUADB} amsuabufr_db ${NLN} ${AMUBDB} amsubbufr_db #$NLN $MHSDB mhsbufr_db @@ -781,8 +775,6 @@ OBS_INPUT:: sbuvbufr sbuv2 n16 sbuv8_n16 0.0 0 0 sbuvbufr sbuv2 n17 sbuv8_n17 0.0 0 0 sbuvbufr sbuv2 n18 sbuv8_n18 0.0 0 0 - hirs3bufr hirs3 n17 hirs3_n17 0.0 1 0 - hirs4bufr hirs4 metop-a hirs4_metop-a 0.0 1 1 gimgrbufr goes_img g11 imgr_g11 0.0 1 0 gimgrbufr goes_img g12 imgr_g12 0.0 1 0 airsbufr airs aqua airs_aqua 0.0 1 1 @@ -816,7 +808,6 @@ OBS_INPUT:: gomebufr gome metop-a gome_metop-a 0.0 2 0 omibufr omi aura omi_aura 0.0 2 0 sbuvbufr sbuv2 n19 sbuv8_n19 0.0 0 0 - hirs4bufr hirs4 n19 hirs4_n19 0.0 1 1 amsuabufr amsua n19 amsua_n19 0.0 1 1 mhsbufr mhs n19 mhs_n19 0.0 1 1 tcvitl tcp null tcp 0.0 0 0 @@ -824,7 +815,6 @@ OBS_INPUT:: seviribufr seviri m09 seviri_m09 0.0 1 0 seviribufr seviri m10 seviri_m10 0.0 1 0 seviribufr seviri m11 seviri_m11 0.0 1 0 - hirs4bufr hirs4 metop-b hirs4_metop-b 0.0 1 1 amsuabufr amsua metop-b amsua_metop-b 0.0 1 1 mhsbufr mhs metop-b mhs_metop-b 0.0 1 1 iasibufr iasi metop-b iasi_metop-b 0.0 1 1 @@ -867,8 +857,6 @@ OBS_INPUT:: sstviirs viirs-m npp viirs-m_npp 0.0 4 0 sstviirs viirs-m j1 viirs-m_j1 0.0 4 0 ahibufr ahi himawari9 ahi_himawari9 0.0 1 0 - atmsbufr atms n21 atms_n21 0.0 1 1 - crisfsbufr cris-fsr n21 cris-fsr_n21 0.0 1 0 sstviirs viirs-m j2 viirs-m_j2 0.0 4 0 ompsnpbufr ompsnp n21 ompsnp_n21 0.0 0 0 ompstcbufr ompstc8 n21 ompstc8_n21 0.0 2 0 diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh index b353d3c52b..a2086aa927 100755 --- a/scripts/exglobal_atmos_analysis_calc.sh +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -23,7 +23,7 @@ source "$HOMEgfs/ush/preamble.sh" # Directories. pwd=$(pwd) -export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export FIXam=${FIXam:-$HOMEgfs/fix/am} # Base variables CDUMP=${CDUMP:-"gdas"} diff --git a/scripts/exglobal_atmos_products.sh b/scripts/exglobal_atmos_products.sh new file mode 100755 index 0000000000..d2c0ed7466 --- /dev/null +++ b/scripts/exglobal_atmos_products.sh @@ -0,0 +1,253 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +# Programs used +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} + +# Scripts used +INTERP_ATMOS_MASTERSH=${INTERP_ATMOS_MASTERSH:-"${HOMEgfs}/ush/interp_atmos_master.sh"} +INTERP_ATMOS_SFLUXSH=${INTERP_ATMOS_SFLUXSH:-"${HOMEgfs}/ush/interp_atmos_sflux.sh"} + +# Variables used in this job +downset=${downset:-1} # No. of groups of pressure grib2 products to create +npe_atmos_products=${npe_atmos_products:-8} # no. of processors available to process each group + +cd "${DATA}" || exit 1 + +# Set paramlist files based on FORECAST_HOUR (-1, 0, 3, 6, etc.) +# Determine if supplemental products (PGBS) (1-degree and 1/2-degree) should be generated +if (( FORECAST_HOUR <= 0 )); then + if (( FORECAST_HOUR < 0 )); then + fhr3="anl" + paramlista="${paramlista_anl}" + FLXGF="NO" + elif (( FORECAST_HOUR == 0 )); then + fhr3=$(printf "f%03d" "${FORECAST_HOUR}") + paramlista="${paramlista_f000}" + fi + PGBS="YES" +else + fhr3=$(printf "f%03d" "${FORECAST_HOUR}") + if (( FORECAST_HOUR%FHOUT_PGBS == 0 )); then + PGBS="YES" + fi +fi + +#----------------------------------------------------- +# Section creating pressure grib2 interpolated products + +# Files needed by ${INTERP_ATMOS_MASTERSH} +MASTER_FILE="${COM_ATMOS_MASTER}/${PREFIX}master.grb2${fhr3}" + +# Get inventory from ${MASTER_FILE} that matches patterns from ${paramlista} +# Extract this inventory from ${MASTER_FILE} into a smaller tmpfile or tmpfileb based on paramlista or paramlistb +# shellcheck disable=SC2312 +${WGRIB2} "${MASTER_FILE}" | grep -F -f "${paramlista}" | ${WGRIB2} -i -grib "tmpfile_${fhr3}" "${MASTER_FILE}" +export err=$?; err_chk +# Do the same as above for ${paramlistb} +if (( downset == 2 )); then + # shellcheck disable=SC2312 + ${WGRIB2} "${MASTER_FILE}" | grep -F -f "${paramlistb}" | ${WGRIB2} -i -grib "tmpfileb_${fhr3}" "${MASTER_FILE}" + export err=$?; err_chk +fi + +# Determine grids once and save them as a string and an array for processing +grid_string="0p25" +if [[ "${PGBS:-}" == "YES" ]]; then + grid_string="${grid_string}:0p50:1p00" +else + echo "Supplemental product generation is disable for fhr = ${fhr3}" + PGBS="NO" # Can't generate supplemental products if PGBS is not YES +fi +# Also transform the ${grid_string} into an array for processing +IFS=':' read -ra grids <<< "${grid_string}" + +for (( nset=1 ; nset <= downset ; nset++ )); do + + echo "Begin processing nset = ${nset}" + + # Number of processors available to process $nset + nproc=${npe_atmos_products} + + # Each set represents a group of files + if (( nset == 1 )); then + grp="" # TODO: this should be "a" when we eventually rename the pressure grib2 files per EE2 convention + elif (( nset == 2 )); then + grp="b" + fi + + # process grib2 chunkfiles to interpolate using MPMD + tmpfile="tmpfile${grp}_${fhr3}" + + # shellcheck disable=SC2312 + ncount=$(${WGRIB2} "${tmpfile}" | wc -l) + if (( nproc > ncount )); then + echo "WARNING: Total no. of available processors '${nproc}' exceeds no. of records '${ncount}' in ${tmpfile}" + echo "Reduce nproc to ${ncount} (or less) to not waste resources" + fi + inv=$(( ncount / nproc )) + rm -f "${DATA}/poescript" + + last=0 + for (( iproc = 1 ; iproc <= nproc ; iproc++ )); do + first=$((last + 1)) + last=$((last + inv)) + if (( last > ncount )); then (( last = ncount )); fi + + # if final record of is u-component, add next record v-component + # if final record is land, add next record icec + # grep returns 1 if no match is found, so temporarily turn off exit on non-zero rc + set +e + # shellcheck disable=SC2312 + ${WGRIB2} -d "${last}" "${tmpfile}" | grep -E -i "ugrd|ustm|uflx|u-gwd|land" + rc=$? + set_strict + if (( rc == 0 )); then # Matched the grep + last=$(( last + 1 )) + fi + if (( iproc == nproc )); then + last=${ncount} + fi + + # Break tmpfile into processor specific chunks in preparation for MPMD + ${WGRIB2} "${tmpfile}" -for "${first}":"${last}" -grib "${tmpfile}_${iproc}" + export err=$?; err_chk + input_file="${tmpfile}_${iproc}" + output_file_prefix="pgb2${grp}file_${fhr3}_${iproc}" + echo "${INTERP_ATMOS_MASTERSH} ${input_file} ${output_file_prefix} ${grid_string}" >> "${DATA}/poescript" + + # if at final record and have not reached the final processor then write echo's to + # poescript for remaining processors + if (( last == ncount )); then + for (( pproc = iproc+1 ; pproc < nproc ; pproc++ )); do + echo "/bin/echo ${pproc}" >> "${DATA}/poescript" + done + break + fi + done # for (( iproc = 1 ; iproc <= nproc ; iproc++ )); do + + # Run with MPMD or serial + if [[ "${USE_CFP:-}" = "YES" ]]; then + "${HOMEgfs}/ush/run_mpmd.sh" "${DATA}/poescript" + export err=$? + else + chmod 755 "${DATA}/poescript" + bash +x "${DATA}/poescript" > mpmd.out 2>&1 + export err=$? + fi + err_chk + + # We are in a loop over downset, save output from mpmd into nset specific output + cat mpmd.out # so we capture output into the main logfile + mv mpmd.out "mpmd_${nset}.out" + + # Concatenate grib files from each processor into a single one + # and clean-up as you go + echo "Concatenating processor-specific grib2 files into a single product file" + for (( iproc = 1 ; iproc <= nproc ; iproc++ )); do + for grid in "${grids[@]}"; do + cat "pgb2${grp}file_${fhr3}_${iproc}_${grid}" >> "pgb2${grp}file_${fhr3}_${grid}" + rm "pgb2${grp}file_${fhr3}_${iproc}_${grid}" + done + # There is no further use of the processor specific tmpfile; delete it + rm "${tmpfile}_${iproc}" + done + + # Move to COM and index the product grib files + for grid in "${grids[@]}"; do + prod_dir="COM_ATMOS_GRIB_${grid}" + ${NCP} "pgb2${grp}file_${fhr3}_${grid}" "${!prod_dir}/${PREFIX}pgrb2${grp}.${grid}.${fhr3}" + ${WGRIB2} -s "pgb2${grp}file_${fhr3}_${grid}" > "${!prod_dir}/${PREFIX}pgrb2${grp}.${grid}.${fhr3}.idx" + done + + echo "Finished processing nset = ${nset}" + +done # for (( nset=1 ; nset <= downset ; nset++ )) + +#--------------------------------------------------------------- + +# Section creating slfux grib2 interpolated products +# Create 1-degree sflux grib2 output +# move to COM and index it +if [[ "${FLXGF:-}" == "YES" ]]; then + + # Files needed by ${INTERP_ATMOS_SFLUXSH} + FLUX_FILE="${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2" + + input_file="${FLUX_FILE}" + output_file_prefix="sflux_${fhr3}" + grid_string="1p00" + "${INTERP_ATMOS_SFLUXSH}" "${input_file}" "${output_file_prefix}" "${grid_string}" + export err=$?; err_chk + + # Move to COM and index the product sflux file + IFS=':' read -ra grids <<< "${grid_string}" + for grid in "${grids[@]}"; do + prod_dir="COM_ATMOS_GRIB_${grid}" + ${NCP} "sflux_${fhr3}_${grid}" "${!prod_dir}/${PREFIX}flux.${grid}.${fhr3}" + ${WGRIB2} -s "sflux_${fhr3}_${grid}" > "${!prod_dir}/${PREFIX}flux.${grid}.${fhr3}.idx" + done +fi + +#--------------------------------------------------------------- + +# Start sending DBN alerts +# Everything below this line is for sending files to DBN (SENDDBN=YES) +if [[ "${SENDDBN:-}" == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P25" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P25_WIDX" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.${fhr3}.idx" + if [[ "${RUN}" == "gfs" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P25" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P25_WIDX" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.${fhr3}.idx" + if [[ -s "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P5" "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_0P5_WIDX" "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.${fhr3}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P5" "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_0P5_WIDX" "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.${fhr3}.idx" + fi + if [[ -s "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_1P0" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2_1P0_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_1P0" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB2B_1P0_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.${fhr3}.idx" + fi + fi + + if [[ "${fhr3}" == "anl" ]]; then + + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_MSC_sfcanl" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}sfc${fhr3}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SA" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}atm${fhr3}.nc" + + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGA_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGA_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}.idx" + + else # forecast hours f000, f003, f006, etc. + + if [[ "${RUN}" == "gdas" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_PGB_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.${fhr3}.idx" + if (( FORECAST_HOUR % 3 == 0 )); then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}atm${fhr3}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_BF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}sfc${fhr3}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SGB_GB2" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SGB_GB2_WIDX" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2.idx" + fi + elif [[ "${RUN}" == "gfs" ]]; then + + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}atm${fhr3}.nc" + if (( fhr > 0 && fhr <= 84 )) || (( fhr == 120 )); then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_BF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}sfc${fhr3}.nc" + fi + + if [[ -s "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrbf${fhr3}.grib2" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SGB_GB2" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2" + "${DBNROOT}/bin/dbn_alert" MODEL "${RUN^^}_SGB_GB2_WIDX" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrb${fhr3}.grib2.idx" + fi + fi # end if RUN=gfs + + fi # end if fhr3=anl + +fi # end if SENDDBN=YES + +exit 0 diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh index f173886a07..2997ac0d25 100755 --- a/scripts/exglobal_atmos_sfcanl.sh +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -55,8 +55,8 @@ export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-'-2.'} export CYCLVARS=${CYCLVARS:-""} export FHOUR=${FHOUR:-0} export DELTSFC=${DELTSFC:-6} -export FIXgsm=${FIXgsm:-${HOMEgfs}/fix/am} -export FIXfv3=${FIXfv3:-${HOMEgfs}/fix/orog} +export FIXam=${FIXam:-${HOMEgfs}/fix/am} +export FIXorog=${FIXorog:-${HOMEgfs}/fix/orog} # FV3 specific info (required for global_cycle) export CASE=${CASE:-"C384"} @@ -176,8 +176,8 @@ if [[ ${DOIAU} = "YES" ]]; then "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" ${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" done export APRUNCY=${APRUN_CYCLE} @@ -199,8 +199,8 @@ for n in $(seq 1 ${ntiles}); do fi ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" - ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" done export APRUNCY=${APRUN_CYCLE} diff --git a/scripts/exglobal_atmos_upp.py b/scripts/exglobal_atmos_upp.py new file mode 100755 index 0000000000..6cdbc1bc51 --- /dev/null +++ b/scripts/exglobal_atmos_upp.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 + +import os + +from wxflow import AttrDict, Logger, logit, cast_strdict_as_dtypedict +from pygfs.task.upp import UPP + +# initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +@logit(logger) +def main(): + + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the UPP object + upp = UPP(config) + + # Pull out all the configuration keys needed to run the rest of UPP steps + keys = ['HOMEgfs', 'DATA', 'current_cycle', 'RUN', 'NET', + 'COM_ATMOS_ANALYSIS', 'COM_ATMOS_HISTORY', 'COM_ATMOS_MASTER', + 'upp_run', + 'APRUN_UPP', + 'forecast_hour', 'valid_datetime', + 'atmos_filename', 'flux_filename'] + upp_dict = AttrDict() + for key in keys: + upp_dict[key] = upp.task_config[key] + + # Get the fully parse upp.yaml file for the current cycle + upp_yaml = upp.task_config.upp_yaml + + # Initialize the DATA/ directory; copy static data + upp.initialize(upp_yaml) + + # Configure DATA/ directory for execution; prepare namelist etc. + upp.configure(upp_dict, upp_yaml) + + # Run the UPP and index the master grib2 files + upp.execute(upp_dict.DATA, upp_dict.APRUN_UPP, upp_dict.forecast_hour) + + # Copy processed output from execute + upp.finalize(upp_dict.upp_run, upp_yaml) + + +if __name__ == '__main__': + main() diff --git a/scripts/exglobal_atmos_vminmon.sh b/scripts/exglobal_atmos_vminmon.sh new file mode 100755 index 0000000000..a4453dcf1a --- /dev/null +++ b/scripts/exglobal_atmos_vminmon.sh @@ -0,0 +1,76 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_vminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #----------------------------------------------------------------------- + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + # + # Note: The logic below is to accomodate two different data storage + # methods. Some parallels (and formerly ops) dump all MinMon data for + # a given day in the same directory (if condition). Ops now separates + # data into ${cyc} subdirectories (elif condition). + #----------------------------------------------------------------------- + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + ${NCP} "${M_TANKverf}/gnorm_data.txt" gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + ${NCP} "${M_TANKverfM1}/gnorm_data.txt" gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + "${USHgfs}/minmon_xtrct_costs.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy + rc_costs=$? + echo "rc_costs = ${rc_costs}" + + "${USHgfs}/minmon_xtrct_gnorms.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy + rc_gnorms=$? + echo "rc_gnorms = ${rc_gnorms}" + + "${USHgfs}/minmon_xtrct_reduct.pl" "${MINMON_SUFFIX}" "${PDY}" "${cyc}" "${gsistat}" dummy + rc_reduct=$? + echo "rc_reduct = ${rc_reduct}" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ ${rc_costs} -ne 0 ]]; then + err=${rc_costs} +elif [[ ${rc_gnorms} -ne 0 ]]; then + err=${rc_gnorms} +elif [[ ${rc_reduct} -ne 0 ]]; then + err=${rc_reduct} +fi + +exit "${err}" + diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh new file mode 100755 index 0000000000..5d7c0a9788 --- /dev/null +++ b/scripts/exglobal_cleanup.sh @@ -0,0 +1,106 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Clean up previous cycles; various depths +# PRIOR CYCLE: Leave the prior cycle alone +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${assim_freq} hours") +# PREVIOUS to the PRIOR CYCLE +GDATE=$(date --utc +%Y%m%d%H -d "${GDATE:0:8} ${GDATE:8:2} -${assim_freq} hours") + +# Remove the TMPDIR directory +# TODO Only prepbufr is currently using this directory, and all jobs should be +# cleaning up after themselves anyway +COMIN="${DATAROOT}/${GDATE}" +[[ -d ${COMIN} ]] && rm -rf "${COMIN}" + +if [[ "${CLEANUP_COM:-YES}" == NO ]] ; then + exit 0 +fi + +# Step back every assim_freq hours and remove old rotating directories +# for successful cycles (defaults from 24h to 120h). +# Retain files needed by Fit2Obs +last_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDEND:-24} hours" ) +first_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours") +last_rtofs=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDRTOFS:-48} hours") +function remove_files() { + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local find_exclude_string="" + for exclude in "$@"; do + find_exclude_string+="${find_exclude_string} -name ${exclude} -or" + done + # Chop off any trailing or + find_exclude_string="${find_exclude_string[*]/%-or}" + # Remove all regular files that do not match + # shellcheck disable=SC2086 + find "${directory}" -type f -not \( ${find_exclude_string} \) -delete + # Remove all symlinks that do not match + # shellcheck disable=SC2086 + find "${directory}" -type l -not \( ${find_exclude_string} \) -delete + # Remove any empty directories + find "${directory}" -type d -empty -delete +} + +for (( current_date=first_date; current_date <= last_date; \ + current_date=$(date --utc +%Y%m%d%H -d "${current_date:0:8} ${current_date:8:2} +${assim_freq} hours") )); do + current_PDY="${current_date:0:8}" + current_cyc="${current_date:8:2}" + rtofs_dir="${ROTDIR}/rtofs.${current_PDY}" + rocotolog="${EXPDIR}/logs/${current_date}.log" + if [[ -f "${rocotolog}" ]]; then + # TODO: This needs to be revamped to not look at the rocoto log. + # shellcheck disable=SC2312 + if [[ $(tail -n 1 "${rocotolog}") =~ "This cycle is complete: Success" ]]; then + YMD="${current_PDY}" HH="${current_cyc}" generate_com COM_TOP + if [[ -d "${COM_TOP}" ]]; then + IFS=", " read -r -a exclude_list <<< "${exclude_string:-}" + remove_files "${COM_TOP}" "${exclude_list[@]:-}" + fi + if [[ -d "${rtofs_dir}" ]] && (( current_date < last_rtofs )); then rm -rf "${rtofs_dir}" ; fi + fi + fi + + # Remove mdl gfsmos directory + if [[ "${RUN}" == "gfs" ]]; then + mos_dir="${ROTDIR}/gfsmos.${current_PDY}" + if [[ -d "${mos_dir}" ]] && (( current_date < CDATE_MOS )); then rm -rf "${mos_dir}" ; fi + fi +done + +# Remove archived gaussian files used for Fit2Obs in $VFYARC that are +# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived +# gaussian files to prevent the files from being removed by automatic +# scrubber present on some machines. + +if [[ "${RUN}" == "gfs" ]]; then + fhmax=$((FHMAX_FITS + 36)) + RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${fhmax} hours") + verify_dir="${ROTDIR}/vrfyarch/${RUN}.${RDATE:0:8}" + [[ -d ${verify_dir} ]] && rm -rf "${verify_dir}" + + touch_date=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_FITS} hours") + while (( touch_date < "${PDY}${cyc}" )); do + touch_PDY="${touch_date:0:8}" + touch_cyc="${touch_date:8:2}" + touch_dir="${ROTDIR}/vrfyarch/${RUN}.${touch_PDY}/${touch_cyc}" + [[ -d ${touch_dir} ]] && touch "${touch_dir}"/* + touch_date=$(date --utc +%Y%m%d%H -d "${touch_PDY} ${touch_cyc} +6 hours") + done +fi + +# Remove $RUN.$rPDY for the older of GDATE or RDATE +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${RMOLDSTD:-120} hours") +RDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} -${FHMAX_GFS} hours") +if (( GDATE < RDATE )); then + RDATE=${GDATE} +fi +deletion_target="${ROTDIR}/${RUN}.${RDATE:0:8}" +if [[ -d ${deletion_target} ]]; then rm -rf "${deletion_target}"; fi diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 3aa1093fad..3836643afc 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -35,7 +35,7 @@ export NMV=${NMV:-"/bin/mv"} export NLN=${NLN:-"/bin/ln -sf"} export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} -export CATEXEC=${CATEXEC:-$ncdiag_ROOT/bin/ncdiag_cat_serial.x} +export CATEXEC=${CATEXEC:-${ncdiag_ROOT:-${gsi_ncdiag_ROOT}}/bin/ncdiag_cat_serial.x} COMPRESS=${COMPRESS:-gzip} UNCOMPRESS=${UNCOMPRESS:-gunzip} APRUNCFP=${APRUNCFP:-""} @@ -103,7 +103,7 @@ if [ $GENDIAG = "YES" ] ; then diagtype[0]="conv conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_tcp conv_uv conv_spd" diagtype[1]="pcp_ssmi_dmsp pcp_tmi_trmm" diagtype[2]="sbuv2_n16 sbuv2_n17 sbuv2_n18 sbuv2_n19 gome_metop-a gome_metop-b omi_aura mls30_aura ompsnp_npp ompstc8_npp ompstc8_n20 ompsnp_n20 ompstc8_n21 ompsnp_n21 ompslp_npp gome_metop-c" - diagtype[3]="hirs2_n14 msu_n14 sndr_g08 sndr_g11 sndr_g12 sndr_g13 sndr_g08_prep sndr_g11_prep sndr_g12_prep sndr_g13_prep sndrd1_g11 sndrd2_g11 sndrd3_g11 sndrd4_g11 sndrd1_g12 sndrd2_g12 sndrd3_g12 sndrd4_g12 sndrd1_g13 sndrd2_g13 sndrd3_g13 sndrd4_g13 sndrd1_g14 sndrd2_g14 sndrd3_g14 sndrd4_g14 sndrd1_g15 sndrd2_g15 sndrd3_g15 sndrd4_g15 hirs3_n15 hirs3_n16 hirs3_n17 amsua_n15 amsua_n16 amsua_n17 amsub_n15 amsub_n16 amsub_n17 hsb_aqua airs_aqua amsua_aqua imgr_g08 imgr_g11 imgr_g12 imgr_g14 imgr_g15 ssmi_f13 ssmi_f15 hirs4_n18 hirs4_metop-a amsua_n18 amsua_metop-a mhs_n18 mhs_metop-a amsre_low_aqua amsre_mid_aqua amsre_hig_aqua ssmis_f16 ssmis_f17 ssmis_f18 ssmis_f19 ssmis_f20 iasi_metop-a hirs4_n19 amsua_n19 mhs_n19 seviri_m08 seviri_m09 seviri_m10 seviri_m11 cris_npp cris-fsr_npp cris-fsr_n20 atms_npp atms_n20 hirs4_metop-b amsua_metop-b mhs_metop-b iasi_metop-b avhrr_metop-b avhrr_n18 avhrr_n19 avhrr_metop-a amsr2_gcom-w1 gmi_gpm saphir_meghat ahi_himawari8 abi_g16 abi_g17 amsua_metop-c mhs_metop-c iasi_metop-c avhrr_metop-c viirs-m_npp viirs-m_j1 abi_g18 ahi_himawari9 viirs-m_j2 cris-fsr_n21 atms_n21" + diagtype[3]="msu_n14 sndr_g08 sndr_g11 sndr_g12 sndr_g13 sndr_g08_prep sndr_g11_prep sndr_g12_prep sndr_g13_prep sndrd1_g11 sndrd2_g11 sndrd3_g11 sndrd4_g11 sndrd1_g12 sndrd2_g12 sndrd3_g12 sndrd4_g12 sndrd1_g13 sndrd2_g13 sndrd3_g13 sndrd4_g13 sndrd1_g14 sndrd2_g14 sndrd3_g14 sndrd4_g14 sndrd1_g15 sndrd2_g15 sndrd3_g15 sndrd4_g15 amsua_n15 amsua_n16 amsua_n17 amsub_n15 amsub_n16 amsub_n17 hsb_aqua airs_aqua amsua_aqua imgr_g08 imgr_g11 imgr_g12 imgr_g14 imgr_g15 ssmi_f13 ssmi_f15 amsua_n18 amsua_metop-a mhs_n18 mhs_metop-a amsre_low_aqua amsre_mid_aqua amsre_hig_aqua ssmis_f16 ssmis_f17 ssmis_f18 ssmis_f19 ssmis_f20 iasi_metop-a amsua_n19 mhs_n19 seviri_m08 seviri_m09 seviri_m10 seviri_m11 cris_npp cris-fsr_npp cris-fsr_n20 atms_npp atms_n20 amsua_metop-b mhs_metop-b iasi_metop-b avhrr_metop-b avhrr_n18 avhrr_n19 avhrr_metop-a amsr2_gcom-w1 gmi_gpm saphir_meghat ahi_himawari8 abi_g16 abi_g17 amsua_metop-c mhs_metop-c iasi_metop-c avhrr_metop-c viirs-m_npp viirs-m_j1 abi_g18 ahi_himawari9 viirs-m_j2 cris-fsr_n21 atms_n21" diaglist[0]=listcnv diaglist[1]=listpcp @@ -184,9 +184,9 @@ EOFdiag if [ $count -gt 1 ]; then if [ $USE_CFP = "YES" ]; then echo "$nm $DATA/diag.sh $lrun_subdirs $binary_diag $type $loop $string $CDATE $DIAG_COMPRESS $DIAG_SUFFIX" | tee -a $DATA/mp_diag.sh - if [ ${CFP_MP:-"NO"} = "YES" ]; then - nm=$((nm+1)) - fi + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi else if [ $binary_diag = ".true." ]; then cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} @@ -199,7 +199,7 @@ EOFdiag elif [ $count -eq 1 ]; then cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} if [ $DIAG_COMPRESS = "YES" ]; then - $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} fi echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} numfile[n]=$(expr ${numfile[n]} + 1) diff --git a/scripts/exglobal_forecast.py b/scripts/exglobal_forecast.py index 2b21934bfa..c2ea9495fc 100755 --- a/scripts/exglobal_forecast.py +++ b/scripts/exglobal_forecast.py @@ -2,9 +2,7 @@ import os -from pygw.logger import Logger, logit -from pygw.yaml_file import save_as_yaml -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, logit, save_as_yaml, cast_strdict_as_dtypedict from pygfs.task.gfs_forecast import GFSForecast # initialize root logger diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index d86691d5ec..c629173879 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -2,7 +2,7 @@ ################################################################################ ## UNIX Script Documentation Block -## Script name: exglobal_fcst_nemsfv3gfs.sh +## Script name: exglobal_forecast.sh ## Script description: Runs a global FV3GFS model forecast ## ## Author: Fanglin Yang Organization: NCEP/EMC Date: 2016-11-15 @@ -38,19 +38,19 @@ ## Restart files: ## ## Fix files: -## 1. computing grid, $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc -## 2. orography data, $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc -## 3. mosaic data, $FIXfv3/$CASE/${CASE}_mosaic.nc -## 4. Global O3 data, $FIX_AM/${O3FORC} -## 5. Global H2O data, $FIX_AM/${H2OFORC} -## 6. Global solar constant data, $FIX_AM/global_solarconstant_noaa_an.txt -## 7. Global surface emissivity, $FIX_AM/global_sfc_emissivity_idx.txt -## 8. Global CO2 historical data, $FIX_AM/global_co2historicaldata_glob.txt -## 8. Global CO2 monthly data, $FIX_AM/co2monthlycyc.txt -## 10. Additional global CO2 data, $FIX_AM/fix_co2_proj/global_co2historicaldata +## 1. computing grid, $FIXorog/$CASE/${CASE}_grid.tile${n}.nc +## 2. orography data, $FIXorog/$CASE/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc +## 3. mosaic data, $FIXorog/$CASE/${CASE}_mosaic.nc +## 4. Global O3 data, $FIXam/${O3FORC} +## 5. Global H2O data, $FIXam/${H2OFORC} +## 6. Global solar constant data, $FIXam/global_solarconstant_noaa_an.txt +## 7. Global surface emissivity, $FIXam/global_sfc_emissivity_idx.txt +## 8. Global CO2 historical data, $FIXam/global_co2historicaldata_glob.txt +## 8. Global CO2 monthly data, $FIXam/co2monthlycyc.txt +## 10. Additional global CO2 data, $FIXam/fix_co2_proj/global_co2historicaldata ## 11. Climatological aerosol global distribution -## $FIX_AM/global_climaeropac_global.txt -## 12. Monthly volcanic forcing $FIX_AM/global_volcanic_aerosols_YYYY-YYYY.txt +## $FIXam/global_climaeropac_global.txt +## 12. Monthly volcanic forcing $FIXam/global_volcanic_aerosols_YYYY-YYYY.txt ## ## Data output (location, name) ## If quilting=true and output grid is gaussian grid: @@ -70,7 +70,7 @@ ## ## Namelist input, in RUNDIR, ## 1. diag_table -## 2. nems.configure +## 2. ufs.configure ## 3. model_configure ## 4. input.nml ####################### @@ -79,20 +79,12 @@ source "${HOMEgfs}/ush/preamble.sh" -SCRIPTDIR="${HOMEgfs}/ush" -echo "MAIN: environment loaded for $machine platform,Current Script locates in $SCRIPTDIR." - # include all subroutines. Executions later. -source $SCRIPTDIR/cplvalidate.sh # validation of cpl* -source $SCRIPTDIR/forecast_predet.sh # include functions for variable definition -source $SCRIPTDIR/forecast_det.sh # include functions for run type determination -source $SCRIPTDIR/forecast_postdet.sh # include functions for variables after run type determination -source $SCRIPTDIR/nems_configure.sh # include functions for nems_configure processing -source $SCRIPTDIR/parsing_model_configure_FV3.sh -source $SCRIPTDIR/parsing_model_configure_DATM.sh - -# Compset string. For nems.configure.* template selection. Default ATM only -confignamevarfornems=${confignamevarfornems:-'atm'} +source "${HOMEgfs}/ush/forecast_predet.sh" # include functions for variable definition +source "${HOMEgfs}/ush/forecast_det.sh" # include functions for run type determination +source "${HOMEgfs}/ush/forecast_postdet.sh" # include functions for variables after run type determination +source "${HOMEgfs}/ush/ufs_configure.sh" # include functions for ufs.configure processing +source "${HOMEgfs}/ush/parsing_model_configure_FV3.sh" # Coupling control switches, for coupling purpose, off by default cpl=${cpl:-.false.} @@ -105,95 +97,69 @@ OCNTIM=${OCNTIM:-1800} DELTIM=${DELTIM:-450} ICETIM=${DELTIM} -CPL_SLOW=${CPL_SLOW:-$OCNTIM} -CPL_FAST=${CPL_FAST:-$ICETIM} - -echo "MAIN: $confignamevarfornems selected" -echo "MAIN: Forecast script started for $confignamevarfornems on $machine" - -echo "MAIN: Validating $confignamevarfornems with cpl switches" -cplvalidate -echo "MAIN: $confignamevarfornems validated, continue" -# Validate the consistency between $confignamevarfornems and $CPL switches - -echo "MAIN: Loading variables before determination of run type" +CPL_SLOW=${CPL_SLOW:-${OCNTIM}} +CPL_FAST=${CPL_FAST:-${ICETIM}} +echo "MAIN: Loading common variables before determination of run type" common_predet -echo $RUN -case $RUN in - 'data') DATM_predet;; - *gfs | *gdas | 'gefs') FV3_GFS_predet;; -esac -[[ $cplflx = .true. ]] && MOM6_predet -[[ $cplwav = .true. ]] && WW3_predet -[[ $cplice = .true. ]] && CICE_predet - -case $RUN in - *gfs | *gdas | 'gefs') FV3_GFS_det;; -esac #no run type determination for data atmosphere -[[ $cplflx = .true. ]] && MOM6_det -[[ $cplwav = .true. ]] && WW3_det -[[ $cplice = .true. ]] && CICE_det - +echo "MAIN: Loading variables before determination of run type" +FV3_predet +[[ ${cplflx} = .true. ]] && MOM6_predet +[[ ${cplwav} = .true. ]] && WW3_predet +[[ ${cplice} = .true. ]] && CICE_predet +echo "MAIN: Variables before determination of run type loaded" + +echo "MAIN: Determining run type" +FV3_det +[[ ${cplflx} = .true. ]] && MOM6_det +[[ ${cplwav} = .true. ]] && WW3_det +[[ ${cplice} = .true. ]] && CICE_det echo "MAIN: RUN Type Determined" echo "MAIN: Post-determination set up of run type" -echo $RUN -case $RUN in - 'data') DATM_postdet;; - *gfs | *gdas | 'gefs') FV3_GFS_postdet;; -esac #no post determination set up for data atmosphere -[[ $cplflx = .true. ]] && MOM6_postdet -[[ $cplwav = .true. ]] && WW3_postdet -[[ $cplice = .true. ]] && CICE_postdet -[[ $cplchm = .true. ]] && GOCART_postdet +FV3_postdet +[[ ${cplflx} = .true. ]] && MOM6_postdet +[[ ${cplwav} = .true. ]] && WW3_postdet +[[ ${cplice} = .true. ]] && CICE_postdet +[[ ${cplchm} = .true. ]] && GOCART_postdet echo "MAIN: Post-determination set up of run type finished" -echo "MAIN: Writing name lists and model configuration" -case $RUN in - 'data') DATM_nml;; - *gfs | *gdas | 'gefs') FV3_GFS_nml;; -esac -[[ $cplflx = .true. ]] && MOM6_nml -[[ $cplwav = .true. ]] && WW3_nml -[[ $cplice = .true. ]] && CICE_nml -[[ $cplchm = .true. ]] && GOCART_rc - -case $RUN in - 'data') DATM_model_configure;; - *gfs | *gdas | 'gefs') FV3_model_configure;; -esac +echo "MAIN: Writing namelists and model configuration" +FV3_nml +[[ ${cplflx} = .true. ]] && MOM6_nml +[[ ${cplwav} = .true. ]] && WW3_nml +[[ ${cplice} = .true. ]] && CICE_nml +[[ ${cplchm} = .true. ]] && GOCART_rc +FV3_model_configure echo "MAIN: Name lists and model configuration written" -echo "MAIN: Writing NEMS Configure file" -writing_nems_configure -echo "MAIN: NEMS configured" +echo "MAIN: Writing UFS Configure file" +writing_ufs_configure +echo "MAIN: UFS configured" #------------------------------------------------------------------ # run the executable -if [ $esmf_profile ]; then +if [[ "${esmf_profile:-}" = ".true." ]]; then export ESMF_RUNTIME_PROFILE=ON export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY fi -$NCP $FCSTEXECDIR/$FCSTEXEC $DATA/. -$APRUN_UFS $DATA/$FCSTEXEC 1>&1 2>&2 +${NCP} "${FCSTEXECDIR}/${FCSTEXEC}" "${DATA}/" +${APRUN_UFS} "${DATA}/${FCSTEXEC}" 1>&1 2>&2 export ERR=$? -export err=$ERR -$ERRSCRIPT || exit $err - -case $RUN in - 'data') data_out_Data_ATM;; - *gfs | *gdas | 'gefs') data_out_GFS;; -esac -[[ $cplflx = .true. ]] && MOM6_out -[[ $cplwav = .true. ]] && WW3_out -[[ $cplice = .true. ]] && CICE_out -[[ $esmf_profile = .true. ]] && CPL_out +export err=${ERR} +${ERRSCRIPT} || exit "${err}" + +FV3_out +[[ ${cplflx} = .true. ]] && MOM6_out +[[ ${cplwav} = .true. ]] && WW3_out +[[ ${cplice} = .true. ]] && CICE_out +[[ ${cplchm} = .true. ]] && GOCART_out +[[ ${esmf_profile:-} = .true. ]] && CPL_out echo "MAIN: Output copied to COMROT" #------------------------------------------------------------------ -exit $err +exit "${err}" diff --git a/scripts/exglobal_land_analysis.py b/scripts/exglobal_land_analysis.py new file mode 100755 index 0000000000..70141475b0 --- /dev/null +++ b/scripts/exglobal_land_analysis.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# exglobal_land_analysis.py +# This script creates an LandAnalysis class +# and runs the initialize, execute and finalize methods +# for a global Land Snow Depth analysis +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.land_analysis import LandAnalysis + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the land analysis task + anl = LandAnalysis(config) + anl.initialize() + anl.execute() + anl.finalize() diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_land_obs.py index 69d15b95c8..3594771c8a 100755 --- a/scripts/exglobal_prep_land_obs.py +++ b/scripts/exglobal_prep_land_obs.py @@ -1,12 +1,11 @@ #!/usr/bin/env python3 # exglobal_land_analysis_prepare.py # This script creates a LandAnalysis object -# and runs the prepare_IMS method -# which perform the pre-processing for IMS data +# and runs the prepare_GTS and prepare_IMS method +# which perform the pre-processing for GTS and IMS data import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.land_analysis import LandAnalysis @@ -21,4 +20,6 @@ # Instantiate the land prepare task LandAnl = LandAnalysis(config) - LandAnl.prepare_IMS() + LandAnl.prepare_GTS() + if f"{ LandAnl.runtime_config.cyc }" == '18': + LandAnl.prepare_IMS() diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh new file mode 100755 index 0000000000..58b37f3114 --- /dev/null +++ b/scripts/exglobal_stage_ic.sh @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +# Locally scoped variables and functions +# shellcheck disable=SC2153 +GDATE=$(date --utc -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) +gPDY="${GDATE:0:8}" +gcyc="${GDATE:8:2}" + +MEMDIR_ARRAY=() +if [[ "${RUN:-}" = "gefs" ]]; then + # Populate the member_dirs array based on the value of NMEM_ENS + for ((ii = 0; ii <= "${NMEM_ENS:-0}"; ii++)); do + MEMDIR_ARRAY+=("mem$(printf "%03d" "${ii}")") + done +else + MEMDIR_ARRAY+=("") +fi + +# Initialize return code +err=0 + +error_message() { + echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})" +} + +############################################################### +for MEMDIR in "${MEMDIR_ARRAY[@]}"; do + + # Stage atmosphere initial conditions to ROTDIR + if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then + # Stage the FV3 restarts to ROTDIR (warm start) + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + [[ ! -d "${COM_ATMOS_RESTART_PREV}" ]] && mkdir -p "${COM_ATMOS_RESTART_PREV}" + for ftype in coupler.res fv_core.res.nc; do + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}" + tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + for ftype in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do + for ((tt = 1; tt <= 6; tt++)); do + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc" + tgt="${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.${ftype}.tile${tt}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + done + else + # Stage the FV3 cold-start initial conditions to ROTDIR + YMD=${PDY} HH=${cyc} generate_com COM_ATMOS_INPUT + [[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/gfs_ctrl.nc" + tgt="${COM_ATMOS_INPUT}/gfs_ctrl.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + for ftype in gfs_data sfc_data; do + for ((tt = 1; tt <= 6; tt++)); do + src="${BASE_CPLIC}/${CPL_ATMIC:-}/${PDY}${cyc}/${MEMDIR}/atmos/${ftype}.tile${tt}.nc" + tgt="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + done + fi + + # Stage ocean initial conditions to ROTDIR (warm start) + if [[ "${DO_OCN:-}" = "YES" ]]; then + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL + [[ ! -d "${COM_OCEAN_RESTART_PREV}" ]] && mkdir -p "${COM_OCEAN_RESTART_PREV}" + src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res.nc" + tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + case "${OCNRES}" in + "500" | "100") + # Nothing more to do for these resolutions + ;; + "025" ) + for nn in $(seq 1 3); do + src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + ;; + *) + echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" + rc=1 + err=$((err + rc)) + ;; + esac + + # TODO: Do mediator restarts exists in a ATMW configuration? + # TODO: No mediator is presumably involved in an ATMA configuration + if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then + # Stage the mediator restarts to ROTDIR (warm start/restart the coupled model) + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_MED_RESTART_PREV:COM_MED_RESTART_TMPL + [[ ! -d "${COM_MED_RESTART_PREV}" ]] && mkdir -p "${COM_MED_RESTART_PREV}" + src="${BASE_CPLIC}/${CPL_MEDIC:-}/${PDY}${cyc}/${MEMDIR}/med/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" + tgt="${COM_MED_RESTART_PREV}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" + if [[ -f "${src}" ]]; then + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + else + echo "WARNING: No mediator restarts available with warm_start=${EXP_WARM_START}" + fi + fi + + fi + + # Stage ice initial conditions to ROTDIR (warm start) + if [[ "${DO_ICE:-}" = "YES" ]]; then + RUN=${rCDUMP} YMD=${gPDY} HH=${gcyc} generate_com COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + [[ ! -d "${COM_ICE_RESTART_PREV}" ]] && mkdir -p "${COM_ICE_RESTART_PREV}" + src="${BASE_CPLIC}/${CPL_ICEIC:-}/${PDY}${cyc}/${MEMDIR}/ice/${PDY}.${cyc}0000.cice_model.res.nc" + tgt="${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + fi + + # Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) + if [[ "${DO_WAVE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com COM_WAVE_RESTART + [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" + for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust + src="${BASE_CPLIC}/${CPL_WAVIC:-}/${PDY}${cyc}/${MEMDIR}/wave/${PDY}.${cyc}0000.restart.${grdID}" + tgt="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + fi + +done # for MEMDIR in "${MEMDIR_ARRAY[@]}"; do + +############################################################### +# Check for errors and exit if any of the above failed +if [[ "${err}" -ne 0 ]]; then + echo "FATAL ERROR: Unable to copy ICs from ${BASE_CPLIC} to ${ROTDIR}; ABORT!" + exit "${err}" +fi + +############################################################## +# Exit cleanly +exit "${err}" diff --git a/sorc/build_all.sh b/sorc/build_all.sh index eaba2485d3..ccc088acd9 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -16,13 +16,17 @@ function _usage() { Builds all of the global-workflow components by calling the individual build scripts in sequence. -Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-v] +Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v] -a UFS_app: Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config + -g: + Build GSI -h: - print this help message and exit + Print this help message and exit + -j: + Specify maximum number of build jobs (n) + -u: + Build UFS-DA -v: Execute all build scripts with -v option to turn on verbose where supported EOF @@ -33,25 +37,27 @@ script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) cd "${script_dir}" || exit 1 _build_ufs_opt="" -_ops_opt="" +_build_ufsda="NO" +_build_gsi="NO" _verbose_opt="" -_partial_opt="" +_build_job_max=20 # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:hov" option; do +while getopts ":a:ghj:uv" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; - c) _partial_opt+="-c ${OPTARG} ";; + g) _build_gsi="YES" ;; h) _usage;; - o) _ops_opt+="-o";; + j) _build_job_max="${OPTARG} ";; + u) _build_ufsda="YES" ;; v) _verbose_opt="-v";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage + _usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage + _usage ;; esac done @@ -82,210 +88,170 @@ if [[ -z "${MACHINE_ID}" ]]; then fi #------------------------------------ -# INCLUDE PARTIAL BUILD +# SOURCE BUILD VERSION FILES #------------------------------------ -# Turn off some shellcheck warnings because we want to have -# variables with multiple arguments. -# shellcheck disable=SC2086,SC2248 -source ./partial_build.sh ${_verbose_opt} ${_partial_opt} -# shellcheck disable= +# TODO: Commented out until components aligned for build +#source ../versions/build.ver #------------------------------------ # Exception Handling Init #------------------------------------ # Disable shellcheck warning about single quotes not being substituted. # shellcheck disable=SC2016 -ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +ERRSCRIPT=${ERRSCRIPT:-'eval [[ $errs = 0 ]]'} # shellcheck disable= -err=0 +errs=0 -#------------------------------------ -# build gfs_utils -#------------------------------------ -if [[ ${Build_gfs_utils} == 'true' ]]; then - echo " .... Building gfs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_gfs_utils.sh ${_verbose_opt} > "${logs_dir}/build_gfs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gfs_utils." - echo "The log file is in ${logs_dir}/build_gfs_utils.log" - fi - err=$((err + rc)) -fi +declare -A build_jobs +declare -A build_opts #------------------------------------ -# build WW3 pre & post execs +# Check which builds to do and assign # of build jobs #------------------------------------ -if [[ ${Build_ww3_prepost} == "true" ]]; then - echo " .... Building WW3 pre and post execs .... " - # shellcheck disable=SC2086,SC2248 - ./build_ww3prepost.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ww3_prepost.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building WW3 pre/post processing." - echo "The log file is in ${logs_dir}/build_ww3_prepost.log" - fi - err=$((err + rc)) -fi -#------------------------------------ -# build forecast model -#------------------------------------ -if [[ ${Build_ufs_model} == 'true' ]]; then - echo " .... Building forecast model .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ufs.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UFS model." - echo "The log file is in ${logs_dir}/build_ufs.log" - fi - err=$((err + rc)) -fi +# Mandatory builds, unless otherwise specified, for the UFS +big_jobs=0 +build_jobs["ufs"]=8 +big_jobs=$((big_jobs+1)) +build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" -#------------------------------------ -# build GSI and EnKF - optional checkout -#------------------------------------ -if [[ -d gsi_enkf.fd ]]; then - if [[ ${Build_gsi_enkf} == 'true' ]]; then - echo " .... Building gsi and enkf .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_enkf.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_enkf.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi_enkf." - echo "The log file is in ${logs_dir}/build_gsi_enkf.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi and enkf .... " -fi +build_jobs["upp"]=6 # The UPP is hardcoded to use 6 cores +build_opts["upp"]="" -#------------------------------------ -# build gsi utilities -#------------------------------------ -if [[ -d gsi_utils.fd ]]; then - if [[ ${Build_gsi_utils} == 'true' ]]; then - echo " .... Building gsi utilities .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_utils.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi utilities." - echo "The log file is in ${logs_dir}/build_gsi_utils.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi utilities .... " -fi +build_jobs["ufs_utils"]=3 +build_opts["ufs_utils"]="${_verbose_opt}" -#------------------------------------ -# build gdas - optional checkout -#------------------------------------ -if [[ -d gdas.cd ]]; then - if [[ ${Build_gdas} == 'true' ]]; then - echo " .... Building GDASApp .... " - # shellcheck disable=SC2086,SC2248 - ./build_gdas.sh ${_verbose_opt} > "${logs_dir}/build_gdas.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building GDASApp." - echo "The log file is in ${logs_dir}/build_gdas.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building GDASApp .... " -fi +build_jobs["gfs_utils"]=1 +build_opts["gfs_utils"]="${_verbose_opt}" -#------------------------------------ -# build gsi monitor -#------------------------------------ -if [[ -d gsi_monitor.fd ]]; then - if [[ ${Build_gsi_monitor} == 'true' ]]; then - echo " .... Building gsi monitor .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_monitor.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_monitor.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi monitor." - echo "The log file is in ${logs_dir}/build_gsi_monitor.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi monitor .... " -fi +build_jobs["ww3prepost"]=3 +build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" -#------------------------------------ -# build UPP -#------------------------------------ -if [[ ${Build_upp} == 'true' ]]; then - echo " .... Building UPP .... " - # shellcheck disable=SC2086,SC2248 - ./build_upp.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_upp.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UPP." - echo "The log file is in ${logs_dir}/build_upp.log" - fi - err=$((err + rc)) +# Optional DA builds +if [[ "${_build_ufsda}" == "YES" ]]; then + build_jobs["gdas"]=8 + big_jobs=$((big_jobs+1)) + build_opts["gdas"]="${_verbose_opt}" fi - -#------------------------------------ -# build ufs_utils -#------------------------------------ -if [[ ${Build_ufs_utils} == 'true' ]]; then - echo " .... Building ufs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs_utils.sh ${_verbose_opt} > "${logs_dir}/build_ufs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building ufs_utils." - echo "The log file is in ${logs_dir}/build_ufs_utils.log" - fi - err=$((err + rc)) +if [[ "${_build_gsi}" == "YES" ]]; then + build_jobs["gsi_enkf"]=8 + big_jobs=$((big_jobs+1)) + build_opts["gsi_enkf"]="${_verbose_opt}" +fi +if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then + build_jobs["gsi_utils"]=2 + build_opts["gsi_utils"]="${_verbose_opt}" + build_jobs["gsi_monitor"]=1 + build_opts["gsi_monitor"]="${_verbose_opt}" fi -#------------------------------------ -# build gfs_wafs - optional checkout -#------------------------------------ -if [[ -d gfs_wafs.fd ]]; then - if [[ ${Build_gfs_wafs} == 'true' ]]; then - echo " .... Building gfs_wafs .... " - # shellcheck disable=SC2086,SC2248 - ./build_gfs_wafs.sh ${_verbose_opt} > "${logs_dir}/build_gfs_wafs.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gfs_wafs." - echo "The log file is in ${logs_dir}/build_gfs_wafs.log" - fi - err=$((err + rc)) - fi +# Go through all builds and adjust CPU counts down if necessary +requested_cpus=0 +build_list="" +for build in "${!build_jobs[@]}"; do + if [[ -z "${build_list}" ]]; then + build_list="${build}" + else + build_list="${build_list}, ${build}" + fi + if [[ ${build_jobs[${build}]} -gt ${_build_job_max} ]]; then + build_jobs[${build}]=${_build_job_max} + fi + requested_cpus=$(( requested_cpus + build_jobs[${build}] )) +done + +echo "Building ${build_list}" + +# Go through all builds and adjust CPU counts up if possible +if [[ ${requested_cpus} -lt ${_build_job_max} && ${big_jobs} -gt 0 ]]; then + # Add cores to the gdas, ufs, and gsi build jobs + extra_cores=$(( _build_job_max - requested_cpus )) + extra_cores=$(( extra_cores / big_jobs )) + for build in "${!build_jobs[@]}"; do + if [[ "${build}" == "gdas" || "${build}" == "ufs" || "${build}" == "gsi_enkf" ]]; then + build_jobs[${build}]=$(( build_jobs[${build}] + extra_cores )) + fi + done fi +procs_in_use=0 +declare -A build_ids + +builds_started=0 +# Now start looping through all of the jobs until everything is done +while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do + for build in "${!build_jobs[@]}"; do + # Has the job started? + if [[ -n "${build_jobs[${build}]+0}" && -z "${build_ids[${build}]+0}" ]]; then + # Do we have enough processors to run it? + if [[ ${_build_job_max} -ge $(( build_jobs[build] + procs_in_use )) ]]; then + if [[ "${build}" != "upp" ]]; then + "./build_${build}.sh" -j "${build_jobs[${build}]}" "${build_opts[${build}]:-}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + else + "./build_${build}.sh" "${build_opts[${build}]}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + fi + build_ids["${build}"]=$! + echo "Starting build_${build}.sh" + procs_in_use=$(( procs_in_use + build_jobs[${build}] )) + fi + fi + done + + # Check if all builds have completed + # Also recalculate how many processors are in use to account for completed builds + builds_started=0 + procs_in_use=0 + for build in "${!build_jobs[@]}"; do + # Has the build started? + if [[ -n "${build_ids[${build}]+0}" ]]; then + builds_started=$(( builds_started + 1)) + # Calculate how many processors are in use + # Is the build still running? + if ps -p "${build_ids[${build}]}" > /dev/null; then + procs_in_use=$(( procs_in_use + build_jobs["${build}"] )) + fi + fi + done + + sleep 5s +done + +# Wait for all jobs to complete and check return statuses +while [[ ${#build_jobs[@]} -gt 0 ]]; do + for build in "${!build_jobs[@]}"; do + # Test if each job is complete and if so, notify and remove from the array + if [[ -n "${build_ids[${build}]+0}" ]]; then + if ! ps -p "${build_ids[${build}]}" > /dev/null; then + wait "${build_ids[${build}]}" + build_stat=$? + errs=$((errs+build_stat)) + if [[ ${build_stat} == 0 ]]; then + echo "build_${build}.sh completed successfully!" + else + echo "build_${build}.sh failed with status ${build_stat}!" + fi + + # Remove the completed build from the list of PIDs + unset 'build_ids[${build}]' + unset 'build_jobs[${build}]' + fi + fi + done + + sleep 5s +done + #------------------------------------ # Exception Handling #------------------------------------ -if (( err != 0 )); then +if (( errs != 0 )); then cat << EOF BUILD ERROR: One or more components failed to build Check the associated build log(s) for details. EOF - ${ERRSCRIPT} || exit "${err}" + ${ERRSCRIPT} || exit "${errs}" fi echo;echo " .... Build system finished .... " diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh index 39cf5ac9a7..b1a17c33dd 100755 --- a/sorc/build_gdas.sh +++ b/sorc/build_gdas.sh @@ -2,9 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; + j) export BUILD_JOBS=${OPTARG};; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh index 2a7a611239..09bd4a9656 100755 --- a/sorc/build_gfs_utils.sh +++ b/sorc/build_gfs_utils.sh @@ -5,11 +5,13 @@ function usage() { cat << EOF Builds the GFS utility programs. -Usage: ${BASH_SOURCE[0]} [-d][-h][-v] +Usage: ${BASH_SOURCE[0]} [-d][-h][-j n][-v] -d: Build with debug options -h: Print this help message and exit + -j: + Build with n build jobs -v: Turn on verbose output EOF @@ -19,10 +21,11 @@ EOF cwd=$(pwd) OPTIND=1 -while getopts ":dvh" option; do +while getopts ":j:dvh" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; v) export BUILD_VERBOSE="YES";; + j) export BUILD_JOBS="${OPTARG}";; h) usage ;; @@ -40,6 +43,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gfs_utils.fd/ush/build.sh" exit diff --git a/sorc/build_gfs_wafs.sh b/sorc/build_gfs_wafs.sh deleted file mode 100755 index cbbf6ec950..0000000000 --- a/sorc/build_gfs_wafs.sh +++ /dev/null @@ -1,15 +0,0 @@ -#! /usr/bin/env bash -set -eux - -script_dir=$(dirname "${BASH_SOURCE[0]}") -cd "${script_dir}" || exit 1 - -# Check final exec folder exists -if [[ ! -d "../exec" ]]; then - mkdir ../exec -fi - -cd gfs_wafs.fd/sorc -./build_wafs_cmake.sh - -exit diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh index 671c3d6205..9ba278e3ec 100755 --- a/sorc/build_gsi_enkf.sh +++ b/sorc/build_gsi_enkf.sh @@ -2,10 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -21,6 +21,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ GSI_MODE=GFS \ ENKF_MODE=GFS \ REGRESSION_TESTS=NO \ diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh index ec3645e52f..3de1262aac 100755 --- a/sorc/build_gsi_monitor.sh +++ b/sorc/build_gsi_monitor.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gsi_monitor.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh index bcbc110cf6..81eab0f628 100755 --- a/sorc/build_gsi_utils.sh +++ b/sorc/build_gsi_utils.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; # TODO - unused; remove? + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ "${cwd}/gsi_utils.fd/ush/build.sh" diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index d5bcf95d0c..52ec43bd66 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -5,16 +5,13 @@ cwd=$(pwd) # Default settings APP="S2SWA" -CCPP_SUITES="FV3_GFS_v17_p8,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson,FV3_GFS_v17_p8_c3,FV3_GFS_v17_p8_c3_mynn" +CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1,FV3_GFS_v17_p8_ugwpv1_mynn" # TODO: does the g-w need to build with all these CCPP_SUITES? -export RT_COMPILER="intel" -source "${cwd}/ufs_model.fd/tests/detect_machine.sh" -source "${cwd}/ufs_model.fd/tests/module-setup.sh" - -while getopts ":da:v" option; do +while getopts ":da:j:v" option; do case "${option}" in d) BUILD_TYPE="DEBUG";; - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -27,17 +24,39 @@ done cd "${cwd}/ufs_model.fd" +source "./tests/detect_machine.sh" +source "./tests/module-setup.sh" + MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" [[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON" COMPILE_NR=0 CLEAN_BEFORE=YES CLEAN_AFTER=NO -./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${RT_COMPILER}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" -## for UFS versions before 31May23 -## ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" -mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x -mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua -cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua +if [[ "${MACHINE_ID}" != "noaacloud" ]]; then + ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" + mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x + mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua + cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua +else + + if [[ "${PW_CSP:-}" == "aws" ]]; then + set +x + # TODO: This will need to be addressed further when the EPIC stacks are available/supported. + module use /contrib/spack-stack/envs/ufswm/install/modulefiles/Core + module load stack-intel + module load stack-intel-oneapi-mpi + module load ufs-weather-model-env/1.0.0 + # TODO: It is still uncertain why this is the only module that is + # missing; check the spack build as this needed to be added manually. + module load w3emc/2.9.2 # TODO: This has similar issues for the EPIC stack. + module list + set -x + fi + + export CMAKE_FLAGS="${MAKE_OPT}" + BUILD_JOBS=${BUILD_JOBS:-8} ./build.sh + mv "${cwd}/ufs_model.fd/build/ufs_model" "${cwd}/ufs_model.fd/tests/ufs_model.x" +fi exit 0 diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 5e2edf0737..e78ca3c180 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -1,10 +1,30 @@ #! /usr/bin/env bash set -eux +OPTIND=1 +while getopts ":j:dv" option; do + case "${option}" in + j) export BUILD_JOBS="${OPTARG}";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + script_dir=$(dirname "${BASH_SOURCE[0]}") cd "${script_dir}/ufs_utils.fd" || exit 1 -CMAKE_OPTS="-DGFS=ON" ./build_all.sh +CMAKE_OPTS="-DGFS=ON" \ +BUILD_JOBS=${BUILD_JOBS:-8} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-} \ +./build_all.sh exit diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index 67460487a6..a55e96ebc8 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -6,18 +6,15 @@ cd "${script_dir}" || exit 1 OPTIND=1 _opts="" -while getopts ":dov" option; do +while getopts ":dv" option; do case "${option}" in - d) export BUILD_TYPE="DEBUG";; - o) _opts+="-g ";; + d) _opts+="-d ";; v) _opts+="-v ";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage ;; esac done diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index bf78e7b2ac..919afaacb3 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -4,16 +4,13 @@ set -x script_dir=$(dirname "${BASH_SOURCE[0]}") cd "${script_dir}" || exit 1 -export RT_COMPILER="intel" -source "${script_dir}/ufs_model.fd/tests/detect_machine.sh" -source "${script_dir}/ufs_model.fd/tests/module-setup.sh" - # Default settings APP="S2SWA" -while getopts "a:v" option; do +while getopts ":j:a:v" option; do case "${option}" in - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -27,12 +24,12 @@ while getopts "a:v" option; do done -# Determine which switch to use -if [[ "${APP}" == "ATMW" ]]; then +# Determine which switch to use +if [[ "${APP}" == "ATMW" ]]; then ww3switch="model/esmf/switch" -else +else ww3switch="model/bin/switch_meshcap" -fi +fi # Check final exec folder exists @@ -44,11 +41,13 @@ finalexecdir="$( pwd -P )/../exec" #Determine machine and load modules set +x +source "${script_dir}/ufs_model.fd/tests/detect_machine.sh" +source "${script_dir}/ufs_model.fd/tests/module-setup.sh" module use "${script_dir}/ufs_model.fd/modulefiles" -module load "ufs_${MACHINE_ID}" +module load "ufs_${MACHINE_ID}.intel" set -x -#Set WW3 directory, switch, prep and post exes +#Set WW3 directory, switch, prep and post exes cd ufs_model.fd/WW3 || exit 1 WW3_DIR=$( pwd -P ) export WW3_DIR @@ -58,11 +57,12 @@ export SWITCHFILE="${WW3_DIR}/${ww3switch}" prep_exes="ww3_grid ww3_prep ww3_prnc ww3_grid" post_exes="ww3_outp ww3_outf ww3_outp ww3_gint ww3_ounf ww3_ounp ww3_grib" -#create build directory: +#create build directory: path_build="${WW3_DIR}/build_SHRD" +[[ -d "${path_build}" ]] && rm -rf "${path_build}" mkdir -p "${path_build}" || exit 1 cd "${path_build}" || exit 1 -echo "Forcing a SHRD build" +echo "Forcing a SHRD build" cat "${SWITCHFILE}" > "${path_build}/tempswitch" @@ -77,26 +77,26 @@ sed -e "s/DIST/SHRD/g"\ "${path_build}/tempswitch" > "${path_build}/switch" rm "${path_build}/tempswitch" -echo "Switch file is ${path_build}/switch with switches:" +echo "Switch file is ${path_build}/switch with switches:" cat "${path_build}/switch" -#Build executables: -cmake "${WW3_DIR}" -DSWITCH="${path_build}/switch" -DCMAKE_INSTALL_PREFIX=install +#Build executables: +cmake "${WW3_DIR}" -DSWITCH="${path_build}/switch" -DCMAKE_INSTALL_PREFIX=install rc=$? if (( rc != 0 )); then echo "Fatal error in cmake." exit "${rc}" fi -make -j 8 +make -j "${BUILD_JOBS:-8}" rc=$? if (( rc != 0 )); then echo "Fatal error in make." exit "${rc}" fi -make install +make install if (( rc != 0 )); then echo "Fatal error in make install." - exit "${rc}" + exit "${rc}" fi # Copy to top-level exe directory @@ -110,8 +110,8 @@ for prog in ${prep_exes} ${post_exes}; do done #clean-up build directory: -echo "executables are in ${finalexecdir}" -echo "cleaning up ${path_build}" +echo "executables are in ${finalexecdir}" +echo "cleaning up ${path_build}" rm -rf "${path_build}" exit 0 diff --git a/sorc/checkout.sh b/sorc/checkout.sh deleted file mode 100755 index 8f915bfc83..0000000000 --- a/sorc/checkout.sh +++ /dev/null @@ -1,210 +0,0 @@ -#! /usr/bin/env bash - -set +x -set -u - -function usage() { - cat << EOF -Clones and checks out external components necessary for - global workflow. If the directory already exists, skip - cloning and just check out the requested version (unless - -c option is used). - -Usage: ${BASH_SOURCE[0]} [-c][-h][-m ufs_hash][-o] - -c: - Create a fresh clone (delete existing directories) - -h: - Print this help message and exit - -m ufs_hash: - Check out this UFS hash instead of the default - -o: - Check out operational-only code (GTG and WAFS) - -g: - Check out GSI for GSI-based DA - -u: - Check out GDASApp for UFS-based DA -EOF - exit 1 -} - -function checkout() { - # - # Clone or fetch repo, then checkout specific hash and update submodules - # - # Environment variables: - # topdir [default: $(pwd)]: parent directory to your checkout - # logdir [default: $(pwd)]: where you want logfiles written - # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone - # - # Usage: checkout - # - # Arguments - # dir: Directory for the clone - # remote: URL of the remote repository - # version: Commit to check out; should always be a speciifc commit (hash or tag), not a branch - # - # Returns - # Exit code of last failed command, or 0 if successful - # - - dir="$1" - remote="$2" - version="$3" - recursive=${4:-"YES"} - - name=$(echo "${dir}" | cut -d '.' -f 1) - echo "Performing checkout of ${name}" - - logfile="${logdir:-$(pwd)}/checkout_${name}.log" - - if [[ -f "${logfile}" ]]; then - rm "${logfile}" - fi - - cd "${topdir}" || exit 1 - if [[ -d "${dir}" && ${CLEAN} == "YES" ]]; then - echo "|-- Removing existing clone in ${dir}" - rm -Rf "${dir}" - fi - if [[ ! -d "${dir}" ]]; then - echo "|-- Cloning from ${remote} into ${dir}" - git clone "${remote}" "${dir}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while cloning ${name}" - echo - return "${status}" - fi - cd "${dir}" || exit 1 - else - # Fetch any updates from server - cd "${dir}" || exit 1 - echo "|-- Fetching updates from ${remote}" - git fetch - fi - echo "|-- Checking out ${version}" - git checkout "${version}" >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while checking out ${version} in ${name}" - echo - return "${status}" - fi - if [[ "${recursive}" == "YES" ]]; then - echo "|-- Updating submodules (if any)" - git submodule update --init --recursive >> "${logfile}" 2>&1 - status=$? - if ((status > 0)); then - echo " WARNING: Error while updating submodules of ${name}" - echo - return "${status}" - fi - fi - echo - return 0 -} - -# Set defaults for variables toggled by options -export CLEAN="NO" -checkout_gsi="NO" -checkout_gdas="NO" -checkout_gtg="NO" -checkout_wafs="NO" - -# Parse command line arguments -while getopts ":chgum:o" option; do - case ${option} in - c) - echo "Received -c flag, will delete any existing directories and start clean" - export CLEAN="YES" - ;; - g) - echo "Received -g flag for optional checkout of GSI-based DA" - checkout_gsi="YES" - ;; - h) usage;; - u) - echo "Received -u flag for optional checkout of UFS-based DA" - checkout_gdas="YES" - ;; - o) - echo "Received -o flag for optional checkout of operational-only codes" - checkout_gtg="YES" - checkout_wafs="YES" - ;; - m) - echo "Received -m flag with argument, will check out ufs-weather-model hash ${OPTARG} instead of default" - ufs_model_hash=${OPTARG} - ;; - :) - echo "option -${OPTARG} needs an argument" - usage - ;; - *) - echo "invalid option -${OPTARG}, exiting..." - usage - ;; - esac -done -shift $((OPTIND-1)) - -topdir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) -export topdir -export logdir="${topdir}/logs" -mkdir -p "${logdir}" - -# The checkout version should always be a speciifc commit (hash or tag), not a branch -errs=0 -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "8965258" ; errs=$((errs + $?)) -checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) - -## 26jun23 ufs -## checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-ed7fc88}" ; errs=$((errs + $?)) -## -## Joe's 22sep23 fork: 10jul23 ufs-community + 12jul23 Joe/Anders changes + 18aug23 Joe fixes + 28aug23 Haiqin/Joe changes + 01sep23 Haiqin changes -## + 08sep23 Haiqin changes + 22sep23 Joe changes + 03oct23 Joe changes -checkout "ufs_model.fd" "https://github.com/joeolson42/ufs-weather-model" "${ufs_model_hash:-HFIP2023}" ; errs=$((errs + $?)) - -checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) - -if [[ ${checkout_gsi} == "YES" ]]; then - checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "113e307" "NO"; errs=$((errs + $?)) -fi - -if [[ ${checkout_gdas} == "YES" ]]; then - checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "81675c9"; errs=$((errs + $?)) -fi - -if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then - checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) -fi - -if [[ ${checkout_wafs} == "YES" ]]; then - checkout "gfs_wafs.fd" "https://github.com/NOAA-EMC/EMC_gfs_wafs.git" "014a0b8"; errs=$((errs + $?)) -fi - -if [[ ${checkout_gtg} == "YES" ]]; then - ################################################################################ - # checkout_gtg - ## yes: The gtg code at NCAR private repository is available for ops. GFS only. - # Only approved persons/groups have access permission. - ## no: No need to check out gtg code for general GFS users. - ################################################################################ - - echo "Checking out GTG extension for UPP" - cd "${topdir}/ufs_model.fd/FV3/upp" || exit 1 - logfile="${logdir}/checkout_gtg.log" - git -c submodule."post_gtg.fd".update=checkout submodule update --init --recursive >> "${logfile}" 2>&1 - status=$? - if (( status > 0 )); then - echo "WARNING: Error while checking out GTG" - errs=$((errs + status)) - fi -fi - -if (( errs > 0 )); then - echo "WARNING: One or more errors encountered during checkout process, please check logs before building" -fi -echo -exit "${errs}" diff --git a/sorc/gdas.cd b/sorc/gdas.cd new file mode 160000 index 0000000000..f44a6d500d --- /dev/null +++ b/sorc/gdas.cd @@ -0,0 +1 @@ +Subproject commit f44a6d500dda2aba491e4fa12c0bee428ddb7b80 diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd new file mode 160000 index 0000000000..7d3b08e87c --- /dev/null +++ b/sorc/gfs_utils.fd @@ -0,0 +1 @@ +Subproject commit 7d3b08e87c07cfa54079442d245ac7e9ab1cd9f4 diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd new file mode 160000 index 0000000000..c94bc72ff4 --- /dev/null +++ b/sorc/gsi_enkf.fd @@ -0,0 +1 @@ +Subproject commit c94bc72ff410b48c325abbfe92c9fcb601d89aed diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd new file mode 160000 index 0000000000..ae256c0d69 --- /dev/null +++ b/sorc/gsi_monitor.fd @@ -0,0 +1 @@ +Subproject commit ae256c0d69df3232ee9dd3e81b176bf2c3cda312 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd new file mode 160000 index 0000000000..f371890b9f --- /dev/null +++ b/sorc/gsi_utils.fd @@ -0,0 +1 @@ +Subproject commit f371890b9fcb42312da5f6228d87b5a4829e7e3a diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0ca0ba3415..3311eb9c3d 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -2,7 +2,8 @@ #--make symbolic links for EMC installation and hardcopies for NCO delivery -trap 'echo "${BASH_SOURCE[0]} encounted an error at line ${LINENO} (rc=$?)"' ERR +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd )" +TRACE=NO source "${HOMEgfs}/ush/preamble.sh" function usage() { cat << EOF @@ -18,8 +19,6 @@ EOF exit 1 } -set -eu - RUN_ENVIR="emc" # Reset option counter in case this script is sourced @@ -42,16 +41,25 @@ while getopts ":ho" option; do done shift $((OPTIND-1)) -script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) -top_dir=$(cd "$(dirname "${script_dir}")" &> /dev/null && pwd) -cd "${script_dir}" +# LINK is always ln, LINK_OR_COPY can be ln or cp depending on RUN_ENVIR being emc or nco, respectively +LINK="ln -fs" +if [[ "${RUN_ENVIR}" == "nco" ]]; then + LINK_OR_COPY="cp -rp" +else + LINK_OR_COPY="ln -fs" +fi -export COMPILER="intel" # shellcheck disable=SC1091 -source gfs_utils.fd/ush/detect_machine.sh # (sets MACHINE_ID) +COMPILER="intel" source "${HOMEgfs}/sorc/gfs_utils.fd/ush/detect_machine.sh" # (sets MACHINE_ID) # shellcheck disable= machine=$(echo "${MACHINE_ID}" | cut -d. -f1) +#------------------------------ +#--Set up build.ver and run.ver +#------------------------------ +${LINK_OR_COPY} "${HOMEgfs}/versions/build.${machine}.ver" "${HOMEgfs}/versions/build.ver" +${LINK_OR_COPY} "${HOMEgfs}/versions/run.${machine}.ver" "${HOMEgfs}/versions/run.ver" + #------------------------------ #--model fix fields #------------------------------ @@ -59,6 +67,7 @@ case "${machine}" in "wcoss2") FIX_DIR="/lfs/h2/emc/global/noscrub/emc.global/FIX/fix" ;; "hera") FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix" ;; "orion") FIX_DIR="/work/noaa/global/glopara/fix" ;; + "hercules") FIX_DIR="/work/noaa/global/glopara/fix" ;; "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; "s4") FIX_DIR="/data/prod/glopara/fix" ;; *) @@ -68,22 +77,26 @@ case "${machine}" in esac # Source fix version file -source "${top_dir}/versions/fix.ver" - -LINK="ln -fs" -SLINK="ln -fs" -if [[ "${RUN_ENVIR}" == "nco" ]]; then - LINK="cp -rp" -fi - -# Link post -[[ -d upp.fd ]] && rm -rf upp.fd -${LINK} ufs_model.fd/FV3/upp upp.fd - +source "${HOMEgfs}/versions/fix.ver" + +# Link wxflow in ush/python, workflow and ci/scripts +# TODO: This will be unnecessary when wxflow is part of the virtualenv +cd "${HOMEgfs}/ush/python" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow +${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . +cd "${HOMEgfs}/workflow" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow +${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . +cd "${HOMEgfs}/ci/scripts" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow +${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . + + +# Link fix directories if [[ -n "${FIX_DIR}" ]]; then - if [[ ! -d "${top_dir}/fix" ]]; then mkdir "${top_dir}/fix" || exit 1; fi + if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi fi -cd "${top_dir}/fix" || exit 1 +cd "${HOMEgfs}/fix" || exit 1 for dir in aer \ am \ chem \ @@ -99,185 +112,179 @@ for dir in aer \ ugwd \ verif \ wave - do - if [[ -d "${dir}" ]]; then - [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${dir}" - rm -rf "${dir}" - fi - fix_ver="${dir}_ver" - ${LINK} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" +do + if [[ -d "${dir}" ]]; then + [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${dir}" + rm -rf "${dir}" + fi + fix_ver="${dir}_ver" + ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" done -if [[ -d "${script_dir}/ufs_utils.fd" ]]; then - cd "${script_dir}/ufs_utils.fd/fix" || exit 1 - ./link_fixdirs.sh "${RUN_ENVIR}" "${machine}" 2> /dev/null -fi - - #--------------------------------------- #--add files from external repositories #--------------------------------------- -cd "${top_dir}/parm/post" || exit 1 - for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ - postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ - postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt postxconfig-NT-GFS-WAFS-ANL.txt postxconfig-NT-GFS-WAFS.txt \ - postxconfig-NT-GFS.txt postxconfig-NT-gefs-aerosol.txt postxconfig-NT-gefs-chem.txt params_grib2_tbl_new \ - post_tag_gfs128 post_tag_gfs65 gtg.config.gfs gtg_imprintings.txt nam_micro_lookup.dat \ - AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat \ - ; do - ${LINK} "${script_dir}/upp.fd/parm/${file}" . - done - -cd "${top_dir}/scripts" || exit 8 - ${LINK} "${script_dir}/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" . -cd "${top_dir}/ush" || exit 8 - for file in emcsfc_ice_blend.sh fv3gfs_driver_grid.sh fv3gfs_make_orog.sh global_cycle_driver.sh \ - emcsfc_snow.sh fv3gfs_filter_topo.sh global_cycle.sh fv3gfs_make_grid.sh ; do - ${LINK} "${script_dir}/ufs_utils.fd/ush/${file}" . - done - for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do - ${LINK} "${script_dir}/gfs_utils.fd/ush/${file}" . - done - -#----------------------------------- -#--add gfs_wafs link if checked out -if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then -#----------------------------------- - cd "${top_dir}/jobs" || exit 1 - ${LINK} "${script_dir}/gfs_wafs.fd/jobs"/* . - cd "${top_dir}/parm" || exit 1 - [[ -d wafs ]] && rm -rf wafs - ${LINK} "${script_dir}/gfs_wafs.fd/parm/wafs" wafs - cd "${top_dir}/scripts" || exit 1 - ${LINK} "${script_dir}/gfs_wafs.fd/scripts"/* . - cd "${top_dir}/ush" || exit 1 - ${LINK} "${script_dir}/gfs_wafs.fd/ush"/* . - cd "${top_dir}/fix" || exit 1 - [[ -d wafs ]] && rm -rf wafs - ${LINK} "${script_dir}/gfs_wafs.fd/fix"/* . -fi +#--copy/link NoahMp table form ccpp-physics repository +cd "${HOMEgfs}/parm/ufs" || exit 1 +${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/FV3/ccpp/physics/physics/noahmptable.tbl" . + +cd "${HOMEgfs}/parm/post" || exit 1 +for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ + postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ + postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt \ + postxconfig-NT-GFS.txt postxconfig-NT-gefs-aerosol.txt postxconfig-NT-gefs-chem.txt params_grib2_tbl_new \ + post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat \ + AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat +do + ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/parm/${file}" . +done + +cd "${HOMEgfs}/scripts" || exit 8 +${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" . +cd "${HOMEgfs}/ush" || exit 8 +for file in emcsfc_ice_blend.sh global_cycle_driver.sh emcsfc_snow.sh global_cycle.sh; do + ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/ush/${file}" . +done +for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/ush/${file}" . +done + +# TODO: Link these ufs.configure templates from ufs-weather-model +#cd "${HOMEgfs}/parm/ufs" || exit 1 +#declare -a ufs_configure_files=("ufs.configure.atm.IN" \ +# "ufs.configure.atm_aero.IN" \ +# "ufs.configure.atmw.IN" \ +# "ufs.configure.blocked_atm_wav_2way.IN" \ +# "ufs.configure.blocked_atm_wav.IN" \ +# "ufs.configure.cpld_agrid.IN" \ +# "ufs.configure.cpld_esmfthreads.IN" \ +# "ufs.configure.cpld.IN" \ +# "ufs.configure.cpld_noaero.IN" \ +# "ufs.configure.cpld_noaero_nowave.IN" \ +# "ufs.configure.cpld_noaero_outwav.IN" \ +# "ufs.configure.leapfrog_atm_wav.IN") +#for file in "${ufs_configure_files[@]}"; do +# [[ -s "${file}" ]] && rm -f "${file}" +# ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/${file}" . +#done + +cd "${HOMEgfs}/ush" || exit 1 +[[ -s "atparse.bash" ]] && rm -f "atparse.bash" +${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/atparse.bash" . #------------------------------ #--add GDASApp fix directory #------------------------------ -if [[ -d "${script_dir}/gdas.cd" ]]; then - cd "${top_dir}/fix" || exit 1 - [[ ! -d gdas ]] && mkdir -p gdas - cd gdas || exit 1 - for gdas_sub in crtm fv3jedi gsibec; do - if [[ -d "${gdas_sub}" ]]; then - rm -rf "${gdas_sub}" - fi - fix_ver="gdas_${gdas_sub}_ver" - ${LINK} "${FIX_DIR}/gdas/${gdas_sub}/${!fix_ver}" "${gdas_sub}" - done +if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then + cd "${HOMEgfs}/fix" || exit 1 + [[ ! -d gdas ]] && mkdir -p gdas + cd gdas || exit 1 + for gdas_sub in fv3jedi gsibec; do + if [[ -d "${gdas_sub}" ]]; then + rm -rf "${gdas_sub}" + fi + fix_ver="gdas_${gdas_sub}_ver" + ${LINK_OR_COPY} "${FIX_DIR}/gdas/${gdas_sub}/${!fix_ver}" "${gdas_sub}" + done fi #------------------------------ #--add GDASApp files #------------------------------ -if [[ -d "${script_dir}/gdas.cd" ]]; then - cd "${top_dir}/ush" || exit 1 - ${LINK} "${script_dir}/gdas.cd/ush/ufsda" . - ${LINK} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . - ${LINK} "${script_dir}/gdas.cd/build/bin/imsfv3_scf2ioda.py" . - ${LINK} "${script_dir}/gdas.cd/ush/land/letkf_create_ens.py" . +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then + cd "${HOMEgfs}/ush" || exit 1 + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ufsda" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/exglobal_prep_ocean_obs.py" . fi #------------------------------ #--add DA Monitor file (NOTE: ensure to use correct version) #------------------------------ -if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then - - cd "${top_dir}/fix" || exit 1 - [[ ! -d gdas ]] && ( mkdir -p gdas || exit 1 ) - cd gdas || exit 1 - ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . - cd "${top_dir}/parm" || exit 1 - [[ -d mon ]] && rm -rf mon - mkdir -p mon - cd mon || exit 1 - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . +if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd" ]]; then + + cd "${HOMEgfs}/parm" || exit 1 + [[ -d monitor ]] && rm -rf monitor + mkdir -p monitor + cd monitor || exit 1 + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_cost.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/fix/gfs_minmon_gnorm.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm + # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . + # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . + # ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . fi #------------------------------ #--link executables #------------------------------ -if [[ ! -d "${top_dir}/exec" ]]; then mkdir "${top_dir}/exec" || exit 1 ; fi -cd "${top_dir}/exec" || exit 1 +if [[ ! -d "${HOMEgfs}/exec" ]]; then mkdir "${HOMEgfs}/exec" || exit 1 ; fi +cd "${HOMEgfs}/exec" || exit 1 -for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x regrid_nemsio.x supvit.x syndat_getjtbul.x \ - syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x enkf_chgres_recenter.x overgridid.x \ - mkgfsawps.x enkf_chgres_recenter_nc.x fv3nc2nemsio.x tave.x vint.x reg2grb2.x ; do - [[ -s "${utilexe}" ]] && rm -f "${utilexe}" - ${LINK} "${script_dir}/gfs_utils.fd/install/bin/${utilexe}" . +for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \ + syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x \ + mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x +do + [[ -s "${utilexe}" ]] && rm -f "${utilexe}" + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gfs_utils.fd/install/bin/${utilexe}" . done [[ -s "ufs_model.x" ]] && rm -f ufs_model.x -${LINK} "${script_dir}/ufs_model.fd/tests/ufs_model.x" . +${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/ufs_model.x" . [[ -s "upp.x" ]] && rm -f upp.x -${LINK} "${script_dir}/upp.fd/exec/upp.x" . - -if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then - for wafsexe in \ - wafs_awc_wafavn.x wafs_blending.x wafs_blending_0p25.x \ - wafs_cnvgrib2.x wafs_gcip.x wafs_grib2_0p25.x \ - wafs_makewafs.x wafs_setmissing.x; do - [[ -s ${wafsexe} ]] && rm -f "${wafsexe}" - ${LINK} "${script_dir}/gfs_wafs.fd/exec/${wafsexe}" . - done -fi +${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/exec/upp.x" . -for ufs_utilsexe in \ - emcsfc_ice_blend emcsfc_snow2mdl global_cycle ; do +for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" - ${LINK} "${script_dir}/ufs_utils.fd/exec/${ufs_utilsexe}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . done # GSI -if [[ -d "${script_dir}/gsi_enkf.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_enkf.fd/install" ]]; then for gsiexe in enkf.x gsi.x; do [[ -s "${gsiexe}" ]] && rm -f "${gsiexe}" - ${LINK} "${script_dir}/gsi_enkf.fd/install/bin/${gsiexe}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_enkf.fd/install/bin/${gsiexe}" . done fi # GSI Utils -if [[ -d "${script_dir}/gsi_utils.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_utils.fd/install" ]]; then for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ - interp_inc.x recentersigp.x;do + interp_inc.x recentersigp.x + do [[ -s "${exe}" ]] && rm -f "${exe}" - ${LINK} "${script_dir}/gsi_utils.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_utils.fd/install/bin/${exe}" . done fi # GSI Monitor -if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gsi_monitor.fd/install" ]]; then for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ - radmon_bcoef.x radmon_bcor.x radmon_time.x; do + radmon_bcoef.x radmon_bcor.x radmon_time.x + do [[ -s "${exe}" ]] && rm -f "${exe}" - ${LINK} "${script_dir}/gsi_monitor.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gsi_monitor.fd/install/bin/${exe}" . done fi # GDASApp -if [[ -d "${script_dir}/gdas.cd" ]]; then +if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ "fv3jedi_diffstates.x" \ "fv3jedi_ensvariance.x" \ @@ -300,134 +307,102 @@ if [[ -d "${script_dir}/gdas.cd" ]]; then "soca_setcorscales.x" \ "soca_gridgen.x" \ "soca_var.x" \ + "bufr2ioda.x" \ "calcfIMS.exe" \ "apply_incr.exe" ) for gdasexe in "${JEDI_EXE[@]}"; do [[ -s "${gdasexe}" ]] && rm -f "${gdasexe}" - ${LINK} "${script_dir}/gdas.cd/build/bin/${gdasexe}" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/${gdasexe}" . done fi #------------------------------ #--link source code directories #------------------------------ -cd "${script_dir}" || exit 8 - - if [[ -d gsi_enkf.fd ]]; then - [[ -d gsi.fd ]] && rm -rf gsi.fd - ${SLINK} gsi_enkf.fd/src/gsi gsi.fd - - [[ -d enkf.fd ]] && rm -rf enkf.fd - ${SLINK} gsi_enkf.fd/src/enkf enkf.fd - fi +cd "${HOMEgfs}/sorc" || exit 8 +if [[ -d ufs_model.fd ]]; then + [[ -d upp.fd ]] && rm -rf upp.fd + ${LINK} ufs_model.fd/FV3/upp upp.fd +fi - if [[ -d gsi_utils.fd ]]; then - [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd - ${SLINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd +if [[ -d gsi_enkf.fd ]]; then + [[ -d gsi.fd ]] && rm -rf gsi.fd + ${LINK} gsi_enkf.fd/src/gsi gsi.fd - [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd + [[ -d enkf.fd ]] && rm -rf enkf.fd + ${LINK} gsi_enkf.fd/src/enkf enkf.fd +fi - [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd +if [[ -d gsi_utils.fd ]]; then + [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd + ${LINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd . - [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd + [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd . - [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd + [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd . - [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd + [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd . - [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd + [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd . - [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd - ${SLINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd - fi + [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd . - if [[ -d gsi_monitor.fd ]] ; then - [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd + [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd . - [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd + ${LINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd . +fi - [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd +if [[ -d gsi_monitor.fd ]] ; then + [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd . - [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd . - [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd - [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd - fi + [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd - [[ -d gfs_ncep_post.fd ]] && rm -rf gfs_ncep_post.fd - ${SLINK} upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd - - for prog in fregrid make_hgrid make_solo_mosaic ; do - [[ -d "${prog}.fd" ]] && rm -rf "${prog}.fd" - ${SLINK} "ufs_utils.fd/sorc/fre-nctools.fd/tools/${prog}" "${prog}.fd" - done - for prog in global_cycle.fd \ - emcsfc_ice_blend.fd \ - emcsfc_snow2mdl.fd ;do - [[ -d "${prog}" ]] && rm -rf "${prog}" - ${SLINK} "ufs_utils.fd/sorc/${prog}" "${prog}" - done - - for prog in enkf_chgres_recenter.fd \ - enkf_chgres_recenter_nc.fd \ - fbwndgfs.fd \ - fv3nc2nemsio.fd \ - gaussian_sfcanl.fd \ - gfs_bufr.fd \ - mkgfsawps.fd \ - overgridid.fd \ - rdbfmsua.fd \ - reg2grb2.fd \ - regrid_nemsio.fd \ - supvit.fd \ - syndat_getjtbul.fd \ - syndat_maksynrc.fd \ - syndat_qctropcy.fd \ - tave.fd \ - tocsbufr.fd \ - vint.fd \ - webtitle.fd - do - if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi - ${LINK} "gfs_utils.fd/src/${prog}" . - done - - if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then - ${SLINK} gfs_wafs.fd/sorc/wafs_awc_wafavn.fd wafs_awc_wafavn.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_blending.fd wafs_blending.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_blending_0p25.fd wafs_blending_0p25.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_cnvgrib2.fd wafs_cnvgrib2.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_gcip.fd wafs_gcip.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_grib2_0p25.fd wafs_grib2_0p25.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_makewafs.fd wafs_makewafs.fd - ${SLINK} gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd - fi + [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd -#------------------------------ -# copy $HOMEgfs/parm/config/config.base.nco.static as config.base for operations -# config.base in the $HOMEgfs/parm/config has no use in development -cd "${top_dir}/parm/config" || exit 1 -[[ -s "config.base" ]] && rm -f config.base -if [[ "${RUN_ENVIR}" == "nco" ]] ; then - cp -p config.base.nco.static config.base - cp -p config.fv3.nco.static config.fv3 - cp -p config.resources.nco.static config.resources + [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd fi -#------------------------------ -echo "${BASH_SOURCE[0]} completed successfully" +for prog in global_cycle.fd emcsfc_ice_blend.fd emcsfc_snow2mdl.fd ;do + [[ -d "${prog}" ]] && rm -rf "${prog}" + ${LINK} "ufs_utils.fd/sorc/${prog}" "${prog}" +done + +for prog in enkf_chgres_recenter_nc.fd \ + fbwndgfs.fd \ + gaussian_sfcanl.fd \ + gfs_bufr.fd \ + mkgfsawps.fd \ + overgridid.fd \ + rdbfmsua.fd \ + reg2grb2.fd \ + supvit.fd \ + syndat_getjtbul.fd \ + syndat_maksynrc.fd \ + syndat_qctropcy.fd \ + tave.fd \ + tocsbufr.fd \ + vint.fd \ + webtitle.fd +do + if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi + ${LINK_OR_COPY} "gfs_utils.fd/src/${prog}" . +done exit 0 diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh deleted file mode 100755 index f212ae4cb4..0000000000 --- a/sorc/partial_build.sh +++ /dev/null @@ -1,201 +0,0 @@ -#! /usr/bin/env bash -# -# define the array of the name of build program -# -declare -a Build_prg=("Build_ufs_model" \ - "Build_ww3_prepost" \ - "Build_gsi_enkf" \ - "Build_gsi_utils" \ - "Build_gsi_monitor" \ - "Build_ww3_prepost" \ - "Build_gdas" \ - "Build_upp" \ - "Build_ufs_utils" \ - "Build_gfs_wafs" \ - "Build_gfs_utils") - -# -# function parse_cfg: read config file and retrieve the values -# -parse_cfg() { - declare -i n - declare -i num_args - declare -i total_args - declare -a all_prg - total_args=$# - num_args=$1 - (( num_args == 0 )) && return 0 - config=$2 - [[ ${config,,} == "--verbose" ]] && config=$3 - all_prg=() - for (( n = num_args + 2; n <= total_args; n++ )); do - all_prg+=( "${!n}" ) - done - - if [[ ${config^^} == ALL ]]; then - # - # set all values to true - # - for var in "${Build_prg[@]}"; do - eval "${var}=true" - done - elif [[ ${config} == config=* ]]; then - # - # process config file - # - cfg_file=${config#config=} - ${verbose} && echo "INFO: settings in config file: ${cfg_file}" - while read -r cline; do - # remove leading white space - clean_line="${cline#"${cline%%[![:space:]]*}"}" - { [[ -z "${clean_line}" ]] || [[ "${clean_line:0:1}" == "#" ]]; } || { - ${verbose} && echo "${clean_line}" - first9=${clean_line:0:9} - [[ ${first9,,} == "building " ]] && { - # No shellcheck, this can't be replaced by a native bash substitute - # because it uses a regex - # shellcheck disable=SC2001 - short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "${clean_line}") - # shellcheck disable= - # remove trailing white space - clean_line="${cline%"${cline##*[![:space:]]}"}" - build_action=true - last5=${clean_line: -5} - [[ ${last5,,} == ". yes" ]] && build_action=true - last4=${clean_line: -4} - [[ ${last4,,} == ". no" ]] && build_action=false - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=${build_action}" - break - } - done - ${found} || { - echo "*** Unrecognized line in config file \"${cfg_file}\":" 2>&1 - echo "${cline}" 2>&1 - exit 3 - } - } - } - done < "${cfg_file}" - elif [[ ${config} == select=* ]]; then - # - # set all values to (default) false - # - for var in "${Build_prg[@]}"; do - eval "${var}=false" - done - # - # read command line partial build setting - # - del="" - sel_prg=${config#select=} - for separator in " " "," ";" ":" "/" "|"; do - [[ "${sel_prg/${separator}}" == "${sel_prg}" ]] || { - del=${separator} - sel_prg=${sel_prg//${del}/ } - } - done - if [[ ${del} == "" ]]; then - { - short_prg=${sel_prg} - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 4 - } - } || { - for short_prg in ${sel_prg}; do - found=false - for prg in "${all_prg[@]}"; do - [[ ${prg} == "Build_${short_prg}" ]] && { - found=true - eval "${prg}=true" - break - } - done - ${found} || { - echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 - exit 5 - } - done - } - fi - else - echo "*** Unrecognized command line option \"${config}\"" 2>&1 - exit 6 - fi -} - - -usage() { - cat << EOF 2>&1 -Usage: ${BASH_SOURCE[0]} [-c config_file][-h][-v] - -h: - Print this help message and exit - -v: - Turn on verbose mode - -c config_file: - Override default config file to determine whether to build each program [default: gfs_build.cfg] -EOF -} - - -# -# read command line arguments; processing config file -# -declare -a parse_argv=() -verbose=false -config_file="gfs_build.cfg" -# Reset option counter for when this script is sourced -OPTIND=1 -while getopts ":c:h:v" option; do - case "${option}" in - c) config_file="${OPTARG}";; - h) usage;; - v) - verbose=true - parse_argv+=( "--verbose" ) - ;; - :) - echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage - ;; - *) - echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage - ;; - esac -done - -shift $((OPTIND-1)) - -parse_argv+=( "config=${config_file}" ) - -# -# call arguments retriever/config parser -# -parse_cfg ${#parse_argv[@]} "${parse_argv[@]}" "${Build_prg[@]}" - -# -# print values of build array -# -${verbose} && { - echo "INFO: partial build settings:" - for var in "${Build_prg[@]}"; do - echo -n " ${var}: " - "${!var}" && echo True || echo False - done -} - -echo "=== end of partial build setting ===" > /dev/null - diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd new file mode 160000 index 0000000000..991d6527da --- /dev/null +++ b/sorc/ufs_model.fd @@ -0,0 +1 @@ +Subproject commit 991d6527da22d11016df035998ec1352d0449875 diff --git a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_c3_mynn.xml b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_c3_mynn.xml index 61badf90c0..8b70b34a0c 100644 --- a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_c3_mynn.xml +++ b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_c3_mynn.xml @@ -64,7 +64,6 @@ unified_ugwp_post GFS_GWD_generic_post GFS_suite_stateout_update - ozphys_2015 h2ophys get_phi_fv3 GFS_suite_interstitial_3 @@ -93,7 +92,7 @@ GFS_stochastics - phys_tend + GFS_physics_post diff --git a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml index db415a6cf4..118fec5615 100644 --- a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml +++ b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml @@ -61,7 +61,6 @@ unified_ugwp_post GFS_GWD_generic_post GFS_suite_stateout_update - ozphys_2015 h2ophys get_phi_fv3 GFS_suite_interstitial_3 @@ -88,7 +87,7 @@ GFS_stochastics - phys_tend + GFS_physics_post diff --git a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_ugwpv1_mynn.xml b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_ugwpv1_mynn.xml new file mode 100644 index 0000000000..e16c60e065 --- /dev/null +++ b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_ugwpv1_mynn.xml @@ -0,0 +1,97 @@ + + + + + + + GFS_time_vary_pre + GFS_rrtmg_setup + GFS_rad_time_vary + GFS_phys_time_vary + + + + + GFS_suite_interstitial_rad_reset + sgscloud_radpre + GFS_rrtmg_pre + GFS_radiation_surface + rad_sw_pre + rrtmg_sw + rrtmg_sw_post + rrtmg_lw_pre + rrtmg_lw + sgscloud_radpost + rrtmg_lw_post + GFS_rrtmg_post + + + + + GFS_suite_interstitial_phys_reset + GFS_suite_stateout_reset + get_prs_fv3 + GFS_suite_interstitial_1 + GFS_surface_generic_pre + GFS_surface_composites_pre + dcyc2t3 + GFS_surface_composites_inter + GFS_suite_interstitial_2 + + + + sfc_diff + GFS_surface_loop_control_part1 + sfc_nst_pre + sfc_nst + sfc_nst_post + noahmpdrv + sfc_sice + GFS_surface_loop_control_part2 + + + + GFS_surface_composites_post + sfc_diag + sfc_diag_post + GFS_surface_generic_post + + + + mynnedmf_wrapper + GFS_GWD_generic_pre + ugwpv1_gsldrag + ugwpv1_gsldrag_post + GFS_GWD_generic_post + GFS_suite_stateout_update + h2ophys + get_phi_fv3 + GFS_suite_interstitial_3 + GFS_DCNV_generic_pre + samfdeepcnv + GFS_DCNV_generic_post + + + + GFS_suite_interstitial_4 + cnvc90 + GFS_MP_generic_pre + mp_thompson_pre + + + mp_thompson + + + mp_thompson_post + GFS_MP_generic_post + maximum_hourly_diagnostics + + + + + GFS_stochastics + GFS_physics_post + + + + diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd new file mode 160000 index 0000000000..ce385cedfa --- /dev/null +++ b/sorc/ufs_utils.fd @@ -0,0 +1 @@ +Subproject commit ce385cedfa9abd46b0905e8d6486b0339a9e4267 diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh index 67fe1b3d83..e6815c180c 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh @@ -41,7 +41,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh index 9b2ac08efb..76608fe07e 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh @@ -37,7 +37,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh index 6a9ef4ec7a..0c9fe2a952 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh @@ -27,7 +27,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh index a5f0eeb41d..2fa7475ba1 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh @@ -43,7 +43,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh index 34312b3210..50df282567 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh @@ -51,7 +51,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh index a30257e0ce..5195924e13 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh @@ -61,7 +61,7 @@ source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/sfc" mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" diff --git a/sorc/verif-global.fd b/sorc/verif-global.fd new file mode 160000 index 0000000000..c267780a12 --- /dev/null +++ b/sorc/verif-global.fd @@ -0,0 +1 @@ +Subproject commit c267780a1255fa7db052c745cf9c78b7dc6a2695 diff --git a/sorc/wxflow b/sorc/wxflow new file mode 160000 index 0000000000..528f5abb49 --- /dev/null +++ b/sorc/wxflow @@ -0,0 +1 @@ +Subproject commit 528f5abb49e80751f83ebd6eb0a87bc70012bb24 diff --git a/test/diff_UFS_rundir.sh b/test/diff_UFS_rundir.sh index fac2242a65..a305497ef3 100755 --- a/test/diff_UFS_rundir.sh +++ b/test/diff_UFS_rundir.sh @@ -69,8 +69,8 @@ temp_file=".diff.nc" coord_file="${coord_file:-./coordinates.lst}" # Input files -files="data_table diag_table fd_nems.yaml field_table ice_in input.nml med_modelio.nml \ - model_configure nems.configure pio_in ww3_multi.inp ww3_shel.inp" +files="data_table diag_table fd_ufs.yaml field_table ice_in input.nml med_modelio.nml \ + model_configure ufs.configure pio_in ww3_multi.inp ww3_shel.inp" for file in $files; do echo "=== ${file} ===" diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py index a325ec35b3..cf2dc8dc89 100755 --- a/ush/calcanl_gfs.py +++ b/ush/calcanl_gfs.py @@ -346,7 +346,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, ComOut = os.getenv('COM_ATMOS_ANALYSIS', './') APrefix = os.getenv('APREFIX', '') NThreads = os.getenv('NTHREADS_CHGRES', 1) - FixDir = os.getenv('FIXgsm', './') + FixDir = os.getenv('FIXam', './') atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean') RunDir = os.getenv('DATA', './') ExecCMD = os.getenv('APRUN_CALCANL', '') diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 647722b7a3..01ae66a02d 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -26,6 +26,8 @@ case $(hostname -f) in Orion-login-[1-4].HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion1-4 + [Hh]ercules-login-[1-4].[Hh][Pp][Cc].[Mm]s[Ss]tate.[Ee]du) MACHINE_ID=hercules ;; ### hercules1-4 + cheyenne[1-6].cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 cheyenne[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 chadmin[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 @@ -38,6 +40,13 @@ case $(hostname -f) in *) MACHINE_ID=UNKNOWN ;; # Unknown platform esac +if [[ ${MACHINE_ID} == "UNKNOWN" ]]; then + case ${PW_CSP:-} in + "aws" | "google" | "azure") MACHINE_ID=noaacloud ;; + *) PW_CSP="UNKNOWN" + esac +fi + # Overwrite auto-detect with MACHINE if set MACHINE_ID=${MACHINE:-${MACHINE_ID}} @@ -57,8 +66,13 @@ elif [[ -d /scratch1 ]] ; then # We are on NOAA Hera MACHINE_ID=hera elif [[ -d /work ]] ; then - # We are on MSU Orion - MACHINE_ID=orion + # We are on MSU Orion or Hercules + if [[ -d /apps/other ]] ; then + # We are on Hercules + MACHINE_ID=hercules + else + MACHINE_ID=orion + fi elif [[ -d /glade ]] ; then # We are on NCAR Yellowstone MACHINE_ID=cheyenne diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 06329e0762..d0dc325460 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -11,7 +11,8 @@ # For all non-evironment variables # Cycling and forecast hour specific parameters -FV3_GFS_det(){ +FV3_det(){ + echo "SUB ${FUNCNAME[0]}: Run type determination for FV3" #------------------------------------------------------- # warm start? warm_start=${EXP_WARM_START:-".false."} @@ -20,55 +21,88 @@ FV3_GFS_det(){ # Determine if this is a warm start or cold start if [[ -f "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000.coupler.res" ]]; then - export warm_start=".true." + warm_start=".true." fi # turn IAU off for cold start DOIAU_coldstart=${DOIAU_coldstart:-"NO"} - if [ ${DOIAU} = "YES" -a ${warm_start} = ".false." ] || [ ${DOIAU_coldstart} = "YES" -a ${warm_start} = ".true." ]; then - export DOIAU="NO" - echo "turning off IAU since warm_start = ${warm_start}" + if [ "${DOIAU}" = "YES" -a "${warm_start}" = ".false." ] || [ "${DOIAU_coldstart}" = "YES" -a "${warm_start}" = ".true." ]; then + echo "turning off IAU since this is a cold-start" + DOIAU="NO" DOIAU_coldstart="YES" + # Ignore "not used" warning + # shellcheck disable=SC2034 IAU_OFFSET=0 - sCDATE=${CDATE} - sPDY=${PDY} - scyc=${cyc} + sCDATE=${current_cycle} + sPDY=${current_cycle:0:8} + scyc=${current_cycle:8:2} tPDY=${sPDY} - tcyc=${cyc} + tcyc=${scyc} fi #------------------------------------------------------- - # determine if restart IC exists to continue from a previous forecast + # determine if restart IC exists to continue from a previous forecast run attempt + RERUN=${RERUN:-"NO"} - filecount=$(find "${COM_ATMOS_RESTART:-/dev/null}" -type f | wc -l) - if [[ ( ${CDUMP} = "gfs" || ( ${RUN} = "gefs" && ${CDATE_RST} = "" )) && ${rst_invt1} -gt 0 && ${FHMAX} -gt ${rst_invt1} && ${filecount} -gt 10 ]]; then - reverse=$(echo "${restart_interval[@]} " | tac -s ' ') - for xfh in ${reverse} ; do - yfh=$((xfh-(IAU_OFFSET/2))) - SDATE=$(${NDATE} ${yfh} "${CDATE}") - PDYS=$(echo "${SDATE}" | cut -c1-8) - cycs=$(echo "${SDATE}" | cut -c9-10) - flag1=${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res - flag2=${COM_ATMOS_RESTART}/coupler.res - - #make sure that the wave restart files also exist if cplwav=true - waverstok=".true." + # Get a list of all YYYYMMDD.HH0000.coupler.res files from the atmos restart directory + mapfile -t file_array < <(find "${COM_ATMOS_RESTART:-/dev/null}" -name "????????.??0000.coupler.res") + if [[ ( "${RUN}" = "gfs" || "${RUN}" = "gefs" ) \ + && "${#file_array[@]}" -gt 0 ]]; then + + # Look in reverse order of file_array to determine available restart times + for ((ii=${#file_array[@]}-1; ii>=0; ii--)); do + + local filepath="${file_array[ii]}" + local filename + filename=$(basename "${filepath}") # Strip path from YYYYMMDD.HH0000.coupler.res + PDYS=${filename:0:8} # match YYYYMMDD of YYYYMMDD.HH0000.coupler.res + cycs=${filename:9:2} # match HH of YYYYMMDD.HH0000.coupler.res + + # Assume all is well; all restarts are available + local fv3_rst_ok="YES" + local mom6_rst_ok="YES" + local cice6_rst_ok="YES" + local cmeps_rst_ok="YES" + local ww3_rst_ok="YES" + + # Check for availability of FV3 restarts + if [[ -f "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res" ]]; then + mv "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res" "${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res.old" ## JKH + else + local fv3_rst_ok="NO" + fi + + # Check for availability of MOM6 restarts # TODO + # Check for availability of CICE6 restarts # TODO + # Check for availability of CMEPS restarts # TODO + + # Check for availability of WW3 restarts if [[ "${cplwav}" = ".true." ]]; then - for wavGRD in ${waveGRD} ; do - if [[ ! -f "${COM_WAVE_RESTART}/${PDYS}.${cycs}0000.restart.${wavGRD}" ]]; then - waverstok=".false." + for ww3_grid in ${waveGRD} ; do + if [[ ! -f "${COM_WAVE_RESTART}/${PDYS}.${cycs}0000.restart.${ww3_grid}" ]]; then + local ww3_rst_ok="NO" fi done fi - if [[ -s "${flag1}" ]] && [[ ${waverstok} = ".true." ]]; then - CDATE_RST=${SDATE} - [[ ${RERUN} = "YES" ]] && break - mv "${flag1}" "${flag1}.old" - if [[ -s "${flag2}" ]]; then mv "${flag2}" "${flag2}.old" ;fi + # Collective check + if [[ "${fv3_rst_ok}" = "YES" ]] \ + && [[ "${mom6_rst_ok}" = "YES" ]] \ + && [[ "${cice6_rst_ok}" = "YES" ]] \ + && [[ "${cmeps_rst_ok}" = "YES" ]] \ + && [[ "${ww3_rst_ok}" = "YES" ]]; then + + if [[ -f "${COM_ATMOS_RESTART}/coupler.res" ]]; then + mv "${COM_ATMOS_RESTART}/coupler.res" "${COM_ATMOS_RESTART}/coupler.res.old" + fi + + SDATE="${PDYS}${cycs}" + CDATE_RST="${SDATE}" RERUN="YES" - [[ ${xfh} = ${rst_invt1} ]] && RERUN="NO" + echo "Restarts have been found for CDATE_RST=${CDATE_RST}, returning with 'RERUN=YES'" + break fi + done fi #------------------------------------------------------- diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index adce9f696c..f47755f854 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -11,59 +11,43 @@ ## for execution. ##### -DATM_postdet(){ - ###################################################################### - # Link DATM inputs (ie forcing files) # - ###################################################################### - - #TODO: This should be some loop through CDATE-> CDATE+ FORECAST length - #and get input from either CFSR or GEFS or Whatever... - #Currently assumes you only need the month of DATM input for IC date - #DATMINPUTDIR should be machine specific - - # DATM forcing file name convention is ${DATM_FILENAME_BASE}.$YYYYMMDDHH.nc - echo "Link DATM forcing files" - DATMINPUTDIR="/scratch2/NCEPDEV/marineda/DATM_INPUT/CFSR/${SYEAR}${SMONTH}" - $NLN -sf ${DATMINPUTDIR}/${DATM_FILENAME_BASE}*.nc $DATA/DATM_INPUT/ -} - -FV3_GFS_postdet(){ - echo "SUB ${FUNCNAME[0]}: $RERUN and $warm_start determined for $RUN" +FV3_postdet(){ + echo "SUB ${FUNCNAME[0]}: Entering for RUN = ${RUN}" - echo $warm_start - echo $RERUN + echo "warm_start = ${warm_start}" + echo "RERUN = ${RERUN}" #------------------------------------------------------- - if [ $warm_start = ".true." -o $RERUN = "YES" ]; then + if [[ "${warm_start}" = ".true." ]] || [[ "${RERUN}" = "YES" ]]; then #------------------------------------------------------- #............................. - if [ $RERUN = "NO" ]; then + if [[ ${RERUN} = "NO" ]]; then #............................. # Link all restart files from previous cycle for file in "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000."*.nc; do - file2=$(echo $(basename $file)) - file2=$(echo $file2 | cut -d. -f3-) # remove the date from file - fsuf=$(echo $file2 | cut -d. -f1) - $NLN $file $DATA/INPUT/$file2 + file2=$(echo $(basename "${file}")) + file2=$(echo "${file2}" | cut -d. -f3-) # remove the date from file + fsuf=$(echo "${file2}" | cut -d. -f1) + ${NLN} "${file}" "${DATA}/INPUT/${file2}" done # Replace sfc_data with sfcanl_data restart files from current cycle (if found) - if [ "${MODE}" = "cycled" ] && [ "${CCPP_SUITE}" = "FV3_GFS_v16" ]; then # TODO: remove if statement when global_cycle can handle NOAHMP + if [[ "${MODE}" = "cycled" ]] && [[ "${CCPP_SUITE}" = "FV3_GFS_v16" ]]; then # TODO: remove if statement when global_cycle can handle NOAHMP for file in "${COM_ATMOS_RESTART}/${sPDY}.${scyc}0000."*.nc; do - file2=$(echo $(basename $file)) - file2=$(echo $file2 | cut -d. -f3-) # remove the date from file - fsufanl=$(echo $file2 | cut -d. -f1) - file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g") - rm -f $DATA/INPUT/$file2 - $NLN $file $DATA/INPUT/$file2 + file2=$(basename "${file}") + file2=$(echo "${file2}" | cut -d. -f3-) # remove the date from file + fsufanl=$(echo "${file2}" | cut -d. -f1) + file2=$(echo "${file2}" | sed -e "s/sfcanl_data/sfc_data/g") + rm -f "${DATA}/INPUT/${file2}" + ${NLN} "${file}" "${DATA}/INPUT/${file2}" done fi # Need a coupler.res when doing IAU - if [ $DOIAU = "YES" ]; then - rm -f $DATA/INPUT/coupler.res - cat >> $DATA/INPUT/coupler.res << EOF + if [[ ${DOIAU} = "YES" ]]; then + rm -f "${DATA}/INPUT/coupler.res" + cat >> "${DATA}/INPUT/coupler.res" << EOF 2 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4) ${gPDY:0:4} ${gPDY:4:2} ${gPDY:6:2} ${gcyc} 0 0 Model start time: year, month, day, hour, minute, second ${sPDY:0:4} ${sPDY:4:2} ${sPDY:6:2} ${scyc} 0 0 Current model time: year, month, day, hour, minute, second @@ -71,28 +55,28 @@ EOF fi # Link increments - if [ $DOIAU = "YES" ]; then - for i in $(echo $IAUFHRS | sed "s/,/ /g" | rev); do - incfhr=$(printf %03i $i) - if [ $incfhr = "006" ]; then + if [[ ${DOIAU} = "YES" ]]; then + for i in $(echo "${IAUFHRS}" | sed "s/,/ /g" | rev); do + incfhr=$(printf %03i "${i}") + if [[ ${incfhr} = "006" ]]; then increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" else increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc" fi - if [ ! -f $increment_file ]; then - echo "ERROR: DOIAU = $DOIAU, but missing increment file for fhr $incfhr at $increment_file" + if [[ ! -f ${increment_file} ]]; then + echo "ERROR: DOIAU = ${DOIAU}, but missing increment file for fhr ${incfhr} at ${increment_file}" echo "Abort!" exit 1 fi - $NLN $increment_file $DATA/INPUT/fv_increment$i.nc - IAU_INC_FILES="'fv_increment$i.nc',${IAU_INC_FILES:-}" + ${NLN} "${increment_file}" "${DATA}/INPUT/fv_increment${i}.nc" + IAU_INC_FILES="'fv_increment${i}.nc',${IAU_INC_FILES:-}" done read_increment=".false." res_latlon_dynamics="" else increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" - if [ -f $increment_file ]; then - $NLN $increment_file $DATA/INPUT/fv3_increment.nc + if [[ -f ${increment_file} ]]; then + ${NLN} "${increment_file}" "${DATA}/INPUT/fv3_increment.nc" read_increment=".true." res_latlon_dynamics="fv3_increment.nc" fi @@ -101,53 +85,47 @@ EOF #............................. else ##RERUN export warm_start=".true." - PDYT=$(echo $CDATE_RST | cut -c1-8) - cyct=$(echo $CDATE_RST | cut -c9-10) + PDYT="${CDATE_RST:0:8}" + cyct="${CDATE_RST:8:2}" for file in "${COM_ATMOS_RESTART}/${PDYT}.${cyct}0000."*; do - file2=$(echo $(basename $file)) - file2=$(echo $file2 | cut -d. -f3-) - $NLN $file $DATA/INPUT/$file2 + file2=$(basename "${file}") + file2=$(echo "${file2}" | cut -d. -f3-) + ${NLN} "${file}" "${DATA}/INPUT/${file2}" done - hour_rst=$($NHOUR $CDATE_RST $CDATE) + local hour_rst=$(nhour "${CDATE_RST}" "${current_cycle}") IAU_FHROT=$((IAU_OFFSET+hour_rst)) - if [ $DOIAU = "YES" ]; then + if [[ ${DOIAU} = "YES" ]]; then IAUFHRS=-1 + # Ignore "not used" warning + # shellcheck disable=SC2034 IAU_DELTHRS=0 IAU_INC_FILES="''" fi - - rst_list_rerun="" - xfh=$restart_interval_gfs - while [ $xfh -le $FHMAX_GFS ]; do - rst_list_rerun="$rst_list_rerun $xfh" - xfh=$((xfh+restart_interval_gfs)) - done - restart_interval="$rst_list_rerun" fi #............................. else ## cold start for file in "${COM_ATMOS_INPUT}/"*.nc; do - file2=$(echo $(basename $file)) - fsuf=$(echo $file2 | cut -c1-3) - if [ $fsuf = "gfs" -o $fsuf = "sfc" ]; then - $NLN $file $DATA/INPUT/$file2 + file2=$(basename "${file}") + fsuf="${file2:0:3}" + if [[ "${fsuf}" = "gfs" ]] || [[ "${fsuf}" = "sfc" ]]; then + ${NLN} "${file}" "${DATA}/INPUT/${file2}" fi done fi - nfiles=$(ls -1 $DATA/INPUT/* | wc -l) - if [ $nfiles -le 0 ]; then - echo SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT! + nfiles=$(ls -1 "${DATA}/INPUT/"* | wc -l) + if [[ ${nfiles} -le 0 ]]; then + echo SUB "${FUNCNAME[0]}": Initial conditions must exist in "${DATA}/INPUT", ABORT! exit 1 fi # If doing IAU, change forecast hours - if [[ "$DOIAU" = "YES" ]]; then + if [[ "${DOIAU}" = "YES" ]]; then FHMAX=$((FHMAX+6)) - if [ $FHMAX_HF -gt 0 ]; then + if [[ ${FHMAX_HF} -gt 0 ]]; then FHMAX_HF=$((FHMAX_HF+6)) fi fi @@ -155,29 +133,27 @@ EOF #-------------------------------------------------------------------------- # Grid and orography data - if [ $cplflx = ".false." ] ; then - $NLN $FIXfv3/$CASE/${CASE}_mosaic.nc $DATA/INPUT/grid_spec.nc + FIXsfc=${FIXsfc:-"${FIXorog}/${CASE}/sfc"} + + if [[ ${cplflx} = ".false." ]] ; then + ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/grid_spec.nc" else - $NLN $FIXfv3/$CASE/${CASE}_mosaic.nc $DATA/INPUT/${CASE}_mosaic.nc + ${NLN} "${FIXorog}/${CASE}/${CASE}_mosaic.nc" "${DATA}/INPUT/${CASE}_mosaic.nc" fi - OROFIX=${OROFIX:-"${FIX_DIR}/orog/${CASE}.mx${OCNRES}_frac"} - FIX_SFC=${FIX_SFC:-"${OROFIX}/fix_sfc"} - for n in $(seq 1 $ntiles); do - $NLN ${OROFIX}/oro_${CASE}.mx${OCNRES}.tile${n}.nc $DATA/INPUT/oro_data.tile${n}.nc - $NLN ${OROFIX}/${CASE}_grid.tile${n}.nc $DATA/INPUT/${CASE}_grid.tile${n}.nc + for n in $(seq 1 "${ntiles}"); do + ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/INPUT/oro_data.tile${n}.nc" + ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/INPUT/${CASE}_grid.tile${n}.nc" done - export CCPP_SUITE=${CCPP_SUITE:-"FV3_GFS_v16"} - _suite_file=$HOMEgfs/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml - - if [ ! -f ${_suite_file} ]; then + _suite_file="${HOMEgfs}/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml" + if [[ ! -f ${_suite_file} ]]; then echo "FATAL: CCPP Suite file ${_suite_file} does not exist!" exit 2 fi # Scan suite file to determine whether it uses Noah-MP - if [ $(grep noahmpdrv ${_suite_file} | wc -l ) -gt 0 ]; then + if [[ $(grep noahmpdrv "${_suite_file}" | wc -l ) -gt 0 ]]; then lsm="2" lheatstrg=".false." landice=".false." @@ -215,12 +191,20 @@ EOF IEMS=${IEMS:-1} fi + # NoahMP table + local noahmptablefile="${HOMEgfs}/parm/ufs/noahmptable.tbl" + if [[ ! -f ${noahmptablefile} ]]; then + echo "FATAL ERROR: missing noahmp table file ${noahmptablefile}" + exit 1 + else + ${NLN} "${noahmptablefile}" "${DATA}/noahmptable.tbl" + fi + # Files for GWD - OROFIX_ugwd=${OROFIX_ugwd:-"${FIX_DIR}/ugwd"} - $NLN ${OROFIX_ugwd}/ugwp_limb_tau.nc $DATA/ugwp_limb_tau.nc - for n in $(seq 1 $ntiles); do - $NLN ${OROFIX_ugwd}/$CASE/${CASE}_oro_data_ls.tile${n}.nc $DATA/INPUT/oro_data_ls.tile${n}.nc - $NLN ${OROFIX_ugwd}/$CASE/${CASE}_oro_data_ss.tile${n}.nc $DATA/INPUT/oro_data_ss.tile${n}.nc + ${NLN} "${FIXugwd}/ugwp_limb_tau.nc" "${DATA}/ugwp_limb_tau.nc" + for n in $(seq 1 "${ntiles}"); do + ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ls.tile${n}.nc" "${DATA}/INPUT/oro_data_ls.tile${n}.nc" + ${NLN} "${FIXugwd}/${CASE}/${CASE}_oro_data_ss.tile${n}.nc" "${DATA}/INPUT/oro_data_ss.tile${n}.nc" done # GFS standard input data @@ -229,7 +213,7 @@ EOF IAER=${IAER:-1011} ICO2=${ICO2:-2} - if [ ${new_o3forc:-YES} = YES ]; then + if [[ ${new_o3forc:-YES} = YES ]]; then O3FORC=ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 else O3FORC=global_o3prdlos.f77 @@ -238,65 +222,63 @@ EOF #### # Copy CCN_ACTIVATE.BIN for Thompson microphysics # Thompson microphysics used when CCPP_SUITE set to FV3_GSD_v0 or FV3_GSD_noah - # imp_physics should be 8 + # imp_physics should be 8: #### - if [ $imp_physics -eq 8 ]; then - $NLN $FIX_AM/CCN_ACTIVATE.BIN $DATA/CCN_ACTIVATE.BIN - $NLN $FIX_AM/freezeH2O.dat $DATA/freezeH2O.dat - $NLN $FIX_AM/qr_acr_qgV2.dat $DATA/qr_acr_qgV2.dat - $NLN $FIX_AM/qr_acr_qsV2.dat $DATA/qr_acr_qsV2.dat + if [[ ${imp_physics} -eq 8 ]]; then + ${NLN} "${FIXam}/CCN_ACTIVATE.BIN" "${DATA}/CCN_ACTIVATE.BIN" + ${NLN} "${FIXam}/freezeH2O.dat" "${DATA}/freezeH2O.dat" + ${NLN} "${FIXam}/qr_acr_qgV2.dat" "${DATA}/qr_acr_qgV2.dat" + ${NLN} "${FIXam}/qr_acr_qsV2.dat" "${DATA}/qr_acr_qsV2.dat" fi - $NLN $FIX_AM/${O3FORC} $DATA/global_o3prdlos.f77 - $NLN $FIX_AM/${H2OFORC} $DATA/global_h2oprdlos.f77 - $NLN $FIX_AM/global_solarconstant_noaa_an.txt $DATA/solarconstant_noaa_an.txt - $NLN $FIX_AM/global_sfc_emissivity_idx.txt $DATA/sfc_emissivity_idx.txt + ${NLN} "${FIXam}/${O3FORC}" "${DATA}/global_o3prdlos.f77" + ${NLN} "${FIXam}/${H2OFORC}" "${DATA}/global_h2oprdlos.f77" + ${NLN} "${FIXam}/global_solarconstant_noaa_an.txt" "${DATA}/solarconstant_noaa_an.txt" + ${NLN} "${FIXam}/global_sfc_emissivity_idx.txt" "${DATA}/sfc_emissivity_idx.txt" ## merra2 aerosol climo - if [ $IAER -eq "1011" ]; then - FIX_AER="${FIX_DIR}/aer" + if [[ ${IAER} -eq "1011" ]]; then for month in $(seq 1 12); do - MM=$(printf %02d $month) - $NLN "${FIX_AER}/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc" + MM=$(printf %02d "${month}") + ${NLN} "${FIXaer}/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc" done - FIX_LUT="${FIX_DIR}/lut" - $NLN $FIX_LUT/optics_BC.v1_3.dat $DATA/optics_BC.dat - $NLN $FIX_LUT/optics_OC.v1_3.dat $DATA/optics_OC.dat - $NLN $FIX_LUT/optics_DU.v15_3.dat $DATA/optics_DU.dat - $NLN $FIX_LUT/optics_SS.v3_3.dat $DATA/optics_SS.dat - $NLN $FIX_LUT/optics_SU.v1_3.dat $DATA/optics_SU.dat + ${NLN} "${FIXlut}/optics_BC.v1_3.dat" "${DATA}/optics_BC.dat" + ${NLN} "${FIXlut}/optics_OC.v1_3.dat" "${DATA}/optics_OC.dat" + ${NLN} "${FIXlut}/optics_DU.v15_3.dat" "${DATA}/optics_DU.dat" + ${NLN} "${FIXlut}/optics_SS.v3_3.dat" "${DATA}/optics_SS.dat" + ${NLN} "${FIXlut}/optics_SU.v1_3.dat" "${DATA}/optics_SU.dat" fi - $NLN $FIX_AM/global_co2historicaldata_glob.txt $DATA/co2historicaldata_glob.txt - $NLN $FIX_AM/co2monthlycyc.txt $DATA/co2monthlycyc.txt - if [ $ICO2 -gt 0 ]; then - for file in $(ls $FIX_AM/fix_co2_proj/global_co2historicaldata*) ; do - $NLN $file $DATA/$(echo $(basename $file) | sed -e "s/global_//g") + ${NLN} "${FIXam}/global_co2historicaldata_glob.txt" "${DATA}/co2historicaldata_glob.txt" + ${NLN} "${FIXam}/co2monthlycyc.txt" "${DATA}/co2monthlycyc.txt" + if [[ ${ICO2} -gt 0 ]]; then + for file in $(ls "${FIXam}/fix_co2_proj/global_co2historicaldata"*) ; do + ${NLN} "${file}" "${DATA}/$(basename "${file//global_}")" done fi - $NLN $FIX_AM/global_climaeropac_global.txt $DATA/aerosol.dat - if [ $IAER -gt 0 ] ; then - for file in $(ls $FIX_AM/global_volcanic_aerosols*) ; do - $NLN $file $DATA/$(echo $(basename $file) | sed -e "s/global_//g") + ${NLN} "${FIXam}/global_climaeropac_global.txt" "${DATA}/aerosol.dat" + if [[ ${IAER} -gt 0 ]] ; then + for file in $(ls "${FIXam}/global_volcanic_aerosols"*) ; do + ${NLN} "${file}" "${DATA}/$(basename "${file//global_}")" done fi # inline post fix files - if [ $WRITE_DOPOST = ".true." ]; then - $NLN $PARM_POST/post_tag_gfs${LEVS} $DATA/itag - $NLN ${FLTFILEGFS:-$PARM_POST/postxconfig-NT-GFS-TWO.txt} $DATA/postxconfig-NT.txt - $NLN ${FLTFILEGFSF00:-$PARM_POST/postxconfig-NT-GFS-F00-TWO.txt} $DATA/postxconfig-NT_FH00.txt - $NLN ${POSTGRB2TBL:-$PARM_POST/params_grib2_tbl_new} $DATA/params_grib2_tbl_new + if [[ ${WRITE_DOPOST} = ".true." ]]; then + ${NLN} "${PARM_POST}/post_tag_gfs${LEVS}" "${DATA}/itag" + ${NLN} "${FLTFILEGFS:-${PARM_POST}/postxconfig-NT-GFS-TWO.txt}" "${DATA}/postxconfig-NT.txt" + ${NLN} "${FLTFILEGFSF00:-${PARM_POST}/postxconfig-NT-GFS-F00-TWO.txt}" "${DATA}/postxconfig-NT_FH00.txt" + ${NLN} "${POSTGRB2TBL:-${PARM_POST}/params_grib2_tbl_new}" "${DATA}/params_grib2_tbl_new" fi #------------------------------------------------------------------ # changeable parameters # dycore definitions - res=$(echo $CASE |cut -c2-5) + res="${CASE:1}" resp=$((res+1)) - npx=$resp - npy=$resp + npx=${resp} + npy=${resp} npz=$((LEVS-1)) io_layout="1,1" #ncols=$(( (${npx}-1)*(${npy}-1)*3/2 )) @@ -306,35 +288,36 @@ EOF LONB_CASE=$((4*res)) LATB_CASE=$((2*res)) - JCAP=${JCAP:-$JCAP_CASE} - LONB=${LONB:-$LONB_CASE} - LATB=${LATB:-$LATB_CASE} + JCAP=${JCAP:-${JCAP_CASE}} + LONB=${LONB:-${LONB_CASE}} + LATB=${LATB:-${LATB_CASE}} - LONB_IMO=${LONB_IMO:-$LONB_CASE} - LATB_JMO=${LATB_JMO:-$LATB_CASE} + LONB_IMO=${LONB_IMO:-${LONB_CASE}} + LATB_JMO=${LATB_JMO:-${LATB_CASE}} # Fix files - FNGLAC=${FNGLAC:-"$FIX_AM/global_glacier.2x2.grb"} - FNMXIC=${FNMXIC:-"$FIX_AM/global_maxice.2x2.grb"} - FNTSFC=${FNTSFC:-"$FIX_AM/RTGSST.1982.2012.monthly.clim.grb"} - FNSNOC=${FNSNOC:-"$FIX_AM/global_snoclim.1.875.grb"} + FNGLAC=${FNGLAC:-"${FIXam}/global_glacier.2x2.grb"} + FNMXIC=${FNMXIC:-"${FIXam}/global_maxice.2x2.grb"} + FNTSFC=${FNTSFC:-"${FIXam}/RTGSST.1982.2012.monthly.clim.grb"} + FNSNOC=${FNSNOC:-"${FIXam}/global_snoclim.1.875.grb"} FNZORC=${FNZORC:-"igbp"} - FNAISC=${FNAISC:-"$FIX_AM/IMS-NIC.blended.ice.monthly.clim.grb"} - FNALBC2=${FNALBC2:-"${FIX_SFC}/${CASE}.facsf.tileX.nc"} - FNTG3C=${FNTG3C:-"${FIX_SFC}/${CASE}.substrate_temperature.tileX.nc"} - FNVEGC=${FNVEGC:-"${FIX_SFC}/${CASE}.vegetation_greenness.tileX.nc"} - FNMSKH=${FNMSKH:-"$FIX_AM/global_slmask.t1534.3072.1536.grb"} - FNVMNC=${FNVMNC:-"${FIX_SFC}/${CASE}.vegetation_greenness.tileX.nc"} - FNVMXC=${FNVMXC:-"${FIX_SFC}/${CASE}.vegetation_greenness.tileX.nc"} - FNSLPC=${FNSLPC:-"${FIX_SFC}/${CASE}.slope_type.tileX.nc"} - FNALBC=${FNALBC:-"${FIX_SFC}/${CASE}.snowfree_albedo.tileX.nc"} - FNVETC=${FNVETC:-"${FIX_SFC}/${CASE}.vegetation_type.tileX.nc"} - FNSOTC=${FNSOTC:-"${FIX_SFC}/${CASE}.soil_type.tileX.nc"} - FNABSC=${FNABSC:-"${FIX_SFC}/${CASE}.maximum_snow_albedo.tileX.nc"} - FNSMCC=${FNSMCC:-"$FIX_AM/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"} + FNAISC=${FNAISC:-"${FIXam}/IMS-NIC.blended.ice.monthly.clim.grb"} + FNALBC2=${FNALBC2:-"${FIXsfc}/${CASE}.mx${OCNRES}.facsf.tileX.nc"} + FNTG3C=${FNTG3C:-"${FIXsfc}/${CASE}.mx${OCNRES}.substrate_temperature.tileX.nc"} + FNVEGC=${FNVEGC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNMSKH=${FNMSKH:-"${FIXam}/global_slmask.t1534.3072.1536.grb"} + FNVMNC=${FNVMNC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNVMXC=${FNVMXC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_greenness.tileX.nc"} + FNSLPC=${FNSLPC:-"${FIXsfc}/${CASE}.mx${OCNRES}.slope_type.tileX.nc"} + FNALBC=${FNALBC:-"${FIXsfc}/${CASE}.mx${OCNRES}.snowfree_albedo.tileX.nc"} + FNVETC=${FNVETC:-"${FIXsfc}/${CASE}.mx${OCNRES}.vegetation_type.tileX.nc"} + FNSOTC=${FNSOTC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_type.tileX.nc"} + FNSOCC=${FNSOCC:-"${FIXsfc}/${CASE}.mx${OCNRES}.soil_color.tileX.nc"} + FNABSC=${FNABSC:-"${FIXsfc}/${CASE}.mx${OCNRES}.maximum_snow_albedo.tileX.nc"} + FNSMCC=${FNSMCC:-"${FIXam}/global_soilmgldas.statsgo.t${JCAP}.${LONB}.${LATB}.grb"} # If the appropriate resolution fix file is not present, use the highest resolution available (T1534) - [[ ! -f $FNSMCC ]] && FNSMCC="$FIX_AM/global_soilmgldas.statsgo.t1534.3072.1536.grb" + [[ ! -f ${FNSMCC} ]] && FNSMCC="${FIXam}/global_soilmgldas.statsgo.t1534.3072.1536.grb" # NSST Options # nstf_name contains the NSST related parameters @@ -349,7 +332,7 @@ EOF NST_RESV=${NST_RESV-0} ZSEA1=${ZSEA1:-0} ZSEA2=${ZSEA2:-0} - nstf_name=${nstf_name:-"$NST_MODEL,$NST_SPINUP,$NST_RESV,$ZSEA1,$ZSEA2"} + nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"} nst_anl=${nst_anl:-".false."} # blocking factor used for threading and general physics performance @@ -363,7 +346,7 @@ EOF # >0 means new adiabatic pre-conditioning # <0 means older adiabatic pre-conditioning na_init=${na_init:-1} - [[ $warm_start = ".true." ]] && na_init=0 + [[ ${warm_start} = ".true." ]] && na_init=0 # variables for controlling initialization of NCEP/NGGPS ICs filtered_terrain=${filtered_terrain:-".true."} @@ -375,11 +358,11 @@ EOF chksum_debug=${chksum_debug:-".false."} print_freq=${print_freq:-6} - if [ ${TYPE} = "nh" ]; then # non-hydrostatic options + if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic options hydrostatic=".false." phys_hydrostatic=".false." # enable heating in hydrostatic balance in non-hydrostatic simulation use_hydro_pressure=".false." # use hydrostatic pressure for physics - if [ $warm_start = ".true." ]; then + if [[ ${warm_start} = ".true." ]]; then make_nh=".false." # restarts contain non-hydrostatic state else make_nh=".true." # re-initialize non-hydrostatic state @@ -397,12 +380,12 @@ EOF # time step parameters in FV3 k_split=${k_split:-2} - n_split=${n_split:-6} + n_split=${n_split:-5} - if [ $(echo $MONO | cut -c-4) = "mono" ]; then # monotonic options + if [[ "${MONO:0:4}" = "mono" ]]; then # monotonic options d_con=${d_con_mono:-"0."} do_vort_damp=".false." - if [ ${TYPE} = "nh" ]; then # non-hydrostatic + if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic hord_mt=${hord_mt_nh_mono:-"10"} hord_xx=${hord_xx_nh_mono:-"10"} else # hydrostatic @@ -413,7 +396,7 @@ EOF else # non-monotonic options d_con=${d_con_nonmono:-"1."} do_vort_damp=".true." - if [ ${TYPE} = "nh" ]; then # non-hydrostatic + if [[ ${TYPE} = "nh" ]]; then # non-hydrostatic hord_mt=${hord_mt_nh_nonmono:-"5"} hord_xx=${hord_xx_nh_nonmono:-"5"} else # hydrostatic @@ -422,18 +405,18 @@ EOF fi fi - if [ $(echo $MONO | cut -c-4) != "mono" -a $TYPE = "nh" ]; then + if [[ "${MONO:0:4}" != "mono" ]] && [[ "${TYPE}" = "nh" ]]; then vtdm4=${vtdm4_nh_nonmono:-"0.06"} else vtdm4=${vtdm4:-"0.05"} fi - if [ $warm_start = ".true." ]; then # warm start from restart file + if [[ ${warm_start} = ".true." ]]; then # warm start from restart file nggps_ic=".false." ncep_ic=".false." external_ic=".false." mountain=".true." - if [ $read_increment = ".true." ]; then # add increment on the fly to the restarts + if [[ ${read_increment} = ".true." ]]; then # add increment on the fly to the restarts res_latlon_dynamics="fv3_increment.nc" else res_latlon_dynamics='""' @@ -449,259 +432,217 @@ EOF fi # Stochastic Physics Options - if [ ${SET_STP_SEED:-"YES"} = "YES" ]; then - ISEED_SKEB=$((CDATE*1000 + MEMBER*10 + 1)) - ISEED_SHUM=$((CDATE*1000 + MEMBER*10 + 2)) - ISEED_SPPT=$((CDATE*1000 + MEMBER*10 + 3)) - ISEED_CA=$(( (CDATE*1000 + MEMBER*10 + 4) % 2147483647 )) - ISEED_LNDP=$(( (CDATE*1000 + MEMBER*10 + 5) % 2147483647 )) + if [[ ${SET_STP_SEED:-"YES"} = "YES" ]]; then + ISEED_SKEB=$((current_cycle*1000 + MEMBER*10 + 1)) + ISEED_SHUM=$((current_cycle*1000 + MEMBER*10 + 2)) + ISEED_SPPT=$((current_cycle*1000 + MEMBER*10 + 3)) + ISEED_CA=$(( (current_cycle*1000 + MEMBER*10 + 4) % 2147483647 )) + ISEED_LNDP=$(( (current_cycle*1000 + MEMBER*10 + 5) % 2147483647 )) else ISEED=${ISEED:-0} fi - if [ $DO_SKEB = "YES" ]; then + if [[ ${DO_SKEB} = "YES" ]]; then do_skeb=".true." fi - if [ $DO_SPPT = "YES" ]; then + if [[ ${DO_SPPT} = "YES" ]]; then do_sppt=".true." fi - if [ $DO_SHUM = "YES" ]; then + if [[ ${DO_SHUM} = "YES" ]]; then do_shum=".true." fi - if [ $DO_LAND_PERT = "YES" ]; then + if [[ ${DO_LAND_PERT} = "YES" ]]; then lndp_type=${lndp_type:-2} LNDP_TAU=${LNDP_TAU:-21600} LNDP_SCALE=${LNDP_SCALE:-500000} - ISEED_LNDP=${ISEED_LNDP:-$ISEED} + ISEED_LNDP=${ISEED_LNDP:-${ISEED}} lndp_var_list=${lndp_var_list:-"'smc', 'vgf',"} lndp_prt_list=${lndp_prt_list:-"0.2,0.1"} - n_var_lndp=$(echo "$lndp_var_list" | wc -w) + n_var_lndp=$(echo "${lndp_var_list}" | wc -w) fi - JCAP_STP=${JCAP_STP:-$JCAP_CASE} - LONB_STP=${LONB_STP:-$LONB_CASE} - LATB_STP=${LATB_STP:-$LATB_CASE} - cd $DATA - if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p ${COM_ATMOS_HISTORY}; fi - if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p ${COM_ATMOS_MASTER}; fi - if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - fhr=$FHMIN - for fhr in $OUTPUT_FH; do - FH3=$(printf %03i $fhr) - FH2=$(printf %02i $fhr) - atmi=atmf${FH3}.nc - sfci=sfcf${FH3}.nc - logi=log.atm.f${FH3} - pgbi=GFSPRS.GrbF${FH2} - flxi=GFSFLX.GrbF${FH2} - atmo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc - sfco=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc - logo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt - pgbo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3} - flxo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2 - eval $NLN $atmo $atmi - eval $NLN $sfco $sfci - eval $NLN $logo $logi - if [ $WRITE_DOPOST = ".true." ]; then - eval $NLN $pgbo $pgbi - eval $NLN $flxo $flxi + JCAP_STP=${JCAP_STP:-${JCAP_CASE}} + LONB_STP=${LONB_STP:-${LONB_CASE}} + LATB_STP=${LATB_STP:-${LATB_CASE}} + cd "${DATA}" || exit 1 + if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p "${COM_ATMOS_HISTORY}"; fi + if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p "${COM_ATMOS_MASTER}"; fi + if [[ "${QUILTING}" = ".true." ]] && [[ "${OUTPUT_GRID}" = "gaussian_grid" ]]; then + for fhr in ${FV3_OUTPUT_FH}; do + local FH3=$(printf %03i "${fhr}") + local FH2=$(printf %02i "${fhr}") + ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc" + ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc" + ${NLN} "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}" + if [[ ${WRITE_DOPOST} = ".true." ]]; then + ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}" + ${NLN} "${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}" fi done - else - for n in $(seq 1 $ntiles); do - eval $NLN nggps2d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps2d.tile${n}.nc - eval $NLN nggps3d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps3d.tile${n}.nc - eval $NLN grid_spec.tile${n}.nc ${COM_ATMOS_HISTORY}/grid_spec.tile${n}.nc - eval $NLN atmos_static.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_static.tile${n}.nc - eval $NLN atmos_4xdaily.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${n}.nc + else # TODO: Is this even valid anymore? + for n in $(seq 1 "${ntiles}"); do + ${NLN} "nggps2d.tile${n}.nc" "${COM_ATMOS_HISTORY}/nggps2d.tile${n}.nc" + ${NLN} "nggps3d.tile${n}.nc" "${COM_ATMOS_HISTORY}/nggps3d.tile${n}.nc" + ${NLN} "grid_spec.tile${n}.nc" "${COM_ATMOS_HISTORY}/grid_spec.tile${n}.nc" + ${NLN} "atmos_static.tile${n}.nc" "${COM_ATMOS_HISTORY}/atmos_static.tile${n}.nc" + ${NLN} "atmos_4xdaily.tile${n}.nc" "${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${n}.nc" done fi } -FV3_GFS_nml(){ +FV3_nml(){ # namelist output for a certain component - echo SUB ${FUNCNAME[0]}: Creating name lists and model configure file for FV3 + echo "SUB ${FUNCNAME[0]}: Creating name lists and model configure file for FV3" # Call child scripts in current script directory - source $SCRIPTDIR/parsing_namelists_FV3.sh + source "${HOMEgfs}/ush/parsing_namelists_FV3.sh" FV3_namelists - echo SUB ${FUNCNAME[0]}: FV3 name lists and model configure file created + echo "SUB ${FUNCNAME[0]}: FV3 name lists and model configure file created" } -DATM_nml(){ - source $SCRIPTDIR/parsing_namelists_DATM.sh - DATM_namelists - echo SUB ${FUNCNAME[0]}: DATM name lists and model configure file created -} - -data_out_GFS() { - # data in take for FV3GFS - # Arguments: None - # - #------------------------------------------------------------------ - # make symbolic links to write forecast files directly in memdir +FV3_out() { echo "SUB ${FUNCNAME[0]}: copying output data for FV3" - #------------------------------------------------------------------ - if [ $SEND = "YES" ]; then - # Copy model restart files - if [[ ${RUN} =~ "gdas" ]] && (( rst_invt1 > 0 )); then - cd $DATA/RESTART - mkdir -p "${COM_ATMOS_RESTART}" - for rst_int in $restart_interval ; do - if [ $rst_int -ge 0 ]; then - RDATE=$($NDATE +$rst_int $CDATE) - rPDY=$(echo $RDATE | cut -c1-8) - rcyc=$(echo $RDATE | cut -c9-10) - for file in "${rPDY}.${rcyc}0000."* ; do - ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" - done - fi + # Copy FV3 restart files + if [[ ${RUN} =~ "gdas" ]]; then + cd "${DATA}/RESTART" + mkdir -p "${COM_ATMOS_RESTART}" + local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + while [[ ${idate} -le ${forecast_end_cycle} ]]; do + for file in "${idate:0:8}.${idate:8:2}0000."*; do + ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" done - if [ $DOIAU = "YES" ] || [ $DOIAU_coldstart = "YES" ]; then - # if IAU is on, save restart at start of IAU window - rst_iau=$(( ${IAU_OFFSET} - (${IAU_DELTHRS}/2) )) - if [ $rst_iau -lt 0 ];then - rst_iau=$(( (${IAU_DELTHRS}) - ${IAU_OFFSET} )) - fi - RDATE=$($NDATE +$rst_iau $CDATE) - rPDY=$(echo $RDATE | cut -c1-8) - rcyc=$(echo $RDATE | cut -c9-10) - for file in "${rPDY}.${rcyc}0000."* ; do - ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" - done - fi - elif [[ ${RUN} =~ "gfs" ]]; then - ${NCP} "${DATA}/input.nml" "${COM_ATMOS_HISTORY}/input.nml" - fi + local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H) + done + else + # No need to copy FV3 restart files when RUN=gfs or gefs + ${NCP} "${DATA}/input.nml" "${COM_CONF}/ufs.input.nml" + ${NCP} "${DATA}/model_configure" "${COM_CONF}/ufs.model_configure" + ${NCP} "${DATA}/ufs.configure" "${COM_CONF}/ufs.ufs.configure" + ${NCP} "${DATA}/diag_table" "${COM_CONF}/ufs.diag_table" fi - echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } - WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" COMPONENTwave=${COMPONENTwave:-${RUN}wave} #Link mod_def files for wave grids - if [ $waveMULTIGRID = ".true." ]; then - array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD) - echo "Wave Grids: $WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD" - grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') + if [[ ${waveMULTIGRID} = ".true." ]]; then + local array=(${WAVECUR_FID} ${WAVEICE_FID} ${WAVEWND_FID} ${waveuoutpGRD} ${waveGRD} ${waveesmfGRD}) + echo "Wave Grids: ${WAVECUR_FID} ${WAVEICE_FID} ${WAVEWND_FID} ${waveuoutpGRD} ${waveGRD} ${waveesmfGRD}" + local grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') for wavGRD in ${grdALL}; do ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${wavGRD}" "${DATA}/mod_def.${wavGRD}" done - else - #if shel, only 1 waveGRD which is linked to mod_def.ww3 + else + #if shel, only 1 waveGRD which is linked to mod_def.ww3 ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" fi #if wave mesh is not the same as the ocn/ice mesh, linkk it in the file - comparemesh=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} - if [ "$MESH_WAV" = "$comparemesh" ]; then + local comparemesh=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} + if [[ "${MESH_WAV}" = "${comparemesh}" ]]; then echo "Wave is on same mesh as ocean/ice" else - $NLN -sf $FIXwave/$MESH_WAV $DATA/ + ${NLN} "${FIXwave}/${MESH_WAV}" "${DATA}/" fi export wavprfx=${RUNwave}${WAV_MEMBER:-} #Copy initial condition files: - for wavGRD in $waveGRD ; do - if [ $warm_start = ".true." -o $RERUN = "YES" ]; then - if [ $RERUN = "NO" ]; then - waverstfile=${COM_WAVE_RESTART_PREV}/${sPDY}.${scyc}0000.restart.${wavGRD} - else - waverstfile=${COM_WAVE_RESTART}/${PDYT}.${cyct}0000.restart.${wavGRD} + for wavGRD in ${waveGRD} ; do + if [[ "${warm_start}" = ".true." ]] || [[ "${RERUN}" = "YES" ]]; then + if [[ ${RERUN} = "NO" ]]; then + local waverstfile="${COM_WAVE_RESTART_PREV}/${sPDY}.${scyc}0000.restart.${wavGRD}" + else + local waverstfile="${COM_WAVE_RESTART}/${PDYT}.${cyct}0000.restart.${wavGRD}" fi - else - waverstfile=${COM_WAVE_RESTART}/${sPDY}.${scyc}0000.restart.${wavGRD} + else + local waverstfile="${COM_WAVE_RESTART}/${sPDY}.${scyc}0000.restart.${wavGRD}" fi - if [ ! -f ${waverstfile} ]; then - if [ $RERUN = "NO" ]; then + if [[ ! -f ${waverstfile} ]]; then + if [[ ${RERUN} = "NO" ]]; then echo "WARNING: NON-FATAL ERROR wave IC is missing, will start from rest" else echo "ERROR: Wave IC is missing in RERUN, exiting." exit 1 fi else - if [ $waveMULTIGRID = ".true." ]; then - $NLN ${waverstfile} $DATA/restart.${wavGRD} + if [[ ${waveMULTIGRID} = ".true." ]]; then + ${NLN} "${waverstfile}" "${DATA}/restart.${wavGRD}" else - $NLN ${waverstfile} $DATA/restart.ww3 + ${NLN} "${waverstfile}" "${DATA}/restart.ww3" fi fi done - if [ $waveMULTIGRID = ".true." ]; then - for wavGRD in $waveGRD ; do + if [[ ${waveMULTIGRID} = ".true." ]]; then + for wavGRD in ${waveGRD} ; do ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${wavGRD}.${PDY}${cyc}" "log.${wavGRD}" done - else + else ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" fi - if [ "$WW3ICEINP" = "YES" ]; then - wavicefile="${COM_WAVE_PREP}/${RUNwave}.${WAVEICE_FID}.${cycle}.ice" - if [ ! -f $wavicefile ]; then + if [[ "${WW3ICEINP}" = "YES" ]]; then + local wavicefile="${COM_WAVE_PREP}/${RUNwave}.${WAVEICE_FID}.${cycle}.ice" + if [[ ! -f ${wavicefile} ]]; then echo "ERROR: WW3ICEINP = ${WW3ICEINP}, but missing ice file" echo "Abort!" exit 1 fi - $NLN ${wavicefile} $DATA/ice.${WAVEICE_FID} + ${NLN} "${wavicefile}" "${DATA}/ice.${WAVEICE_FID}" fi - if [ "$WW3CURINP" = "YES" ]; then - wavcurfile="${COM_WAVE_PREP}/${RUNwave}.${WAVECUR_FID}.${cycle}.cur" - if [ ! -f $wavcurfile ]; then + if [[ "${WW3CURINP}" = "YES" ]]; then + local wavcurfile="${COM_WAVE_PREP}/${RUNwave}.${WAVECUR_FID}.${cycle}.cur" + if [[ ! -f ${wavcurfile} ]]; then echo "ERROR: WW3CURINP = ${WW3CURINP}, but missing current file" echo "Abort!" exit 1 fi - $NLN $wavcurfile $DATA/current.${WAVECUR_FID} + ${NLN} "${wavcurfile}" "${DATA}/current.${WAVECUR_FID}" fi if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi # Link output files - cd $DATA - if [ $waveMULTIGRID = ".true." ]; then + cd "${DATA}" + if [[ ${waveMULTIGRID} = ".true." ]]; then ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3" fi # Loop for gridded output (uses FHINC) - fhr=$FHMIN_WAV - while [ $fhr -le $FHMAX_WAV ]; do - YMDH=$($NDATE $fhr $CDATE) - YMD=$(echo $YMDH | cut -c1-8) - HMS="$(echo $YMDH | cut -c9-10)0000" - if [ $waveMULTIGRID = ".true." ]; then + local fhr vdate FHINC wavGRD + fhr=${FHMIN_WAV} + while [[ ${fhr} -le ${FHMAX_WAV} ]]; do + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + if [[ ${waveMULTIGRID} = ".true." ]]; then for wavGRD in ${waveGRD} ; do - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${wavGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.${wavGRD}" + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${wavGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_grd.${wavGRD}" done - else - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.ww3" + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_grd.ww3" fi - FHINC=$FHOUT_WAV - if [ $FHMAX_HF_WAV -gt 0 -a $FHOUT_HF_WAV -gt 0 -a $fhr -lt $FHMAX_HF_WAV ]; then - FHINC=$FHOUT_HF_WAV + FHINC=${FHOUT_WAV} + if (( FHMAX_HF_WAV > 0 && FHOUT_HF_WAV > 0 && fhr < FHMAX_HF_WAV )); then + FHINC=${FHOUT_HF_WAV} fi fhr=$((fhr+FHINC)) done # Loop for point output (uses DTPNT) - fhr=$FHMIN_WAV - while [ $fhr -le $FHMAX_WAV ]; do - YMDH=$($NDATE $fhr $CDATE) - YMD=$(echo $YMDH | cut -c1-8) - HMS="$(echo $YMDH | cut -c9-10)0000" - if [ $waveMULTIGRID = ".true." ]; then - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.${waveuoutpGRD}" - else - ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.ww3" + fhr=${FHMIN_WAV} + while [[ ${fhr} -le ${FHMAX_WAV} ]]; do + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + if [[ ${waveMULTIGRID} = ".true." ]]; then + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_pnt.${waveuoutpGRD}" + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${vdate:0:8}.${vdate:8:2}0000" "${DATA}/${vdate:0:8}.${vdate:8:2}0000.out_pnt.ww3" fi - FHINC=$FHINCP_WAV + FHINC=${FHINCP_WAV} fhr=$((fhr+FHINC)) done } @@ -709,17 +650,17 @@ WW3_postdet() { WW3_nml() { echo "SUB ${FUNCNAME[0]}: Copying input files for WW3" WAV_MOD_TAG=${RUN}wave${waveMEMB} - if [ "${USE_WAV_RMP:-YES}" = "YES" ]; then - if (( $( ls -1 $FIXwave/rmp_src_to_dst_conserv_* 2> /dev/null | wc -l) > 0 )); then - for file in $(ls $FIXwave/rmp_src_to_dst_conserv_*) ; do - $NLN $file $DATA/ + if [[ "${USE_WAV_RMP:-YES}" = "YES" ]]; then + if (( $( ls -1 "${FIXwave}/rmp_src_to_dst_conserv_"* 2> /dev/null | wc -l) > 0 )); then + for file in $(ls "${FIXwave}/rmp_src_to_dst_conserv_"*) ; do + ${NLN} "${file}" "${DATA}/" done else echo 'FATAL ERROR : No rmp precomputed nc files found for wave model' exit 4 fi fi - source $SCRIPTDIR/parsing_namelists_WW3.sh + source "${HOMEgfs}/ush/parsing_namelists_WW3.sh" WW3_namelists } @@ -730,7 +671,7 @@ WW3_out() { CPL_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields" - if [ $esmf_profile = ".true." ]; then + if [[ "${esmf_profile:-}" = ".true." ]]; then ${NCP} "${DATA}/ESMF_Profile.summary" "${COM_ATMOS_HISTORY}/ESMF_Profile.summary" fi } @@ -739,12 +680,12 @@ MOM6_postdet() { echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination" # Copy MOM6 ICs - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" - case $OCNRES in + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" + case ${OCNRES} in "025") for nn in $(seq 1 4); do - if [[ -f "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" + if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" fi done ;; @@ -760,19 +701,19 @@ MOM6_postdet() { fi # Copy MOM6 fixed files - $NCP -pf $FIXmom/$OCNRES/* $DATA/INPUT/ + ${NCP} "${FIXmom}/${OCNRES}/"* "${DATA}/INPUT/" # Copy coupled grid_spec - spec_file="$FIX_DIR/cpl/a${CASE}o${OCNRES}/grid_spec.nc" - if [ -s $spec_file ]; then - $NCP -pf $spec_file $DATA/INPUT/ + spec_file="${FIXcpl}/a${CASE}o${OCNRES}/grid_spec.nc" + if [[ -s ${spec_file} ]]; then + ${NCP} "${spec_file}" "${DATA}/INPUT/" else - echo "FATAL ERROR: grid_spec file '$spec_file' does not exist" + echo "FATAL ERROR: grid_spec file '${spec_file}' does not exist" exit 3 fi # Copy mediator restart files to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function - if [[ $warm_start = ".true." ]]; then + if [[ ${warm_start} = ".true." ]]; then local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" if [[ -f "${mediator_file}" ]]; then ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc" @@ -794,10 +735,10 @@ MOM6_postdet() { # If using stochatic parameterizations, create a seed that does not exceed the # largest signed integer - if [ $DO_OCN_SPPT = "YES" -o $DO_OCN_PERT_EPBL = "YES" ]; then - if [ ${SET_STP_SEED:-"YES"} = "YES" ]; then - ISEED_OCNSPPT=$(( (CDATE*1000 + MEMBER*10 + 6) % 2147483647 )) - ISEED_EPBL=$(( (CDATE*1000 + MEMBER*10 + 7) % 2147483647 )) + if [[ "${DO_OCN_SPPT}" = "YES" ]] || [[ "${DO_OCN_PERT_EPBL}" = "YES" ]]; then + if [[ ${SET_STP_SEED:-"YES"} = "YES" ]]; then + ISEED_OCNSPPT=$(( (current_cycle*1000 + MEMBER*10 + 6) % 2147483647 )) + ISEED_EPBL=$(( (current_cycle*1000 + MEMBER*10 + 7) % 2147483647 )) else ISEED=${ISEED:-0} fi @@ -807,77 +748,62 @@ MOM6_postdet() { [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}" # Link output files - if [[ "${RUN}" =~ "gfs" ]]; then + if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then # Link output files for RUN = gfs # TODO: get requirements on what files need to be written out and what these dates here are and what they mean - export ENSMEM=${ENSMEM:-01} - export IDATE=$CDATE - fhrlst=${OUTPUT_FH} - if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p ${COM_OCEAN_HISTORY}; fi + if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p "${COM_OCEAN_HISTORY}"; fi - for fhr in $fhrlst; do - if [ $fhr = 'anl' ]; then # Looking at OUTPUT_FH, this is never true, TODO: remove this block - continue - fi - if [ -z ${last_fhr:-} ]; then - last_fhr=$fhr + # Looping over FV3 output hours + # TODO: Need to define MOM6_OUTPUT_FH and control at some point for issue #1629 + for fhr in ${FV3_OUTPUT_FH}; do + if [[ -z ${last_fhr:-} ]]; then + local last_fhr=${fhr} continue fi (( interval = fhr - last_fhr )) (( midpoint = last_fhr + interval/2 )) - VDATE=$($NDATE $fhr $IDATE) - YYYY=$(echo $VDATE | cut -c1-4) - MM=$(echo $VDATE | cut -c5-6) - DD=$(echo $VDATE | cut -c7-8) - HH=$(echo $VDATE | cut -c9-10) - SS=$((10#$HH*3600)) - - VDATE_MID=$($NDATE $midpoint $IDATE) - YYYY_MID=$(echo $VDATE_MID | cut -c1-4) - MM_MID=$(echo $VDATE_MID | cut -c5-6) - DD_MID=$(echo $VDATE_MID | cut -c7-8) - HH_MID=$(echo $VDATE_MID | cut -c9-10) - SS_MID=$((10#$HH_MID*3600)) - - source_file="ocn_${YYYY_MID}_${MM_MID}_${DD_MID}_${HH_MID}.nc" - dest_file="ocn${VDATE}.${ENSMEM}.${IDATE}.nc" - ${NLN} ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} - - source_file="ocn_daily_${YYYY}_${MM}_${DD}.nc" - dest_file=${source_file} - if [ ! -a "${DATA}/${source_file}" ]; then - $NLN ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} + + local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + local vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H) + + + # Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM + local source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc" + local dest_file="ocn${vdate}.${ENSMEM}.${current_cycle}.nc" + ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}" + + local source_file="ocn_daily_${vdate:0:4}_${vdate:4:2}_${vdate:6:2}.nc" + local dest_file=${source_file} + if [[ ! -a "${DATA}/${source_file}" ]]; then + ${NLN} "${COM_OCEAN_HISTORY}/${dest_file}" "${DATA}/${source_file}" fi - last_fhr=$fhr + local last_fhr=${fhr} done elif [[ "${RUN}" =~ "gdas" ]]; then # Link output files for RUN = gdas # Save MOM6 backgrounds - for fhr in ${OUTPUT_FH}; do - local idatestr=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y_%m_%d_%H) + for fhr in ${FV3_OUTPUT_FH}; do + local idatestr=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y_%m_%d_%H) local fhr3=$(printf %03i "${fhr}") - $NLN "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc" + ${NLN} "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc" done fi mkdir -p "${COM_OCEAN_RESTART}" - # end point restart does not have a timestamp, calculate - local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) - # Link ocean restarts from DATA to COM # Coarser than 1/2 degree has a single MOM restart - $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" + ${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" # 1/4 degree resolution has 4 additional restarts case ${OCNRES} in "025") for nn in $(seq 1 4); do - $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" + ${NLN} "${COM_OCEAN_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" done ;; *) @@ -885,19 +811,18 @@ MOM6_postdet() { esac # Loop over restart_interval frequency and link restarts from DATA to COM - local res_int=$(echo $restart_interval | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU - local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) - while [[ $idate -lt $rdate ]]; do + local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + while [[ ${idate} -lt ${forecast_end_cycle} ]]; do local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}") - $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" + ${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" case ${OCNRES} in "025") for nn in $(seq 1 4); do - $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" + ${NLN} "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" done ;; esac - local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H) done # TODO: mediator should have its own CMEPS_postdet() function @@ -907,12 +832,12 @@ MOM6_postdet() { # Instead of linking, copy the mediator files after the model finishes #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med" #mkdir -p "${COMOUTmed}/RESTART" - #local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) - #while [[ $idate -le $rdate ]]; do + #local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + #while [[ ${idate} -le ${forecast_end_cycle} ]]; do # local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds # local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" - # $NLN "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" - # local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + # ${NLN} "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" + # local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H) #done echo "SUB ${FUNCNAME[0]}: MOM6 input data linked/copied" @@ -921,7 +846,7 @@ MOM6_postdet() { MOM6_nml() { echo "SUB ${FUNCNAME[0]}: Creating name list for MOM6" - source $SCRIPTDIR/parsing_namelists_MOM6.sh + source "${HOMEgfs}/ush/parsing_namelists_MOM6.sh" MOM6_namelists } @@ -930,53 +855,39 @@ MOM6_out() { # Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull) if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi - ${NCP} "${DATA}/INPUT/MOM_input" "${COM_OCEAN_INPUT}/" + ${NCP} "${DATA}/INPUT/MOM_input" "${COM_CONF}/ufs.MOM_input" # TODO: mediator should have its own CMEPS_out() function # Copy mediator restarts from DATA to COM # Linking mediator restarts to COM causes the model to fail with a message. # See MOM6_postdet() function for error message mkdir -p "${COM_MED_RESTART}" - local res_int=$(echo $restart_interval | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU - local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) - local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) - while [[ $idate -le $rdate ]]; do - local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds + local idate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + while [[ ${idate} -le ${forecast_end_cycle} ]]; do + local seconds=$(to_seconds "${idate:8:2}"0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" if [[ -f ${mediator_file} ]]; then - $NCP "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" + ${NCP} "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" else echo "Mediator restart ${mediator_file} not found." fi - local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + local idate=$(date --utc -d "${idate:0:8} ${idate:8:2} + ${restart_interval} hours" +%Y%m%d%H) done } CICE_postdet() { echo "SUB ${FUNCNAME[0]}: CICE after run type determination" - # TODO: move configuration settings to config.ice - - # TODO: These need to be calculated in the parsing_namelists_CICE.sh script CICE_namelists() function and set as local - year=$(echo $CDATE|cut -c 1-4) - month=$(echo $CDATE|cut -c 5-6) - day=$(echo $CDATE|cut -c 7-8) - sec=$(echo $CDATE|cut -c 9-10) - stepsperhr=$((3600/$ICETIM)) - nhours=$($NHOUR $CDATE ${year}010100) - steps=$((nhours*stepsperhr)) - npt=$((FHMAX*$stepsperhr)) # Need this in order for dump_last to work - # TODO: These settings should be elevated to config.ice histfreq_n=${histfreq_n:-6} - dumpfreq_n=${dumpfreq_n:-1000} # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by nems.configure + dumpfreq_n=${dumpfreq_n:-1000} # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by ufs.configure dumpfreq=${dumpfreq:-"y"} # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years" if [[ "${RUN}" =~ "gdas" ]]; then - cice_hist_avg=".false." # DA needs instantaneous - elif [[ "${RUN}" =~ "gfs" ]]; then - cice_hist_avg=".true." # P8 wants averaged over histfreq_n + cice_hist_avg=".false., .false., .false., .false., .false." # DA needs instantaneous + else + cice_hist_avg=".true., .true., .true., .true., .true." # P8 wants averaged over histfreq_n fi FRAZIL_FWSALT=${FRAZIL_FWSALT:-".true."} @@ -989,104 +900,91 @@ CICE_postdet() { # -- if false, re-initialize level ponds to zero (if runtype=initial or continue) restart_pond_lvl=${restart_pond_lvl:-".false."} - ICERES=${ICERES:-"025"} # TODO: similar to MOM_out, lift this higher - ice_grid_file=${ice_grid_file:-"grid_cice_NEMS_mx${ICERES}.nc"} ice_kmt_file=${ice_kmt_file:-"kmtu_cice_NEMS_mx${ICERES}.nc"} export MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} - # Copy/link CICE IC to DATA - if [[ "${warm_start}" = ".true." ]]; then - cice_ana="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model_anl.res.nc" - if [[ -e ${cice_ana} ]]; then - ${NLN} "${cice_ana}" "${DATA}/cice_model.res.nc" - else - ${NLN} "${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" - fi - else # cold start are typically SIS2 restarts obtained from somewhere else e.g. CPC - $NLN "${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + # Copy CICE ICs + echo "Link CICE ICs" + cice_restart_file="${COM_ICE_RESTART_PREV}/${sPDY}.${scyc}0000.cice_model.res.nc" + if [[ ! -f "${cice_restart_file}" ]]; then + echo "FATAL ERROR: CICE restart file not found at '${cice_restart_file}', ABORT!" + exit 112 + else + ${NLN} "${cice_restart_file}" "${DATA}/cice_model.res.nc" fi - # TODO: add a check for the restarts to exist, if not, exit eloquently rm -f "${DATA}/ice.restart_file" - touch "${DATA}/ice.restart_file" - echo "${DATA}/cice_model.res.nc" >> "${DATA}/ice.restart_file" + echo "${DATA}/cice_model.res.nc" > "${DATA}/ice.restart_file" echo "Link CICE fixed files" - $NLN -sf $FIXcice/$ICERES/${ice_grid_file} $DATA/ - $NLN -sf $FIXcice/$ICERES/${ice_kmt_file} $DATA/ - $NLN -sf $FIXcice/$ICERES/$MESH_OCN_ICE $DATA/ + ${NLN} "${FIXcice}/${ICERES}/${ice_grid_file}" "${DATA}/" + ${NLN} "${FIXcice}/${ICERES}/${ice_kmt_file}" "${DATA}/" + ${NLN} "${FIXcice}/${ICERES}/${MESH_OCN_ICE}" "${DATA}/" # Link CICE output files - if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi - mkdir -p ${COM_ICE_RESTART} + if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi + mkdir -p "${COM_ICE_RESTART}" - if [[ "${RUN}" =~ "gfs" ]]; then + if [[ "${RUN}" =~ "gfs" || "${RUN}" =~ "gefs" ]]; then # Link output files for RUN = gfs # TODO: make these forecast output files consistent w/ GFS output # TODO: Work w/ NB to determine appropriate naming convention for these files - export ENSMEM=${ENSMEM:-01} - export IDATE=$CDATE - - fhrlst=$OUTPUT_FH - # TODO: consult w/ NB on how to improve on this. Gather requirements and more information on what these files are and how they are used to properly catalog them - for fhr in $fhrlst; do - if [ $fhr = 'anl' ]; then # Looking at OUTPUT_FH, this is never true. TODO: remove this block - continue - fi - VDATE=$($NDATE $fhr $IDATE) - YYYY=$(echo $VDATE | cut -c1-4) - MM=$(echo $VDATE | cut -c5-6) - DD=$(echo $VDATE | cut -c7-8) - HH=$(echo $VDATE | cut -c9-10) - SS=$((10#$HH*3600)) - - if [[ 10#$fhr -eq 0 ]]; then - ${NLN} "${COM_ICE_HISTORY}/iceic${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" + local vdate seconds vdatestr fhr last_fhr + for fhr in ${FV3_OUTPUT_FH}; do + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}" + + if [[ 10#${fhr} -eq 0 ]]; then + ${NLN} "${COM_ICE_HISTORY}/iceic${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" else (( interval = fhr - last_fhr )) # Umm.. isn't this histfreq_n? - ${NLN} "${COM_ICE_HISTORY}/ice${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" $interval)h.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" + ${NLN} "${COM_ICE_HISTORY}/ice${vdate}.${ENSMEM}.${current_cycle}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" "${interval}")h.${vdatestr}.nc" fi - last_fhr=$fhr + last_fhr=${fhr} done elif [[ "${RUN}" =~ "gdas" ]]; then # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice # This can be thought of as the f000 output from the CICE model - local seconds=$(to_seconds ${CDATE:8:2}0000) # convert HHMMSS to seconds - $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}-${seconds}.nc" + local seconds vdatestr + seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds + vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}" + ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice - local fhr="${FHOUT}" + local vdate vdatestr seconds fhr fhr3 + fhr="${FHOUT}" while [[ "${fhr}" -le "${FHMAX}" ]]; do - local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y%m%d%H) - local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds - local fhr3=$(printf %03i ${fhr}) - $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}.nc" - local fhr=$((fhr + FHOUT)) + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}" + fhr3=$(printf %03i "${fhr}") + ${NLN} "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${vdatestr}.nc" + fhr=$((fhr + FHOUT)) done fi # Link CICE restarts from CICE_RESTART to COMOUTice/RESTART # Loop over restart_interval and link restarts from DATA to COM - local res_int=$(echo ${restart_interval} | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU - local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) - local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) - while [[ ${idate} -le ${rdate} ]]; do - local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds - local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" - $NLN "${COM_ICE_RESTART}/${idate:0:8}.${idate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${idatestr}.nc" - local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + local vdate vdatestr seconds + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${restart_interval} hours" +%Y%m%d%H) + while [[ ${vdate} -le ${forecast_end_cycle} ]]; do + seconds=$(to_seconds "${vdate:8:2}0000") # convert HHMMSS to seconds + vdatestr="${vdate:0:4}-${vdate:4:2}-${vdate:6:2}-${seconds}" + ${NLN} "${COM_ICE_RESTART}/${vdate:0:8}.${vdate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${vdatestr}.nc" + vdate=$(date --utc -d "${vdate:0:8} ${vdate:8:2} + ${restart_interval} hours" +%Y%m%d%H) done } CICE_nml() { echo "SUB ${FUNCNAME[0]}: Creating name list for CICE" - source $SCRIPTDIR/parsing_namelists_CICE.sh + source "${HOMEgfs}/ush/parsing_namelists_CICE.sh" CICE_namelists } @@ -1095,7 +993,7 @@ CICE_out() { # Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull) if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi - ${NCP} "${DATA}/ice_in" "${COM_ICE_INPUT}/ice_in" + ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in" } GOCART_rc() { @@ -1104,27 +1002,27 @@ GOCART_rc() { # this variable is platform-dependent and should be set via a YAML file # link directory containing GOCART input dataset, if provided - if [ ! -z "${AERO_INPUTS_DIR}" ]; then - $NLN -sf ${AERO_INPUTS_DIR} $DATA/ExtData + if [[ ! -z "${AERO_INPUTS_DIR}" ]]; then + ${NLN} "${AERO_INPUTS_DIR}" "${DATA}/ExtData" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit "${status}" fi # copying GOCART configuration files - if [ ! -z "${AERO_CONFIG_DIR}" ]; then - $NCP ${AERO_CONFIG_DIR}/*.rc $DATA + if [[ ! -z "${AERO_CONFIG_DIR}" ]]; then + ${NCP} "${AERO_CONFIG_DIR}"/*.rc "${DATA}" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit "${status}" # attempt to generate ExtData configuration file if not provided - if [ ! -f $DATA/AERO_ExtData.rc ]; then + if [[ ! -f "${DATA}/AERO_ExtData.rc" ]]; then { \ echo "PrimaryExports%%" ; \ - cat ${AERO_CONFIG_DIR}/ExtData.other ; \ - cat ${AERO_CONFIG_DIR}/ExtData.${AERO_EMIS_FIRE:-none} ; \ + cat "${AERO_CONFIG_DIR}/ExtData.other" ; \ + cat "${AERO_CONFIG_DIR}/ExtData.${AERO_EMIS_FIRE:-none}" ; \ echo "%%" ; \ - } > $DATA/AERO_ExtData.rc + } > "${DATA}/AERO_ExtData.rc" status=$? - if (( status != 0 )); then exit $status; fi + if (( status != 0 )); then exit "${status}"; fi fi fi } @@ -1134,25 +1032,33 @@ GOCART_postdet() { if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi - fhrlst=$OUTPUT_FH - for fhr in $fhrlst; do - if [ $fhr = 'anl' ]; then - continue - fi - VDATE=$($NDATE $fhr $CDATE) - YYYY=$(echo $VDATE | cut -c1-4) - MM=$(echo $VDATE | cut -c5-6) - DD=$(echo $VDATE | cut -c7-8) - HH=$(echo $VDATE | cut -c9-10) - SS=$((10#$HH*3600)) - - # + for fhr in ${FV3_OUTPUT_FH}; do + local vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + # Temporarily delete existing files due to noclobber in GOCART - # - if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" ]]; then - rm "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" + if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" ]]; then + rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" fi - ${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" "${DATA}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" + #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end + #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ + # "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done } + +GOCART_out() { + echo "SUB ${FUNCNAME[0]}: Copying output data for GOCART" + + # Copy gocart.inst_aod after the forecast is run (and successfull) + # TO DO: this should be linked but there were issues where gocart was crashing if it was linked + local fhr + local vdate + for fhr in ${FV3_OUTPUT_FH}; do + if (( fhr == 0 )); then continue; fi + vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) + ${NCP} "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ + "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" + done + + +} diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 334eacedef..9bb565919a 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -1,7 +1,7 @@ #! /usr/bin/env bash ##### -## "forecast_def.sh" +## "forecast_predet.sh" ## This script sets value of all variables ## ## This is the child script of ex-global forecast, @@ -18,48 +18,82 @@ to_seconds() { local mm=${hhmmss:2:2} local ss=${hhmmss:4:2} local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) - local padded_seconds=$(printf "%05d" ${seconds}) - echo ${padded_seconds} + local padded_seconds=$(printf "%05d" "${seconds}") + echo "${padded_seconds}" } middle_date(){ # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH local date1=${1:?} local date2=${2:?} - local date1s=$(date -d "${date1:0:8} ${date1:8:2}" +%s) - local date2s=$(date -d "${date2:0:8} ${date2:8:2}" +%s) + local date1s=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) + local date2s=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) local dtsecsby2=$(( $((date2s - date1s)) / 2 )) - local mid_date=$(date -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) - echo ${mid_date:0:10} + local mid_date=$(date --utc -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) + echo "${mid_date:0:10}" +} + +nhour(){ + # Function to calculate hours between two dates (This replicates prod-util NHOUR) + local date1=${1:?} + local date2=${2:?} + # Convert dates to UNIX timestamps + seconds1=$(date --utc -d "${date1:0:8} ${date1:8:2}:00:00" +%s) + seconds2=$(date --utc -d "${date2:0:8} ${date2:8:2}:00:00" +%s) + hours=$(( $((seconds1 - seconds2)) / 3600 )) # Calculate the difference in seconds and convert to hours + echo "${hours}" } common_predet(){ - echo "SUB ${FUNCNAME[0]}: Defining variables for shared through models" + echo "SUB ${FUNCNAME[0]}: Defining variables for shared through model components" + # Ignore "not used" warning + # shellcheck disable=SC2034 pwd=$(pwd) - machine=${machine:-"WCOSS2"} - machine=$(echo $machine | tr '[a-z]' '[A-Z]') + CDUMP=${CDUMP:-gdas} CASE=${CASE:-C768} CDATE=${CDATE:-2017032500} - DATA=${DATA:-$pwd/fv3tmp$$} # temporary running directory - ROTDIR=${ROTDIR:-$pwd} # rotating archive directory -} + ENSMEM=${ENSMEM:-000} -DATM_predet(){ - SYEAR=$(echo $CDATE | cut -c1-4) - SMONTH=$(echo $CDATE | cut -c5-6) - SDAY=$(echo $CDATE | cut -c7-8) - SHOUR=$(echo $CDATE | cut -c9-10) - # directory set up - if [ ! -d $DATA ]; then mkdir -p $DATA; fi - if [ ! -d $DATA/DATM_INPUT ]; then mkdir -p $DATA/DATM_INPUT; fi - FHMAX=${FHMAX:-9} - # Go to Run Directory (DATA) - cd $DATA + FCSTEXECDIR=${FCSTEXECDIR:-${HOMEgfs}/exec} + FCSTEXEC=${FCSTEXEC:-ufs_model.x} + + # Directories. + FIXgfs=${FIXgfs:-${HOMEgfs}/fix} + + # Model specific stuff + PARM_POST=${PARM_POST:-${HOMEgfs}/parm/post} + + # Define significant cycles + current_cycle=${CDATE} + previous_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${assim_freq} hours" +%Y%m%d%H) + # ignore errors that variable isn't used + # shellcheck disable=SC2034 + next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) + forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) + + # IAU options + IAU_OFFSET=${IAU_OFFSET:-0} + DOIAU=${DOIAU:-"NO"} + if [[ "${DOIAU}" = "YES" ]]; then + sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H) + sPDY="${sCDATE:0:8}" + scyc="${sCDATE:8:2}" + tPDY=${previous_cycle:0:8} + tcyc=${previous_cycle:8:2} + else + sCDATE=${current_cycle} + sPDY=${current_cycle:0:8} + scyc=${current_cycle:8:2} + tPDY=${sPDY} + tcyc=${scyc} + fi + + mkdir -p "${COM_CONF}" + cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } -FV3_GFS_predet(){ - echo "SUB ${FUNCNAME[0]}: Defining variables for FV3GFS" - CDUMP=${CDUMP:-gdas} +FV3_predet(){ + echo "SUB ${FUNCNAME[0]}: Defining variables for FV3" FHMIN=${FHMIN:-0} FHMAX=${FHMAX:-9} FHOUT=${FHOUT:-3} @@ -68,39 +102,30 @@ FV3_GFS_predet(){ FHMAX_HF=${FHMAX_HF:-0} FHOUT_HF=${FHOUT_HF:-1} NSOUT=${NSOUT:-"-1"} - FDIAG=$FHOUT - if [ $FHMAX_HF -gt 0 -a $FHOUT_HF -gt 0 ]; then FDIAG=$FHOUT_HF; fi + FDIAG=${FHOUT} + if (( FHMAX_HF > 0 && FHOUT_HF > 0 )); then FDIAG=${FHOUT_HF}; fi WRITE_DOPOST=${WRITE_DOPOST:-".false."} - restart_interval=${restart_interval:-0} - rst_invt1=$(echo $restart_interval |cut -d " " -f 1) + restart_interval=${restart_interval:-${FHMAX}} + # restart_interval = 0 implies write restart at the END of the forecast i.e. at FHMAX + if [[ ${restart_interval} -eq 0 ]]; then + restart_interval=${FHMAX} + fi - # Convert output settings into an explicit list - OUTPUT_FH="" - FHMIN_LF=$FHMIN + # Convert output settings into an explicit list for FV3 + # NOTE: FV3_OUTPUT_FH is also currently used in other components + # TODO: Have a seperate control for other components to address issue #1629 + FV3_OUTPUT_FH="" + local fhr=${FHMIN} if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then for (( fh = FHMIN; fh < FHMAX_HF; fh = fh + FHOUT_HF )); do - OUTPUT_FH="$OUTPUT_FH $fh" + FV3_OUTPUT_FH="${FV3_OUTPUT_FH} ${fh}" done - FHMIN_LF=$FHMAX_HF + fhr=${FHMAX_HF} fi - for (( fh = FHMIN_LF; fh <= FHMAX; fh = fh + FHOUT )); do - OUTPUT_FH="$OUTPUT_FH $fh" + for (( fh = fhr; fh <= FHMAX; fh = fh + FHOUT )); do + FV3_OUTPUT_FH="${FV3_OUTPUT_FH} ${fh}" done - PDY=$(echo $CDATE | cut -c1-8) - cyc=$(echo $CDATE | cut -c9-10) - - # Directories. - pwd=$(pwd) - HOMEgfs=${HOMEgfs:-${PACKAGEROOT:-$pwd}} - FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} - FIX_AM=${FIX_AM:-$FIX_DIR/am} - FIX_AER=${FIX_AER:-$FIX_DIR/aer} - FIX_LUT=${FIX_LUT:-$FIX_DIR/lut} - FIXfv3=${FIXfv3:-$FIX_DIR/orog} - DATA=${DATA:-$pwd/fv3tmp$$} # temporary running directory - ROTDIR=${ROTDIR:-$pwd} # rotating archive directory - DMPDIR=${DMPDIR:-$pwd} # global dumps for seaice, snow and sst analysis # Model resolution specific parameters DELTIM=${DELTIM:-225} @@ -108,33 +133,17 @@ FV3_GFS_predet(){ layout_y=${layout_y:-16} LEVS=${LEVS:-65} - # Utilities - NCP=${NCP:-"/bin/cp -p"} - NLN=${NLN:-"/bin/ln -sf"} - NMV=${NMV:-"/bin/mv"} - SEND=${SEND:-"YES"} #move final result to rotating directory - ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} - KEEPDATA=${KEEPDATA:-"NO"} - # Other options MEMBER=${MEMBER:-"-1"} # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER ENS_NUM=${ENS_NUM:-1} # Single executable runs multiple members (e.g. GEFS) PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment # IAU options - DOIAU=${DOIAU:-"NO"} IAUFHRS=${IAUFHRS:-0} IAU_DELTHRS=${IAU_DELTHRS:-0} - IAU_OFFSET=${IAU_OFFSET:-0} - - # Model specific stuff - FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/exec} - FCSTEXEC=${FCSTEXEC:-ufs_model.x} - PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} - PARM_POST=${PARM_POST:-$HOMEgfs/parm/post} # Model config options - ntiles=${ntiles:-6} + ntiles=6 TYPE=${TYPE:-"nh"} # choices: nh, hydro MONO=${MONO:-"non-mono"} # choices: mono, non-mono @@ -144,25 +153,17 @@ FV3_GFS_predet(){ WRITE_NEMSIOFLIP=${WRITE_NEMSIOFLIP:-".true."} WRITE_FSYNCFLAG=${WRITE_FSYNCFLAG:-".true."} - rCDUMP=${rCDUMP:-$CDUMP} + rCDUMP=${rCDUMP:-${CDUMP}} - #------------------------------------------------------- - if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR; fi - mkdata=NO - if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA ; - fi - cd $DATA || exit 8 - mkdir -p $DATA/INPUT + mkdir -p "${DATA}/INPUT" #------------------------------------------------------------------ # changeable parameters # dycore definitions - res=$(echo $CASE |cut -c2-5) + res="${CASE:1}" resp=$((res+1)) - npx=$resp - npy=$resp + npx=${resp} + npy=${resp} npz=$((LEVS-1)) io_layout="1,1" #ncols=$(( (${npx}-1)*(${npy}-1)*3/2 )) @@ -172,12 +173,12 @@ FV3_GFS_predet(){ LONB_CASE=$((4*res)) LATB_CASE=$((2*res)) - JCAP=${JCAP:-$JCAP_CASE} - LONB=${LONB:-$LONB_CASE} - LATB=${LATB:-$LATB_CASE} + JCAP=${JCAP:-${JCAP_CASE}} + LONB=${LONB:-${LONB_CASE}} + LATB=${LATB:-${LATB_CASE}} - LONB_IMO=${LONB_IMO:-$LONB_CASE} - LATB_JMO=${LATB_JMO:-$LATB_CASE} + LONB_IMO=${LONB_IMO:-${LONB_CASE}} + LATB_JMO=${LATB_JMO:-${LATB_CASE}} # NSST Options # nstf_name contains the NSST related parameters @@ -192,7 +193,7 @@ FV3_GFS_predet(){ NST_RESV=${NST_RESV-0} ZSEA1=${ZSEA1:-0} ZSEA2=${ZSEA2:-0} - nstf_name=${nstf_name:-"$NST_MODEL,$NST_SPINUP,$NST_RESV,$ZSEA1,$ZSEA2"} + nstf_name=${nstf_name:-"${NST_MODEL},${NST_SPINUP},${NST_RESV},${ZSEA1},${ZSEA2}"} nst_anl=${nst_anl:-".false."} @@ -213,58 +214,41 @@ FV3_GFS_predet(){ print_freq=${print_freq:-6} #------------------------------------------------------- - if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]] && (( rst_invt1 > 0 )); then + if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]]; then if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi ${NLN} "${COM_ATMOS_RESTART}" RESTART # The final restart written at the end doesn't include the valid date # Create links that keep the same name pattern for these files - VDATE=$($NDATE +$FHMAX_GFS $CDATE) - vPDY=$(echo $VDATE | cut -c1-8) - vcyc=$(echo $VDATE | cut -c9-10) files="coupler.res fv_core.res.nc" - for tile in {1..6}; do + for n in $(seq 1 "${ntiles}"); do for base in ca_data fv_core.res fv_srf_wnd.res fv_tracer.res phy_data sfc_data; do - files="${files} ${base}.tile${tile}.nc" + files="${files} ${base}.tile${n}.nc" done done for file in ${files}; do - ${NLN} "${COM_ATMOS_RESTART}/${file}" "${COM_ATMOS_RESTART}/${vPDY}.${vcyc}0000.${file}" + ${NLN} "${file}" "${COM_ATMOS_RESTART}/${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${file}" done else - mkdir -p $DATA/RESTART - fi - - if [[ "$DOIAU" = "YES" ]]; then - sCDATE=$($NDATE -3 $CDATE) - sPDY=$(echo $sCDATE | cut -c1-8) - scyc=$(echo $sCDATE | cut -c9-10) - tPDY=${gPDY} - tcyc=${gcyc} - else - sCDATE=$CDATE - sPDY=$PDY - scyc=$cyc - tPDY=$sPDY - tcyc=$cyc + mkdir -p "${DATA}/RESTART" fi echo "SUB ${FUNCNAME[0]}: pre-determination variables set" } WW3_predet(){ - echo "SUB ${FUNCNAME[0]}: Defining variables for WW3" + echo "SUB ${FUNCNAME[0]}: WW3 before run type determination" if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" - if [ ! -d $DATA/CICE_OUTPUT ]; then mkdir -p $DATA/CICE_OUTPUT; fi - if [ ! -d $DATA/CICE_RESTART ]; then mkdir -p $DATA/CICE_RESTART; fi + if [[ ! -d "${DATA}/CICE_OUTPUT" ]]; then mkdir -p "${DATA}/CICE_OUTPUT"; fi + if [[ ! -d "${DATA}/CICE_RESTART" ]]; then mkdir -p "${DATA}/CICE_RESTART"; fi } MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" - if [ ! -d $DATA/MOM6_OUTPUT ]; then mkdir -p $DATA/MOM6_OUTPUT; fi - if [ ! -d $DATA/MOM6_RESTART ]; then mkdir -p $DATA/MOM6_RESTART; fi + if [[ ! -d "${DATA}/MOM6_OUTPUT" ]]; then mkdir -p "${DATA}/MOM6_OUTPUT"; fi + if [[ ! -d "${DATA}/MOM6_RESTART" ]]; then mkdir -p "${DATA}/MOM6_RESTART"; fi } diff --git a/ush/fv3gfs_remap.sh b/ush/fv3gfs_remap.sh index 430e96c868..7986add331 100755 --- a/ush/fv3gfs_remap.sh +++ b/ush/fv3gfs_remap.sh @@ -14,8 +14,8 @@ export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg pwd=$(pwd) export DATA=${DATA:-$pwd} export HOMEgfs=${HOMEgfs:-$PACKAGEROOT} -export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} -export FIXfv3=${FIXfv3:-$FIX_DIR/orog} +export FIXgfs=${FIXgfs:-$HOMEgfs/fix} +export FIXorog=${FIXorog:-$FIXgfs/orog} export REMAPEXE=${REMAPEXE:-$HOMEgfs/exec/fregrid_parallel} export IPD4=${IPD4:-"YES"} @@ -25,8 +25,8 @@ export CDUMP=${CDUMP:-gfs} export PREFIX=${PREFIX:-${CDUMP}${TCYC}} #-------------------------------------------------- -export grid_loc=$FIXfv3/$CASE/${CASE}_mosaic.nc -export weight_file=$FIXfv3/$CASE/remap_weights_${CASE}_${GG}.nc +export grid_loc=${FIXorog}/${CASE}/${CASE}_mosaic.nc +export weight_file=${FIXorog}/${CASE}/remap_weights_${CASE}_${GG}.nc export APRUN_REMAP=${APRUN_REMAP:-${APRUN:-""}} export NTHREADS_REMAP=${NTHREADS_REMAP:-${NTHREADS:-1}} diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index 1e68ca0186..1a0441a06f 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -29,7 +29,7 @@ # $BASEDIR/gfs_ver.v15.0.0} # FIXam Directory for the global fixed climatology files. # Defaults to $HOMEgfs/fix/am -# FIXfv3 Directory for the model grid and orography netcdf +# FIXorog Directory for the model grid and orography netcdf # files. Defaults to $HOMEgfs/fix/orog # FIXWGTS Weight file to use for interpolation # EXECgfs Directory of the program executable. Defaults to @@ -83,9 +83,9 @@ # # programs : $GAUSFCANLEXE # -# fixed data : $FIXfv3/${CASE}/${CASE}_oro_data.tile*.nc -# $FIXWGTS -# $FIXam/global_hyblev.l65.txt +# fixed data : ${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile*.nc +# ${FIXWGTS} +# ${FIXam}/global_hyblev.l65.txt # # input data : ${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc # @@ -126,9 +126,9 @@ gfs_ver=${gfs_ver:-v16.3.0} BASEDIR=${BASEDIR:-${PACKAGEROOT:-/lfs/h1/ops/prod/packages}} HOMEgfs=${HOMEgfs:-$BASEDIR/gfs.${gfs_ver}} EXECgfs=${EXECgfs:-$HOMEgfs/exec} -FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} +FIXorog=${FIXorog:-$HOMEgfs/fix/orog} FIXam=${FIXam:-$HOMEgfs/fix/am} -FIXWGTS=${FIXWGTS:-$FIXfv3/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} +FIXWGTS=${FIXWGTS:-$FIXorog/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} DATA=${DATA:-$(pwd)} # Filenames. @@ -176,7 +176,7 @@ ih=${cyc} export OMP_NUM_THREADS=${OMP_NUM_THREADS_SFC:-1} # input interpolation weights -$NLN $FIXWGTS ./weights.nc +${NLN} "${FIXWGTS}" "./weights.nc" # input analysis tiles (with nst records) ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile1.nc" "./anal.tile1.nc" @@ -187,14 +187,14 @@ ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile5.nc" "./anal.til ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile6.nc" "./anal.tile6.nc" # input orography tiles -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile1.nc ./orog.tile1.nc -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile2.nc ./orog.tile2.nc -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile3.nc ./orog.tile3.nc -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile4.nc ./orog.tile4.nc -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile5.nc ./orog.tile5.nc -$NLN $FIXfv3/$CASE/${CASE}_oro_data.tile6.nc ./orog.tile6.nc - -$NLN $SIGLEVEL ./vcoord.txt +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile1.nc" "./orog.tile1.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile2.nc" "./orog.tile2.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile3.nc" "./orog.tile3.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile4.nc" "./orog.tile4.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile5.nc" "./orog.tile5.nc" +${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile6.nc" "./orog.tile6.nc" + +${NLN} "${SIGLEVEL}" "./vcoord.txt" # output gaussian global surface analysis files ${NLN} "${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc" "./sfc.gaussian.analysis.file" diff --git a/ush/getncdimlen b/ush/getncdimlen index 5d230f6cc3..fcf231947b 100755 --- a/ush/getncdimlen +++ b/ush/getncdimlen @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # getncdimlen # cory.r.martin@noaa.gov # 2019-10-17 diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index b782c707c9..5ed05f9beb 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -58,7 +58,7 @@ for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do # Make sure all files are available: ic=0 while (( ic < 1000 )); do - if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.logf${hh3}.${logfm}" ]]; then + if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atm.logf${hh3}.${logfm}" ]]; then sleep 10 ic=$((ic + 1)) else diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh index 579dd5ae25..99c5c68fa3 100755 --- a/ush/gfs_sndp.sh +++ b/ush/gfs_sndp.sh @@ -57,12 +57,10 @@ EOF rm $DATA/${m}/bufrout done - if [[ ${SENDCOM} == 'YES' ]]; then - if [[ ${SENDDBN} == 'YES' ]] ; then - cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" - "${DBNROOT}/bin/dbn_alert" NTC_LOW BUFR "${job}" \ - "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" - fi - cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_BUFR}/." + if [[ ${SENDDBN} == 'YES' ]] ; then + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" + "${DBNROOT}/bin/dbn_alert" NTC_LOW BUFR "${job}" \ + "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" fi + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_BUFR}/." diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py index 97d66e8ace..8088cb7233 100644 --- a/ush/gsi_utils.py +++ b/ush/gsi_utils.py @@ -2,6 +2,7 @@ # a collection of functions, classes, etc. # used for the GSI global analysis + def isTrue(str_in): """ isTrue(str_in) - function to translate shell variables to python logical variables @@ -77,7 +78,7 @@ def get_ncdims(ncfile): try: import netCDF4 as nc except ImportError as err: - raise ImportError(f"Unable to import netCDF4 module\n{err}") + raise ImportError(f"Unable to import netCDF4 module") ncf = nc.Dataset(ncfile) ncdims = {} for d in ncf.dimensions.keys(): diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh deleted file mode 120000 index 7ae08ab214..0000000000 --- a/ush/hpssarch_gen.sh +++ /dev/null @@ -1 +0,0 @@ -hpssarch_gen_gsl.sh \ No newline at end of file diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh new file mode 100755 index 0000000000..0a027c7537 --- /dev/null +++ b/ush/hpssarch_gen.sh @@ -0,0 +1,752 @@ +#! /usr/bin/env bash + +################################################### +# Fanglin Yang, 20180318 +# --create bunches of files to be archived to HPSS +################################################### +source "${HOMEgfs}/ush/preamble.sh" + +type=${1:-gfs} ##gfs, gdas, enkfgdas or enkfggfs + +ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"YES"} +ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +# Set whether to archive downstream products +DO_DOWN=${DO_DOWN:-"NO"} +if [[ ${DO_BUFRSND} = "YES" ]]; then + export DO_DOWN="YES" +fi + +#----------------------------------------------------- +if [[ ${type} = "gfs" ]]; then +#----------------------------------------------------- + FHMIN_GFS=${FHMIN_GFS:-0} + FHMAX_GFS=${FHMAX_GFS:-384} + FHOUT_GFS=${FHOUT_GFS:-3} + FHMAX_HF_GFS=${FHMAX_HF_GFS:-120} + FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} + + rm -f "${DATA}/gfsa.txt" + rm -f "${DATA}/gfsb.txt" + rm -f "${DATA}/gfs_restarta.txt" + touch "${DATA}/gfsa.txt" + touch "${DATA}/gfsb.txt" + touch "${DATA}/gfs_restarta.txt" + + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + rm -f "${DATA}/gfs_pgrb2b.txt" + rm -f "${DATA}/gfs_netcdfb.txt" + rm -f "${DATA}/gfs_flux.txt" + touch "${DATA}/gfs_pgrb2b.txt" + touch "${DATA}/gfs_netcdfb.txt" + touch "${DATA}/gfs_flux.txt" + + if [[ ${MODE} = "cycled" ]]; then + rm -f "${DATA}/gfs_netcdfa.txt" + touch "${DATA}/gfs_netcdfa.txt" + fi + fi + + if [[ ${DO_DOWN} = "YES" ]]; then + rm -f "${DATA}/gfs_downstream.txt" + touch "${DATA}/gfs_downstream.txt" + fi + + head="gfs.t${cyc}z." + + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" + } >> "${DATA}/gfs_pgrb2b.txt" + + if [[ ${MODE} = "cycled" ]]; then + { + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*.nc" + gsida_files=("dtfanl.nc" + "loginc.txt") + for file in "${gsida_files[@]}"; do + [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" + done + } >> "${DATA}/gfs_netcdfa.txt" + fi + + fh=0 + while (( fh <= ARCH_GAUSSIAN_FHMAX )); do + fhr=$(printf %03i "${fh}") + { + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + } >> "${DATA}/gfs_netcdfb.txt" + fh=$((fh+ARCH_GAUSSIAN_FHINC)) + done + fi + + #.................. + # Exclude the gfsarch.log file, which will change during the tar operation + # This uses the bash extended globbing option + { + echo "./logs/${PDY}${cyc}/gfs!(arch).log" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/input.nml" + + if [[ ${MODE} = "cycled" ]]; then + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}gsistat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + fi + gsiob_files=("nsstbufr" + "prepbufr" + "prepbufr.acft_profiles") + for file in "${gsiob_files[@]}"; do + [[ -s ${COM_OBS}/${head}${file} ]] && echo "${COM_OBS/${ROTDIR}\//}/${head}${file}" + done + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmvar.yaml" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmvar.yaml" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmstat" + fi + fi + + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" + + #Only generated if there are cyclones to track + cyclone_files=("avno.t${cyc}z.cyclone.trackatcfunix" + "avnop.t${cyc}z.cyclone.trackatcfunix" + "trak.gfso.atcfunix.${PDY}${cyc}" + "trak.gfso.atcfunix.altg.${PDY}${cyc}") + + for file in "${cyclone_files[@]}"; do + [[ -s ${COM_ATMOS_TRACK}/${file} ]] && echo "${COM_ATMOS_TRACK/${ROTDIR}\//}/${file}" + done + + genesis_files=("storms.gfso.atcf_gen.${PDY}${cyc}" + "storms.gfso.atcf_gen.altg.${PDY}${cyc}") + for file in "${genesis_files[@]}"; do + [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" + done + + # GSI Monitor job output + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + + } >> "${DATA}/gfsa.txt" + + { + if [[ ${DO_DOWN} = "YES" ]]; then + if [[ ${DO_BUFRSND} = "YES" ]]; then + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.sfc" + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.snd" + echo "${COM_ATMOS_WMO/${ROTDIR}\//}/gfs_collective*.postsnd_${cyc}" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/bufr.t${cyc}z" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" + fi + fi + } >> "${DATA}/gfs_downstream.txt" + + { + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + } >> "${DATA}/gfsb.txt" + + + fh=0 + while (( fh <= FHMAX_GFS )); do + fhr=$(printf %03i "${fh}") + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + { + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + } >> "${DATA}/gfs_flux.txt" + + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}.idx" + fi + } >> "${DATA}/gfs_pgrb2b.txt" + fi + + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" + } >> "${DATA}/gfsa.txt" + + + { + if [[ -s "${COM_ATMOS_GRIB_0p50}/${head}pgrb2.0p50.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}.idx" + fi + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + fi + } >> "${DATA}/gfsb.txt" + + inc=${FHOUT_GFS} + if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then + inc=${FHOUT_HF_GFS} + fi + + fh=$((fh+inc)) + done + + #.................. + { + if [[ ${MODE} = "cycled" ]]; then + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + elif [[ ${MODE} = "forecast-only" ]]; then + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_ctrl.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile6.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" + fi + } >> "${DATA}/gfs_restarta.txt" + + + #.................. + if [[ ${DO_WAVE} = "YES" ]]; then + + rm -rf "${DATA}/gfswave.txt" + touch "${DATA}/gfswave.txt" + + head="gfswave.t${cyc}z." + + #........................... + { + echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> "${DATA}/gfswave.txt" + fi + + if [[ ${DO_OCN} = "YES" ]]; then + + head="gfs.t${cyc}z." + + rm -f "${DATA}/gfs_flux_1p00.txt" + rm -f "${DATA}/ocn_ice_grib2_0p5.txt" + rm -f "${DATA}/ocn_ice_grib2_0p25.txt" + rm -f "${DATA}/ocn_2D.txt" + rm -f "${DATA}/ocn_3D.txt" + rm -f "${DATA}/ocn_xsect.txt" + rm -f "${DATA}/ocn_daily.txt" + touch "${DATA}/gfs_flux_1p00.txt" + touch "${DATA}/ocn_ice_grib2_0p5.txt" + touch "${DATA}/ocn_ice_grib2_0p25.txt" + touch "${DATA}/ocn_2D.txt" + touch "${DATA}/ocn_3D.txt" + touch "${DATA}/ocn_xsect.txt" + touch "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> "${DATA}/ocn_3D.txt" + echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> "${DATA}/ocn_xsect.txt" + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> "${DATA}/ocn_ice_grib2_0p5.txt" + echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> "${DATA}/ocn_ice_grib2_0p25.txt" + + # Also save fluxes from atmosphere + { + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" + } >> "${DATA}/gfs_flux_1p00.txt" + fi + + if [[ ${DO_ICE} = "YES" ]]; then + head="gfs.t${cyc}z." + + rm -f "${DATA}/ice.txt" + touch "${DATA}/ice.txt" + { + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc" + } >> "${DATA}/ice.txt" + fi + + if [[ ${DO_AERO} = "YES" ]]; then + head="gocart" + + rm -f "${DATA}/chem.txt" + touch "${DATA}/chem.txt" + + echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> "${DATA}/chem.txt" + fi + +#----------------------------------------------------- +fi ##end of gfs +#----------------------------------------------------- + + + +#----------------------------------------------------- +if [[ ${type} == "gdas" ]]; then +#----------------------------------------------------- + + rm -f "${DATA}/gdas.txt" + rm -f "${DATA}/gdas_restarta.txt" + rm -f "${DATA}/gdas_restartb.txt" + touch "${DATA}/gdas.txt" + touch "${DATA}/gdas_restarta.txt" + touch "${DATA}/gdas_restartb.txt" + + head="gdas.t${cyc}z." + + #.................. + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmvar.yaml" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmvar.yaml" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}gsistat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmanl.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma003.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma003.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma009.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma009.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}oznstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}oznstat" + fi + if [[ -s "${COM_CHEM_ANALYSIS}/${head}aerostat" ]]; then + echo "${COM_CHEM_ANALYSIS/${ROTDIR}\//}/${head}aerostat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" + fi + for fstep in prep anal fcst verfozn verfrad vminmon; do + if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then + echo "./logs/${PDY}${cyc}/gdas${fstep}.log" + fi + done + echo "./logs/${PDY}${cyc}/gdaspost*.log" + + fh=0 + while [[ ${fh} -le 9 ]]; do + fhr=$(printf %03i "${fh}") + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + fh=$((fh+3)) + done + flist="001 002 004 005 007 008" + for fhr in ${flist}; do + file="${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + if [[ -s "${file}" ]]; then + echo "${file}" + echo "${file}.idx" + fi + done + + # GSI Monitor jobs output + + if [[ ${DO_VERFOZN} = "YES" ]]; then + for type in horiz time; do + if [[ ${type} = "horiz" ]]; then + suffix=".gz" + elif [[ ${type} = "time" ]]; then + suffix="" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_cnt.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_pen.${PDY}${cyc}" + fi + subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19" + for subtype in ${subtyplist}; do + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.ctl" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.ctl" + done + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/stdout.${type}.tar.gz" + done + fi + + if [[ ${DO_VERFRAD} = "YES" ]]; then + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_pen.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/low_count.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_angle.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcoef.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcor.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_time.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/warning.${PDY}${cyc}" + fi + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + + } >> "${DATA}/gdas.txt" + + #.................. + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> "${DATA}/gdas_restarta.txt" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> "${DATA}/gdas_restarta.txt" + fi + + { + gsiob_files=("nsstbufr" + "prepbufr" + "prepbufr.acft_profiles") + for file in "${gsiob_files[@]}"; do + [[ -s ${COM_OBS}/${head}${file} ]] && echo "${COM_OBS/${ROTDIR}\//}/${head}${file}" + done + + gsida_files=("abias" + "abias_air" + "abias_int" + "abias_pc" + "dtfanl.nc" + "loginc.txt") + for file in "${gsida_files[@]}"; do + [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" + done + + ufsda_files=("amsua_n19.satbias.nc4" + "amsua_n19.satbias_cov.nc4" + "amsua_n19.tlapse.txt") + for file in "${ufsda_files[@]}"; do + [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" + done + + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*nc" + + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> "${DATA}/gdas_restarta.txt" + + #.................. + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> "${DATA}/gdas_restartb.txt" + + #.................. + if [[ ${DO_WAVE} = "YES" ]]; then + + rm -rf "${DATA}/gdaswave.txt" + touch "${DATA}/gdaswave.txt" + rm -rf "${DATA}/gdaswave_restart.txt" + touch "${DATA}/gdaswave_restart.txt" + + head="gdaswave.t${cyc}z." + + #........................... + { + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> "${DATA}/gdaswave.txt" + + echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdaswave_restart.txt" + + fi + + #.................. + if [[ ${DO_OCN} = "YES" ]]; then + + rm -rf "${DATA}/gdasocean.txt" + touch "${DATA}/gdasocean.txt" + rm -rf "${DATA}/gdasocean_restart.txt" + touch "${DATA}/gdasocean_restart.txt" + + head="gdas.t${cyc}z." + + #........................... + { + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" + } >> "${DATA}/gdasocean.txt" + + { + echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" + echo "${COM_MED_RESTART/${ROTDIR}\//}/*" + } >> "${DATA}/gdasocean_restart.txt" + + { + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" + } >> "${DATA}/gdasocean_analysis.txt" + + fi + + if [[ ${DO_ICE} = "YES" ]]; then + + rm -rf "${DATA}/gdasice.txt" + touch "${DATA}/gdasice.txt" + rm -rf "${DATA}/gdasice_restart.txt" + touch "${DATA}/gdasice_restart.txt" + + head="gdas.t${cyc}z." + + #........................... + { + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + } >> "${DATA}/gdasice.txt" + + echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdasice_restart.txt" + + fi + + +#----------------------------------------------------- +fi ##end of gdas +#----------------------------------------------------- + + +#----------------------------------------------------- +if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then +#----------------------------------------------------- + + IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} + lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} + IFS=',' read -ra nfhrs <<< ${IAUFHRS_ENKF} + NMEM_ENS=${NMEM_ENS:-80} + NMEM_EARCGRP=${NMEM_EARCGRP:-10} ##number of ens memebers included in each tarball + NTARS=$((NMEM_ENS/NMEM_EARCGRP)) + [[ ${NTARS} -eq 0 ]] && NTARS=1 + [[ $((NTARS*NMEM_EARCGRP)) -lt ${NMEM_ENS} ]] && NTARS=$((NTARS+1)) + ##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments + NTARS2=${NTARS} + + head="${RUN}.t${cyc}z." + + #.................. + rm -f "${DATA}/${RUN}.txt" + touch "${DATA}/${RUN}.txt" + + { + gsida_files=("enkfstat" + "gsistat.ensmean" + "cnvstat.ensmean" + "oznstat.ensmean" + "radstat.ensmean") + for file in "${gsida_files[@]}"; do + [[ -s ${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}${file}" + done + + ufsda_files=("atmens.yaml" + "atmensstat") + for file in "${ufsda_files[@]}"; do + [[ -s ${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}${file}" + done + + for FHR in "${nfhrs[@]}"; do # loop over analysis times in window + if [[ ${FHR} -eq 6 ]]; then + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmanl.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmanl.ensmean.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atminc.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atminc.ensmean.nc" + fi + else + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atma00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atma00${FHR}.ensmean.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmi00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmi00${FHR}.ensmean.nc" + fi + fi + done # loop over FHR + for fstep in eobs ecen esfc eupd efcs epos ; do + echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" + done + + # eomg* are optional jobs + for log in "${ROTDIR}/logs/${PDY}${cyc}/${RUN}eomg"*".log"; do + if [[ -s "${log}" ]]; then + echo "logs/${PDY}${cyc}/${RUN}eomg*.log" + fi + break + done + + # Ensemble spread file only available with netcdf output + fh=3 + while [ $fh -le 9 ]; do + fhr=$(printf %03i $fh) + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensmean.nc" + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}sfcf${fhr}.ensmean.nc" + if [[ -s "${COM_ATMOS_HISTORY_ENSSTAT}/${head}atmf${fhr}.ensspread.nc" ]]; then + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensspread.nc" + fi + fh=$((fh+3)) + done + } >> "${DATA}/${RUN}.txt" + + #........................... + n=1 + while (( n <= NTARS )); do + #........................... + + rm -f "${DATA}/${RUN}_grp${n}.txt" + rm -f "${DATA}/${RUN}_restarta_grp${n}.txt" + rm -f "${DATA}/${RUN}_restartb_grp${n}.txt" + touch "${DATA}/${RUN}_grp${n}.txt" + touch "${DATA}/${RUN}_restarta_grp${n}.txt" + touch "${DATA}/${RUN}_restartb_grp${n}.txt" + + m=1 + while (( m <= NMEM_EARCGRP )); do + nm=$(((n-1)*NMEM_EARCGRP+m)) + mem=$(printf %03i ${nm}) + head="${RUN}.t${cyc}z." + + MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL \ + COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL + + #--- + for FHR in "${nfhrs[@]}"; do # loop over analysis times in window + if [ "${FHR}" -eq 6 ]; then + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atmanl.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmanl.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" + fi + fi + } >> "${DATA}/${RUN}_grp${n}.txt" + + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ + >> "${DATA}/${RUN}_restarta_grp${n}.txt" + fi + + else + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atma00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atma00${FHR}.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" + fi + fi + } >> "${DATA}/${RUN}_grp${n}.txt" + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ + >> "${DATA}/${RUN}_restarta_grp${n}.txt" + fi + fi + { + echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}atmf00${FHR}.nc" + if (( FHR == 6 )); then + echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" + fi + } >> "${DATA}/${RUN}_grp${n}.txt" + done # loop over FHR + + if [[ ${lobsdiag_forenkf} == ".false." ]] ; then + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}gsistat" + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + } >> "${DATA}/${RUN}_grp${n}.txt" + + { + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}radstat" + fi + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" + fi + #--- + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" + #--- + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${DATA}/${RUN}_restartb_grp${n}.txt" + + m=$((m+1)) + done + + + #........................... + n=$((n+1)) + done + #........................... + + +#----------------------------------------------------- +fi ##end of enkfgdas or enkfgfs +#----------------------------------------------------- + +exit 0 + diff --git a/ush/interp_atmos_master.sh b/ush/interp_atmos_master.sh new file mode 100755 index 0000000000..0abc6ad185 --- /dev/null +++ b/ush/interp_atmos_master.sh @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +# This script takes in a master grib file and creates products at various interpolated resolutions +# Generate 0.25 / 0.5 / 1 degree interpolated grib2 files for each input grib2 file +# trim's RH and tweaks sea-ice cover + +source "${HOMEgfs}/ush/preamble.sh" + +input_file=${1:-"pgb2file_in"} # Input pressure grib2 file +output_file_prefix=${2:-"pgb2file_out"} # Prefix for output grib2 file; the prefix is appended by resolution e.g. _0p25 +grid_string=${3:-"0p25"} # Target grids; e.g. "0p25" or "0p25:0p50"; If multiple, they need to be ":" seperated + +WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} + +# wgrib2 options for regridding +defaults="-set_grib_type same -set_bitmap 1 -set_grib_max_bits 16" +interp_winds="-new_grid_winds earth" +interp_bilinear="-new_grid_interpolation bilinear" +interp_neighbor="-if :(CSNOW|CRAIN|CFRZR|CICEP|ICSEV): -new_grid_interpolation neighbor -fi" +interp_budget="-if :(APCP|ACPCP|PRATE|CPRAT|DZDT): -new_grid_interpolation budget -fi" +increased_bits="-if :(APCP|ACPCP|PRATE|CPRAT): -set_grib_max_bits 25 -fi" + +# interpolated target grids +# shellcheck disable=SC2034 +grid0p25="latlon 0:1440:0.25 90:721:-0.25" +# shellcheck disable=SC2034 +grid0p50="latlon 0:720:0.5 90:361:-0.5" +# shellcheck disable=SC2034 +grid1p00="latlon 0:360:1.0 90:181:-1.0" + +# "Import" functions used in this script +source "${HOMEgfs}/ush/product_functions.sh" + +# Transform the input ${grid_string} into an array for processing +IFS=':' read -ra grids <<< "${grid_string}" + +output_grids="" +for grid in "${grids[@]}"; do + gridopt="grid${grid}" + output_grids="${output_grids} -new_grid ${!gridopt} ${output_file_prefix}_${grid}" +done + +#shellcheck disable=SC2086 +${WGRIB2} "${input_file}" ${defaults} \ + ${interp_winds} \ + ${interp_bilinear} \ + ${interp_neighbor} \ + ${interp_budget} \ + ${increased_bits} \ + ${output_grids} +export err=$?; err_chk + +# trim and mask for all grids +for grid in "${grids[@]}"; do + trim_rh "${output_file_prefix}_${grid}"; export err=$?; err_chk + mod_icec "${output_file_prefix}_${grid}"; export err=$?; err_chk +done + +exit 0 diff --git a/ush/interp_atmos_sflux.sh b/ush/interp_atmos_sflux.sh new file mode 100755 index 0000000000..516a2f5e4a --- /dev/null +++ b/ush/interp_atmos_sflux.sh @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +# This script takes in a master flux file and creates interpolated flux files at various interpolated resolutions +# Generate 0.25 / 0.5 / 1 degree interpolated grib2 flux files for each input sflux grib2 file + +source "${HOMEgfs}/ush/preamble.sh" + +input_file=${1:-"sfluxfile_in"} # Input sflux grib2 file +output_file_prefix=${2:-"sfluxfile_out"} # Prefix for output sflux grib2 file; the prefix is appended by resolution e.g. _0p25 +grid_string=${3:-"1p00"} # Target grids; e.g. "0p25" or "0p25:0p50"; If multiple, they need to be ":" seperated + +WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} + +# wgrib2 options for regridding +defaults="-set_grib_type same -set_bitmap 1 -set_grib_max_bits 16" +interp_winds="-new_grid_winds earth" +interp_bilinear="-new_grid_interpolation bilinear" +interp_neighbor="-if :(LAND|CSNOW|CRAIN|CFRZR|CICEP|ICSEV): -new_grid_interpolation neighbor -fi" +interp_budget="-if :(APCP|ACPCP|PRATE|CPRAT|DZDT): -new_grid_interpolation budget -fi" +increased_bits="-if :(APCP|ACPCP|PRATE|CPRAT): -set_grib_max_bits 25 -fi" + +# interpolated target grids +# shellcheck disable=SC2034 +grid0p25="latlon 0:1440:0.25 90:721:-0.25" +# shellcheck disable=SC2034 +grid0p50="latlon 0:720:0.5 90:361:-0.5" +# shellcheck disable=SC2034 +grid1p00="latlon 0:360:1.0 90:181:-1.0" + +# Transform the input ${grid_string} into an array for processing +IFS=':' read -ra grids <<< "${grid_string}" + +output_grids="" +for grid in "${grids[@]}"; do + gridopt="grid${grid}" + output_grids="${output_grids} -new_grid ${!gridopt} ${output_file_prefix}_${grid}" +done + +#shellcheck disable=SC2086 +${WGRIB2} "${input_file}" ${defaults} \ + ${interp_winds} \ + ${interp_bilinear} \ + ${interp_neighbor} \ + ${interp_budget} \ + ${increased_bits} \ + ${output_grids} +export err=$?; err_chk + +exit 0 \ No newline at end of file diff --git a/ush/link_crtm_fix.sh b/ush/link_crtm_fix.sh index 0d4d8dc55b..61ac3f7870 100755 --- a/ush/link_crtm_fix.sh +++ b/ush/link_crtm_fix.sh @@ -4,16 +4,16 @@ source "$HOMEgfs/ush/preamble.sh" # Get CRTM fix directory from (in this order): # 1. First argument to script, or -# 2. $FIXCRTM environment variable, or +# 2. $CRTM_FIX environment variable, or # 3. Nowhere. Give up. Whine. -FIXCRTM="${1:-${FIXCRTM:-MISSING}}" +CRTM_FIX="${1:-${CRTM_FIX:-MISSING}}" -if [[ "$FIXCRTM" == "MISSING" ]] ; then +if [[ "${CRTM_FIX}" == "MISSING" ]] ; then echo "Please specify CRTM fix location. Giving up." 1>&2 exit 19 fi -if [[ ! -d "$FIXCRTM" ]] ; then - echo "$FIXCRTM: \$FIXCRTM is not a directory. Giving up." 1>&2 +if [[ ! -d "${CRTM_FIX}" ]] ; then + echo "${CRTM_FIX}: \${CRTM_FIX} is not a directory. Giving up." 1>&2 exit 38 fi @@ -22,16 +22,16 @@ for what in "amsre_aqua" "imgr_g11" "imgr_g12" "imgr_g13" \ "ssmi_f13" "ssmi_f14" "ssmi_f15" "ssmis_f16" \ "ssmis_f17" "ssmis_f18" "ssmis_f19" "ssmis_f20" \ "tmi_trmm" "v.seviri_m10" "imgr_insat3d" "abi_gr" "ahi_himawari8" ; do - ln -s "$FIXCRTM/$what.TauCoeff.bin" . - ln -s "$FIXCRTM/$what.SpcCoeff.bin" . + ln -s "${CRTM_FIX}/${what}.TauCoeff.bin" . + ln -s "${CRTM_FIX}/${what}.SpcCoeff.bin" . done for what in 'Aerosol' 'Cloud' ; do - ln -s "$FIXCRTM/${what}Coeff.bin" . + ln -s "${CRTM_FIX}/${what}Coeff.bin" . done -for what in $FIXCRTM/*Emis* ; do - ln -s $what . +for what in ${CRTM_FIX}/*Emis* ; do + ln -s ${what} . done exit 0 diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index 2899e69514..b4f23fa331 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -12,6 +12,9 @@ ulimit_s=$( ulimit -S -s ) # Find module command and purge: source "${HOMEgfs}/modulefiles/module-setup.sh.inc" +# Source versions file for runtime +source "${HOMEgfs}/versions/run.ver" + # Load our modules: module use "${HOMEgfs}/modulefiles" @@ -25,8 +28,14 @@ elif [[ -d /scratch1 ]] ; then # We are on NOAA Hera module load module_base.hera elif [[ -d /work ]] ; then - # We are on MSU Orion - module load module_base.orion + # We are on MSU Orion or Hercules + if [[ -d /apps/other ]] ; then + # Hercules + module load module_base.hercules + else + # Orion + module load module_base.orion + fi elif [[ -d /glade ]] ; then # We are on NCAR Yellowstone module load module_base.cheyenne @@ -46,4 +55,8 @@ module list ulimit -S -s "${ulimit_s}" unset ulimit_s -set_trace +# If this function exists in the environment, run it; else do not +ftype=$(type -t set_trace || echo "") +if [[ "${ftype}" == "function" ]]; then + set_trace +fi diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh new file mode 100755 index 0000000000..da3ab61818 --- /dev/null +++ b/ush/load_ufswm_modules.sh @@ -0,0 +1,67 @@ +#! /usr/bin/env bash + +############################################################### +if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then + echo "Loading modules quietly..." + set +x +fi + +# Setup runtime environment by loading modules +ulimit_s=$( ulimit -S -s ) + +source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEgfs}/ush/module-setup.sh" +if [[ "${MACHINE_ID}" != "noaacloud" ]]; then + module use "${HOMEgfs}/sorc/ufs_model.fd/tests" + module load modules.ufs_model.lua + module load prod_util + if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + module load cray-pals + module load cfp + module load libjpeg + module load craype-network-ucx + module load cray-mpich-ucx + else + export UTILROOT=${prod_util_ROOT} + fi + module load wgrib2 + export WGRIB2=wgrib2 +fi +if [[ "${MACHINE_ID}" == "noaacloud" ]]; then + if [[ "${PW_CSP:-}" = "aws" ]]; then + + # TODO: This can be cleaned-up; most of this is a hack for now. + module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core" + module load "stack-intel" + module load "stack-intel-oneapi-mpi" + module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/" + module load "py39_4.12.0" + module load "ufs-weather-model-env/1.0.0" + export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0" + # TODO: Are there plans for EPIC to maintain this package or should GW provide support? + export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" + export PATH="${PATH}:/contrib/global-workflow/bin" + ndate_path="$(command -v ndate)" + export NDATE="${ndate_path}" + fi +fi + +module list +unset MACHINE_ID + +############################################################### +# exglobal_forecast.py requires the following in PYTHONPATH +# This will be moved to a module load when ready +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" +export PYTHONPATH + +# Restore stack soft limit: +ulimit -S -s "${ulimit_s}" +unset ulimit_s + +# If this function exists in the environment, run it; else do not +ftype=$(type -t set_trace || echo "") +if [[ "${ftype}" == "function" ]]; then + set_trace +fi diff --git a/ush/merge_fv3_aerosol_tile.py b/ush/merge_fv3_aerosol_tile.py index decf6e9cba..58afa2fc8b 100755 --- a/ush/merge_fv3_aerosol_tile.py +++ b/ush/merge_fv3_aerosol_tile.py @@ -88,14 +88,6 @@ def merge_tile(base_file_name: str, ctrl_file_name: str, core_file_name: str, re print("FATAL ERROR: Inconsistent size of B(k) arrays: src=", bk.size, ", dst=", bi.size) sys.exit(108) - if not np.array_equal(ak, ai): - print("FATAL ERROR: A(k) coefficients must be identical") - sys.exit(109) - - if not np.array_equal(bk, bi): - print("FATAL ERROR: B(k) coefficients must be identical") - sys.exit(110) - dp = np.zeros(delp.shape) for k in range(0, dp.shape[0]): dp[k, :, :] = ak[k + 1] - ak[k] + psfc * (bk[k + 1] - bk[k]) diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl index 1b5d490102..502032da80 100755 --- a/ush/minmon_xtrct_costs.pl +++ b/ush/minmon_xtrct_costs.pl @@ -208,7 +208,7 @@ #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl index ecd44232da..0125c58ac8 100755 --- a/ush/minmon_xtrct_gnorms.pl +++ b/ush/minmon_xtrct_gnorms.pl @@ -414,7 +414,7 @@ sub updateGnormData { #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl index f6037d3f32..1b8186b6ad 100755 --- a/ush/minmon_xtrct_reduct.pl +++ b/ush/minmon_xtrct_reduct.pl @@ -72,7 +72,7 @@ #---------------------------- # copy outfile to $M_TANKverf #---------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/module-setup.sh b/ush/module-setup.sh index 9c27ab4f7c..fd656966bf 100755 --- a/ush/module-setup.sh +++ b/ush/module-setup.sh @@ -15,15 +15,29 @@ elif [[ ${MACHINE_ID} = hera* ]] ; then source /apps/lmod/lmod/init/bash fi export LMOD_SYSTEM_DEFAULT_MODULES=contrib + set +u module reset + set -u + +elif [[ ${MACHINE_ID} = hercules* ]] ; then + # We are on Hercules + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /apps/other/lmod/lmod/init/bash + fi + export LMOD_SYSTEM_DEFAULT_MODULES=contrib + set +u + module reset + set -u elif [[ ${MACHINE_ID} = orion* ]] ; then # We are on Orion if ( ! eval module help > /dev/null 2>&1 ) ; then - source /apps/lmod/init/bash + source /apps/lmod/lmod/init/bash fi export LMOD_SYSTEM_DEFAULT_MODULES=contrib + set +u module reset + set -u elif [[ ${MACHINE_ID} = s4* ]] ; then # We are on SSEC Wisconsin S4 @@ -102,6 +116,22 @@ elif [[ ${MACHINE_ID} = discover* ]]; then export PATH=${PATH}:${SPACK_ROOT}/bin . "${SPACK_ROOT}"/share/spack/setup-env.sh +# TODO: This can likely be made more general once other cloud +# platforms come online. +elif [[ ${MACHINE_ID} = "noaacloud" ]]; then + + export SPACK_ROOT=/contrib/global-workflow/spack-stack/spack + export PATH=${PATH}:${SPACK_ROOT}/bin + . "${SPACK_ROOT}"/share/spack/setup-env.sh + else echo WARNING: UNKNOWN PLATFORM 1>&2 fi + +# If this function exists in the environment, run it; else do not +ftype=$(type -t set_strict || echo "") +if [[ "${ftype}" == "function" ]]; then + set_strict +else + set +u +fi diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh index 3f6b3fed19..57ff87be5f 100755 --- a/ush/ozn_xtrct.sh +++ b/ush/ozn_xtrct.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" #------------------------------------------------------------------ # ozn_xtrct.sh @@ -11,9 +11,9 @@ source "$HOMEgfs/ush/preamble.sh" # $TANKverf_ozn. # # Calling scripts must define: -# $TANKverf_ozn -# $HOMEoznmon -# $PDATE +# $TANKverf_ozn +# $PDY +# $cyc # # Return values are # 0 = normal @@ -28,7 +28,7 @@ source "$HOMEgfs/ush/preamble.sh" # gdas_oznmon_satype.txt to $avail_satype which is # determined by the contents of the oznstat file. # Report any missing diag files in a file named -# bad_diag.$PDATE +# bad_diag.$PDY$cyc # check_diag_files() { pdate=$1 @@ -40,10 +40,10 @@ check_diag_files() { echo ""; echo ""; echo "--> check_diag_files" for type in ${found_satype}; do - len_check=$(echo ${avail_satype} | grep ${type} | wc -c) + len_check=$(echo "${avail_satype}" | grep "${type}" | wc -c) if [[ ${len_check} -le 1 ]]; then - echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found " >> ./${out_file} + echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found" >> "./${out_file}" fi done @@ -58,13 +58,13 @@ nregion=${nregion:-6} DO_DATA_RPT=${DO_DATA_RPT:-0} netcdf_boolean=".false." -if [[ $OZNMON_NETCDF -eq 1 ]]; then +if [[ ${OZNMON_NETCDF} -eq 1 ]]; then netcdf_boolean=".true." fi OZNMON_NEW_HDR=${OZNMON_NEW_HDR:-0} new_hdr="F" -if [[ $OZNMON_NEW_HDR -eq 1 ]]; then +if [[ ${OZNMON_NEW_HDR} -eq 1 ]]; then new_hdr="T" fi @@ -72,19 +72,19 @@ fi # if VALIDATE_DATA then locate and untar base file # validate=".FALSE." -if [[ $VALIDATE_DATA -eq 1 ]]; then - if [[ ! -e $ozn_val_file && ! -h $ozn_val_file ]]; then - echo "WARNING: VALIDATE_DATA set to 1, but unable to locate $ozn_val_file" +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + if [[ ! -e ${ozn_val_file} && ! -h ${ozn_val_file} ]]; then + echo "WARNING: VALIDATE_DATA set to 1, but unable to locate ${ozn_val_file}" echo " Setting VALIDATE_DATA to 0/OFF" VALIDATE_DATA=0 else validate=".TRUE." - val_file=$(basename ${ozn_val_file}) - ${NCP} $ozn_val_file $val_file - tar -xvf $val_file + val_file=$(basename "${ozn_val_file}") + ${NCP} "${ozn_val_file}" "${val_file}" + tar -xvf "${val_file}" fi fi -echo "VALIDATE_DATA, validate = $VALIDATE_DATA, $validate " +echo "VALIDATE_DATA, validate = ${VALIDATE_DATA}, ${validate} " @@ -106,8 +106,8 @@ avail_satype=$(ls -1 d*ges* | sed -e 's/_/ /g;s/\./ /' | gawk '{ print $2 "_" $3 if [[ ${DO_DATA_RPT} -eq 1 ]]; then if [[ -e ${SATYPE_FILE} ]]; then - satype=$(cat ${SATYPE_FILE}) - check_diag_files ${PDATE} "${satype}" "${avail_satype}" + satype=$(cat "${SATYPE_FILE}") + check_diag_files "${PDY}${cyc}" "${satype}" "${avail_satype}" else echo "WARNING: missing ${SATYPE_FILE}" fi @@ -119,7 +119,7 @@ if [[ ${len_satype} -le 1 ]]; then satype=${avail_satype} fi -echo ${satype} +echo "${satype}" len_satype=$(echo -n "${satype}" | wc -c) @@ -132,12 +132,12 @@ else #-------------------------------------------------------------------- # Copy extraction programs to working directory # - ${NCP} ${HOMEoznmon}/exec/oznmon_time.x ./oznmon_time.x + ${NCP} "${HOMEgfs}/exec/oznmon_time.x" ./oznmon_time.x if [[ ! -e oznmon_time.x ]]; then iret=2 exit ${iret} fi - ${NCP} ${HOMEoznmon}/exec/oznmon_horiz.x ./oznmon_horiz.x + ${NCP} "${HOMEgfs}/exec/oznmon_horiz.x" ./oznmon_horiz.x if [[ ! -e oznmon_horiz.x ]]; then iret=3 exit ${iret} @@ -149,15 +149,15 @@ else # for ptype in ${ozn_ptype}; do - iyy=$(echo ${PDATE} | cut -c1-4) - imm=$(echo ${PDATE} | cut -c5-6) - idd=$(echo ${PDATE} | cut -c7-8) - ihh=$(echo ${PDATE} | cut -c9-10) + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} for type in ${avail_satype}; do - if [[ -f "diag_${type}_${ptype}.${PDATE}.gz" ]]; then - mv diag_${type}_${ptype}.${PDATE}.gz ${type}.${ptype}.gz - gunzip ./${type}.${ptype}.gz + if [[ -f "diag_${type}_${ptype}.${PDY}${cyc}.gz" ]]; then + mv "diag_${type}_${ptype}.${PDY}${cyc}.gz" "${type}.${ptype}.gz" + gunzip "./${type}.${ptype}.gz" echo "processing ptype, type: ${ptype}, ${type}" rm -f input @@ -188,17 +188,17 @@ EOF echo "oznmon_time.x HAS STARTED ${type}" - ./oznmon_time.x < input > stdout.time.${type}.${ptype} + ./oznmon_time.x < input > "stdout.time.${type}.${ptype}" echo "oznmon_time.x HAS ENDED ${type}" if [[ ! -d ${TANKverf_ozn}/time ]]; then - mkdir -p ${TANKverf_ozn}/time + mkdir -p "${TANKverf_ozn}/time" fi - $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/time/ - $NCP ${type}.${ptype}.${PDATE}.ieee_d ${TANKverf_ozn}/time/ + ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/time/" + ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d" "${TANKverf_ozn}/time/" - $NCP bad* ${TANKverf_ozn}/time/ + ${NCP} bad* "${TANKverf_ozn}/time/" rm -f input @@ -219,17 +219,17 @@ EOF echo "oznmon_horiz.x HAS STARTED ${type}" - ./oznmon_horiz.x < input > stdout.horiz.${type}.${ptype} + ./oznmon_horiz.x < input > "stdout.horiz.${type}.${ptype}" echo "oznmon_horiz.x HAS ENDED ${type}" if [[ ! -d ${TANKverf_ozn}/horiz ]]; then - mkdir -p ${TANKverf_ozn}/horiz + mkdir -p "${TANKverf_ozn}/horiz" fi - $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/horiz/ + ${NCP} "${type}.${ptype}.ctl" "${TANKverf_ozn}/horiz/" - $COMPRESS ${type}.${ptype}.${PDATE}.ieee_d - $NCP ${type}.${ptype}.${PDATE}.ieee_d.${Z} ${TANKverf_ozn}/horiz/ + ${COMPRESS} "${type}.${ptype}.${PDY}${cyc}.ieee_d" + ${NCP} "${type}.${ptype}.${PDY}${cyc}.ieee_d.${Z}" "${TANKverf_ozn}/horiz/" echo "finished processing ptype, type: ${ptype}, ${type}" @@ -244,18 +244,11 @@ EOF tar -cvf stdout.horiz.tar stdout.horiz* ${COMPRESS} stdout.horiz.tar - ${NCP} stdout.horiz.tar.${Z} ${TANKverf_ozn}/horiz/ + ${NCP} "stdout.horiz.tar.${Z}" "${TANKverf_ozn}/horiz/" tar -cvf stdout.time.tar stdout.time* ${COMPRESS} stdout.time.tar - ${NCP} stdout.time.tar.${Z} ${TANKverf_ozn}/time/ + ${NCP} "stdout.time.tar.${Z}" "${TANKverf_ozn}/time/" fi -#------------------------------------------------------- -# Conditionally remove data files older than 40 days -# -if [[ ${CLEAN_TANKDIR:-0} -eq 1 ]]; then - ${HOMEoznmon}/ush/clean_tankdir.sh glb 40 -fi - exit ${iret} diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 91b82a0d76..e08a4a2b17 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -32,7 +32,7 @@ fhrot: ${IAU_FHROT:-0} dt_atmos: ${DELTIM} calendar: ${calendar:-'julian'} -restart_interval: ${restart_interval} +restart_interval: ${restart_interval} -1 output_1st_tstep_rst: .false. quilting: ${QUILTING} @@ -41,22 +41,25 @@ write_groups: ${WRITE_GROUP:-1} write_tasks_per_group: ${WRTTASK_PER_GROUP:-24} itasks: 1 output_history: ${OUTPUT_HISTORY:-".true."} +history_file_on_native_grid: .false. write_dopost: ${WRITE_DOPOST:-".false."} write_nsflip: ${WRITE_NSFLIP:-".false."} num_files: ${NUM_FILES:-2} filename_base: 'atm' 'sfc' output_grid: ${OUTPUT_GRID} output_file: '${OUTPUT_FILETYPE_ATM}' '${OUTPUT_FILETYPE_SFC}' +zstandard_level: 0 ichunk2d: ${ichunk2d:-0} jchunk2d: ${jchunk2d:-0} ichunk3d: ${ichunk3d:-0} jchunk3d: ${jchunk3d:-0} kchunk3d: ${kchunk3d:-0} ideflate: ${ideflate:-1} -nbits: ${nbits:-14} +quantize_mode: 'quantize_bitround' +quantize_nsd: ${QUANTIZE_NSD:-0} imo: ${LONB_IMO} jmo: ${LATB_JMO} -output_fh: ${OUTPUT_FH} +output_fh: ${FV3_OUTPUT_FH} iau_offset: ${IAU_OFFSET:-0} EOF diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index 6fc8b143cc..6ef743ebc9 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -39,13 +39,18 @@ else fi local max_blocks=-1 +local sec stepsperhr npt +sec=$(to_seconds "${current_cycle:8:2}0000") +stepsperhr=$((3600/ICETIM)) +npt=$((FHMAX*stepsperhr)) # Need this in order for dump_last to work + cat > ice_in < diag_table << EOF -FV3 Forecast -${gPDY:0:4} ${gPDY:4:2} ${gPDY:6:2} ${gcyc} 0 0 -EOF -cat $DIAG_TABLE >> diag_table +# build the diag_table +{ +echo "UFS_Weather_Model_Forecast" +if [[ "${DOIAU}" = "YES" ]]; then + echo "${previous_cycle:0:4} ${previous_cycle:4:2} ${previous_cycle:6:2} ${previous_cycle:8:2} 0 0" else -cat > diag_table << EOF -FV3 Forecast -${sPDY:0:4} ${sPDY:4:2} ${sPDY:6:2} ${scyc} 0 0 -EOF -cat $DIAG_TABLE >> diag_table + echo "${current_cycle:0:4} ${current_cycle:4:2} ${current_cycle:6:2} ${current_cycle:8:2} 0 0" fi - -if [ ! -z "${AERO_DIAG_TABLE:-}" ]; then - cat ${AERO_DIAG_TABLE} >> diag_table +cat "${DIAG_TABLE}" +if [[ -n "${AERO_DIAG_TABLE:-}" ]]; then + cat "${AERO_DIAG_TABLE}" fi +cat "${DIAG_TABLE_APPEND}" +} >> diag_table -cat $DIAG_TABLE_APPEND >> diag_table # copy data table -$NCP $DATA_TABLE data_table +${NCP} "${DATA_TABLE}" data_table # build field_table -if [ ! -z "${AERO_FIELD_TABLE:-}" ]; then - nrec=$( cat ${FIELD_TABLE} | wc -l ) +if [[ -n "${AERO_FIELD_TABLE:-}" ]]; then + nrec=$(wc -l < "${FIELD_TABLE}") prec=${nrec} if (( dnats > 0 )); then - prec=$( grep -F -n TRACER ${FIELD_TABLE} 2> /dev/null | tail -n ${dnats} | head -1 | cut -d: -f1 ) + prec=$( grep -F -n TRACER "${FIELD_TABLE}" 2> /dev/null | tail -n "${dnats}" | head -1 | cut -d: -f1 ) prec=${prec:-0} prec=$(( prec > 0 ? prec - 1 : prec )) fi { \ - head -n ${prec} ${FIELD_TABLE} ; \ - cat ${AERO_FIELD_TABLE} ; \ - tail -n $(( nrec - prec )) ${FIELD_TABLE} ; \ + head -n "${prec}" "${FIELD_TABLE}" ; \ + cat "${AERO_FIELD_TABLE}" ; \ + tail -n $(( nrec - prec )) "${FIELD_TABLE}" ; \ } > field_table # add non-prognostic tracers from additional table dnats=$(( dnats + dnats_aero )) else - $NCP $FIELD_TABLE field_table + ${NCP} "${FIELD_TABLE}" field_table fi cat > input.nml < input.nml < input.nml <> input.nml << EOF - nord_tr = ${nord_tr:-"2"} + nord_tr = ${nord_tr:-"2"} EOF fi cat >> input.nml << EOF grid_type = -1 - make_nh = $make_nh + make_nh = ${make_nh} fv_debug = ${fv_debug:-".false."} range_warn = ${range_warn:-".true."} reset_eta = .false. @@ -132,16 +119,16 @@ cat >> input.nml << EOF kord_mt = ${kord_mt:-"9"} kord_wz = ${kord_wz:-"9"} kord_tr = ${kord_tr:-"9"} - hydrostatic = $hydrostatic - phys_hydrostatic = $phys_hydrostatic - use_hydro_pressure = $use_hydro_pressure + hydrostatic = ${hydrostatic} + phys_hydrostatic = ${phys_hydrostatic} + use_hydro_pressure = ${use_hydro_pressure} beta = 0. a_imp = 1. p_fac = 0.1 - k_split = $k_split - n_split = $n_split + k_split = ${k_split} + n_split = ${n_split} nwat = ${nwat:-2} - na_init = $na_init + na_init = ${na_init} d_ext = 0. dnats = ${dnats} fv_sg_adj = ${fv_sg_adj:-"450"} @@ -149,56 +136,56 @@ cat >> input.nml << EOF nord = ${nord:-3} dddmp = ${dddmp:-0.1} d4_bg = ${d4_bg:-0.15} - vtdm4 = $vtdm4 + vtdm4 = ${vtdm4} delt_max = ${delt_max:-"0.002"} ke_bg = 0. - do_vort_damp = $do_vort_damp - external_ic = $external_ic + do_vort_damp = ${do_vort_damp} + external_ic = ${external_ic} external_eta = ${external_eta:-.true.} gfs_phil = ${gfs_phil:-".false."} - nggps_ic = $nggps_ic - mountain = $mountain - ncep_ic = $ncep_ic - d_con = $d_con - hord_mt = $hord_mt - hord_vt = $hord_xx - hord_tm = $hord_xx - hord_dp = -$hord_xx + nggps_ic = ${nggps_ic} + mountain = ${mountain} + ncep_ic = ${ncep_ic} + d_con = ${d_con} + hord_mt = ${hord_mt} + hord_vt = ${hord_xx} + hord_tm = ${hord_xx} + hord_dp = -${hord_xx} hord_tr = ${hord_tr:-"8"} adjust_dry_mass = ${adjust_dry_mass:-".true."} dry_mass=${dry_mass:-98320.0} - consv_te = $consv_te + consv_te = ${consv_te} do_sat_adj = ${do_sat_adj:-".false."} fast_tau_w_sec = ${fast_tau_w_sec:-"0.2"} consv_am = .false. fill = .true. dwind_2d = .false. - print_freq = $print_freq - warm_start = $warm_start - no_dycore = $no_dycore + print_freq = ${print_freq} + warm_start = ${warm_start} + no_dycore = ${no_dycore} z_tracer = .true. agrid_vel_rst = ${agrid_vel_rst:-".true."} - read_increment = $read_increment - res_latlon_dynamics = $res_latlon_dynamics + read_increment = ${read_increment} + res_latlon_dynamics = ${res_latlon_dynamics} ${fv_core_nml-} / &external_ic_nml - filtered_terrain = $filtered_terrain - levp = $LEVS - gfs_dwinds = $gfs_dwinds + filtered_terrain = ${filtered_terrain} + levp = ${LEVS} + gfs_dwinds = ${gfs_dwinds} checker_tr = .false. nt_checker = 0 ${external_ic_nml-} / &gfs_physics_nml - fhzero = $FHZER + fhzero = ${FHZER} h2o_phys = ${h2o_phys:-".true."} ldiag3d = ${ldiag3d:-".false."} qdiag3d = ${qdiag3d:-".false."} print_diff_pgr = ${print_diff_pgr:-".false."} - fhcyc = $FHCYC + fhcyc = ${FHCYC} use_ufo = ${use_ufo:-".true."} pre_rad = ${pre_rad:-".false."} imp_physics = ${imp_physics:-"99"} @@ -289,12 +276,13 @@ EOF EOF ;; FV3_GFS_v17_p8_*mynn) + local default_dt_inner=$(( DELTIM/2 )) cat >> input.nml << EOF iovr = ${iovr:-"3"} ltaerosol = ${ltaerosol:-".false."} - lradar = ${lradar:-".false."} + lradar = ${lradar:-".true."} ttendlim = ${ttendlim:-"-999"} - dt_inner = ${dt_inner:-"$(echo "$DELTIM/2" |bc)"} + dt_inner = ${dt_inner:-"${default_dt_inner}"} sedi_semi = ${sedi_semi:-".true."} decfl = ${decfl:-"10"} oz_phys = ${oz_phys:-".false."} @@ -318,11 +306,11 @@ EOF do_ugwp = ${do_ugwp:-".false."} do_tofd = ${do_tofd:-".false."} gwd_opt = ${gwd_opt:-"2"} - do_ugwp_v0 = ${do_ugwp_v0:-".true."} - do_ugwp_v1 = ${do_ugwp_v1:-".false."} + do_ugwp_v0 = ${do_ugwp_v0:-".false."} + do_ugwp_v1 = ${do_ugwp_v1:-".true."} do_ugwp_v0_orog_only = ${do_ugwp_v0_orog_only:-".false."} do_ugwp_v0_nst_only = ${do_ugwp_v0_nst_only:-".false."} - do_gsl_drag_ls_bl = ${do_gsl_drag_ls_bl:-".false."} + do_gsl_drag_ls_bl = ${do_gsl_drag_ls_bl:-".true."} do_gsl_drag_ss = ${do_gsl_drag_ss:-".true."} do_gsl_drag_tofd = ${do_gsl_drag_tofd:-".true."} do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."} @@ -332,12 +320,13 @@ EOF EOF ;; FV3_GFS_v17*) + local default_dt_inner=$(( DELTIM/2 )) cat >> input.nml << EOF iovr = ${iovr:-"3"} ltaerosol = ${ltaerosol:-".false."} - lradar = ${lradar:-".false."} + lradar = ${lradar:-".true."} ttendlim = ${ttendlim:-"-999"} - dt_inner = ${dt_inner:-"$(echo "$DELTIM/2" |bc)"} + dt_inner = ${dt_inner:-"${default_dt_inner}"} sedi_semi = ${sedi_semi:-".true."} decfl = ${decfl:-"10"} oz_phys = ${oz_phys:-".false."} @@ -352,11 +341,11 @@ EOF do_ugwp = ${do_ugwp:-".false."} do_tofd = ${do_tofd:-".false."} gwd_opt = ${gwd_opt:-"2"} - do_ugwp_v0 = ${do_ugwp_v0:-".true."} - do_ugwp_v1 = ${do_ugwp_v1:-".false."} + do_ugwp_v0 = ${do_ugwp_v0:-".false."} + do_ugwp_v1 = ${do_ugwp_v1:-".true."} do_ugwp_v0_orog_only = ${do_ugwp_v0_orog_only:-".false."} do_ugwp_v0_nst_only = ${do_ugwp_v0_nst_only:-".false."} - do_gsl_drag_ls_bl = ${do_gsl_drag_ls_bl:-".false."} + do_gsl_drag_ls_bl = ${do_gsl_drag_ls_bl:-".true."} do_gsl_drag_ss = ${do_gsl_drag_ss:-".true."} do_gsl_drag_tofd = ${do_gsl_drag_tofd:-".true."} do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."} @@ -378,9 +367,9 @@ cat >> input.nml <> input.nml <> input.nml <> input.nml <> input.nml << EOF fscav_aero = ${fscav_aero:-'*:0.0'} EOF @@ -458,13 +451,13 @@ cat >> input.nml <> input.nml << EOF iaufhrs = ${IAUFHRS} iau_delthrs = ${IAU_DELTHRS} @@ -474,7 +467,7 @@ if [ $DOIAU = "YES" ]; then EOF fi -if [ ${DO_CA:-"NO"} = "YES" ]; then +if [[ ${DO_CA:-"NO"} = "YES" ]]; then cat >> input.nml << EOF do_ca = .true. ca_global = ${ca_global:-".false."} @@ -491,19 +484,19 @@ if [ ${DO_CA:-"NO"} = "YES" ]; then EOF fi -if [ ${DO_LAND_PERT:-"NO"} = "YES" ]; then +if [[ ${DO_LAND_PERT:-"NO"} = "YES" ]]; then cat >> input.nml << EOF lndp_type = ${lndp_type:-2} n_var_lndp = ${n_var_lndp:-0} EOF fi - cat >> input.nml << EOF - +# Close &gfs_physics_nml section +cat >> input.nml << EOF / EOF -if [ $knob_ugwp_version -eq 0 ]; then +if [[ ${knob_ugwp_version} -eq 0 ]]; then cat >> input.nml << EOF &cires_ugwp_nml knob_ugwp_solver = ${knob_ugwp_solver:-2} @@ -522,7 +515,7 @@ if [ $knob_ugwp_version -eq 0 ]; then EOF fi -if [ $knob_ugwp_version -eq 1 ]; then +if [[ ${knob_ugwp_version} -eq 1 ]]; then cat >> input.nml << EOF &cires_ugwp_nml knob_ugwp_solver = ${knob_ugwp_solver:-2} @@ -535,7 +528,8 @@ if [ $knob_ugwp_version -eq 1 ]; then knob_ugwp_doheat = ${knob_ugwp_doheat:-1} knob_ugwp_dokdis = ${knob_ugwp_dokdis:-2} knob_ugwp_ndx4lh = ${knob_ugwp_ndx4lh:-4} - knob_ugwp_palaunch = ${knob_ugwp_palaunch:-275.0e2} + knob_ugwp_version = ${knob_ugwp_version:-1} + knob_ugwp_palaunch = ${knob_ugwp_palaunch:-275.0e2} knob_ugwp_nslope = ${knob_ugwp_nslope:-1} knob_ugwp_lzmax = ${knob_ugwp_lzmax:-15.750e3} knob_ugwp_lzmin = ${knob_ugwp_lzmin:-0.75e3} @@ -548,8 +542,6 @@ if [ $knob_ugwp_version -eq 1 ]; then EOF fi - - echo "" >> input.nml cat >> input.nml <> input.nml <> input.nml #if [ $MEMBER -gt 0 ]; then -if [ $DO_SPPT = "YES" -o $DO_SHUM = "YES" -o $DO_SKEB = "YES" -o $DO_LAND_PERT = "YES" ]; then +if [[ "${DO_SPPT}" = "YES" || "${DO_SHUM}" = "YES" || "${DO_SKEB}" = "YES" || "${DO_LAND_PERT}" = "YES" ]]; then cat >> input.nml << EOF &nam_stochy EOF - if [ $DO_SKEB = "YES" ]; then + if [[ ${DO_SKEB} = "YES" ]]; then cat >> input.nml << EOF - skeb = $SKEB - iseed_skeb = ${ISEED_SKEB:-$ISEED} + skeb = ${SKEB} + iseed_skeb = ${ISEED_SKEB:-${ISEED}} skeb_tau = ${SKEB_TAU:-"-999."} skeb_lscale = ${SKEB_LSCALE:-"-999."} skebnorm = ${SKEBNORM:-"1"} @@ -681,19 +674,19 @@ EOF EOF fi - if [ $DO_SHUM = "YES" ]; then + if [[ ${DO_SHUM} = "YES" ]]; then cat >> input.nml << EOF - shum = $SHUM - iseed_shum = ${ISEED_SHUM:-$ISEED} + shum = ${SHUM} + iseed_shum = ${ISEED_SHUM:-${ISEED}} shum_tau = ${SHUM_TAU:-"-999."} shum_lscale = ${SHUM_LSCALE:-"-999."} EOF fi - if [ $DO_SPPT = "YES" ]; then + if [[ ${DO_SPPT} = "YES" ]]; then cat >> input.nml << EOF - sppt = $SPPT - iseed_sppt = ${ISEED_SPPT:-$ISEED} + sppt = ${SPPT} + iseed_sppt = ${ISEED_SPPT:-${ISEED}} sppt_tau = ${SPPT_TAU:-"-999."} sppt_lscale = ${SPPT_LSCALE:-"-999."} sppt_logit = ${SPPT_LOGIT:-".true."} @@ -707,13 +700,13 @@ EOF / EOF - if [ $DO_LAND_PERT = "YES" ]; then + if [[ ${DO_LAND_PERT} = "YES" ]]; then cat >> input.nml << EOF &nam_sfcperts lndp_type = ${lndp_type} LNDP_TAU = ${LNDP_TAU} LNDP_SCALE = ${LNDP_SCALE} - ISEED_LNDP = ${ISEED_LNDP:-$ISEED} + ISEED_LNDP = ${ISEED_LNDP:-${ISEED}} lndp_var_list = ${lndp_var_list} lndp_prt_list = ${lndp_prt_list} ${nam_sfcperts_nml:-} @@ -738,5 +731,9 @@ EOF fi -echo "$(cat input.nml)" +# Echo out formatted "input.nml" +echo "====================================" +echo "FV3_namelists(): 'input.nml'" +cat input.nml +echo "====================================" } diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index add7090fe7..8059096363 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -62,7 +62,7 @@ echo "$(cat input.nml)" #Copy MOM_input and edit: -${NCP} -pf "${HOMEgfs}/parm/mom6/MOM_input_template_${OCNRES}" "${DATA}/INPUT/" +${NCP} -pf "${HOMEgfs}/parm/ufs/mom6/MOM_input_template_${OCNRES}" "${DATA}/INPUT/" sed -e "s/@\[DT_THERM_MOM6\]/${DT_THERM_MOM6}/g" \ -e "s/@\[DT_DYNAM_MOM6\]/${DT_DYNAM_MOM6}/g" \ -e "s/@\[MOM6_RIVER_RUNOFF\]/${MOM6_RIVER_RUNOFF}/g" \ @@ -75,12 +75,15 @@ sed -e "s/@\[DT_THERM_MOM6\]/${DT_THERM_MOM6}/g" \ -e "s/@\[CHLCLIM\]/${CHLCLIM}/g" \ -e "s/@\[DO_OCN_SPPT\]/${OCN_SPPT}/g" \ -e "s/@\[PERT_EPBL\]/${PERT_EPBL}/g" \ + -e "s/@\[MOM6_DIAG_COORD_DEF_Z_FILE\]/${MOM6_DIAG_COORD_DEF_Z_FILE}/g" \ + -e "s/@\[TOPOEDITS\]/${TOPOEDITS}/g" \ + -e "s/@\[MOM6_DIAG_MISVAL\]/${MOM6_DIAG_MISVAL}/g" \ -e "s/@\[ODA_INCUPD_NHOURS\]/${ODA_INCUPD_NHOURS}/g" \ -e "s/@\[ODA_INCUPD\]/${ODA_INCUPD}/g" "${DATA}/INPUT/MOM_input_template_${OCNRES}" > "${DATA}/INPUT/MOM_input" rm "${DATA}/INPUT/MOM_input_template_${OCNRES}" #data table for runoff: -DATA_TABLE=${DATA_TABLE:-${PARM_FV3DIAG}/data_table} +DATA_TABLE=${DATA_TABLE:-${HOMEgfs}/parm/ufs/fv3/data_table} ${NCP} "${DATA_TABLE}" "${DATA}/data_table_template" sed -e "s/@\[FRUNOFF\]/${FRUNOFF}/g" "${DATA}/data_table_template" > "${DATA}/data_table" rm "${DATA}/data_table_template" diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh index c53af9f18f..9b0a94695c 100755 --- a/ush/parsing_namelists_WW3.sh +++ b/ush/parsing_namelists_WW3.sh @@ -79,8 +79,8 @@ WW3_namelists(){ if [ $waveMULTIGRID = ".true." ]; then # ww3_multi template - if [ -f $PARMwave/ww3_multi.${NET}.inp.tmpl ]; then - cp $PARMwave/ww3_multi.${NET}.inp.tmpl ww3_multi.inp.tmpl + if [ -f $PARMwave/ww3_multi.inp.tmpl ]; then + cp $PARMwave/ww3_multi.inp.tmpl ww3_multi.inp.tmpl fi if [ ! -f ww3_multi.inp.tmpl ]; then echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 MULTI INPUT FILE" @@ -88,8 +88,8 @@ WW3_namelists(){ fi else # ww3_multi template - if [ -f $PARMwave/ww3_shel.${NET}.inp.tmpl ]; then - cp $PARMwave/ww3_shel.${NET}.inp.tmpl ww3_shel.inp.tmpl + if [ -f $PARMwave/ww3_shel.inp.tmpl ]; then + cp $PARMwave/ww3_shel.inp.tmpl ww3_shel.inp.tmpl fi if [ ! -f ww3_shel.inp.tmpl ]; then echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" diff --git a/ush/product_functions.sh b/ush/product_functions.sh new file mode 100755 index 0000000000..05b23fbee2 --- /dev/null +++ b/ush/product_functions.sh @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +function trim_rh() { + # trim RH values larger than 100. + local filename=$1 + ${WGRIB2} "${filename}" \ + -not_if ':RH:' -grib "${filename}.new" \ + -if ':RH:' -rpn "10:*:0.5:+:floor:1000:min:10:/" -set_grib_type same \ + -set_scaling -1 0 -grib_out "${filename}.new" + rc=$? + if (( rc == 0 )); then mv "${filename}.new" "${filename}"; fi + return "${rc}" +} + +function mod_icec() { + # modify icec based on land-sea mask + local filename=$1 + ${WGRIB2} "${filename}" \ + -if 'LAND' -rpn 'sto_1' -fi \ + -if 'ICEC' -rpn 'rcl_1:0:==:*' -fi \ + -set_grib_type same \ + -set_scaling same same \ + -grib_out "${filename}.new" + rc=$? + if (( rc == 0 )); then mv "${filename}.new" "${filename}"; fi + return "${rc}" +} + +function scale_dec() { + # change the scaling for temperature, precipitable water, and water-equivalent accumlated snow depth + local filename=$1 + ${WGRIB2} "${filename}" -not_if ':(TMP|PWAT|WEASD):' -grib "${filename}.new" \ + -if ':(TMP|PWAT):' -set_grib_type same \ + -set_scaling -1 0 -grib_out "${filename}.new" \ + -if ':(WEASD):' -set_grib_type same \ + -set_scaling 0 0 -grib_out "${filename}.new" + rc=$? + if (( rc == 0 )); then mv "${filename}.new" "${filename}"; fi + return "${rc}" +} diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index e3c9ad50a2..0e515a0df4 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -7,15 +7,15 @@ from logging import getLogger from typing import Dict, List, Any -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta -from pygw.fsutils import rm_p, chdir -from pygw.timetools import to_fv3time -from pygw.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, + chdir, + to_fv3time, + YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) @@ -30,7 +30,7 @@ def __init__(self, config): super().__init__(config) _res = int(self.config['CASE'][1:]) - _res_enkf = int(self.config['CASE_ENS'][1:]) + _res_anl = int(self.config['CASE_ANL'][1:]) _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") @@ -41,11 +41,12 @@ def __init__(self, config): 'npy_ges': _res + 1, 'npz_ges': self.config.LEVS - 1, 'npz': self.config.LEVS - 1, - 'npx_anl': _res_enkf + 1, - 'npy_anl': _res_enkf + 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, 'npz_anl': self.config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", + 'aero_bkg_fhr': map(int, self.config['aero_bkg_times'].split(',')), 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", @@ -72,15 +73,15 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aero_crtm_coeff.yaml') + crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_crtm_coeff.yaml') logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aero_jedi_fix.yaml') + jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aero_jedi_fix.yaml') logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) FileHandler(jedi_fix_list).sync() # stage berror files @@ -137,8 +138,6 @@ def finalize(self: Analysis) -> None: - applying the increments to the original RESTART files - moving the increment files to the ROTDIR - Please note that some of these steps are temporary and will be modified - once the model is able to read aerosol tracer increments. """ # ---- tar up diags # path of output tar statfile @@ -167,10 +166,12 @@ def finalize(self: Analysis) -> None: } FileHandler(yaml_copy).sync() - # ---- NOTE below is 'temporary', eventually we will not be using FMS RESTART formatted files - # ---- all of the rest of this method will need to be changed but requires model and JEDI changes # ---- copy RESTART fv_tracer files for future reference - template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(self.task_config.current_cycle), '{tilenum}') + if self.task_config.DOIAU: + bkgtime = self.task_config.AERO_WINDOW_BEGIN + else: + bkgtime = self.task_config.current_cycle + template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(bkgtime), '{tilenum}') bkglist = [] for itile in range(1, self.task_config.ntiles + 1): tracer = template.format(tilenum=itile) @@ -200,15 +201,18 @@ def clean(self): @logit(logger) def _add_fms_cube_sphere_increments(self: Analysis) -> None: """This method adds increments to RESTART files to get an analysis - NOTE this is only needed for now because the model cannot read aerosol increments. - This method will be assumed to be deprecated before this is implemented operationally """ + if self.task_config.DOIAU: + bkgtime = self.task_config.AERO_WINDOW_BEGIN + else: + bkgtime = self.task_config.current_cycle # only need the fv_tracer files - template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' - inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + template) - bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, template) + restart_template = f'{to_fv3time(bkgtime)}.fv_tracer.res.tile{{tilenum}}.nc' + increment_template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' + inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + increment_template) + bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, restart_template) # get list of increment vars - incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aeroanl_inc_vars.yaml') + incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'gdas', 'aeroanl_inc_vars.yaml') incvars = YAMLFile(path=incvars_list_path)['incvars'] super().add_fv3_increments(inc_template, bkg_template, incvars) @@ -238,16 +242,24 @@ def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: # Start accumulating list of background files to copy bkglist = [] - # aerosol DA needs coupler - basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # aerosol DA only needs core/tracer - for ftype in ['core', 'tracer']: - template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + # if using IAU, we can use FGAT + bkgtimes = [] + begintime = task_config.previous_cycle + for fcsthr in task_config.aero_bkg_fhr: + bkgtimes.append(add_to_datetime(begintime, to_timedelta(f"{fcsthr}H"))) + + # now loop over background times + for bkgtime in bkgtimes: + # aerosol DA needs coupler + basename = f'{to_fv3time(bkgtime)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # aerosol DA only needs core/tracer + for ftype in ['core', 'tracer']: + template = f'{to_fv3time(bkgtime)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) bkg_dict = { 'mkdir': [run_dir], @@ -279,18 +291,21 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: b_datestr = to_fv3time(config.BERROR_DATE) berror_list = [] - for ftype in ['cor_rh', 'cor_rv', 'stddev']: + for ftype in ['stddev']: coupler = f'{b_datestr}.{ftype}.coupler.res' berror_list.append([ os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) ]) - template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + template = f'{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' for itile in range(1, config.ntiles + 1): tracer = template.format(tilenum=itile) berror_list.append([ os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) ]) - + radius = 'cor_aero_universe_radius' + berror_list.append([ + os.path.join(b_dir, radius), os.path.join(config.DATA, 'berror', radius) + ]) nproc = config.ntiles * config.layout_x * config.layout_y for nn in range(1, nproc + 1): berror_list.append([ diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 7c24c9cbdb..cfd1fb2206 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -1,15 +1,15 @@ #!/usr/bin/env python3 import os +import glob +import tarfile from logging import getLogger from netCDF4 import Dataset -from typing import List, Dict, Any +from typing import List, Dict, Any, Union -from pygw.yaml_file import YAMLFile, parse_j2yaml, parse_yamltmpl -from pygw.file_utils import FileHandler -from pygw.template import Template, TemplateConstants -from pygw.logger import logit -from pygw.task import Task +from wxflow import (parse_j2yaml, FileHandler, rm_p, logit, + Task, Executable, WorkflowException, to_fv3time, to_YMD, + Template, TemplateConstants) logger = getLogger(__name__.split('.')[-1]) @@ -40,7 +40,7 @@ def initialize(self) -> None: self.link_jediexe() @logit(logger) - def get_obs_dict(self: Task) -> Dict[str, Any]: + def get_obs_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy This method uses the OBS_LIST configuration variable to generate a dictionary @@ -72,7 +72,7 @@ def get_obs_dict(self: Task) -> Dict[str, Any]: return obs_dict @logit(logger) - def get_bias_dict(self: Task) -> Dict[str, Any]: + def get_bias_dict(self) -> Dict[str, Any]: """Compile a dictionary of observation files to copy This method uses the OBS_LIST configuration variable to generate a dictionary @@ -176,7 +176,7 @@ def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: return berror_dict @logit(logger) - def link_jediexe(self: Task) -> None: + def link_jediexe(self) -> None: """Compile a dictionary of background error files to copy This method links a JEDI executable to the run directory @@ -192,10 +192,137 @@ def link_jediexe(self: Task) -> None: exe_src = self.task_config.JEDIEXE # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. - logger.debug(f"Link executable {exe_src} to DATA/") + logger.info(f"Link executable {exe_src} to DATA/") + logger.warn("Linking is not permitted per EE2.") exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) if os.path.exists(exe_dest): rm_p(exe_dest) os.symlink(exe_src, exe_dest) return + + @staticmethod + @logit(logger) + def get_fv3ens_dict(config: Dict[str, Any]) -> Dict[str, Any]: + """Compile a dictionary of ensemble member restarts to copy + + This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer) + that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + ens_dict: Dict + a dictionary containing the list of ensemble member restart files to copy for FileHandler + """ + # NOTE for now this is FV3 restart files and just assumed to be fh006 + + # define template + template_res = config.COM_ATMOS_RESTART_TMPL + prev_cycle = config.previous_cycle + tmpl_res_dict = { + 'ROTDIR': config.ROTDIR, + 'RUN': config.RUN, + 'YMD': to_YMD(prev_cycle), + 'HH': prev_cycle.strftime('%H'), + 'MEMDIR': None + } + + # construct ensemble member file list + dirlist = [] + enslist = [] + for imem in range(1, config.NMEM_ENS + 1): + memchar = f"mem{imem:03d}" + + # create directory path for ensemble member restart + dirlist.append(os.path.join(config.DATA, config.dirname, f'mem{imem:03d}')) + + # get FV3 restart files, this will be a lot simpler when using history files + tmpl_res_dict['MEMDIR'] = memchar + rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get) + run_dir = os.path.join(config.DATA, config.dirname, memchar) + + # atmens DA needs coupler + basename = f'{to_fv3time(config.current_cycle)}.coupler.res' + enslist.append([os.path.join(rst_dir, basename), os.path.join(config.DATA, config.dirname, memchar, basename)]) + + # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: + template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + basename = template.format(tilenum=itile) + enslist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + ens_dict = { + 'mkdir': dirlist, + 'copy': enslist, + } + return ens_dict + + @staticmethod + @logit(logger) + def execute_jediexe(workdir: Union[str, os.PathLike], aprun_cmd: str, jedi_exec: str, jedi_yaml: str) -> None: + """ + Run a JEDI executable + + Parameters + ---------- + workdir : str | os.PathLike + Working directory where to run containing the necessary files and executable + aprun_cmd : str + Launcher command e.g. mpirun -np or srun, etc. + jedi_exec : str + Name of the JEDI executable e.g. fv3jedi_var.x + jedi_yaml : str | os.PathLike + Name of the yaml file to feed the JEDI executable e.g. fv3jedi_var.yaml + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + + os.chdir(workdir) + + exec_cmd = Executable(aprun_cmd) + exec_cmd.add_default_arg([os.path.join(workdir, jedi_exec), jedi_yaml]) + + logger.info(f"Executing {exec_cmd}") + try: + exec_cmd() + except OSError: + logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}") + raise OSError(f"{exec_cmd}") + except Exception: + logger.exception(f"FATAL ERROR: Error occured during execution of {exec_cmd}") + raise WorkflowException(f"{exec_cmd}") + + @staticmethod + @logit(logger) + def tgz_diags(statfile: str, diagdir: str) -> None: + """tar and gzip the diagnostic files resulting from a JEDI analysis. + + Parameters + ---------- + statfile : str | os.PathLike + Path to the output .tar.gz .tgz file that will contain the diag*.nc4 files e.g. atmstat.tgz + diagdir : str | os.PathLike + Directory containing JEDI diag files + """ + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(diagdir, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {statfile}") + + # Open tar.gz file for writing + with tarfile.open(statfile, "w:gz") as tgz: + # Add diag files to tarball + for diagfile in diags: + tgz.add(diagfile, arcname=os.path.basename(diagfile)) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 3ab0ae3240..da41574fc9 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -7,14 +7,14 @@ from logging import getLogger from typing import Dict, List, Any -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH -from pygw.fsutils import rm_p, chdir -from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, to_YMDH, + chdir, + parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) @@ -71,22 +71,33 @@ def initialize(self: Analysis) -> None: super().initialize() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) FileHandler(jedi_fix_list).sync() - # stage berror files - # copy static background error files, otherwise it will assume ID matrix + # stage static background error files, otherwise it will assume ID matrix logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") FileHandler(self.get_berror_dict(self.task_config)).sync() + # stage ensemble files for use in hybrid background error + if self.task_config.DOHYBVAR: + logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}") + localconf = AttrDict() + keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', + 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] + for key in keys: + localconf[key] = self.task_config[key] + localconf.RUN = 'enkf' + self.task_config.RUN + localconf.dirname = 'ens' + FileHandler(self.get_fv3ens_dict(localconf)).sync() + # stage backgrounds FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index c5c7e5b145..9cf84c07c7 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -5,17 +5,17 @@ import gzip import tarfile from logging import getLogger -from typing import Dict, List, Any - -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD -from pygw.fsutils import rm_p, chdir -from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException -from pygw.template import Template, TemplateConstants +from typing import Dict, List + +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD, + chdir, + parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException, + Template, TemplateConstants) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) @@ -96,19 +96,26 @@ def initialize(self: Analysis) -> None: FileHandler({'mkdir': dirlist}).sync() # stage CRTM fix files - crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_crtm_coeff.yaml') logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") - crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + crtm_fix_list = parse_j2yaml(crtm_fix_list_path, self.task_config) FileHandler(crtm_fix_list).sync() # stage fix files - jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'atm_jedi_fix.yaml') logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") - jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config) FileHandler(jedi_fix_list).sync() # stage backgrounds - FileHandler(self.get_bkg_dict()).sync() + logger.debug(f"Stage ensemble member background files") + localconf = AttrDict() + keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', + 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] + for key in keys: + localconf[key] = self.task_config[key] + localconf.dirname = 'bkg' + FileHandler(self.get_fv3ens_dict(localconf)).sync() # generate ensemble da YAML file logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") @@ -286,62 +293,3 @@ def jedi2fv3inc(self: Analysis) -> None: cmd.add_default_arg(atminc_fv3) logger.debug(f"Executing {cmd}") cmd(output='stdout', error='stderr') - - @logit(logger) - def get_bkg_dict(self: Analysis) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer) - that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - None - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - # NOTE for now this is FV3 restart files and just assumed to be fh006 - # loop over ensemble members - rstlist = [] - bkglist = [] - - # get FV3 restart files, this will be a lot simpler when using history files - template_res = self.task_config.COM_ATMOS_RESTART_TMPL - tmpl_res_dict = { - 'ROTDIR': self.task_config.ROTDIR, - 'RUN': self.task_config.RUN, - 'YMD': to_YMD(self.task_config.previous_cycle), - 'HH': self.task_config.previous_cycle.strftime('%H'), - 'MEMDIR': None - } - - for imem in range(1, self.task_config.NMEM_ENS + 1): - memchar = f"mem{imem:03d}" - - # get FV3 restart files, this will be a lot simpler when using history files - tmpl_res_dict['MEMDIR'] = memchar - rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get) - rstlist.append(rst_dir) - - run_dir = os.path.join(self.task_config.DATA, 'bkg', memchar) - - # atmens DA needs coupler - basename = f'{to_fv3time(self.task_config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(self.task_config.DATA, 'bkg', memchar, basename)]) - - # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data - for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: - template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, self.task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': rstlist, - 'copy': bkglist, - } - - return bkg_dict diff --git a/ush/python/pygfs/task/gfs_forecast.py b/ush/python/pygfs/task/gfs_forecast.py index 3527c623e0..1aa550fd62 100644 --- a/ush/python/pygfs/task/gfs_forecast.py +++ b/ush/python/pygfs/task/gfs_forecast.py @@ -2,8 +2,7 @@ import logging from typing import Dict, Any -from pygw.logger import logit -from pygw.task import Task +from wxflow import logit, Task from pygfs.ufswm.gfs import GFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/land_analysis.py index 0db6075d64..307e875183 100644 --- a/ush/python/pygfs/task/land_analysis.py +++ b/ush/python/pygfs/task/land_analysis.py @@ -4,16 +4,18 @@ from logging import getLogger from typing import Dict, List from pprint import pformat - -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import to_fv3time, to_YMD, to_YMDH -from pygw.fsutils import rm_p -from pygw.yaml_file import parse_j2yaml -from pygw.jinja import Jinja -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +import numpy as np +from netCDF4 import Dataset + +from wxflow import (AttrDict, + FileHandler, + to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, + rm_p, + parse_j2yaml, parse_yamltmpl, save_as_yaml, + Jinja, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) @@ -23,14 +25,29 @@ class LandAnalysis(Analysis): """ Class for global land analysis tasks """ + + NMEM_LANDENS = 2 # The size of the land ensemble is fixed at 2. Does this need to be a variable? + @logit(logger, name="LandAnalysis") def __init__(self, config): super().__init__(config) + _res = int(self.config['CASE'][1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) + _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml") + # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'LAND_WINDOW_BEGIN': _window_begin, + 'LAND_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'jedi_yaml': _letkfoi_yaml } ) @@ -38,7 +55,84 @@ def __init__(self, config): self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) @logit(logger) - def prepare_IMS(self: Analysis) -> None: + def prepare_GTS(self) -> None: + """Prepare the GTS data for a global land analysis + + This method will prepare GTS data for a global land analysis using JEDI. + This includes: + - processing GTS bufr snow depth observation data to IODA format + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + + # create a temporary dict of all keys needed in this method + localconf = AttrDict() + keys = ['HOMEgfs', 'DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + 'OPREFIX', 'CASE', 'ntiles'] + for key in keys: + localconf[key] = self.task_config[key] + + # Read and render the GTS_OBS_LIST yaml + logger.info(f"Reading {self.task_config.GTS_OBS_LIST}") + prep_gts_config = parse_j2yaml(self.task_config.GTS_OBS_LIST, localconf) + logger.debug(f"{self.task_config.GTS_OBS_LIST}:\n{pformat(prep_gts_config)}") + + # copy the GTS obs files from COM_OBS to DATA/obs + logger.info("Copying GTS obs for bufr2ioda.x") + FileHandler(prep_gts_config.gtsbufr).sync() + + logger.info("Link BUFR2IODAX into DATA/") + exe_src = self.task_config.BUFR2IODAX + exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # Create executable instance + exe = Executable(self.task_config.BUFR2IODAX) + + def _gtsbufr2iodax(exe, yaml_file): + if not os.path.isfile(yaml_file): + logger.exception(f"{yaml_file} not found") + raise FileNotFoundError(yaml_file) + + logger.info(f"Executing {exe}") + try: + exe(yaml_file) + except OSError: + raise OSError(f"Failed to execute {exe} {yaml_file}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe} {yaml_file}") + + # Loop over entries in prep_gts_config.bufr2ioda keys + # 1. generate bufr2ioda YAML files + # 2. execute bufr2ioda.x + for name in prep_gts_config.bufr2ioda.keys(): + gts_yaml = os.path.join(self.runtime_config.DATA, f"bufr_{name}_snow.yaml") + logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}") + temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf) + save_as_yaml(temp_yaml, gts_yaml) + logger.info(f"Wrote bufr2ioda YAML to: {gts_yaml}") + + # execute BUFR2IODAX to convert {name} bufr data into IODA format + _gtsbufr2iodax(exe, gts_yaml) + + # Ensure the IODA snow depth GTS file is produced by the IODA converter + # If so, copy to COM_OBS/ + try: + FileHandler(prep_gts_config.gtsioda).sync() + except OSError as err: + logger.exception(f"{self.task_config.BUFR2IODAX} failed to produce GTS ioda files") + raise OSError(err) + + @logit(logger) + def prepare_IMS(self) -> None: """Prepare the IMS data for a global land analysis This method will prepare IMS data for a global land analysis using JEDI. @@ -57,19 +151,19 @@ def prepare_IMS(self: Analysis) -> None: """ # create a temporary dict of all keys needed in this method - cfg = AttrDict() + localconf = AttrDict() keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', - 'OPREFIX', 'CASE', 'ntiles'] + 'OPREFIX', 'CASE', 'OCNRES', 'ntiles'] for key in keys: - cfg[key] = self.task_config[key] + localconf[key] = self.task_config[key] # stage backgrounds logger.info("Staging backgrounds") - FileHandler(self.get_bkg_dict(cfg)).sync() + FileHandler(self.get_bkg_dict(localconf)).sync() # Read and render the IMS_OBS_LIST yaml logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") - prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, cfg) + prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, localconf) logger.debug(f"{self.task_config.IMS_OBS_LIST}:\n{pformat(prep_ims_config)}") # copy the IMS obs files from COM_OBS to DATA/obs @@ -78,25 +172,25 @@ def prepare_IMS(self: Analysis) -> None: logger.info("Create namelist for CALCFIMSEXE") nml_template = self.task_config.FIMS_NML_TMPL - nml_data = Jinja(nml_template, cfg).render + nml_data = Jinja(nml_template, localconf).render logger.debug(f"fims.nml:\n{nml_data}") - nml_file = os.path.join(self.task_config.DATA, "fims.nml") + nml_file = os.path.join(localconf.DATA, "fims.nml") with open(nml_file, "w") as fho: fho.write(nml_data) logger.info("Link CALCFIMSEXE into DATA/") exe_src = self.task_config.CALCFIMSEXE - exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + exe_dest = os.path.join(localconf.DATA, os.path.basename(exe_src)) if os.path.exists(exe_dest): rm_p(exe_dest) os.symlink(exe_src, exe_dest) # execute CALCFIMSEXE to calculate IMS snowdepth exe = Executable(self.task_config.APRUN_CALCFIMS) - exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) + exe.add_default_arg(os.path.join(localconf.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") try: - logger.debug(f"Executing {exe}") exe() except OSError: raise OSError(f"Failed to execute {exe}") @@ -104,21 +198,21 @@ def prepare_IMS(self: Analysis) -> None: raise WorkflowException(f"An error occured during execution of {exe}") # Ensure the snow depth IMS file is produced by the above executable - input_file = f"IMSscf.{to_YMD(self.task_config.PDY)}.{self.task_config.CASE}_oro_data.nc" - if not os.path.isfile(f"{os.path.join(self.task_config.DATA, input_file)}"): + input_file = f"IMSscf.{to_YMD(localconf.current_cycle)}.{localconf.CASE}.mx{localconf.OCNRES}_oro_data.nc" + if not os.path.isfile(f"{os.path.join(localconf.DATA, input_file)}"): logger.exception(f"{self.task_config.CALCFIMSEXE} failed to produce {input_file}") - raise FileNotFoundError(f"{os.path.join(self.task_config.DATA, input_file)}") + raise FileNotFoundError(f"{os.path.join(localconf.DATA, input_file)}") # Execute imspy to create the IMS obs data in IODA format logger.info("Create IMS obs data in IODA format") - output_file = f"ims_snow_{to_YMDH(self.task_config.current_cycle)}.nc4" - if os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + output_file = f"ims_snow_{to_YMDH(localconf.current_cycle)}.nc4" + if os.path.isfile(f"{os.path.join(localconf.DATA, output_file)}"): rm_p(output_file) exe = Executable(self.task_config.IMS2IODACONV) - exe.add_default_arg(["-i", f"{os.path.join(self.task_config.DATA, input_file)}"]) - exe.add_default_arg(["-o", f"{os.path.join(self.task_config.DATA, output_file)}"]) + exe.add_default_arg(["-i", f"{os.path.join(localconf.DATA, input_file)}"]) + exe.add_default_arg(["-o", f"{os.path.join(localconf.DATA, output_file)}"]) try: logger.debug(f"Executing {exe}") exe() @@ -129,15 +223,155 @@ def prepare_IMS(self: Analysis) -> None: # Ensure the IODA snow depth IMS file is produced by the IODA converter # If so, copy to COM_OBS/ - if not os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + if not os.path.isfile(f"{os.path.join(localconf.DATA, output_file)}"): logger.exception(f"{self.task_config.IMS2IODACONV} failed to produce {output_file}") - raise FileNotFoundError(f"{os.path.join(self.task_config.DATA, output_file)}") + raise FileNotFoundError(f"{os.path.join(localconf.DATA, output_file)}") else: logger.info(f"Copy {output_file} to {self.task_config.COM_OBS}") FileHandler(prep_ims_config.ims2ioda).sync() @logit(logger) - def get_bkg_dict(self, config: Dict) -> Dict[str, List[str]]: + def initialize(self) -> None: + """Initialize method for Land analysis + This method: + - creates artifacts in the DATA directory by copying fix files + - creates the JEDI LETKF yaml from the template + - stages backgrounds, observations and ensemble members + + Parameters + ---------- + self : Analysis + Instance of the LandAnalysis object + """ + + super().initialize() + + # create a temporary dict of all keys needed in this method + localconf = AttrDict() + keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + 'OPREFIX', 'CASE', 'ntiles'] + for key in keys: + localconf[key] = self.task_config[key] + + # Make member directories in DATA for background + dirlist = [] + for imem in range(1, LandAnalysis.NMEM_LANDENS + 1): + dirlist.append(os.path.join(localconf.DATA, 'bkg', f'mem{imem:03d}')) + FileHandler({'mkdir': dirlist}).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'land_jedi_fix.yaml') + logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + logger.info("Staging ensemble backgrounds") + FileHandler(self.get_ens_bkg_dict(localconf)).sync() + + # generate letkfoi YAML file + logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}") + letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config) + save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml) + logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}") + + # need output dir for diags and anl + logger.info("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(localconf.DATA, "anl"), + os.path.join(localconf.DATA, "diags"), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self) -> None: + """Run a series of tasks to create Snow analysis + This method: + - creates an 2 member ensemble + - runs the JEDI LETKF executable to produce increments + - creates analysis from increments + + Parameters + ---------- + self : Analysis + Instance of the LandAnalysis object + """ + + # create a temporary dict of all keys needed in this method + localconf = AttrDict() + keys = ['HOMEgfs', 'DATA', 'current_cycle', + 'COM_ATMOS_RESTART_PREV', 'COM_LAND_ANALYSIS', 'APREFIX', + 'SNOWDEPTHVAR', 'BESTDDEV', 'CASE', 'ntiles', + 'APRUN_LANDANL', 'JEDIEXE', 'jedi_yaml', + 'APPLY_INCR_NML_TMPL', 'APPLY_INCR_EXE', 'APRUN_APPLY_INCR'] + for key in keys: + localconf[key] = self.task_config[key] + + logger.info("Creating ensemble") + self.create_ensemble(localconf.SNOWDEPTHVAR, + localconf.BESTDDEV, + AttrDict({key: localconf[key] for key in ['DATA', 'ntiles', 'current_cycle']})) + + logger.info("Running JEDI LETKF") + self.execute_jediexe(localconf.DATA, + localconf.APRUN_LANDANL, + os.path.basename(localconf.JEDIEXE), + localconf.jedi_yaml) + + logger.info("Creating analysis from backgrounds and increments") + self.add_increments(localconf) + + @logit(logger) + def finalize(self) -> None: + """Performs closing actions of the Land analysis task + This method: + - tar and gzip the output diag files and place in COM/ + - copy the generated YAML file from initialize to the COM/ + - copy the analysis files to the COM/ + - copy the increment files to the COM/ + + Parameters + ---------- + self : Analysis + Instance of the LandAnalysis object + """ + + logger.info("Create diagnostic tarball of diag*.nc4 files") + statfile = os.path.join(self.task_config.COM_LAND_ANALYSIS, f"{self.task_config.APREFIX}landstat.tgz") + self.tgz_diags(statfile, self.task_config.DATA) + + logger.info("Copy full YAML to COM") + src = os.path.join(self.task_config['DATA'], f"{self.task_config.APREFIX}letkfoi.yaml") + dest = os.path.join(self.task_config.COM_CONF, f"{self.task_config.APREFIX}letkfoi.yaml") + yaml_copy = { + 'mkdir': [self.task_config.COM_CONF], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + logger.info("Copy analysis to COM") + template = f'{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + anllist = [] + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename) + anllist.append([src, dest]) + FileHandler({'copy': anllist}).sync() + + logger.info('Copy increments to COM') + template = f'landinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, self.task_config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', filename) + dest = os.path.join(self.task_config.COM_LAND_ANALYSIS, filename) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + + @staticmethod + @logit(logger) + def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: """Compile a dictionary of model background files to copy This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) @@ -145,10 +379,13 @@ def get_bkg_dict(self, config: Dict) -> Dict[str, List[str]]: Parameters ---------- - self: Analysis - Instance of the current object class config: Dict Dictionary of key-value pairs needed in this method + Should contain the following keys: + COM_ATMOS_RESTART_PREV + DATA + current_cycle + ntiles Returns ---------- @@ -157,7 +394,7 @@ def get_bkg_dict(self, config: Dict) -> Dict[str, List[str]]: """ # NOTE for now this is FV3 RESTART files and just assumed to be fh006 - # get FV3 RESTART files, this will be a lot simpler when using history files + # get FV3 sfc_data RESTART files, this will be a lot simpler when using history files rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) # for now, option later? run_dir = os.path.join(config.DATA, 'bkg') @@ -180,3 +417,166 @@ def get_bkg_dict(self, config: Dict) -> Dict[str, List[str]]: 'copy': bkglist } return bkg_dict + + @staticmethod + @logit(logger) + def get_ens_bkg_dict(config: Dict) -> Dict: + """Compile a dictionary of model background files to copy for the ensemble + Note that a "Fake" 2-member ensemble backgroud is being created by copying FV3 RESTART files (coupler, sfc_data) + from the deterministic background to DATA/bkg/mem001, 002. + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + COM_ATMOS_RESTART_PREV + DATA + current_cycle + ntiles + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + + dirlist = [] + bkglist = [] + + # get FV3 sfc_data RESTART files; Note an ensemble is being created + rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) + + for imem in range(1, LandAnalysis.NMEM_LANDENS + 1): + memchar = f"mem{imem:03d}" + + run_dir = os.path.join(config.DATA, 'bkg', memchar, 'RESTART') + dirlist.append(run_dir) + + # Land DA needs coupler + basename = f'{to_fv3time(config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # Land DA only needs sfc_data + for ftype in ['sfc_data']: + template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': dirlist, + 'copy': bkglist + } + + return bkg_dict + + @staticmethod + @logit(logger) + def create_ensemble(vname: str, bestddev: float, config: Dict) -> None: + """Create a 2-member ensemble for Snow Depth analysis by perturbing snow depth with a prescribed variance. + Additionally, remove glacier locations + + Parameters + ---------- + vname : str + snow depth variable to perturb: "snodl" + bestddev : float + Background Error Standard Deviation to perturb around to create ensemble + config: Dict + Dictionary of key-value pairs needed in this method. It must contain the following keys: + DATA + current_cycle + ntiles + """ + + # 2 ens members + offset = bestddev / np.sqrt(LandAnalysis.NMEM_LANDENS) + + logger.info(f"Creating ensemble for LETKFOI by offsetting with {offset}") + + workdir = os.path.join(config.DATA, 'bkg') + + sign = [1, -1] + ens_dirs = ['mem001', 'mem002'] + + for (memchar, value) in zip(ens_dirs, sign): + logger.debug(f"creating ensemble member {memchar} with sign {value}") + for tt in range(1, config.ntiles + 1): + logger.debug(f"perturbing tile {tt}") + # open file + out_netcdf = os.path.join(workdir, memchar, 'RESTART', f"{to_fv3time(config.current_cycle)}.sfc_data.tile{tt}.nc") + logger.debug(f"creating member {out_netcdf}") + with Dataset(out_netcdf, "r+") as ncOut: + slmsk_array = ncOut.variables['slmsk'][:] + vtype_array = ncOut.variables['vtype'][:] + slmsk_array[vtype_array == 15] = 0 # remove glacier locations + var_array = ncOut.variables[vname][:] + var_array[slmsk_array == 1] = var_array[slmsk_array == 1] + value * offset + ncOut.variables[vname][0, :, :] = var_array[:] + + @staticmethod + @logit(logger) + def add_increments(config: Dict) -> None: + """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + HOMEgfs + COM_ATMOS_RESTART_PREV + DATA + current_cycle + CASE + ntiles + APPLY_INCR_NML_TMPL + APPLY_INCR_EXE + APRUN_APPLY_INCR + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + + # need backgrounds to create analysis from increments after LETKF + logger.info("Copy backgrounds into anl/ directory for creating analysis from increments") + template = f'{to_fv3time(config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + anllist = [] + for itile in range(1, config.ntiles + 1): + filename = template.format(tilenum=itile) + src = os.path.join(config.COM_ATMOS_RESTART_PREV, filename) + dest = os.path.join(config.DATA, "anl", filename) + anllist.append([src, dest]) + FileHandler({'copy': anllist}).sync() + + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.APPLY_INCR_NML_TMPL + nml_data = Jinja(nml_template, config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py new file mode 100644 index 0000000000..7db50e1582 --- /dev/null +++ b/ush/python/pygfs/task/upp.py @@ -0,0 +1,263 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, Any, Union +from pprint import pformat + +from wxflow import (AttrDict, + parse_j2yaml, + FileHandler, + Jinja, + logit, + Task, + add_to_datetime, to_timedelta, + WorkflowException, + Executable, which) + +logger = getLogger(__name__.split('.')[-1]) + + +class UPP(Task): + """Unified Post Processor Task + """ + + VALID_UPP_RUN = ['analysis', 'forecast', 'goes', 'wafs'] + + @logit(logger, name="UPP") + def __init__(self, config: Dict[str, Any]) -> None: + """Constructor for the UPP task + The constructor is responsible for resolving the "UPP_CONFIG" based in the run-type "upp_run" + Sections of upp.yaml outside of the `upp` block are all valid `upp_run` options + E.g. "upp_run" sections are: + analysis: process analysis output + forecast: process UFS-weather-model forecast output + goes: process UFS-weather-model forecast output for simulated satellite imagery + wafs: process UFS-weather-model forecast output for WAFS products + + Parameters + ---------- + config : Dict[str, Any] + Incoming configuration for the task from the environment + + Returns + ------- + None + """ + super().__init__(config) + + if self.config.UPP_RUN not in self.VALID_UPP_RUN: + raise NotImplementedError(f'{self.config.UPP_RUN} is not a valid UPP run type.\n' + + 'Valid UPP_RUN values are:\n' + + f'{", ".join(self.VALID_UPP_RUN)}') + + valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H")) + + localdict = AttrDict( + {'upp_run': self.config.UPP_RUN, + 'forecast_hour': self.config.FORECAST_HOUR, + 'valid_datetime': valid_datetime, + 'atmos_filename': f"atm_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc", + 'flux_filename': f"sfc_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc" + } + ) + self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + + # Read the upp.yaml file for common configuration + logger.info(f"Read the UPP configuration yaml file {self.config.UPP_CONFIG}") + self.task_config.upp_yaml = parse_j2yaml(self.config.UPP_CONFIG, self.task_config) + logger.debug(f"upp_yaml:\n{pformat(self.task_config.upp_yaml)}") + + @staticmethod + @logit(logger) + def initialize(upp_yaml: Dict) -> None: + """Initialize the work directory by copying all the common fix data + + Parameters + ---------- + upp_yaml: Dict + Fully resolved upp.yaml dictionary + """ + + # Copy static data to run directory + logger.info("Copy static data to run directory") + FileHandler(upp_yaml.upp.fix_data).sync() + + @staticmethod + @logit(logger) + def configure(upp_dict: Dict, upp_yaml: Dict) -> None: + """Configure the artifacts in the work directory. + Copy run specific data to run directory + Create namelist 'itag' from template + + Parameters + ---------- + upp_dict : Dict + Task specific keys e.g. upp_run + upp_yaml : Dict + Fully resolved upp.yaml dictionary + """ + + # Copy "upp_run" specific data to run directory + logger.info(f"Copy '{upp_dict.upp_run}' data to run directory") + FileHandler(upp_yaml[upp_dict.upp_run].data_in).sync() + + # Make a localconf with the upp_run specific configuration + # First make a shallow copy for local use + localconf = upp_dict.copy() + # Update 'config' part of the 'run' + localconf.update(upp_yaml.upp.config) + localconf.update(upp_yaml[localconf.upp_run].config) + logger.debug(f"Updated localconf with upp_run='{localconf.upp_run}':\n{pformat(localconf)}") + + # Configure the namelist and write to file + logger.info("Create namelist for upp.x") + nml_template = os.path.join(localconf.DATA, "itag.jinja") + nml_data = Jinja(nml_template, localconf).render + logger.debug(f"itag:\n{nml_data}") + nml_file = os.path.join(localconf.DATA, "itag") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + @staticmethod + @logit(logger) + def execute(workdir: Union[str, os.PathLike], aprun_cmd: str, forecast_hour: int = 0) -> None: + """Run the UPP executable and index the output master and flux files + + Parameters + ---------- + workdir : str | os.PathLike + work directory with the staged data, parm files, namelists, etc. + aprun_cmd : str + launcher command for UPP.x + forecast_hour : int + default: 0 + forecast hour being processed + + Returns + ------- + None + """ + + # Run the UPP executable + UPP.run(workdir, aprun_cmd) + + # Index the output grib2 file + UPP.index(workdir, forecast_hour) + + @classmethod + @logit(logger) + def run(cls, workdir: Union[str, os.PathLike], aprun_cmd: str, exec_name: str = 'upp.x') -> None: + """ + Run the UPP executable + + Parameters + ---------- + workdir : str | os.PathLike + Working directory where to run containing the necessary files and executable + aprun_cmd : str + Launcher command e.g. mpirun -np or srun, etc. + exec_name : str + Name of the UPP executable e.g. upp.x + + Returns + ------- + None + """ + os.chdir(workdir) + + exec_cmd = Executable(aprun_cmd) + exec_cmd.add_default_arg(os.path.join(workdir, exec_name)) + + UPP._call_executable(exec_cmd) + + @classmethod + @logit(logger) + def index(cls, workdir: Union[str, os.PathLike], forecast_hour: int) -> None: + """ + Index the grib2file + + Parameters + ---------- + workdir : str | os.PathLike + Working directory where to run containing the necessary files and executable + forecast_hour : int + forecast hour to index + + Environment Parameters + ---------------------- + GRB2INDEX : str (optional) + path to executable "grb2index" + Typically set in the modulefile + + Returns + ------- + None + """ + os.chdir(workdir) + logger.info("Generate index file") + + grb2index_cmd = os.environ.get("GRB2INDEX", None) + + template = f"GFS{{file_type}}.GrbF{forecast_hour:02d}" + + for ftype in ['PRS', 'FLX']: + grbfile = template.format(file_type=ftype) + grbfidx = f"{grbfile}.idx" + + if not os.path.exists(grbfile): + logger.info(f"No {grbfile} to process, skipping ...") + continue + + logger.info(f"Creating index file for {grbfile}") + exec_cmd = which("grb2index") if grb2index_cmd is None else Executable(grb2index_cmd) + exec_cmd.add_default_arg(os.path.join(workdir, grbfile)) + exec_cmd.add_default_arg(os.path.join(workdir, grbfidx)) + + UPP._call_executable(exec_cmd) + + @staticmethod + @logit(logger) + def _call_executable(exec_cmd: Executable) -> None: + """Internal method to call executable + + Parameters + ---------- + exec_cmd : Executable + Executable to run + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + + logger.info(f"Executing {exec_cmd}") + try: + exec_cmd() + except OSError: + logger.exception(f"FATAL ERROR: Failed to execute {exec_cmd}") + raise OSError(f"{exec_cmd}") + except Exception: + logger.exception(f"FATAL ERROR: Error occurred during execution of {exec_cmd}") + raise WorkflowException(f"{exec_cmd}") + + @staticmethod + @logit(logger) + def finalize(upp_run: Dict, upp_yaml: Dict) -> None: + """Perform closing actions of the task. + Copy data back from the DATA/ directory to COM/ + + Parameters + ---------- + upp_run: str + Run type of UPP + upp_yaml: Dict + Fully resolved upp.yaml dictionary + """ + + # Copy "upp_run" specific generated data to COM/ directory + logger.info(f"Copy '{upp_run}' processed data to COM/ directory") + FileHandler(upp_yaml[upp_run].data_out).sync() diff --git a/ush/python/pygfs/ufswm/gfs.py b/ush/python/pygfs/ufswm/gfs.py index f86164d706..2ed6cd0c08 100644 --- a/ush/python/pygfs/ufswm/gfs.py +++ b/ush/python/pygfs/ufswm/gfs.py @@ -1,7 +1,7 @@ import copy import logging -from pygw.logger import logit +from wxflow import logit from pygfs.ufswm.ufs import UFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/ufswm/ufs.py b/ush/python/pygfs/ufswm/ufs.py index a9118801b9..e9836e0b75 100644 --- a/ush/python/pygfs/ufswm/ufs.py +++ b/ush/python/pygfs/ufswm/ufs.py @@ -3,8 +3,7 @@ import logging from typing import Dict, Any -from pygw.template import Template, TemplateConstants -from pygw.logger import logit +from wxflow import logit, Template, TemplateConstants logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/radmon_diag_ck.sh b/ush/radmon_diag_ck.sh index 4045ddb2d5..8c4f54bba5 100755 --- a/ush/radmon_diag_ck.sh +++ b/ush/radmon_diag_ck.sh @@ -27,7 +27,7 @@ echo "--> radmon_diag_ck.sh" # Process input arguments # nargs=$# - if [[ $nargs -ne 6 ]]; then + if [[ ${nargs} -ne 6 ]]; then usage exit 1 fi @@ -35,9 +35,9 @@ echo "--> radmon_diag_ck.sh" while [[ $# -ge 1 ]] do key="$1" - echo $key + echo "${key}" - case $key in + case ${key} in -r|--rad) radstat_file="$2" shift # past argument @@ -52,7 +52,7 @@ echo "--> radmon_diag_ck.sh" ;; *) #unspecified key - echo " unsupported key = $key" + echo " unsupported key = ${key}" ;; esac @@ -71,7 +71,7 @@ echo "--> radmon_diag_ck.sh" #--------------------------------------------- # get list of diag files in the radstat file # - radstat_contents=`tar -tf ${radstat_file} | grep '_ges' | + radstat_contents=`tar -tf "${radstat_file}" | grep '_ges' | gawk -F"diag_" '{print $2}' | gawk -F"_ges" '{print $1}'` @@ -79,17 +79,17 @@ echo "--> radmon_diag_ck.sh" #--------------------------------------------- # load contents of satype_file into an array # - satype_contents=`cat ${satype_file}` + satype_contents=$(cat "${satype_file}") #------------------------------------------------- # compare $satype_contents and $radstat_contents # report anything missing # - for sat in $satype_contents; do - test=`echo $radstat_contents | grep $sat` - - if [[ ${#test} -le 0 ]]; then + for sat in ${satype_contents}; do + content_count=$(echo "${radstat_contents}" | grep -c "${sat}") + + if (( content_count <= 0 )); then missing_diag="${missing_diag} ${sat}" fi @@ -117,10 +117,10 @@ echo "--> radmon_diag_ck.sh" # TODO Rewrite these array parsing commands to avoid using Bash's sloppy word splitting # File sizes contain only digits and immediately precede the date # shellcheck disable=SC2207 - sizes=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '(\d)+(?= \d{4}-\d{2}-\d{2})')) + sizes=($(tar -vtf "${radstat_file}" --wildcards '*_ges*' | grep -P -o '(\d)+(?= \d{4}-\d{2}-\d{2})')) # Filenames are the last group of non-whitespace characters # shellcheck disable=SC2207 - filenames=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '\S+$')) + filenames=($(tar -vtf "${radstat_file}" --wildcards '*_ges*' | grep -P -o '\S+$')) # shellcheck disable= @@ -144,7 +144,7 @@ echo "--> radmon_diag_ck.sh" zero_len_diag="${zero_len_diag} ${sat}" fi - rm -f ${uz_file_name} + rm -f "${uz_file_name}" fi done @@ -159,13 +159,13 @@ echo "--> radmon_diag_ck.sh" # if [[ ${#zero_len_diag} -gt 0 ]]; then for zld in ${zero_len_diag}; do - echo " Zero Length diagnostic file: $zld" >> $output_file + echo " Zero Length diagnostic file: ${zld}" >> "${output_file}" done fi if [[ ${#missing_diag} -gt 0 ]]; then for md in ${missing_diag}; do - echo " Missing diagnostic file : $md" >> $output_file + echo " Missing diagnostic file : ${md}" >> "${output_file}" done fi diff --git a/ush/radmon_err_rpt.sh b/ush/radmon_err_rpt.sh index 8561563d48..6ae6505624 100755 --- a/ush/radmon_err_rpt.sh +++ b/ush/radmon_err_rpt.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -63,12 +63,12 @@ err=0 RADMON_SUFFIX=${RADMON_SUFFIX} have_diag_rpt=0 -if [[ -s $diag_rpt ]]; then +if [[ -s "${diag_rpt}" ]]; then have_diag_rpt=1 else err=1 fi -echo "have_diag_rpt = $have_diag_rpt" +echo "have_diag_rpt = ${have_diag_rpt}" #----------------------------------------------------------------------------- # read each line in the $file1 @@ -76,15 +76,15 @@ echo "have_diag_rpt = $have_diag_rpt" # if same combination is in both files, add the values to the output file # { while read myline; do - echo "myline = $myline" + echo "myline = ${myline}" bound="" - echo $myline - satname=$(echo $myline | gawk '{print $1}') - channel=$(echo $myline | gawk '{print $3}') - region=$(echo $myline | gawk '{print $5}') - value1=$(echo $myline | gawk '{print $7}') - bound=$(echo $myline | gawk '{print $9}') + echo "${myline}" + satname=$(echo "${myline}" | gawk '{print $1}') + channel=$(echo "${myline}" | gawk '{print $3}') + region=$(echo "${myline}" | gawk '{print $5}') + value1=$(echo "${myline}" | gawk '{print $7}') + bound=$(echo "${myline}" | gawk '{print $9}') # # Check findings against diag_report. If the satellite/instrument is on the @@ -96,40 +96,40 @@ echo "have_diag_rpt = $have_diag_rpt" diag_match="" diag_match_len=0 - if [[ $have_diag_rpt == 1 ]]; then - diag_match=$(gawk "/$satname/" $diag_rpt) + if [[ ${have_diag_rpt} == 1 ]]; then + diag_match=$(gawk "/${satname}/" "${diag_rpt}") diag_match_len=$(echo ${#diag_match}) fi - if [[ $diag_match_len == 0 ]]; then + if [[ ${diag_match_len} == 0 ]]; then - if [[ $type == "chan" ]]; then - echo "looking for match for $satname and $channel" + if [[ ${type} == "chan" ]]; then + echo "looking for match for ${satname} and ${channel}" { while read myline2; do - satname2=$(echo $myline2 | gawk '{print $1}') - channel2=$(echo $myline2 | gawk '{print $3}') + satname2=$(echo "${myline2}" | gawk '{print $1}') + channel2=$(echo "${myline2}" | gawk '{print $3}') - if [[ $satname == $satname2 && $channel == $channel2 ]]; then - match="$satname channel= $channel" - echo "match from gawk = $match" + if [[ ${satname} == ${satname2} && ${channel} == ${channel2} ]]; then + match="${satname} channel= ${channel}" + echo "match from gawk = ${match}" break; else match="" fi - done } < $file2 + done } < "${file2}" else - match=$(gawk "/$satname/ && /channel= $channel / && /region= $region /" $file2) - echo match = $match + match=$(gawk "/${satname}/ && /channel= ${channel} / && /region= ${region} /" "${file2}") + echo match = "${match}" match_len=$(echo ${#match}) - if [[ $match_len > 0 ]]; then - channel2=$(echo $match | gawk '{print $3}') + if (( match_len > 0 )); then + channel2=$(echo "${match}" | gawk '{print $3}') - if [[ $channel2 != $channel ]]; then + if [[ ${channel2} != ${channel} ]]; then match="" fi fi @@ -137,54 +137,54 @@ echo "have_diag_rpt = $have_diag_rpt" fi match_len=$(echo ${#match}) - if [[ $match_len > 0 ]]; then + if (( match_len > 0 )); then - value2=$(echo $match | gawk '{print $7}') - bound2=$(echo $match | gawk '{print $9}') + value2=$(echo "${match}" | gawk '{print $7}') + bound2=$(echo "${match}" | gawk '{print $9}') - if [[ $type == "chan" ]]; then - tmpa=" $satname channel= $channel" + if [[ ${type} == "chan" ]]; then + tmpa=" ${satname} channel= ${channel}" tmpb="" - elif [[ $type == "pen" ]]; then - tmpa="$satname channel= $channel region= $region" - tmpb="$cycle1 $value1 $bound" + elif [[ ${type} == "pen" ]]; then + tmpa="${satname} channel= ${channel} region= ${region}" + tmpb="${cycle1} ${value1} ${bound}" - elif [[ $type == "cnt" ]]; then - tmpa="$satname channel= $channel region= $region" - tmpb="$cycle1 $value1 $bound" + elif [[ ${type} == "cnt" ]]; then + tmpa="${satname} channel= ${channel} region= ${region}" + tmpb="${cycle1} ${value1} ${bound}" else - tmpa="$satname channel= $channel region= $region" - tmpb="$cycle1: $type= $value1" + tmpa="${satname} channel= ${channel} region= ${region}" + tmpb="${cycle1}: ${type}= ${value1}" fi - line1="$tmpa $tmpb" - echo "$line1" >> $outfile + line1="${tmpa} ${tmpb}" + echo "${line1}" >> "${outfile}" - if [[ $type != "chan" ]]; then - tmpc=$(echo $tmpa |sed 's/[a-z]/ /g' | sed 's/[0-9]/ /g' | sed 's/=/ /g' | sed 's/_/ /g' | sed 's/-/ /g') + if [[ ${type} != "chan" ]]; then + tmpc=$(echo "${tmpa}" |sed 's/[a-z]/ /g' | sed 's/[0-9]/ /g' | sed 's/=/ /g' | sed 's/_/ /g' | sed 's/-/ /g') - if [[ $type == "pen" || $type == "cnt" ]]; then - line2=" $tmpc $cycle2 $value2 $bound2" + if [[ ${type} == "pen" || ${type} == "cnt" ]]; then + line2=" ${tmpc} ${cycle2} ${value2} ${bound2}" else - line2=" $tmpc $cycle2: $type= $value2" + line2=" ${tmpc} ${cycle2}: ${type}= ${value2}" fi - echo "$line2" >> $outfile + echo "${line2}" >> "${outfile}" fi #----------------------------------------- # add hyperlink to warning entry # line3=" http://www.emc.ncep.noaa.gov/gmb/gdas/radiance/es_rad/${RADMON_SUFFIX}/index.html?sat=${satname}®ion=${region}&channel=${channel}&stat=${type}" - if [[ $channel -gt 0 ]]; then - echo "$line3" >> $outfile - echo "" >> $outfile + if [[ ${channel} -gt 0 ]]; then + echo "${line3}" >> "${outfile}" + echo "" >> "${outfile}" fi fi fi -done } < $file1 +done } < "${file1}" ################################################################################ diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh index b2dab0825a..f68d7c88cc 100755 --- a/ush/radmon_verf_angle.sh +++ b/ush/radmon_verf_angle.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,17 +20,17 @@ source "$HOMEgfs/ush/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_angle.sh PDATE +# Usage: radmon_verf_angle.sh # # Input script positional parameters: -# PDATE processing date +# PDYcyc processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECradmon executable directory -# defaults to current directory +# EXECgfs executable directory +# PARMmonitor parm directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository @@ -72,26 +72,19 @@ REGIONAL_RR=${REGIONAL_RR:-0} # rapid refresh model flag rgnHH=${rgnHH:-} rgnTM=${rgnTM:-} -export PDATE=${1:-${PDATE:?}} - -echo " REGIONAL_RR, rgnHH, rgnTM = $REGIONAL_RR, $rgnHH, $rgnTM" +echo " REGIONAL_RR, rgnHH, rgnTM = ${REGIONAL_RR}, ${rgnHH}, ${rgnTM}" netcdf_boolean=".false." -if [[ $RADMON_NETCDF -eq 1 ]]; then +if [[ ${RADMON_NETCDF} -eq 1 ]]; then netcdf_boolean=".true." fi -echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" which prep_step which startmsg -# Directories -FIXgdas=${FIXgdas:-$(pwd)} -EXECradmon=${EXECradmon:-$(pwd)} -TANKverf_rad=${TANKverf_rad:-$(pwd)} - # File names export pgmout=${pgmout:-${jlogfile}} -touch $pgmout +touch "${pgmout}" # Other variables SATYPE=${SATYPE:-} @@ -100,7 +93,7 @@ LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} USE_ANL=${USE_ANL:-0} -if [[ $USE_ANL -eq 1 ]]; then +if [[ ${USE_ANL} -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -108,14 +101,14 @@ fi err=0 angle_exec=radmon_angle.x -shared_scaninfo=${shared_scaninfo:-$FIXgdas/gdas_radmon_scaninfo.txt} +shared_scaninfo="${shared_scaninfo:-${PARMmonitor}/gdas_radmon_scaninfo.txt}" scaninfo=scaninfo.txt #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -$NCP ${EXECradmon}/${angle_exec} ./ -$NCP $shared_scaninfo ./${scaninfo} +${NCP} "${EXECgfs}/${angle_exec}" ./ +${NCP} "${shared_scaninfo}" ./${scaninfo} if [[ ! -s ./${angle_exec} || ! -s ./${scaninfo} ]]; then err=2 @@ -125,10 +118,10 @@ else export pgm=${angle_exec} - iyy=$(echo $PDATE | cut -c1-4) - imm=$(echo $PDATE | cut -c5-6) - idd=$(echo $PDATE | cut -c7-8) - ihh=$(echo $PDATE | cut -c9-10) + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} ctr=0 fail=0 @@ -143,24 +136,24 @@ else for dtype in ${gesanl}; do - echo "pgm = $pgm" - echo "pgmout = $pgmout" + echo "pgm = ${pgm}" + echo "pgmout = ${pgmout}" prep_step - ctr=$(expr $ctr + 1) + ctr=$((ctr + 1)) - if [[ $dtype == "anl" ]]; then - data_file=${type}_anl.${PDATE}.ieee_d + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" ctl_file=${type}_anl.ctl angl_ctl=angle.${ctl_file} else - data_file=${type}.${PDATE}.ieee_d + data_file="${type}.${PDY}${cyc}.ieee_d" ctl_file=${type}.ctl angl_ctl=angle.${ctl_file} fi angl_file="" - if [[ $REGIONAL_RR -eq 1 ]]; then + if [[ ${REGIONAL_RR} -eq 1 ]]; then angl_file=${rgnHH}.${data_file}.${rgnTM} fi @@ -187,18 +180,18 @@ cat << EOF > input EOF startmsg - ./${angle_exec} < input >> ${pgmout} 2>>errfile + ./${angle_exec} < input >> "${pgmout}" 2>>errfile export err=$?; err_chk - if [[ $err -ne 0 ]]; then - fail=$(expr $fail + 1) + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) fi if [[ -s ${angl_file} ]]; then - ${COMPRESS} -f ${angl_file} + ${COMPRESS} -f "${angl_file}" fi if [[ -s ${angl_ctl} ]]; then - ${COMPRESS} -f ${angl_ctl} + ${COMPRESS} -f "${angl_ctl}" fi @@ -207,24 +200,24 @@ EOF done # for type in ${SATYPE} loop - ${USHradmon}/rstprod.sh + "${USHgfs}/rstprod.sh" tar_file=radmon_angle.tar if compgen -G "angle*.ieee_d*" > /dev/null || compgen -G "angle*.ctl*" > /dev/null; then - tar -cf $tar_file angle*.ieee_d* angle*.ctl* + tar -cf "${tar_file}" angle*.ieee_d* angle*.ctl* ${COMPRESS} ${tar_file} - mv $tar_file.${Z} ${TANKverf_rad}/. + mv "${tar_file}.${Z}" "${TANKverf_rad}/." - if [[ $RAD_AREA = "rgn" ]]; then + if [[ ${RAD_AREA} = "rgn" ]]; then cwd=$(pwd) - cd ${TANKverf_rad} - tar -xf ${tar_file}.${Z} - rm ${tar_file}.${Z} - cd ${cwd} + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" fi fi - if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then err=3 fi fi diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh index 374c8db7b2..ab1058711e 100755 --- a/ush/radmon_verf_bcoef.sh +++ b/ush/radmon_verf_bcoef.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,23 +20,19 @@ source "$HOMEgfs/ush/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_bcoef.sh PDATE +# Usage: radmon_verf_bcoef.sh # # Input script positional parameters: -# PDATE processing date +# PDYcyc processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECradmon executable directory -# defaults to current directory -# FIXradmon fixed data directory -# defaults to current directory +# EXECgfs executable directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository -# defaults to current directory # SATYPE list of satellite/instrument sources # defaults to none # LITTLE_ENDIAN flag for LE machine @@ -65,23 +61,16 @@ source "$HOMEgfs/ush/preamble.sh" # >0 - some problem encountered # #################################################################### -# Command line arguments. -export PDATE=${1:-${PDATE:?}} netcdf_boolean=".false." -if [[ $RADMON_NETCDF -eq 1 ]]; then +if [[ ${RADMON_NETCDF} -eq 1 ]]; then netcdf_boolean=".true." fi -echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" - -# Directories -FIXgdas=${FIXgdas:-$(pwd)} -EXECradmon=${EXECradmon:-$(pwd)} -TANKverf_rad=${TANKverf_rad:-$(pwd)} +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, ${netcdf_boolean}" # File names pgmout=${pgmout:-${jlogfile}} -touch $pgmout +touch "${pgmout}" # Other variables RAD_AREA=${RAD_AREA:-glb} @@ -96,7 +85,7 @@ USE_ANL=${USE_ANL:-0} err=0 bcoef_exec=radmon_bcoef.x -if [[ $USE_ANL -eq 1 ]]; then +if [[ ${USE_ANL} -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -105,8 +94,8 @@ fi #-------------------------------------------------------------------- # Copy extraction program and supporting files to working directory -$NCP $EXECradmon/${bcoef_exec} ./${bcoef_exec} -$NCP ${biascr} ./biascr.txt +${NCP} "${EXECgfs}/${bcoef_exec}" ./${bcoef_exec} +${NCP} "${biascr}" ./biascr.txt if [[ ! -s ./${bcoef_exec} || ! -s ./biascr.txt ]]; then err=4 @@ -118,10 +107,10 @@ else export pgm=${bcoef_exec} - iyy=$(echo $PDATE | cut -c1-4) - imm=$(echo $PDATE | cut -c5-6) - idd=$(echo $PDATE | cut -c7-8) - ihh=$(echo $PDATE | cut -c9-10) + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} ctr=0 fail=0 @@ -140,19 +129,19 @@ else prep_step - ctr=$(expr $ctr + 1) + ctr=$(( ctr + 1 )) - if [[ $dtype == "anl" ]]; then - data_file=${type}_anl.${PDATE}.ieee_d + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" ctl_file=${type}_anl.ctl bcoef_ctl=bcoef.${ctl_file} else - data_file=${type}.${PDATE}.ieee_d + data_file="${type}.${PDY}${cyc}.ieee_d" ctl_file=${type}.ctl bcoef_ctl=bcoef.${ctl_file} fi - if [[ $REGIONAL_RR -eq 1 ]]; then + if [[ ${REGIONAL_RR} -eq 1 ]]; then bcoef_file=${rgnHH}.bcoef.${data_file}.${rgnTM} else bcoef_file=bcoef.${data_file} @@ -180,10 +169,10 @@ cat << EOF > input / EOF startmsg - ./${bcoef_exec} < input >>${pgmout} 2>>errfile + ./${bcoef_exec} < input >>"${pgmout}" 2>>errfile export err=$?; err_chk - if [[ $err -ne 0 ]]; then - fail=$(expr $fail + 1) + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) fi @@ -192,11 +181,11 @@ EOF # if [[ -s ${bcoef_file} ]]; then - ${COMPRESS} ${bcoef_file} + ${COMPRESS} "${bcoef_file}" fi if [[ -s ${bcoef_ctl} ]]; then - ${COMPRESS} ${bcoef_ctl} + ${COMPRESS} "${bcoef_ctl}" fi @@ -204,24 +193,24 @@ EOF done # type in $SATYPE loop - ${USHradmon}/rstprod.sh + "${USHgfs}/rstprod.sh" if compgen -G "bcoef*.ieee_d*" > /dev/null || compgen -G "bcoef*.ctl*" > /dev/null; then tar_file=radmon_bcoef.tar - tar -cf $tar_file bcoef*.ieee_d* bcoef*.ctl* + tar -cf ${tar_file} bcoef*.ieee_d* bcoef*.ctl* ${COMPRESS} ${tar_file} - mv $tar_file.${Z} ${TANKverf_rad} + mv "${tar_file}.${Z}" "${TANKverf_rad}" - if [[ $RAD_AREA = "rgn" ]]; then + if [[ ${RAD_AREA} = "rgn" ]]; then cwd=$(pwd) - cd ${TANKverf_rad} - tar -xf ${tar_file}.${Z} - rm ${tar_file}.${Z} - cd ${cwd} + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" fi fi - if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then err=5 fi fi diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh index 3e267f018c..f1f97c247e 100755 --- a/ush/radmon_verf_bcor.sh +++ b/ush/radmon_verf_bcor.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -20,16 +20,16 @@ source "$HOMEgfs/ush/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_bcor.sh PDATE +# Usage: radmon_verf_bcor.sh # # Input script positional parameters: -# PDATE processing date +# PDYcyc processing date # yyyymmddcc format; required # # Imported Shell Variables: # RADMON_SUFFIX data source suffix # defauls to opr -# EXECradmon executable directory +# EXECgfs executable directory # defaults to current directory # RAD_AREA global or regional flag # defaults to global @@ -64,16 +64,9 @@ source "$HOMEgfs/ush/preamble.sh" # #################################################################### -# Command line arguments. -export PDATE=${1:-${PDATE:?}} - -# Directories -EXECradmon=${EXECradmon:-$(pwd)} -TANKverf_rad=${TANKverf_rad:-$(pwd)} - # File names pgmout=${pgmout:-${jlogfile}} -touch $pgmout +touch "${pgmout}" # Other variables RAD_AREA=${RAD_AREA:-glb} @@ -85,11 +78,11 @@ bcor_exec=radmon_bcor.x err=0 netcdf_boolean=".false." -if [[ $RADMON_NETCDF -eq 1 ]]; then +if [[ ${RADMON_NETCDF} -eq 1 ]]; then netcdf_boolean=".true." fi -if [[ $USE_ANL -eq 1 ]]; then +if [[ ${USE_ANL} -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -99,7 +92,7 @@ fi #-------------------------------------------------------------------- # Copy extraction program to working directory -$NCP ${EXECradmon}/${bcor_exec} ./${bcor_exec} +${NCP} "${EXECgfs}/${bcor_exec}" ./${bcor_exec} if [[ ! -s ./${bcor_exec} ]]; then err=6 @@ -111,10 +104,10 @@ else export pgm=${bcor_exec} - iyy=$(echo $PDATE | cut -c1-4) - imm=$(echo $PDATE | cut -c5-6) - idd=$(echo $PDATE | cut -c7-8) - ihh=$(echo $PDATE | cut -c9-10) + iyy="${PDY:0:4}" + imm="${PDY:4:2}" + idd="${PDY:6:2}" + ihh=${cyc} ctr=0 fail=0 @@ -126,10 +119,10 @@ else prep_step - ctr=$(expr $ctr + 1) + ctr=$(( ctr + 1 )) - if [[ $dtype == "anl" ]]; then - data_file=${type}_anl.${PDATE}.ieee_d + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" bcor_file=bcor.${data_file} ctl_file=${type}_anl.ctl bcor_ctl=bcor.${ctl_file} @@ -137,7 +130,7 @@ else bcor_stdout=bcor.${stdout_file} input_file=${type}_anl else - data_file=${type}.${PDATE}.ieee_d + data_file="${type}.${PDY}${cyc}.ieee_d" bcor_file=bcor.${data_file} ctl_file=${type}.ctl bcor_ctl=bcor.${ctl_file} @@ -151,7 +144,7 @@ else # Check for 0 length input file here and avoid running # the executable if $input_file doesn't exist or is 0 bytes # - if [[ -s $input_file ]]; then + if [[ -s "${input_file}" ]]; then nchanl=-999 cat << EOF > input @@ -173,10 +166,10 @@ cat << EOF > input EOF startmsg - ./${bcor_exec} < input >> ${pgmout} 2>>errfile + ./${bcor_exec} < input >> "${pgmout}" 2>>errfile export err=$?; err_chk if [[ $? -ne 0 ]]; then - fail=$(expr $fail + 1) + fail=$(( fail + 1 )) fi @@ -185,11 +178,11 @@ EOF # if [[ -s ${bcor_file} ]]; then - ${COMPRESS} ${bcor_file} + ${COMPRESS} "${bcor_file}" fi if [[ -s ${bcor_ctl} ]]; then - ${COMPRESS} ${bcor_ctl} + ${COMPRESS} "${bcor_ctl}" fi fi @@ -197,24 +190,24 @@ EOF done # type in $SATYPE loop - ${USHradmon}/rstprod.sh + "${USHgfs}/rstprod.sh" tar_file=radmon_bcor.tar if compgen -G "bcor*.ieee_d*" > /dev/null || compgen -G "bcor*.ctl*" > /dev/null; then - tar -cf $tar_file bcor*.ieee_d* bcor*.ctl* + tar -cf "${tar_file}" bcor*.ieee_d* bcor*.ctl* ${COMPRESS} ${tar_file} - mv $tar_file.${Z} ${TANKverf_rad}/. + mv "${tar_file}.${Z}" "${TANKverf_rad}/." - if [[ $RAD_AREA = "rgn" ]]; then + if [[ ${RAD_AREA} = "rgn" ]]; then cwd=$(pwd) - cd ${TANKverf_rad} - tar -xf ${tar_file}.${Z} - rm ${tar_file}.${Z} - cd ${cwd} + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" fi fi - if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then err=7 fi fi diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh index 51743277c9..7f98407ec5 100755 --- a/ush/radmon_verf_time.sh +++ b/ush/radmon_verf_time.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -source "$HOMEgfs/ush/preamble.sh" +source "${HOMEgfs}/ush/preamble.sh" ################################################################################ #### UNIX Script Documentation Block @@ -22,10 +22,10 @@ source "$HOMEgfs/ush/preamble.sh" # other supporting files into a temporary working directory. # # -# Usage: radmon_verf_time.sh PDATE +# Usage: radmon_verf_time.sh ${PDY}${cyc} # # Input script positional parameters: -# PDATE processing date +# PDYcyc processing date # yyyymmddcc format; required # # Imported Shell Variables: @@ -33,14 +33,11 @@ source "$HOMEgfs/ush/preamble.sh" # defaults to 1 (on) # RADMON_SUFFIX data source suffix # defauls to opr -# EXECradmon executable directory -# defaults to current directory -# FIXgdas fixed data directory -# defaults to current directory +# EXECgfs executable directory +# PARMmonitor parm data directory # RAD_AREA global or regional flag # defaults to global # TANKverf_rad data repository -# defaults to current directory # SATYPE list of satellite/instrument sources # defaults to none # VERBOSE Verbose flag (YES or NO) @@ -77,20 +74,12 @@ source "$HOMEgfs/ush/preamble.sh" # #################################################################### -# Command line arguments. -export PDATE=${1:-${PDATE:?}} - -# Directories -FIXgdas=${FIXgdas:-$(pwd)} -EXECradmon=${EXECradmon:-$(pwd)} -TANKverf_rad=${TANKverf_rad:-$(pwd)} - # File names #pgmout=${pgmout:-${jlogfile}} #touch $pgmout -radmon_err_rpt=${radmon_err_rpt:-${USHradmon}/radmon_err_rpt.sh} -base_file=${base_file:-$FIXgdas/gdas_radmon_base.tar} +radmon_err_rpt=${radmon_err_rpt:-${USHgfs}/radmon_err_rpt.sh} +base_file=${base_file:-${PARMmonitor}/gdas_radmon_base.tar} report=report.txt disclaimer=disclaimer.txt @@ -109,7 +98,7 @@ count_hdr=count_hdr.txt count_err=count_err.txt netcdf_boolean=".false." -if [[ $RADMON_NETCDF -eq 1 ]]; then +if [[ ${RADMON_NETCDF} -eq 1 ]]; then netcdf_boolean=".true." fi @@ -127,7 +116,7 @@ time_exec=radmon_time.x USE_ANL=${USE_ANL:-0} err=0 -if [[ $USE_ANL -eq 1 ]]; then +if [[ ${USE_ANL} -eq 1 ]]; then gesanl="ges anl" else gesanl="ges" @@ -137,26 +126,24 @@ fi #-------------------------------------------------------------------- # Copy extraction program and base files to working directory #------------------------------------------------------------------- -$NCP ${EXECradmon}/${time_exec} ./ +${NCP} "${EXECgfs}/${time_exec}" ./ if [[ ! -s ./${time_exec} ]]; then err=8 fi -iyy=$(echo $PDATE | cut -c1-4) -imm=$(echo $PDATE | cut -c5-6) -idd=$(echo $PDATE | cut -c7-8) -ihh=$(echo $PDATE | cut -c9-10) -cyc=$ihh -CYCLE=$cyc +iyy="${PDY:0:4}" +imm="${PDY:4:2}" +idd="${PDY:6:2}" +ihh=${cyc} local_base="local_base" -if [[ $DO_DATA_RPT -eq 1 ]]; then +if [[ ${DO_DATA_RPT} -eq 1 ]]; then if [[ -e ${base_file}.${Z} ]]; then - $NCP ${base_file}.${Z} ./${local_base}.{Z} - ${UNCOMPRESS} ${local_base}.${Z} + ${NCP} "${base_file}.${Z}" "./${local_base}.${Z}" + ${UNCOMPRESS} "${local_base}.${Z}" else - $NCP ${base_file} ./${local_base} + ${NCP} "${base_file}" ./${local_base} fi if [[ ! -s ./${local_base} ]]; then @@ -168,7 +155,7 @@ if [[ $DO_DATA_RPT -eq 1 ]]; then fi fi -if [[ $err -eq 0 ]]; then +if [[ ${err} -eq 0 ]]; then ctr=0 fail=0 @@ -183,23 +170,23 @@ if [[ $err -eq 0 ]]; then continue fi - ctr=$(expr $ctr + 1) + ctr=$(( ctr + 1 )) for dtype in ${gesanl}; do if [[ -f input ]]; then rm input; fi - if [[ $dtype == "anl" ]]; then - data_file=${type}_anl.${PDATE}.ieee_d + if [[ ${dtype} == "anl" ]]; then + data_file="${type}_anl.${PDY}${cyc}.ieee_d" ctl_file=${type}_anl.ctl time_ctl=time.${ctl_file} else - data_file=${type}.${PDATE}.ieee_d + data_file="${type}.${PDY}${cyc}.ieee_d" ctl_file=${type}.ctl time_ctl=time.${ctl_file} fi - if [[ $REGIONAL_RR -eq 1 ]]; then + if [[ ${REGIONAL_RR} -eq 1 ]]; then time_file=${rgnHH}.time.${data_file}.${rgnTM} else time_file=time.${data_file} @@ -227,48 +214,48 @@ cat << EOF > input / EOF - ./${time_exec} < input >> stdout.${type} 2>>errfile + ./${time_exec} < input >> stdout."${type}" 2>>errfile - if [[ $err -ne 0 ]]; then - fail=$(expr $fail + 1) + if [[ ${err} -ne 0 ]]; then + fail=$(( fail + 1 )) fi #------------------------------------------------------------------- # move data, control, and stdout files to $TANKverf_rad and compress #------------------------------------------------------------------- - cat stdout.${type} >> stdout.time + cat "stdout.${type}" >> stdout.time if [[ -s ${time_file} ]]; then - ${COMPRESS} ${time_file} + ${COMPRESS} "${time_file}" fi if [[ -s ${time_ctl} ]]; then - ${COMPRESS} ${time_ctl} + ${COMPRESS} "${time_ctl}" fi done done - ${USHradmon}/rstprod.sh + "${USHgfs}/rstprod.sh" if compgen -G "time*.ieee_d*" > /dev/null || compgen -G "time*.ctl*" > /dev/null; then tar_file=radmon_time.tar - tar -cf $tar_file time*.ieee_d* time*.ctl* + tar -cf "${tar_file}" time*.ieee_d* time*.ctl* ${COMPRESS} ${tar_file} - mv $tar_file.${Z} ${TANKverf_rad}/. + mv "${tar_file}.${Z}" "${TANKverf_rad}/." - if [[ $RAD_AREA = "rgn" ]]; then + if [[ ${RAD_AREA} = "rgn" ]]; then cwd=$(pwd) - cd ${TANKverf_rad} - tar -xf ${tar_file}.${Z} - rm ${tar_file}.${Z} - cd ${cwd} + cd "${TANKverf_rad}" + tar -xf "${tar_file}.${Z}" + rm "${tar_file}.${Z}" + cd "${cwd}" fi fi - if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then - echo "fail, ctr = $fail, $ctr" + if [[ ${ctr} -gt 0 && ${fail} -eq ${ctr} || ${fail} -gt ${ctr} ]]; then + echo "fail, ctr = ${fail}, ${ctr}" err=10 fi @@ -282,7 +269,7 @@ fi #------------------------------------------------------------------- #################################################################### -if [[ $DO_DATA_RPT -eq 1 ]]; then +if [[ ${DO_DATA_RPT} -eq 1 ]]; then #--------------------------- # build report disclaimer @@ -301,8 +288,8 @@ EOF # Check for missing diag files # tmp_satype="./tmp_satype.txt" - echo ${SATYPE} > ${tmp_satype} - ${USHradmon}/radmon_diag_ck.sh --rad ${radstat} --sat ${tmp_satype} --out ${diag} + echo "${SATYPE}" > ${tmp_satype} + "${USHgfs}/radmon_diag_ck.sh" --rad "${radstat}" --sat "${tmp_satype}" --out "${diag}" if [[ -s ${diag} ]]; then cat << EOF > ${diag_hdr} @@ -328,11 +315,11 @@ EOF # if [[ -s ${diag} ]]; then lines=$(wc -l <${diag}) - echo "lines in diag = $lines" + echo "lines in diag = ${lines}" - if [[ $lines -gt 0 ]]; then + if [[ ${lines} -gt 0 ]]; then cat ${diag_report} - cp ${diag} ${TANKverf_rad}/bad_diag.${PDATE} + cp ${diag} "${TANKverf_rad}/bad_diag.${PDY}${cyc}" else rm ${diag_report} fi @@ -344,12 +331,12 @@ EOF # Identify bad_pen and bad_chan files for this cycle and # previous cycle - bad_pen=bad_pen.${PDATE} - bad_chan=bad_chan.${PDATE} - low_count=low_count.${PDATE} + bad_pen=bad_pen.${PDY}${cyc} + bad_chan=bad_chan.${PDY}${cyc} + low_count=low_count.${PDY}${cyc} - qdate=$($NDATE -${CYCLE_INTERVAL} $PDATE) - pday=$(echo $qdate | cut -c1-8) + qdate=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") + pday="${qdate:0:8}" prev_bad_pen=bad_pen.${qdate} prev_bad_chan=bad_chan.${qdate} @@ -359,35 +346,35 @@ EOF prev_bad_chan=${TANKverf_radM1}/${prev_bad_chan} prev_low_count=${TANKverf_radM1}/${prev_low_count} - if [[ -s $bad_pen ]]; then - echo "pad_pen = $bad_pen" + if [[ -s ${bad_pen} ]]; then + echo "pad_pen = ${bad_pen}" fi - if [[ -s $prev_bad_pen ]]; then - echo "prev_pad_pen = $prev_bad_pen" + if [[ -s ${prev_bad_pen} ]]; then + echo "prev_pad_pen = ${prev_bad_pen}" fi - if [[ -s $bad_chan ]]; then - echo "bad_chan = $bad_chan" + if [[ -s ${bad_chan} ]]; then + echo "bad_chan = ${bad_chan}" fi - if [[ -s $prev_bad_chan ]]; then - echo "prev_bad_chan = $prev_bad_chan" + if [[ -s ${prev_bad_chan} ]]; then + echo "prev_bad_chan = ${prev_bad_chan}" fi - if [[ -s $low_count ]]; then - echo "low_count = $low_count" + if [[ -s ${low_count} ]]; then + echo "low_count = ${low_count}" fi - if [[ -s $prev_low_count ]]; then - echo "prev_low_count = $prev_low_count" + if [[ -s ${prev_low_count} ]]; then + echo "prev_low_count = ${prev_low_count}" fi do_pen=0 do_chan=0 do_cnt=0 - if [[ -s $bad_pen && -s $prev_bad_pen ]]; then + if [[ -s ${bad_pen} && -s ${prev_bad_pen} ]]; then do_pen=1 fi - if [[ -s $low_count && -s $prev_low_count ]]; then + if [[ -s ${low_count} && -s ${prev_low_count} ]]; then do_cnt=1 fi @@ -395,7 +382,7 @@ EOF # avoid doing the bad_chan report for REGIONAL_RR sources -- because # they run hourly they often have 0 count channels for off-hour runs. # - if [[ -s $bad_chan && -s $prev_bad_chan && REGIONAL_RR -eq 0 ]]; then + if [[ -s ${bad_chan} && -s ${prev_bad_chan} && REGIONAL_RR -eq 0 ]]; then do_chan=1 fi @@ -403,37 +390,37 @@ EOF # Remove extra spaces in new bad_pen & low_count files # if [[ -s ${bad_pen} ]]; then - gawk '{$1=$1}1' $bad_pen > tmp.bad_pen - mv -f tmp.bad_pen $bad_pen + gawk '{$1=$1}1' "${bad_pen}" > tmp.bad_pen + mv -f tmp.bad_pen "${bad_pen}" fi if [[ -s ${low_count} ]]; then - gawk '{$1=$1}1' $low_count > tmp.low_count - mv -f tmp.low_count $low_count + gawk '{$1=$1}1' "${low_count}" > tmp.low_count + mv -f tmp.low_count "${low_count}" fi - echo " do_pen, do_chan, do_cnt = $do_pen, $do_chan, $do_cnt" - echo " diag_report = $diag_report " - if [[ $do_pen -eq 1 || $do_chan -eq 1 || $do_cnt -eq 1 || -s ${diag_report} ]]; then + echo " do_pen, do_chan, do_cnt = ${do_pen}, ${do_chan}, ${do_cnt}" + echo " diag_report = ${diag_report} " + if [[ ${do_pen} -eq 1 || ${do_chan} -eq 1 || ${do_cnt} -eq 1 || -s ${diag_report} ]]; then - if [[ $do_pen -eq 1 ]]; then + if [[ ${do_pen} -eq 1 ]]; then echo "calling radmon_err_rpt for pen" - ${radmon_err_rpt} ${prev_bad_pen} ${bad_pen} pen ${qdate} \ - ${PDATE} ${diag_report} ${pen_err} + ${radmon_err_rpt} "${prev_bad_pen}" "${bad_pen}" pen "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${pen_err} fi - if [[ $do_chan -eq 1 ]]; then + if [[ ${do_chan} -eq 1 ]]; then echo "calling radmon_err_rpt for chan" - ${radmon_err_rpt} ${prev_bad_chan} ${bad_chan} chan ${qdate} \ - ${PDATE} ${diag_report} ${chan_err} + ${radmon_err_rpt} "${prev_bad_chan}" "${bad_chan}" chan "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${chan_err} fi - if [[ $do_cnt -eq 1 ]]; then + if [[ ${do_cnt} -eq 1 ]]; then echo "calling radmon_err_rpt for cnt" - ${radmon_err_rpt} ${prev_low_count} ${low_count} cnt ${qdate} \ - ${PDATE} ${diag_report} ${count_err} + ${radmon_err_rpt} "${prev_low_count}" "${low_count}" cnt "${qdate}" \ + "${PDY}${cyc}" ${diag_report} ${count_err} fi #------------------------------------------------------------------- @@ -445,18 +432,18 @@ EOF echo DOING ERROR REPORTING - cat << EOF > $report + cat << EOF > ${report} Radiance Monitor warning report Net: ${RADMON_SUFFIX} Run: ${RUN} - Cycle: $PDATE + Cycle: ${PDY}${cyc} EOF if [[ -s ${diag_report} ]]; then echo OUTPUTING DIAG_REPORT - cat ${diag_report} >> $report + cat ${diag_report} >> ${report} fi if [[ -s ${chan_err} ]]; then @@ -472,8 +459,8 @@ EOF EOF - cat ${chan_hdr} >> $report - cat ${chan_err} >> $report + cat ${chan_hdr} >> ${report} + cat ${chan_err} >> ${report} fi @@ -490,8 +477,8 @@ Satellite/Instrument Obs Count Avg Count EOF - cat ${count_hdr} >> $report - cat ${count_err} >> $report + cat ${count_hdr} >> ${report} + cat ${count_err} >> ${report} fi @@ -507,15 +494,15 @@ EOF ============ ======= ====== Cycle Penalty Bound ----- ------- ----- EOF - cat ${pen_hdr} >> $report - cat ${pen_err} >> $report + cat ${pen_hdr} >> ${report} + cat ${pen_err} >> ${report} rm -f ${pen_hdr} rm -f ${pen_err} fi - echo >> $report - cat ${disclaimer} >> $report - echo >> $report + echo >> ${report} + cat ${disclaimer} >> ${report} + echo >> ${report} fi #------------------------------------------------------------------- @@ -523,10 +510,10 @@ EOF # if [[ -s ${report} ]]; then lines=$(wc -l <${report}) - if [[ $lines -gt 2 ]]; then + if [[ ${lines} -gt 2 ]]; then cat ${report} - $NCP ${report} ${TANKverf_rad}/warning.${PDATE} + ${NCP} ${report} "${TANKverf_rad}/warning.${PDY}${cyc}" fi fi @@ -537,22 +524,22 @@ EOF # copy new bad_pen, bad_chan, and low_count files to $TANKverf_rad # if [[ -s ${bad_chan} ]]; then - mv ${bad_chan} ${TANKverf_rad}/. + mv "${bad_chan}" "${TANKverf_rad}/." fi if [[ -s ${bad_pen} ]]; then - mv ${bad_pen} ${TANKverf_rad}/. + mv "${bad_pen}" "${TANKverf_rad}/." fi if [[ -s ${low_count} ]]; then - mv ${low_count} ${TANKverf_rad}/. + mv "${low_count}" "${TANKverf_rad}/." fi fi for type in ${SATYPE}; do - rm -f stdout.${type} + rm -f "stdout.${type}" done ################################################################################ diff --git a/ush/run_mpmd.sh b/ush/run_mpmd.sh new file mode 100755 index 0000000000..24cb3f2656 --- /dev/null +++ b/ush/run_mpmd.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +cmdfile=${1:?"run_mpmd requires an input file containing commands to execute in MPMD mode"} + +# Determine the number of MPMD processes from incoming ${cmdfile} +nprocs=$(wc -l < "${cmdfile}") + +# Local MPMD file containing instructions to run in CFP +mpmd_cmdfile="${DATA:-}/mpmd_cmdfile" +if [[ -s "${mpmd_cmdfile}" ]]; then rm -f "${mpmd_cmdfile}"; fi + +if [[ "${launcher:-}" =~ ^srun.* ]]; then # srun-based system e.g. Hera, Orion, etc. + + # Slurm requires a counter in front of each line in the script + # Read the incoming cmdfile and create srun usable cmdfile + nm=0 + # shellcheck disable=SC2312 + while IFS= read -r line; do + echo "${nm} ${line}" >> "${mpmd_cmdfile}" + ((nm=nm+1)) + done < "${cmdfile}" + + set +e + # shellcheck disable=SC2086 + ${launcher:-} ${mpmd_opt:-} -n ${nprocs} "${mpmd_cmdfile}" + rc=$? + set_strict + if (( rc == 0 )); then + out_files=$(find . -name 'mpmd.*.*.out') + fi + +elif [[ "${launcher:-}" =~ ^mpiexec.* ]]; then # mpiexec + + # Redirect output from each process to its own stdout + # Read the incoming cmdfile and create mpiexec usable cmdfile + nm=0 + echo "#!/bin/bash" >> "${mpmd_cmdfile}" + # shellcheck disable=SC2312 + while IFS= read -r line; do + echo "${line} > mpmd.${nm}.out" >> "${mpmd_cmdfile}" + ((nm=nm+1)) + done < "${cmdfile}" + + chmod 755 "${mpmd_cmdfile}" + # shellcheck disable=SC2086 + ${launcher:-} -np ${nprocs} ${mpmd_opt:-} "${mpmd_cmdfile}" + rc=$? + if (( rc == 0 )); then + out_files=$(find . -name 'mpmd.*.out') + fi + +else + + echo "FATAL ERROR: CFP is not usable with launcher: '${launcher:-}'" + rc=1 + +fi + +# On success concatenate processor specific output into a single mpmd.out +if (( rc == 0 )); then + rm -f "${mpmd_cmdfile}" + for file in ${out_files}; do + cat "${file}" >> mpmd.out + rm -f "${file}" + done +fi + +exit "${rc}" diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 5b5b4ba34b..cda9030577 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -46,7 +46,7 @@ # subsequent program SYNDAT_SYNDATA) # PARMSYND - path to syndat parm field directory # EXECSYND - path to syndat executable directory -# FIXSYND - path to syndat fix field directory +# FIXam - path to syndat fix field directory # USHSYND - path to syndat ush directory # Imported variables that can be passed in: @@ -59,12 +59,10 @@ # data base # (Default: /dcom/us007003) # slmask - path to t126 32-bit gaussian land/sea mask file -# (Default: $FIXSYND/syndat_slmask.t126.gaussian) +# (Default: $FIXam/syndat_slmask.t126.gaussian) # copy_back - switch to copy updated files back to archive directory and # to tcvitals directory # (Default: YES) -# SENDCOM switch copy output files to $COMSP -# (Default: YES) # files_override - switch to override default "files" setting for given run # (Default: not set) # TIMEIT - optional time and resource reporting (Default: not set) @@ -76,14 +74,13 @@ HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003} -FIXSYND=${FIXSYND:-$HOMEgfs/fix/am} +FIXam=${FIXam:-$HOMEgfs/fix/am} USHSYND=${USHSYND:-$HOMEgfs/ush} EXECSYND=${EXECSYND:-$HOMEgfs/exec} PARMSYND=${PARMSYND:-$HOMEgfs/parm/relo} -slmask=${slmask:-$FIXSYND/syndat_slmask.t126.gaussian} +slmask=${slmask:-$FIXam/syndat_slmask.t126.gaussian} copy_back=${copy_back:-YES} -SENDCOM=${SENDCOM:-YES} files_override=${files_override:-""} cd $DATA @@ -119,13 +116,11 @@ positional parameter 1" # to remote machine(s) # (Note: Only do so if files don't already exist) - if [ $SENDCOM = YES ]; then - if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" - fi - if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" - fi + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" fi exit @@ -195,10 +190,10 @@ fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp cat $PARMSYND/syndat_qctropcy.${RUN}.parm >> vitchk.inp -# Copy the fixed fields from FIXSYND +# Copy the fixed fields from FIXam -cp $FIXSYND/syndat_fildef.vit fildef.vit -cp $FIXSYND/syndat_stmnames stmnames +cp $FIXam/syndat_fildef.vit fildef.vit +cp $FIXam/syndat_stmnames stmnames rm -f nhc fnoc lthistry @@ -296,13 +291,11 @@ if [ "$errqct" -gt '0' ];then # wasting time with multiple attempts to remote machine(s) # (Note: Only do so if files don't already exist) - if [ $SENDCOM = YES ]; then - if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then - cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" - fi - if [[ ! -s ${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark} ]]; then - cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" - fi + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s ${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark} ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" fi exit @@ -379,7 +372,7 @@ fi # This is the file that connects to the later RELOCATE and/or PREP scripts -[ $SENDCOM = YES ] && cp current "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" +cp current "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" # Create the DBNet alert if [ $SENDDBN = "YES" ] @@ -388,6 +381,6 @@ then fi # Write JTWC/FNOC Tcvitals to /com path since not saved anywhere else -[ $SENDCOM = YES ] && cp fnoc "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" +cp fnoc "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" exit diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 9b170ddfd0..01a21bd12c 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -147,8 +147,6 @@ # Default is "$EXECUTIL/gettrk" # BKGFREQ Frequency of background files for relocation # Default is "3" -# SENDCOM String when set to "YES" copies output files to $COMSP -# Default is "YES" # SENDDBN String when set to "YES" alerts output files to $COMSP # NDATE String indicating executable path for NDATE utility program # Default is "$EXECUTIL/ndate" @@ -210,7 +208,6 @@ source "$HOMEgfs/ush/preamble.sh" MACHINE=${MACHINE:-$(hostname -s | cut -c 1-3)} -SENDCOM=${SENDCOM:-YES} export OPSROOT=${OPSROOT:-/lfs/h1/ops/prod} GRIBVERSION=${GRIBVERSION:-"grib2"} @@ -687,21 +684,20 @@ else fi rm -f RELOCATE_GES cmd - if [ "$SENDCOM" = "YES" ]; then - cp "rel_inform1" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" - cp "tcvitals" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" - if [ "$SENDDBN" = "YES" ]; then - if test "$RUN" = "gdas1" - then - "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" - "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" - fi - if test "$RUN" = "gfs" - then - "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" - "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" - fi - fi + + cp "rel_inform1" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + cp "tcvitals" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + if [ "$SENDDBN" = "YES" ]; then + if test "$RUN" = "gdas1" + then + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + fi + if test "$RUN" = "gfs" + then + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + fi fi # -------------------------------------------------------------------------- diff --git a/ush/ufs_configure.sh b/ush/ufs_configure.sh new file mode 100755 index 0000000000..8898d11162 --- /dev/null +++ b/ush/ufs_configure.sh @@ -0,0 +1,107 @@ +#! /usr/bin/env bash + +##### +## This script writes ufs.configure file +## first, select a "*.IN" templates based on +## $confignamevarforufs and parse values based on +## $cpl** switches. +## +## This is a child script of modular +## forecast script. This script is definition only (Is it? There is nothing defined here being used outside this script.) +##### + +# Disable variable not used warnings +# shellcheck disable=SC2034 +writing_ufs_configure() { + +echo "SUB ${FUNCNAME[0]}: ufs.configure.sh begins" + +# Setup ufs.configure +local DumpFields=${NEMSDumpFields:-false} +local cap_dbug_flag=${cap_dbug_flag:-0} +# Determine "cmeps_run_type" based on the availability of the mediator restart file +# If it is a warm_start, we already copied the mediator restart to DATA, if it was present +# If the mediator restart was not present, despite being a "warm_start", we put out a WARNING +# in forecast_postdet.sh +if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then + local cmeps_run_type='continue' +else + local cmeps_run_type='startup' +fi + +local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + +# Atm-related +local atm_model="fv3" +local atm_petlist_bounds="0 $(( ATMPETS-1 ))" +local atm_omp_num_threads="${ATMTHREADS}" + +local med_model="cmeps" +local med_petlist_bounds="0 $(( MEDPETS-1 ))" +local med_omp_num_threads="${MEDTHREADS}" + +if [[ "${cpl}" = ".true." ]]; then + local coupling_interval_slow_sec="${CPL_SLOW}" +fi + +if [[ "${cplflx}" = ".true." ]]; then + + local use_coldstart=${use_coldstart:-".false."} + local use_mommesh=${USE_MOMMESH:-"true"} + + local ocn_model="mom6" + local ocn_petlist_bounds="${ATMPETS} $(( ATMPETS+OCNPETS-1 ))" + local ocn_omp_num_threads="${OCNTHREADS}" + local RUNTYPE="${cmeps_run_type}" + local CPLMODE="${cplmode}" + local coupling_interval_fast_sec="${CPL_FAST}" + local RESTART_N="${restart_interval}" + local ocean_albedo_limit=0.06 + local ATMTILESIZE="${CASE:1}" + local ocean_albedo_limit=0.06 +fi + +if [[ "${cplice}" = ".true." ]]; then + + local ice_model="cice6" + local ice_petlist_bounds="$(( ATMPETS+OCNPETS )) $(( ATMPETS+OCNPETS+ICEPETS-1 ))" + local ice_omp_num_threads="${ICETHREADS}" + local MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} + local FHMAX="${FHMAX_GFS}" # TODO: How did this get in here hard-wired to FHMAX_GFS? +fi + +if [[ "${cplwav}" = ".true." ]]; then + + local wav_model="ww3" + local wav_petlist_bounds="$(( ATMPETS+OCNPETS+ICEPETS )) $(( ATMPETS+OCNPETS+ICEPETS+WAVPETS-1 ))" + local wav_omp_num_threads="${WAVTHREADS}" + local MULTIGRID="${waveMULTIGRID}" + +fi + +if [[ "${cplchm}" = ".true." ]]; then + + local chm_model="gocart" + local chm_petlist_bounds="0 $(( CHMPETS-1 ))" + local chm_omp_num_threads="${CHMTHREADS}" + local coupling_interval_fast_sec="${CPL_FAST}" + +fi + +# Ensure the template exists +if [[ ! -r "${ufs_configure_template}" ]]; then + echo "FATAL ERROR: template '${ufs_configure_template}' does not exist, ABORT!" + exit 1 +fi + +source "${HOMEgfs}/ush/atparse.bash" +rm -f "${DATA}/ufs.configure" +atparse < "${ufs_configure_template}" >> "${DATA}/ufs.configure" +echo "Rendered ufs.configure:" +cat ufs.configure + +${NCP} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_ufs.yaml" fd_ufs.yaml + +echo "SUB ${FUNCNAME[0]}: ufs.configure.sh ends for ${ufs_configure_template}" + +} diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index 8511515abb..af28760269 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -83,8 +83,8 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then set_trace if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ - [[ -z "${gribflags}" ]] || [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \ + [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \ + [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \ [[ -z "${SENDDBN}" ]]; then set +x echo ' ' diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 7fa8d9d7f3..c11a75f89d 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -66,8 +66,8 @@ source "$HOMEgfs/ush/preamble.sh" set_trace if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ - [[ -z "${SENDDBN}" ]] || [ -z "${waveGRD}" ] + [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \ + [ -z "${waveGRD}" ] then set +x echo ' ' @@ -75,7 +75,7 @@ source "$HOMEgfs/ush/preamble.sh" echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDCOM} ${SENDDBN} ${waveGRD}" + echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}" set_trace exit 1 fi @@ -172,12 +172,10 @@ source "$HOMEgfs/ush/preamble.sh" # 1.c Save in /com - if [ "$SENDCOM" = 'YES' ] - then - set +x - echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" - set_trace - cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" + set +x + echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" + set_trace + cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" # if [ "$SENDDBN" = 'YES' ] # then @@ -190,7 +188,6 @@ source "$HOMEgfs/ush/preamble.sh" # # fi - fi # --------------------------------------------------------------------------- # # 2. Clean up the directory diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index a32a2b7e43..5ec1d7fc2e 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -56,8 +56,7 @@ source "$HOMEgfs/ush/preamble.sh" if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \ [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \ - [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${SENDCOM}" ]] || \ - [[ -z "${COM_OBS}" ]]; then + [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COM_OBS}" ]]; then set +x echo ' ' diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index 9264aac5f3..1a8d6d6cc5 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -77,7 +77,7 @@ source "$HOMEgfs/ush/preamble.sh" # The tested variables should be exported by the postprocessor script. if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \ - [[ -z "${SENDCOM}" ]] || [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then + [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then set +x echo ' ' echo '*****************************************************' diff --git a/versions/build.hera.ver b/versions/build.hera.ver new file mode 100644 index 0000000000..ff85b1a801 --- /dev/null +++ b/versions/build.hera.ver @@ -0,0 +1,3 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 +source "${HOMEgfs:-}/versions/build.spack.ver" diff --git a/versions/build.hercules.ver b/versions/build.hercules.ver new file mode 100644 index 0000000000..5513466631 --- /dev/null +++ b/versions/build.hercules.ver @@ -0,0 +1,3 @@ +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 +source "${HOMEgfs:-}/versions/build.spack.ver" diff --git a/versions/build.jet.ver b/versions/build.jet.ver new file mode 100644 index 0000000000..ff85b1a801 --- /dev/null +++ b/versions/build.jet.ver @@ -0,0 +1,3 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 +source "${HOMEgfs:-}/versions/build.spack.ver" diff --git a/versions/build.orion.ver b/versions/build.orion.ver new file mode 100644 index 0000000000..ff85b1a801 --- /dev/null +++ b/versions/build.orion.ver @@ -0,0 +1,3 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 +source "${HOMEgfs:-}/versions/build.spack.ver" diff --git a/versions/build.s4.ver b/versions/build.s4.ver new file mode 100644 index 0000000000..a0aae51d87 --- /dev/null +++ b/versions/build.s4.ver @@ -0,0 +1,3 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.0 +source "${HOMEgfs:-}/versions/build.spack.ver" diff --git a/versions/build.spack.ver b/versions/build.spack.ver new file mode 100644 index 0000000000..28c3a10185 --- /dev/null +++ b/versions/build.spack.ver @@ -0,0 +1,36 @@ +export spack_stack_ver=1.5.1 +export spack_env=gsi-addon +export python_ver=3.10.8 + +export cmake_ver=3.23.1 + +export gempak_ver=7.4.2 +export jasper_ver=2.0.32 +export libpng_ver=1.6.37 +export zlib_ver=1.2.13 +export esmf_ver=8.5.0 +export fms_ver=2023.02.01 +export cdo_ver=2.0.5 +export nco_ver=5.0.6 +export ncl_ver=6.6.2 + +export hdf5_ver=1.14.0 +export netcdf_c_ver=4.9.2 +export netcdf_fortran_ver=4.6.0 + +export bacio_ver=2.4.1 +export nemsio_ver=2.5.4 +export sigio_ver=2.3.2 +export w3emc_ver=2.10.0 +export bufr_ver=11.7.0 +export g2_ver=3.4.5 +export sp_ver=2.3.3 +export ip_ver=4.3.0 +export gsi_ncdiag_ver=1.1.2 +export g2tmpl_ver=1.10.2 +export crtm_ver=2.4.0 +export wgrib2_ver=2.0.8 +export grib_util_ver=1.3.0 +export py_netcdf4_ver=1.5.8 +export py_pyyaml_ver=5.4.1 +export py_jinja2_ver=3.1.2 diff --git a/versions/build.wcoss2.ver b/versions/build.wcoss2.ver new file mode 100644 index 0000000000..bb7ee6ac99 --- /dev/null +++ b/versions/build.wcoss2.ver @@ -0,0 +1,36 @@ +export PrgEnv_intel_ver=8.1.0 +export intel_ver=19.1.3.304 +export craype_ver=2.7.13 +export cray_mpich_ver=8.1.9 + +export cmake_ver=3.20.2 + +export python_ver=3.8.6 +export gempak_ver=7.14.1 +export jasper_ver=2.0.25 +export libpng_ver=1.6.37 +export zlib_ver=1.2.11 +export esmf_ver=8.3.0b09 +export pio_ver=2.5.2 +export fms_ver=2022.04 + +export hdf5_ver=1.10.6 +export netcdf_ver=4.7.4 + +export bacio_ver=2.4.1 +export w3nco_ver=2.4.1 +export nemsio_ver=2.5.2 +export sigio_ver=2.3.2 +export w3emc_ver=2.9.2 +export bufr_ver=11.7.0 +export g2_ver=3.4.5 +export sp_ver=2.3.3 +export ip_ver=3.3.3 +export wrf_io_ver=1.2.0 +export ncio_ver=1.1.2 +export ncdiag_ver=1.0.0 +export g2tmpl_ver=1.10.2 +export crtm_ver=2.4.0 +export wgrib2_ver=2.0.8 + +export upp_ver=10.0.8 diff --git a/versions/fix.ver b/versions/fix.ver index 775155e70e..a01e9d4151 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -5,16 +5,16 @@ export aer_ver=20220805 export am_ver=20220805 export chem_ver=20220805 export cice_ver=20220805 -export cpl_ver=20220805 +export cpl_ver=20230526 export datm_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 export gdas_gsibec_ver=20221031 export glwu_ver=20220805 -export gsi_ver=20230112 +export gsi_ver=20230911 export lut_ver=20220805 export mom6_ver=20220805 -export orog_ver=20220805 +export orog_ver=20231027 export reg2grb2_ver=20220805 export sfc_climo_ver=20220805 export ugwd_ver=20220805 diff --git a/versions/run.hera.ver b/versions/run.hera.ver new file mode 100644 index 0000000000..43443ba715 --- /dev/null +++ b/versions/run.hera.ver @@ -0,0 +1,13 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 + +export hpss_ver=hpss +export ncl_ver=6.6.2 +export R_ver=3.5.0 +export gempak_ver=7.4.2 + +#For metplus jobs, not currently working with spack-stack +#export met_ver=9.1.3 +#export metplus_ver=3.1.1 + +source "${HOMEgfs:-}/versions/run.spack.ver" diff --git a/versions/run.hercules.ver b/versions/run.hercules.ver new file mode 100644 index 0000000000..4bedeb1e96 --- /dev/null +++ b/versions/run.hercules.ver @@ -0,0 +1,10 @@ +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 + +export ncl_ver=6.6.2 + +source "${HOMEgfs:-}/versions/run.spack.ver" + +# wgrib2 and cdo are different on Hercules from all the other systems +export wgrib2_ver=3.1.1 +export cdo_ver=2.2.0 diff --git a/versions/run.jet.ver b/versions/run.jet.ver new file mode 100644 index 0000000000..18a82cab4f --- /dev/null +++ b/versions/run.jet.ver @@ -0,0 +1,9 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 + +export hpss_ver= +export ncl_ver=6.6.2 +export R_ver=4.0.2 +export gempak_ver=7.4.2 + +source "${HOMEgfs:-}/versions/run.spack.ver" diff --git a/versions/run.orion.ver b/versions/run.orion.ver new file mode 100644 index 0000000000..ee2f65523b --- /dev/null +++ b/versions/run.orion.ver @@ -0,0 +1,7 @@ +export stack_intel_ver=2022.0.2 +export stack_impi_ver=2021.5.1 + +export ncl_ver=6.6.2 +export gempak_ver=7.5.1 + +source "${HOMEgfs:-}/versions/run.spack.ver" diff --git a/versions/run.s4.ver b/versions/run.s4.ver new file mode 100644 index 0000000000..56817ef439 --- /dev/null +++ b/versions/run.s4.ver @@ -0,0 +1,6 @@ +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.0 + +export ncl_ver=6.4.0-precompiled + +source "${HOMEgfs:-}/versions/run.spack.ver" diff --git a/versions/run.spack.ver b/versions/run.spack.ver new file mode 100644 index 0000000000..7045f2ed01 --- /dev/null +++ b/versions/run.spack.ver @@ -0,0 +1,27 @@ +export spack_stack_ver=1.5.1 +export spack_env=gsi-addon +export python_ver=3.10.8 + +export jasper_ver=2.0.32 +export libpng_ver=1.6.37 +export cdo_ver=2.0.5 +export nco_ver=5.0.6 + +export hdf5_ver=1.14.0 +export netcdf_c_ver=4.9.2 +export netcdf_fortran_ver=4.6.0 + +export bufr_ver=11.7.0 +export gsi_ncdiag_ver=1.1.2 +export g2tmpl_ver=1.10.2 +export crtm_ver=2.4.0 +export wgrib2_ver=2.0.8 +export py_netcdf4_ver=1.5.8 +export py_pyyaml_ver=5.4.1 +export py_jinja2_ver=3.1.2 + +export obsproc_run_ver=1.1.2 +export prepobs_run_ver=1.0.1 + +export ens_tracker_ver=feature-GFSv17_com_reorg +export fit2obs_ver=1.0.0 diff --git a/versions/run.wcoss2.ver b/versions/run.wcoss2.ver new file mode 100644 index 0000000000..a188cdea74 --- /dev/null +++ b/versions/run.wcoss2.ver @@ -0,0 +1,51 @@ +export version=v16.3.7 +export gfs_ver=v16.3.7 +export ukmet_ver=v2.2 +export ecmwf_ver=v2.1 +export nam_ver=v4.2 +export rtofs_ver=v2.3 +export obsproc_ver=v1.1 + +export envvar_ver=1.0 +export prod_envir_ver=${prod_envir_ver:-2.0.4} # Allow override from ops ecflow +export prod_util_ver=${prod_util_ver:-2.0.9} # Allow override from ops ecflow + +export PrgEnv_intel_ver=8.1.0 +export intel_ver=19.1.3.304 +export craype_ver=2.7.13 +export cray_mpich_ver=8.1.9 +export cray_pals_ver=1.0.17 +export cfp_ver=2.0.4 + +export python_ver=3.8.6 +export gempak_ver=7.14.1 +export perl_ver=5.32.0 +export libjpeg_ver=9c +export udunits_ver=2.2.28 +export gsl_ver=2.7 +export jasper_ver=2.0.25 +export zlib_ver=1.2.11 +export libpng_ver=1.6.37 +export cdo_ver=1.9.8 + +export hdf5_ver=1.10.6 +export netcdf_ver=4.7.4 + +export nco_ver=4.7.9 +export grib_util_ver=1.2.3 +export bufr_dump_ver=1.0.0 +export util_shared_ver=1.4.0 +export g2tmpl_ver=1.10.2 +export ncdiag_ver=1.0.0 +export crtm_ver=2.4.0 +export wgrib2_ver=2.0.8 + +# Development-only below + +export obsproc_run_ver=1.1.2 +export prepobs_run_ver=1.0.1 + +export ens_tracker_ver=feature-GFSv17_com_reorg +export fit2obs_ver=1.0.0 +export mos_ver=5.4.3 +export mos_shared_ver=2.7.2 diff --git a/workflow/applications/__init__.py b/workflow/applications/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow/applications/application_factory.py b/workflow/applications/application_factory.py new file mode 100644 index 0000000000..ff6b6992f4 --- /dev/null +++ b/workflow/applications/application_factory.py @@ -0,0 +1,10 @@ +from wxflow import Factory +from applications.gfs_cycled import GFSCycledAppConfig +from applications.gfs_forecast_only import GFSForecastOnlyAppConfig +from applications.gefs import GEFSAppConfig + + +app_config_factory = Factory('AppConfig') +app_config_factory.register('gfs_cycled', GFSCycledAppConfig) +app_config_factory.register('gfs_forecast-only', GFSForecastOnlyAppConfig) +app_config_factory.register('gefs_forecast-only', GEFSAppConfig) diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py new file mode 100644 index 0000000000..766d4aa508 --- /dev/null +++ b/workflow/applications/applications.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python3 + +from typing import Dict, List, Any +from datetime import timedelta +from hosts import Host +from wxflow import Configuration, to_timedelta +from abc import ABC, ABCMeta, abstractmethod + +__all__ = ['AppConfig'] + + +class AppConfigInit(ABCMeta): + def __call__(cls, *args, **kwargs): + ''' + We want the child classes to be able to define additional settings + before we source the configs and complete the rest of the process, + so break init up into two methods, one to run first (both in the + base class and the child class) and one to finalize the initiali- + zation after both have completed. + ''' + obj = type.__call__(cls, *args, **kwargs) + obj._init_finalize(*args, **kwargs) + return obj + + +class AppConfig(ABC, metaclass=AppConfigInit): + + VALID_MODES = ['cycled', 'forecast-only'] + + def __init__(self, conf: Configuration) -> None: + + self.scheduler = Host().scheduler + + _base = conf.parse_config('config.base') + # Define here so the child __init__ functions can use it; will + # be overwritten later during _init_finalize(). + self._base = _base + + self.mode = _base['MODE'] + + if self.mode not in self.VALID_MODES: + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(self.VALID_MODES)}') + + self.net = _base['NET'] + self.model_app = _base.get('APP', 'ATM') + self.do_atm = _base.get('DO_ATM', True) + self.do_wave = _base.get('DO_WAVE', False) + self.do_wave_bnd = _base.get('DOBNDPNT_WAVE', False) + self.do_ocean = _base.get('DO_OCN', False) + self.do_ice = _base.get('DO_ICE', False) + self.do_aero = _base.get('DO_AERO', False) + self.do_bufrsnd = _base.get('DO_BUFRSND', False) + self.do_gempak = _base.get('DO_GEMPAK', False) + self.do_awips = _base.get('DO_AWIPS', False) + self.do_verfozn = _base.get('DO_VERFOZN', True) + self.do_verfrad = _base.get('DO_VERFRAD', True) + self.do_vminmon = _base.get('DO_VMINMON', True) + self.do_tracker = _base.get('DO_TRACKER', True) + self.do_genesis = _base.get('DO_GENESIS', True) + self.do_genesis_fsu = _base.get('DO_GENESIS_FSU', False) + self.do_metp = _base.get('DO_METP', False) + self.do_upp = not _base.get('WRITE_DOPOST', True) + self.do_mos = _base.get('DO_MOS', False) + + self.do_hpssarch = _base.get('HPSSARCH', False) + + self.nens = _base.get('NMEM_ENS', 0) + + self.wave_cdumps = None + if self.do_wave: + wave_cdump = _base.get('WAVE_CDUMP', 'BOTH').lower() + if wave_cdump in ['both']: + self.wave_cdumps = ['gfs', 'gdas'] + elif wave_cdump in ['gfs', 'gdas']: + self.wave_cdumps = [wave_cdump] + + def _init_finalize(self, conf: Configuration): + print("Finalizing initialize") + + # Get a list of all possible config_files that would be part of the application + self.configs_names = self._get_app_configs() + + # Source the config_files for the jobs in the application + self.configs = self._source_configs(conf) + + # Update the base config dictionary base on application + self.configs['base'] = self._update_base(self.configs['base']) + + # Save base in the internal state since it is often needed + self._base = self.configs['base'] + + # Get more configuration options into the class attributes + self.gfs_cyc = self._base.get('gfs_cyc') + + # Finally get task names for the application + self.task_names = self.get_task_names() + + @abstractmethod + def _get_app_configs(self): + pass + + @staticmethod + @abstractmethod + def _update_base(base_in: Dict[str, Any]) -> Dict[str, Any]: + ''' + Make final updates to base and return an updated copy + + Parameters + ---------- + base_in: Dict + Beginning base settings + + Returns + ------- + Dict: A copy of base_in with possible modifications based on the + net and mode. + + ''' + pass + + def _source_configs(self, conf: Configuration) -> Dict[str, Any]: + """ + Given the configuration object and jobs, + source the configurations for each config and return a dictionary + Every config depends on "config.base" + """ + + configs = dict() + + # Return config.base as well + configs['base'] = conf.parse_config('config.base') + + # Source the list of all config_files involved in the application + for config in self.configs_names: + + # All must source config.base first + files = ['config.base'] + + if config in ['eobs', 'eomg']: + files += ['config.anal', 'config.eobs'] + elif config in ['eupd']: + files += ['config.anal', 'config.eupd'] + elif config in ['efcs']: + files += ['config.fcst', 'config.efcs'] + elif 'wave' in config: + files += ['config.wave', f'config.{config}'] + else: + files += [f'config.{config}'] + + print(f'sourcing config.{config}') + configs[config] = conf.parse_config(files) + + return configs + + @abstractmethod + def get_task_names(self) -> Dict[str, List[str]]: + ''' + Create a list of task names for each CDUMP valid for the configuation. + + Parameters + ---------- + None + + Returns + ------- + Dict[str, List[str]]: Lists of tasks for each CDUMP. + + ''' + pass + + @staticmethod + def get_gfs_interval(gfs_cyc: int) -> timedelta: + """ + return interval in hours based on gfs_cyc + """ + + gfs_internal_map = {'1': '24H', '2': '12H', '4': '6H'} + + try: + return to_timedelta(gfs_internal_map[str(gfs_cyc)]) + except KeyError: + raise KeyError(f'Invalid gfs_cyc = {gfs_cyc}') diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py new file mode 100644 index 0000000000..b2369e8dfc --- /dev/null +++ b/workflow/applications/gefs.py @@ -0,0 +1,48 @@ +from applications.applications import AppConfig +from wxflow import Configuration + + +class GEFSAppConfig(AppConfig): + ''' + Class to define GEFS configurations + ''' + + def __init__(self, conf: Configuration): + super().__init__(conf) + + def _get_app_configs(self): + """ + Returns the config_files that are involved in gefs + """ + configs = ['stage_ic', 'fcst'] + + if self.nens > 0: + configs += ['efcs'] + + if self.do_wave: + configs += ['waveinit'] + + return configs + + @staticmethod + def _update_base(base_in): + + base_out = base_in.copy() + base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) + base_out['CDUMP'] = 'gefs' + + return base_out + + def get_task_names(self): + + tasks = ['stage_ic'] + + if self.do_wave: + tasks += ['waveinit'] + + tasks += ['fcst'] + + if self.nens > 0: + tasks += ['efcs'] + + return {f"{self._base['CDUMP']}": tasks} diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py new file mode 100644 index 0000000000..29c6b18f43 --- /dev/null +++ b/workflow/applications/gfs_cycled.py @@ -0,0 +1,322 @@ +from typing import Dict, Any +from applications.applications import AppConfig +from wxflow import Configuration, to_timedelta +from datetime import timedelta + + +class GFSCycledAppConfig(AppConfig): + ''' + Class to define GFS cycled configurations + ''' + + def __init__(self, conf: Configuration): + super().__init__(conf) + self.do_hybvar = self._base.get('DOHYBVAR', False) + self.do_fit2obs = self._base.get('DO_FIT2OBS', True) + self.do_jediatmvar = self._base.get('DO_JEDIATMVAR', False) + self.do_jediatmens = self._base.get('DO_JEDIATMENS', False) + self.do_jediocnvar = self._base.get('DO_JEDIOCNVAR', False) + self.do_jedilandda = self._base.get('DO_JEDILANDDA', False) + self.do_mergensst = self._base.get('DO_MERGENSST', False) + + self.lobsdiag_forenkf = False + self.eupd_cdumps = None + if self.do_hybvar: + self.lobsdiag_forenkf = self._base.get('lobsdiag_forenkf', False) + eupd_cdump = self._base.get('EUPD_CYC', 'gdas').lower() + if eupd_cdump in ['both']: + self.eupd_cdumps = ['gfs', 'gdas'] + elif eupd_cdump in ['gfs', 'gdas']: + self.eupd_cdumps = [eupd_cdump] + + def _get_app_configs(self): + """ + Returns the config_files that are involved in the cycled app + """ + + configs = ['prep'] + + if self.do_jediatmvar: + configs += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + else: + configs += ['anal', 'analdiag'] + + if self.do_jediocnvar: + configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] + + if self.do_ocean: + configs += ['ocnpost'] + + configs += ['sfcanl', 'analcalc', 'fcst', 'upp', 'atmos_products', 'arch', 'cleanup'] + + if self.do_hybvar: + if self.do_jediatmens: + configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] + else: + configs += ['eobs', 'eomg', 'ediag', 'eupd'] + configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] + + if self.do_fit2obs: + configs += ['fit2obs'] + + if self.do_verfozn: + configs += ['verfozn'] + + if self.do_verfrad: + configs += ['verfrad'] + + if self.do_vminmon: + configs += ['vminmon'] + + if self.do_tracker: + configs += ['tracker'] + + if self.do_genesis: + configs += ['genesis'] + + if self.do_genesis_fsu: + configs += ['genesis_fsu'] + + if self.do_metp: + configs += ['metp'] + + if self.do_gempak: + configs += ['gempak', 'npoess'] + + if self.do_bufrsnd: + configs += ['postsnd'] + + if self.do_awips: + configs += ['awips'] + + if self.do_wave: + configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + if self.do_wave_bnd: + configs += ['wavepostbndpnt', 'wavepostbndpntbll'] + if self.do_gempak: + configs += ['wavegempak'] + if self.do_awips: + configs += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_aero: + configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + + if self.do_jedilandda: + configs += ['preplandobs', 'landanl'] + + if self.do_mos: + configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + + return configs + + @staticmethod + def _update_base(base_in): + + return GFSCycledAppConfig.get_gfs_cyc_dates(base_in) + + def get_task_names(self): + """ + Get the task names for all the tasks in the cycled application. + Note that the order of the task names matters in the XML. + This is the place where that order is set. + """ + + gdas_gfs_common_tasks_before_fcst = ['prep'] + gdas_gfs_common_cleanup_tasks = ['arch', 'cleanup'] + + if self.do_jediatmvar: + gdas_gfs_common_tasks_before_fcst += ['prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal'] + else: + gdas_gfs_common_tasks_before_fcst += ['anal'] + + if self.do_jediocnvar: + gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', + 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] + + gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + + if self.do_aero: + gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + + if self.do_jedilandda: + gdas_gfs_common_tasks_before_fcst += ['preplandobs', 'landanl'] + + wave_prep_tasks = ['waveinit', 'waveprep'] + wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] + wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] + + hybrid_tasks = [] + hybrid_after_eupd_tasks = [] + if self.do_hybvar: + if self.do_jediatmens: + hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] + else: + hybrid_tasks += ['eobs', 'eupd', 'echgres'] + hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] + hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] + + # Collect all "gdas" cycle tasks + gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() + + if not self.do_jediatmvar: + gdas_tasks += ['analdiag'] + + if self.do_wave and 'gdas' in self.wave_cdumps: + gdas_tasks += wave_prep_tasks + + gdas_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] + + if self.do_upp: + gdas_tasks += ['atmupp'] + gdas_tasks += ['atmprod'] + + if self.do_wave and 'gdas' in self.wave_cdumps: + if self.do_wave_bnd: + gdas_tasks += wave_bndpnt_tasks + gdas_tasks += wave_post_tasks + + if self.do_fit2obs: + gdas_tasks += ['fit2obs'] + + if self.do_verfozn: + gdas_tasks += ['verfozn'] + + if self.do_verfrad: + gdas_tasks += ['verfrad'] + + if self.do_vminmon: + gdas_tasks += ['vminmon'] + + if self.do_gempak: + gdas_tasks += ['gempak', 'gempakmetancdc'] + + gdas_tasks += gdas_gfs_common_cleanup_tasks + + # Collect "gfs" cycle tasks + gfs_tasks = gdas_gfs_common_tasks_before_fcst.copy() + + if self.do_wave and 'gfs' in self.wave_cdumps: + gfs_tasks += wave_prep_tasks + + gfs_tasks += ['atmanlupp', 'atmanlprod', 'fcst'] + + if self.do_upp: + gfs_tasks += ['atmupp'] + gfs_tasks += ['atmprod'] + + if self.do_vminmon: + gfs_tasks += ['vminmon'] + + if self.do_tracker: + gfs_tasks += ['tracker'] + + if self.do_genesis: + gfs_tasks += ['genesis'] + + if self.do_genesis_fsu: + gfs_tasks += ['genesis_fsu'] + + if self.do_metp: + gfs_tasks += ['metp'] + + if self.do_wave and 'gfs' in self.wave_cdumps: + if self.do_wave_bnd: + gfs_tasks += wave_bndpnt_tasks + gfs_tasks += wave_post_tasks + if self.do_gempak: + gfs_tasks += ['wavegempak'] + if self.do_awips: + gfs_tasks += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_bufrsnd: + gfs_tasks += ['postsnd'] + + if self.do_gempak: + gfs_tasks += ['gempak'] + gfs_tasks += ['gempakmeta'] + gfs_tasks += ['gempakncdcupapgif'] + gfs_tasks += ['npoess_pgrb2_0p5deg'] + gfs_tasks += ['gempakpgrb2spec'] + + if self.do_awips: + gfs_tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + + if self.do_mos: + gfs_tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + + gfs_tasks += gdas_gfs_common_cleanup_tasks + + tasks = dict() + tasks['gdas'] = gdas_tasks + + if self.do_hybvar and 'gdas' in self.eupd_cdumps: + enkfgdas_tasks = hybrid_tasks + hybrid_after_eupd_tasks + tasks['enkfgdas'] = enkfgdas_tasks + + # Add CDUMP=gfs tasks if running early cycle + if self.gfs_cyc > 0: + tasks['gfs'] = gfs_tasks + + if self.do_hybvar and 'gfs' in self.eupd_cdumps: + enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks + enkfgfs_tasks.remove("echgres") + tasks['enkfgfs'] = enkfgfs_tasks + + return tasks + + @staticmethod + def get_gfs_cyc_dates(base: Dict[str, Any]) -> Dict[str, Any]: + """ + Generate GFS dates from experiment dates and gfs_cyc choice + """ + + base_out = base.copy() + + sdate = base['SDATE'] + edate = base['EDATE'] + base_out['INTERVAL'] = to_timedelta(f"{base['assim_freq']}H") + + # Set GFS cycling dates + gfs_cyc = base['gfs_cyc'] + if gfs_cyc != 0: + interval_gfs = AppConfig.get_gfs_interval(gfs_cyc) + hrinc = 0 + hrdet = 0 + if gfs_cyc == 1: + hrinc = 24 - sdate.hour + hrdet = edate.hour + elif gfs_cyc == 2: + if sdate.hour in [0, 12]: + hrinc = 12 + elif sdate.hour in [6, 18]: + hrinc = 6 + if edate.hour in [6, 18]: + hrdet = 6 + elif gfs_cyc == 4: + hrinc = 6 + sdate_gfs = sdate + timedelta(hours=hrinc) + edate_gfs = edate - timedelta(hours=hrdet) + if sdate_gfs > edate: + print('W A R N I N G!') + print('Starting date for GFS cycles is after Ending date of experiment') + print(f'SDATE = {sdate.strftime("%Y%m%d%H")}, EDATE = {edate.strftime("%Y%m%d%H")}') + print(f'SDATE_GFS = {sdate_gfs.strftime("%Y%m%d%H")}, EDATE_GFS = {edate_gfs.strftime("%Y%m%d%H")}') + gfs_cyc = 0 + + base_out['gfs_cyc'] = gfs_cyc + base_out['SDATE_GFS'] = sdate_gfs + base_out['EDATE_GFS'] = edate_gfs + base_out['INTERVAL_GFS'] = interval_gfs + + fhmax_gfs = {} + for hh in ['00', '06', '12', '18']: + fhmax_gfs[hh] = base.get(f'FHMAX_GFS_{hh}', base.get('FHMAX_GFS_00', 120)) + base_out['FHMAX_GFS'] = fhmax_gfs + + return base_out diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py new file mode 100644 index 0000000000..564fd382b9 --- /dev/null +++ b/workflow/applications/gfs_forecast_only.py @@ -0,0 +1,146 @@ +from applications.applications import AppConfig +from wxflow import Configuration + + +class GFSForecastOnlyAppConfig(AppConfig): + ''' + Class to define GFS forecast-only configurations + ''' + + def __init__(self, conf: Configuration): + super().__init__(conf) + + def _get_app_configs(self): + """ + Returns the config_files that are involved in the forecast-only app + """ + + configs = ['stage_ic', 'fcst', 'arch', 'cleanup'] + + if self.do_atm: + + if self.do_upp: + configs += ['upp'] + + configs += ['atmos_products'] + + if self.do_aero: + configs += ['aerosol_init'] + + if self.do_tracker: + configs += ['tracker'] + + if self.do_genesis: + configs += ['genesis'] + + if self.do_genesis_fsu: + configs += ['genesis_fsu'] + + if self.do_metp: + configs += ['metp'] + + if self.do_bufrsnd: + configs += ['postsnd'] + + if self.do_gempak: + configs += ['gempak'] + + if self.do_awips: + configs += ['awips'] + + if self.do_ocean or self.do_ice: + configs += ['ocnpost'] + + if self.do_wave: + configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + if self.do_wave_bnd: + configs += ['wavepostbndpnt', 'wavepostbndpntbll'] + if self.do_gempak: + configs += ['wavegempak'] + if self.do_awips: + configs += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_mos: + configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + + return configs + + @staticmethod + def _update_base(base_in): + + base_out = base_in.copy() + base_out['INTERVAL_GFS'] = AppConfig.get_gfs_interval(base_in['gfs_cyc']) + base_out['CDUMP'] = 'gfs' + + return base_out + + def get_task_names(self): + """ + Get the task names for all the tasks in the forecast-only application. + Note that the order of the task names matters in the XML. + This is the place where that order is set. + """ + + tasks = ['stage_ic'] + + if self.do_aero: + tasks += ['aerosol_init'] + + if self.do_wave: + tasks += ['waveinit'] + # tasks += ['waveprep'] # TODO - verify if waveprep is executed in forecast-only mode when APP=ATMW|S2SW + + tasks += ['fcst'] + + if self.do_atm: + + if self.do_upp: + tasks += ['atmupp'] + + tasks += ['atmprod'] + + if self.do_tracker: + tasks += ['tracker'] + + if self.do_genesis: + tasks += ['genesis'] + + if self.do_genesis_fsu: + tasks += ['genesis_fsu'] + + if self.do_metp: + tasks += ['metp'] + + if self.do_bufrsnd: + tasks += ['postsnd'] + + if self.do_gempak: + tasks += ['gempak', 'gempakmeta', 'gempakncdcupapgif', 'gempakpgrb2spec'] + + if self.do_awips: + tasks += ['awips_20km_1p0deg', 'awips_g2', 'fbwind'] + + if self.do_ocean or self.do_ice: + tasks += ['ocnpost'] + + if self.do_wave: + if self.do_wave_bnd: + tasks += ['wavepostbndpnt', 'wavepostbndpntbll'] + tasks += ['wavepostsbs', 'wavepostpnt'] + if self.do_gempak: + tasks += ['wavegempak'] + if self.do_awips: + tasks += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_mos: + tasks += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', + 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + + tasks += ['arch', 'cleanup'] # arch and cleanup **must** be the last tasks + + return {f"{self._base['CDUMP']}": tasks} diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py new file mode 100755 index 0000000000..7e0f350c0f --- /dev/null +++ b/workflow/create_experiment.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 + +""" +Basic python script to create an experiment directory on the fly from a given +yaml file for the arguments to the two scripts below in ${HOMEgfs}/workflow +where ${HOMEgfs} is determined from the location of this script. + + ${HOMEgfs}/workflow/setup_expt.py + ${HOMEgfs}/workflow/setup_xml.py + +The yaml file are simply the arguments for these two scripts. +After this scripts runs the experiment is ready for launch. + +Output +------ +Functionally an experiment is setup as a result running the two scripts described above +with an error code of 0 upon success. +""" + +import os +import sys + +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from pathlib import Path + +from wxflow import AttrDict, parse_j2yaml, Logger, logit + +import setup_expt +import setup_xml + +from hosts import Host + +_here = os.path.dirname(__file__) +_top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) + +# Setup the logger +logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False) + + +@logit(logger) +def input_args(): + """ + Description + ----------- + + Method to collect user arguments for `create_experiment.py` + + Parameters + ---------- + + None + + Returns + ------- + + argparse.Namespace: + argparse.Namespace with the value of the file path to a yaml file from the key yaml + """ + + description = """Create a global-workflow experiment""" + + parser = ArgumentParser(description=description, + formatter_class=ArgumentDefaultsHelpFormatter) + + parser.add_argument( + '--yaml', help='full path to yaml file describing the experiment configuration', type=Path, required=True) + + return parser.parse_args() + + +if __name__ == '__main__': + + user_inputs = input_args() + + # Create a dictionary to pass to parse_j2yaml for parsing the yaml file + data = AttrDict(HOMEgfs=_top) + data.update(os.environ) + testconf = parse_j2yaml(path=user_inputs.yaml, data=data) + + if 'skip_ci_on_hosts' in testconf: + host = Host() + if host.machine.lower() in [machine.lower() for machine in testconf.skip_ci_on_hosts]: + logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}') + sys.exit(0) + + # Create a list of arguments to setup_expt.py + setup_expt_args = [testconf.experiment.system, testconf.experiment.mode] + for kk, vv in testconf.arguments.items(): + setup_expt_args.append(f"--{kk}") + setup_expt_args.append(str(vv)) + + logger.info(f"Call: setup_expt.main()") + logger.debug(f"setup_expt.py {' '.join(setup_expt_args)}") + setup_expt.main(setup_expt_args) + + # Create a list of arguments to setup_xml.py + experiment_dir = Path.absolute(Path.joinpath( + Path(testconf.arguments.expdir), Path(testconf.arguments.pslot))) + + setup_xml_args = [str(experiment_dir)] + + logger.info(f"Call: setup_xml.main()") + logger.debug(f"setup_xml.py {' '.join(setup_xml_args)}") + setup_xml.main(setup_xml_args) diff --git a/workflow/gw_setup.sh b/workflow/gw_setup.sh new file mode 100755 index 0000000000..930b1aab77 --- /dev/null +++ b/workflow/gw_setup.sh @@ -0,0 +1,15 @@ +#! /bin/bash + +# +# Resets the lmod environment and loads the modules necessary to run all the +# scripts necessary to prepare the workflow for use (checkout, experiment +# setup, etc.). +# +# This script should be SOURCED to properly setup the environment. +# + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null 2>&1 && pwd )" +source "${HOMEgfs}/ush/detect_machine.sh" +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" diff --git a/workflow/hosts.py b/workflow/hosts.py index b97ac67d89..a17cd3f4a8 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -3,7 +3,7 @@ import os from pathlib import Path -from pygw.yaml_file import YAMLFile +from wxflow import YAMLFile __all__ = ['Host'] @@ -14,15 +14,16 @@ class Host: Gather Host specific information. """ - SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', - 'WCOSS2', 'S4', 'CONTAINER'] + SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', 'HERCULES', + 'WCOSS2', 'S4', 'CONTAINER', 'AWSPW'] def __init__(self, host=None): detected_host = self.detect() if host is not None and host != detected_host: - raise ValueError(f'detected host: "{detected_host}" does not match host: "{host}"') + raise ValueError( + f'detected host: "{detected_host}" does not match host: "{host}"') self.machine = detected_host self.info = self._get_info @@ -33,11 +34,15 @@ def detect(cls): machine = 'NOTFOUND' container = os.getenv('SINGULARITY_NAME', None) + pw_csp = os.getenv('PW_CSP', None) if os.path.exists('/scratch1/NCEPDEV'): machine = 'HERA' elif os.path.exists('/work/noaa'): - machine = 'ORION' + if os.path.exists('/apps/other'): + machine = 'HERCULES' + else: + machine = 'ORION' elif os.path.exists('/lfs4/HFIP'): machine = 'JET' elif os.path.exists('/lfs/f1'): @@ -46,6 +51,11 @@ def detect(cls): machine = 'S4' elif container is not None: machine = 'CONTAINER' + elif pw_csp is not None: + if pw_csp.lower() not in ['azure', 'aws', 'gcp']: + raise ValueError( + f'NOAA cloud service provider "{pw_csp}" is not supported.') + machine = f"{pw_csp.upper()}PW" if machine not in Host.SUPPORTED_HOSTS: raise NotImplementedError(f'This machine is not a supported host.\n' + @@ -57,7 +67,8 @@ def detect(cls): @property def _get_info(self) -> dict: - hostfile = Path(os.path.join(os.path.dirname(__file__), f'hosts/{self.machine.lower()}.yaml')) + hostfile = Path(os.path.join(os.path.dirname(__file__), + f'hosts/{self.machine.lower()}.yaml')) try: info = YAMLFile(path=hostfile) except FileNotFoundError: diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml new file mode 100644 index 0000000000..becb38e236 --- /dev/null +++ b/workflow/hosts/awspw.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' #TODO: This does not yet exist. +DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' # TODO: This does not yet exist. +PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' #TODO: This does not yet exist. +COMROOT: '/scratch1/NCEPDEV/global/glopara/com' #TODO: This does not yet exist. +COMINsyn: '${COMROOT}/gfs/prod/syndat' #TODO: This does not yet exist. +HOMEDIR: '/contrib/${USER}' +STMP: '/lustre/${USER}/stmp2/' +PTMP: '/lustre/${USER}/stmp4/' +NOSCRUB: ${HOMEDIR} +ACCOUNT: hwufscpldcld +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: compute +PARTITION_SERVICE: compute +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. +HPSSARCH: 'YES' +HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' # TODO: This will not yet work from AWS. +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml deleted file mode 120000 index 45a868d636..0000000000 --- a/workflow/hosts/hera.yaml +++ /dev/null @@ -1 +0,0 @@ -hera_gsl.yaml \ No newline at end of file diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml new file mode 100644 index 0000000000..9afde57de9 --- /dev/null +++ b/workflow/hosts/hera.yaml @@ -0,0 +1,25 @@ +BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' +DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' +BASE_CPLIC: '/scratch1/NCEPDEV/global/glopara/data/ICSDIR/prototype_ICs' +PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' +COMROOT: '/scratch1/NCEPDEV/global/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}' +STMP: '/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/emc_gw/FV3GFSrun/' +PTMP: '/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/emc_gw/FV3GFSrun/' +NOSCRUB: $HOMEDIR +ACCOUNT: gsd-fv3 +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: hera +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: fim +LOCALARCH: 'NO' +ATARDIR: '/BMC/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/hercules.yaml b/workflow/hosts/hercules.yaml new file mode 100644 index 0000000000..e977091ba6 --- /dev/null +++ b/workflow/hosts/hercules.yaml @@ -0,0 +1,25 @@ +BASE_GIT: '/work/noaa/global/glopara/git' +DMPDIR: '/work/noaa/rstprod/dump' +BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' +PACKAGEROOT: '/work/noaa/global/glopara/nwpara' +COMROOT: '/work/noaa/global/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/work/noaa/global/${USER}' +STMP: '/work/noaa/stmp/${USER}' +PTMP: '/work/noaa/stmp/${USER}' +NOSCRUB: $HOMEDIR +SCHEDULER: slurm +ACCOUNT: fv3-cpu +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: hercules +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml deleted file mode 120000 index ce0110730f..0000000000 --- a/workflow/hosts/jet.yaml +++ /dev/null @@ -1 +0,0 @@ -jet_gsl.yaml \ No newline at end of file diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml new file mode 100644 index 0000000000..313ce38dc2 --- /dev/null +++ b/workflow/hosts/jet.yaml @@ -0,0 +1,25 @@ +BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' +DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' +BASE_CPLIC: '/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs' +PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' +COMROOT: '/lfs4/HFIP/hfv3gfs/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/lfs4/HFIP/hfv3gfs/${USER}' +STMP: '/lfs4/HFIP/hfv3gfs/${USER}/stmp' +PTMP: '/lfs4/HFIP/hfv3gfs/${USER}/ptmp' +NOSCRUB: $HOMEDIR +ACCOUNT: hfv3gfs +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: kjet +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml index 095f126898..459aee7cf6 100644 --- a/workflow/hosts/orion.yaml +++ b/workflow/hosts/orion.yaml @@ -1,5 +1,6 @@ BASE_GIT: '/work/noaa/global/glopara/git' DMPDIR: '/work/noaa/rstprod/dump' +BASE_CPLIC: '/work/noaa/global/glopara/data/ICSDIR/prototype_ICs' PACKAGEROOT: '/work/noaa/global/glopara/nwpara' COMROOT: '/work/noaa/global/glopara/com' COMINsyn: '${COMROOT}/gfs/prod/syndat' @@ -21,4 +22,4 @@ LOCALARCH: 'NO' ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' MAKE_NSSTBUFR: 'NO' MAKE_ACFTBUFR: 'NO' -SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] +SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml index 9c9866db21..01b28c1e6b 100644 --- a/workflow/hosts/s4.yaml +++ b/workflow/hosts/s4.yaml @@ -1,5 +1,6 @@ BASE_GIT: '/data/prod/glopara/git' DMPDIR: '/data/prod/glopara/dump' +BASE_CPLIC: '/data/prod/glopara/coupled_ICs' PACKAGEROOT: '/data/prod/glopara/nwpara' COMROOT: '/data/prod/glopara/com' COMINsyn: '${COMROOT}/gfs/prod/syndat' diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml index 0f1086801e..04a5949b2e 100644 --- a/workflow/hosts/wcoss2.yaml +++ b/workflow/hosts/wcoss2.yaml @@ -1,8 +1,9 @@ BASE_GIT: '/lfs/h2/emc/global/save/emc.global/git' -DMPDIR: '/lfs/h2/emc/global/noscrub/emc.global/dump' +DMPDIR: '/lfs/h2/emc/dump/noscrub/dump' +BASE_CPLIC: '/lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/prototype_ICs' PACKAGEROOT: '${PACKAGEROOT:-"/lfs/h1/ops/prod/packages"}' COMROOT: '${COMROOT:-"/lfs/h1/ops/prod/com"}' -COMINsyn: '${COMROOT}/gfs/${gfs_ver:-"v16.2"}/syndat' +COMINsyn: '${COMROOT}/gfs/v16.3/syndat' HOMEDIR: '/lfs/h2/emc/global/noscrub/${USER}' STMP: '/lfs/h2/emc/stmp/${USER}' PTMP: '/lfs/h2/emc/ptmp/${USER}' @@ -21,4 +22,4 @@ LOCALARCH: 'NO' ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' MAKE_NSSTBUFR: 'NO' MAKE_ACFTBUFR: 'NO' -SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] +SUPPORTED_RESOLUTIONS: ['C1152', 'C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/p8_ugwpv1.sh b/workflow/p8_ugwpv1.sh new file mode 100755 index 0000000000..6ec129797d --- /dev/null +++ b/workflow/p8_ugwpv1.sh @@ -0,0 +1,17 @@ +USER=Judy.K.Henderson +GITDIR=/scratch1/BMC/gsd-fv3-dev/jhender/test/gsl_ufs_dev/ ## where your git checkout is located +COMROT=$GITDIR/FV3GFSrun ## default COMROT directory +EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory +ICSDIR=/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127 + +PSLOT=p8 +IDATE=2023112800 +EDATE=2023112800 +RESDET=768 ## 96 192 384 768 + +### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z + +./setup_expt.py gfs forecast-only --pslot $PSLOT --gfs_cyc 1 \ + --idate $IDATE --edate $EDATE --resdet $RESDET \ + --comrot $COMROT --expdir $EXPDIR + diff --git a/workflow/prod.yml b/workflow/prod.yml index 98755d2f86..64783dd611 100644 --- a/workflow/prod.yml +++ b/workflow/prod.yml @@ -88,41 +88,6 @@ suites: FHR: 'f( )' HR: '( )' post_processing: - tasks: - jgfs_atmos_wafs_gcip: - triggers: - - task: jgfs_atmos_post_f003 - grib_wafs: - tasks: - jgfs_atmos_wafs_f000: - template: jgfs_atmos_wafs_master - triggers: - - task: jgfs_atmos_post_f000 - - task: jgfs_atmos_post_f120 - - task: jgfs_atmos_wafs_grib2 - edits: - FCSTHR: '000' - jgfs_atmos_wafs_f( 6,20,6 ): - template: jgfs_atmos_wafs_master - triggers: - - task: jgfs_atmos_post_f( ) - - task: jgfs_atmos_wafs_f( 0,,6 ) - edits: - FCSTHR: ( ) - grib2_wafs: - tasks: - jgfs_atmos_wafs_grib2: - triggers: - - task: jgfs_atmos_post_f000 - jgfs_atmos_wafs_grib2_0p25: - triggers: - - task: jgfs_atmos_post_f036 - jgfs_atmos_wafs_blending: - triggers: - - task: jgfs_atmos_wafs_grib2 - jgfs_atmos_wafs_blending_0p25: - triggers: - - task: jgfs_atmos_wafs_grib2_0p25 bufr_sounding: tasks: jgfs_atmos_postsnd: diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py new file mode 100644 index 0000000000..680c7d8686 --- /dev/null +++ b/workflow/rocoto/gefs_tasks.py @@ -0,0 +1,108 @@ +from applications.applications import AppConfig +from rocoto.tasks import Tasks, create_wf_task +import rocoto.rocoto as rocoto + + +class GEFSTasks(Tasks): + + def __init__(self, app_config: AppConfig, cdump: str) -> None: + super().__init__(app_config, cdump) + + def stage_ic(self): + + cpl_ic = self._configs['stage_ic'] + + deps = [] + + # Atm ICs + if self.app_config.do_atm: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/mem000/atmos" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ocean ICs + if self.app_config.do_ocean: + ocn_res = f"{self._base.get('OCNRES', '025'):03d}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/mem000/ocean" + data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + if ocn_res in ['025']: + # 0.25 degree ocean model also has these additional restarts + for res in [f'res_{res_index}' for res_index in range(1, 4)]: + data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ice ICs + if self.app_config.do_ice: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/mem000/ice" + data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Wave ICs + if self.app_config.do_wave: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/mem000/wave" + for wave_grid in self._configs['waveinit']['waveGRD'].split(): + data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('stage_ic') + task = create_wf_task('stage_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveinit(self): + + resources = self.get_resource('waveinit') + task = create_wf_task('waveinit', resources, cdump=self.cdump, envar=self.envars, dependency=None) + + return task + + def fcst(self): + # TODO: Add real dependencies + dependencies = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def efcs(self): + dependencies = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + efcsenvars = self.envars.copy() + efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP']) + + resources = self.get_resource('efcs') + task = create_wf_task('efcs', resources, cdump=self.cdump, envar=efcsenvars, dependency=dependencies, + metatask='efmn', varname='grp', varval=groups, cycledef='gefs') + + return task diff --git a/workflow/rocoto/gefs_xml.py b/workflow/rocoto/gefs_xml.py new file mode 100644 index 0000000000..b25a73fa6c --- /dev/null +++ b/workflow/rocoto/gefs_xml.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +from rocoto.workflow_xml import RocotoXML +from applications.applications import AppConfig +from wxflow import to_timedelta, timedelta_to_HMS +from typing import Dict + + +# Copy of GFSForecastOnlyRocotoXML for now, other than changing cycledef names from 'gfs' to 'gefs' +# If it remains this way, we can consolidate into a single forecast-only class +class GEFSRocotoXML(RocotoXML): + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + super().__init__(app_config, rocoto_config) + + def get_cycledefs(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL_GFS', to_timedelta('24H')) + sdate_str = sdate.strftime("%Y%m%d%H%M") + edate_str = edate.strftime("%Y%m%d%H%M") + interval_str = timedelta_to_HMS(interval) + strings = [] + strings.append(f'\t{sdate_str} {edate_str} {interval_str}') + + sdate = sdate + interval + if sdate <= edate: + sdate_str = sdate.strftime("%Y%m%d%H%M") + strings.append(f'\t{sdate_str} {edate_str} {interval_str}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py new file mode 100644 index 0000000000..afd663c337 --- /dev/null +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +from rocoto.workflow_xml import RocotoXML +from applications.applications import AppConfig +from wxflow import to_timedelta, timedelta_to_HMS +from typing import Dict + + +class GFSCycledRocotoXML(RocotoXML): + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + super().__init__(app_config, rocoto_config) + + def get_cycledefs(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = to_timedelta(f"{self._base['assim_freq']}H") + sdate_str = sdate.strftime("%Y%m%d%H%M") + edate_str = edate.strftime("%Y%m%d%H%M") + interval_str = timedelta_to_HMS(interval) + strings = [] + strings.append(f'\t{sdate_str} {sdate_str} {interval_str}') + sdate = sdate + interval + sdate_str = sdate.strftime("%Y%m%d%H%M") + strings.append(f'\t{sdate_str} {edate_str} {interval_str}') + + if self._app_config.gfs_cyc != 0: + sdate_gfs = self._base['SDATE_GFS'] + edate_gfs = self._base['EDATE_GFS'] + interval_gfs = self._base['INTERVAL_GFS'] + sdate_gfs_str = sdate_gfs.strftime("%Y%m%d%H%M") + edate_gfs_str = edate_gfs.strftime("%Y%m%d%H%M") + interval_gfs_str = timedelta_to_HMS(interval_gfs) + strings.append(f'\t{sdate_gfs_str} {edate_gfs_str} {interval_gfs_str}') + + sdate_gfs = sdate_gfs + interval_gfs + sdate_gfs_str = sdate_gfs.strftime("%Y%m%d%H%M") + if sdate_gfs <= edate_gfs: + strings.append(f'\t{sdate_gfs_str} {edate_gfs_str} {interval_gfs_str}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) diff --git a/workflow/rocoto/gfs_forecast_only_xml.py b/workflow/rocoto/gfs_forecast_only_xml.py new file mode 100644 index 0000000000..cf53e685e9 --- /dev/null +++ b/workflow/rocoto/gfs_forecast_only_xml.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 + +from rocoto.workflow_xml import RocotoXML +from applications.applications import AppConfig +from wxflow import to_timedelta, timedelta_to_HMS +from typing import Dict + + +class GFSForecastOnlyRocotoXML(RocotoXML): + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + super().__init__(app_config, rocoto_config) + + def get_cycledefs(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL_GFS', to_timedelta('24H')) + strings = [] + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {timedelta_to_HMS(interval)}') + + sdate = sdate + interval + if sdate <= edate: + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {timedelta_to_HMS(interval)}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py new file mode 100644 index 0000000000..18208983b8 --- /dev/null +++ b/workflow/rocoto/gfs_tasks.py @@ -0,0 +1,1642 @@ +from applications.applications import AppConfig +from rocoto.tasks import Tasks, create_wf_task +from wxflow import timedelta_to_HMS +import rocoto.rocoto as rocoto +import numpy as np + + +class GFSTasks(Tasks): + + def __init__(self, app_config: AppConfig, cdump: str) -> None: + super().__init__(app_config, cdump) + + @staticmethod + def _is_this_a_gdas_task(cdump, task_name): + if cdump != 'enkfgdas': + raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {cdump}') + + # Specific Tasks begin here + def stage_ic(self): + + cpl_ic = self._configs['stage_ic'] + + deps = [] + + # Atm ICs + if self.app_config.do_atm: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/atmos" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + else: # data-atmosphere + # TODO - need more information about how these forcings are stored + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_DATM']}/@Y@m@d@H" + data = f"{prefix}/gefs.@Y@m.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ocean ICs + if self.app_config.do_ocean: + ocn_res = f"{self._base.get('OCNRES', '025'):03d}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocean" + data = f"{prefix}/@Y@m@d.@H0000.MOM.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + if ocn_res in ['025']: + # 0.25 degree ocean model also has these additional restarts + for res in [f'res_{res_index}' for res_index in range(1, 4)]: + data = f"{prefix}/@Y@m@d.@H0000.MOM.{res}.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ice ICs + if self.app_config.do_ice: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice" + data = f"{prefix}/@Y@m@d.@H0000.cice_model.res.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Wave ICs + if self.app_config.do_wave: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wave" + for wave_grid in self._configs['waveinit']['waveGRD'].split(): + data = f"{prefix}/@Y@m@d.@H0000.restart.{wave_grid}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('stage_ic') + task = create_wf_task('stage_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def prep(self): + + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"], + {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix}) + + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdasatmprod', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dump_path}/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('prep') + task = create_wf_task('prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def waveinit(self): + + resources = self.get_resource('waveinit') + dependencies = None + cycledef = None + if self.app_config.mode in ['cycled']: + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.cdump in ['gdas']: + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + task = create_wf_task('waveinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) + + return task + + def waveprep(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + resources = self.get_resource('waveprep') + task = create_wf_task('waveprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, cycledef=cycledef) + + return task + + def aerosol_init(self): + + input_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_INPUT_TMPL']) + restart_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_RESTART_TMPL']) + + deps = [] + # Files from current cycle + files = ['gfs_ctrl.nc'] + [f'gfs_data.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + for file in files: + data = f'{input_path}/{file}' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = timedelta_to_HMS(-interval) + + # Files from previous cycle + files = [f'@Y@m@d.@H0000.fv_core.res.nc'] + \ + [f'@Y@m@d.@H0000.fv_core.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + \ + [f'@Y@m@d.@H0000.fv_tracer.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + + for file in files: + data = [f'{restart_path}/', file] + dep_dict = {'type': 'data', 'data': data, 'offset': [offset, None]} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gfs_seq' + resources = self.get_resource('aerosol_init') + task = create_wf_task('aerosol_init', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def anal(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('anal') + task = create_wf_task('anal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def sfcanl(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('sfcanl') + task = create_wf_task('sfcanl', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analcalc(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar and self.cdump in ['gdas']: + dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analcalc') + task = create_wf_task('analcalc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analdiag(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analdiag') + task = create_wf_task('analdiag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def prepatmiodaobs(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('prepatmiodaobs') + task = create_wf_task('prepatmiodaobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prepatmiodaobs'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('atmanlinit') + task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmanlrun') + task = create_wf_task('atmanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmanlfinal') + task = create_wf_task('atmanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlinit') + task = create_wf_task('aeroanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def aeroanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlrun') + task = create_wf_task('aeroanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlfinal') + task = create_wf_task('aeroanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def preplandobs(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('preplandobs') + task = create_wf_task('preplandobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def landanl(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}preplandobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('landanl') + task = create_wf_task('landanl', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def ocnanalprep(self): + + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) + + deps = [] + data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalprep') + task = create_wf_task('ocnanalprep', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalbmat(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalbmat') + task = create_wf_task('ocnanalbmat', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalbmat'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalrun') + task = create_wf_task('ocnanalrun', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalchkpt(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_mergensst: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalchkpt') + task = create_wf_task('ocnanalchkpt', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalpost') + task = create_wf_task('ocnanalpost', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalvrfy(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalvrfy') + task = create_wf_task('ocnanalvrfy', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def fcst(self): + + fcst_map = {'forecast-only': self._fcst_forecast_only, + 'cycled': self._fcst_cycled} + + try: + task = fcst_map[self.app_config.mode]() + except KeyError: + raise NotImplementedError(f'{self.app_config.mode} is not a valid type.\n' + + 'Currently supported forecast types are:\n' + + f'{" | ".join(fcst_map.keys())}') + + return task + + def _fcst_forecast_only(self): + dependencies = [] + + dep_dict = {'type': 'task', 'name': f'{self.cdump}stage_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit' + dep_dict = {'type': 'task', 'name': f'{self.cdump}{wave_job}'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = timedelta_to_HMS(-interval) + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aerosol_init'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': offset} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def _fcst_cycled(self): + + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + dep = rocoto.add_dependency(dep_dict) + dependencies = rocoto.create_dependency(dep=dep) + + if self.app_config.do_jediocnvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanl'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + if self.cdump in ['gdas']: + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmanlupp(self): + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': 'f000', + 'UPP_RUN': 'analysis'} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + atm_anl_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_ANALYSIS_TMPL"]) + deps = [] + data = f'{atm_anl_path}/{self.cdump}.t@Hz.atmanl.nc' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_anl_path}/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_anl_path}/{self.cdump}.t@Hz.loganl.txt' + dep_dict = {'type': 'data', 'data': data, 'age': 60} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') + resources = self.get_resource('upp') + task = create_wf_task('atmanlupp', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + cycledef=self.cdump, command='&JOBS_DIR;/upp.sh') + + return task + + def atmanlprod(self): + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': '-f001'} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"]) + deps = [] + data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2anl' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + resources = self.get_resource('atmos_products') + task = create_wf_task('atmanlprod', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + cycledef=self.cdump, command='&JOBS_DIR;/atmos_products.sh') + + return task + + @staticmethod + def _get_ufs_postproc_grps(cdump, config): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf) + fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + nfhrs_per_grp = config.get('NFHRS_PER_GROUP', 1) + ngrps = len(fhrs) // nfhrs_per_grp if len(fhrs) % nfhrs_per_grp == 0 else len(fhrs) // nfhrs_per_grp + 1 + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join(f'_{fhr[0]}-{fhr[-1]}' if len(fhr) > 1 else f'_{fhr[0]}' for fhr in fhrs) + dep = ' '.join([fhr[-1] for fhr in fhrs]) + lst = ' '.join(['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + def atmupp(self): + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['upp']) + vardict = {varname2: varval2, varname3: varval3} + + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': '#lst#', + 'UPP_RUN': 'forecast'} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + deps = [] + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm#dep#.nc' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.sfc#dep#.nc' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' + dep_dict = {'type': 'data', 'data': data, 'age': 60} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + resources = self.get_resource('upp') + task = create_wf_task('atmupp', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + metatask='atmupp', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef, + command='&JOBS_DIR;/upp.sh') + + return task + + def atmprod(self): + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['atmos_products']) + vardict = {varname2: varval2, varname3: varval3} + + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': '#lst#'} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + atm_master_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_MASTER_TMPL"]) + deps = [] + data = f'{atm_master_path}/{self.cdump}.t@Hz.master.grb2#dep#' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + resources = self.get_resource('atmos_products') + task = create_wf_task('atmprod', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + metatask='atmprod', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef, + command='&JOBS_DIR;/atmos_products.sh') + + return task + + def ocnpost(self): + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = self._get_ufs_postproc_grps(self.cdump, self._configs['ocnpost']) + vardict = {varname2: varval2, varname3: varval3} + + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + deps = [] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + resources = self.get_resource('ocnpost') + task = create_wf_task('ocnpost', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + metatask='ocnpost', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def wavepostsbs(self): + deps = [] + for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostsbs') + task = create_wf_task('wavepostsbs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpnt') + task = create_wf_task('wavepostbndpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpntbll(self): + deps = [] + atmos_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + data = f'{atmos_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpntbll') + task = create_wf_task('wavepostbndpntbll', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def wavepostpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpntbll'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostpnt') + task = create_wf_task('wavepostpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavegempak(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavegempak') + task = create_wf_task('wavegempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsbulls(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('waveawipsbulls') + task = create_wf_task('waveawipsbulls', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsgridded(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('waveawipsgridded') + task = create_wf_task('waveawipsgridded', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def postsnd(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('postsnd') + task = create_wf_task('postsnd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def fbwind(self): + + atmos_prod_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_GRIB_GRID_TMPL"], {'RUN': self.cdump, 'GRID': '0p25'}) + deps = [] + data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f006' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f012' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atmos_prod_path}/{self.cdump}.t@Hz.pgrb2.0p25.f024' + dep_dict = {'type': 'data', 'data': data, 'age': 120} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps, dep_condition='and') + + resources = self.get_resource('awips') + + # TODO: It would be better to use task dependencies on the + # individual post jobs rather than data dependencies to avoid + # prematurely starting with partial files. Unfortunately, the + # ability to "group" post would make this more convoluted than + # it should be and not worth the complexity. + task = create_wf_task('fbwind', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + @staticmethod + def _get_awipsgroups(cdump, config): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + if fhmax > 240: + fhmax = 240 + if fhmax_hf > 240: + fhmax_hf = 240 + fhrs_hf = list(range(fhmin, fhmax_hf + fhout_hf, fhout_hf)) + fhrs = fhrs_hf + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + nawipsgrp = config['NAWIPSGRP'] + ngrps = nawipsgrp if len(fhrs) > nawipsgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'_{fhr[0]}-{fhr[-1]}' for fhr in fhrs]) + dep = ' '.join([fhr[-1] for fhr in fhrs]) + lst = ' '.join(['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + def awips_20km_1p0deg(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + awipsenvars = self.envars.copy() + awipsenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in awipsenvar_dict.items(): + awipsenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = self._get_awipsgroups(self.cdump, self._configs['awips']) + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('awips') + task = create_wf_task('awips_20km_1p0deg', resources, cdump=self.cdump, envar=awipsenvars, dependency=dependencies, + metatask='awips_20km_1p0deg', varname=varname1, varval=varval1, vardict=vardict) + + return task + + def awips_g2(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + awipsenvars = self.envars.copy() + awipsenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in awipsenvar_dict.items(): + awipsenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = self._get_awipsgroups(self.cdump, self._configs['awips']) + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('awips') + task = create_wf_task('awips_g2', resources, cdump=self.cdump, envar=awipsenvars, dependency=dependencies, + metatask='awips_g2', varname=varname1, varval=varval1, vardict=vardict) + + return task + + def gempak(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def gempakmeta(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempakmeta', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def gempakmetancdc(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempakmetancdc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def gempakncdcupapgif(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempakncdcupapgif', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def gempakpgrb2spec(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}npoess_pgrb2_0p5deg'} + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempakpgrb2spec', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def npoess_pgrb2_0p5deg(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('npoess') + task = create_wf_task('npoess_pgrb2_0p5deg', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def verfozn(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfozn') + task = create_wf_task('verfozn', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def verfrad(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfrad') + task = create_wf_task('verfrad', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def vminmon(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('vminmon') + task = create_wf_task('vminmon', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def tracker(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('tracker') + task = create_wf_task('tracker', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def genesis(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('genesis') + task = create_wf_task('genesis', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def genesis_fsu(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('genesis_fsu') + task = create_wf_task('genesis_fsu', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def fit2obs(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('fit2obs') + task = create_wf_task('fit2obs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def metp(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + metpenvars = self.envars.copy() + if self.app_config.mode in ['cycled']: + metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE_GFS').strftime("%Y%m%d%H")} + elif self.app_config.mode in ['forecast-only']: + metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE').strftime("%Y%m%d%H")} + metpenvar_dict['METPCASE'] = '#metpcase#' + for key, value in metpenvar_dict.items(): + metpenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1 = 'metpcase' + varval1 = 'g2g1 g2o1 pcp1' + + resources = self.get_resource('metp') + task = create_wf_task('metp', resources, cdump=self.cdump, envar=metpenvars, dependency=dependencies, + metatask='metp', varname=varname1, varval=varval1) + + return task + + def mos_stn_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_prep') + task = create_wf_task('mos_stn_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_grd_prep') + task = create_wf_task('mos_grd_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_ext_stn_prep') + task = create_wf_task('mos_ext_stn_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_prep(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_ext_grd_prep') + task = create_wf_task('mos_ext_grd_prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_stn_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_fcst') + task = create_wf_task('mos_stn_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_grd_fcst') + task = create_wf_task('mos_grd_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_stn_fcst') + task = create_wf_task('mos_ext_stn_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_fcst(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_grd_fcst') + task = create_wf_task('mos_ext_grd_fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_stn_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_stn_prdgen') + task = create_wf_task('mos_stn_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_grd_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_grd_prdgen') + task = create_wf_task('mos_grd_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_stn_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_stn_prdgen') + task = create_wf_task('mos_ext_stn_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_ext_grd_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_stn_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_ext_grd_prdgen') + task = create_wf_task('mos_ext_grd_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_wx_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('mos_wx_prdgen') + task = create_wf_task('mos_wx_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def mos_wx_ext_prdgen(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_ext_grd_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_wx_prdgen'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('mos_wx_ext_prdgen') + task = create_wf_task('mos_wx_ext_prdgen', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def arch(self): + deps = [] + dependencies = [] + if self.app_config.mode in ['cycled']: + if self.cdump in ['gfs']: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps.append(rocoto.add_dependency(dep_dict)) + elif self.cdump in ['gdas']: # Block for handling half cycle dependencies + deps2 = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlprod'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_fit2obs: + dep_dict = {'type': 'task', 'name': f'{self.cdump}fit2obs'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_verfozn: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfozn'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_verfrad: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfrad'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps2.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps2) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + if self.cdump in ['gfs'] and self.app_config.do_tracker: + dep_dict = {'type': 'task', 'name': f'{self.cdump}tracker'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.cdump in ['gfs'] and self.app_config.do_genesis: + dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.cdump in ['gfs'] and self.app_config.do_genesis_fsu: + dep_dict = {'type': 'task', 'name': f'{self.cdump}genesis_fsu'} + deps.append(rocoto.add_dependency(dep_dict)) + # Post job dependencies + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmprod'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ocean: + if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'} + deps.append(rocoto.add_dependency(dep_dict)) + # MOS job dependencies + if self.cdump in ['gfs'] and self.app_config.do_mos: + mos_jobs = ["stn_prep", "grd_prep", "ext_stn_prep", "ext_grd_prep", + "stn_fcst", "grd_fcst", "ext_stn_fcst", "ext_grd_fcst", + "stn_prdgen", "grd_prdgen", "ext_stn_prdgen", "ext_grd_prdgen", + "wx_prdgen", "wx_ext_prdgen"] + for job in mos_jobs: + dep_dict = {'type': 'task', 'name': f'{self.cdump}mos_{job}'} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps + dependencies) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('arch') + task = create_wf_task('arch', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + # Cleanup + def cleanup(self): + deps = [] + if 'enkf' in self.cdump: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eamn'} + deps.append(rocoto.add_dependency(dep_dict)) + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}arch'} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('cleanup') + task = create_wf_task('cleanup', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + # Start of ensemble tasks + def eobs(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('eobs') + task = create_wf_task('eobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eomg(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eomgenvars = self.envars.copy() + eomgenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['eobs']['NMEM_EOMGGRP']) + + resources = self.get_resource('eomg') + task = create_wf_task('eomg', resources, cdump=self.cdump, envar=eomgenvars, dependency=dependencies, + metatask='eomn', varname='grp', varval=groups) + + return task + + def ediag(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ediag') + task = create_wf_task('ediag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eupd(self): + deps = [] + if self.app_config.lobsdiag_forenkf: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ediag'} + else: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eomn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('eupd') + task = create_wf_task('eupd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlinit(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prepatmiodaobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmensanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def ecen(self): + + def _get_ecengroups(): + + if self._base.get('DOIAU_ENKF', False): + fhrs = list(self._base.get('IAUFHRS', '6').split(',')) + + necengrp = self._configs['ecen']['NECENGRP'] + ngrps = necengrp if len(fhrs) > necengrp else len(fhrs) + + fhrs = [f'{int(fhr):03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + else: + grp = '000' + dep = 'f006' + lst = 'f006' + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + ecenenvars = self.envars.copy() + ecenenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in ecenenvar_dict.items(): + ecenenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_ecengroups() + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('ecen') + task = create_wf_task('ecen', resources, cdump=self.cdump, envar=ecenenvars, dependency=dependencies, + metatask='ecmn', varname=varname1, varval=varval1, vardict=vardict) + return task + + def esfc(self): + + # eupd_cdump = 'gdas' if 'gdas' in self.app_config.eupd_cdumps else 'gfs' + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esfc') + task = create_wf_task('esfc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def efcs(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ecmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}esfc'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + efcsenvars = self.envars.copy() + efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP']) + + if self.cdump == "enkfgfs": + groups = self._get_hybgroups(self._base['NMEM_ENS_GFS'], self._configs['efcs']['NMEM_EFCSGRP_GFS']) + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + resources = self.get_resource('efcs') + task = create_wf_task('efcs', resources, cdump=self.cdump, envar=efcsenvars, dependency=dependencies, + metatask='efmn', varname='grp', varval=groups, cycledef=cycledef) + + return task + + def echgres(self): + + self._is_this_a_gdas_task(self.cdump, 'echgres') + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}efcs01'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump + + resources = self.get_resource('echgres') + task = create_wf_task('echgres', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def epos(self): + + def _get_eposgroups(epos): + fhmin = epos['FHMIN_ENKF'] + fhmax = epos['FHMAX_ENKF'] + fhout = epos['FHOUT_ENKF'] + if self.cdump == "enkfgfs": + fhmax = epos['FHMAX_ENKF_GFS'] + fhout = epos['FHOUT_ENKF_GFS'] + fhrs = range(fhmin, fhmax + fhout, fhout) + + neposgrp = epos['NEPOSGRP'] + ngrps = neposgrp if len(fhrs) > neposgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [f.tolist() for f in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}efmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eposenvars = self.envars.copy() + eposenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in eposenvar_dict.items(): + eposenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_eposgroups(self._configs['epos']) + vardict = {varname2: varval2, varname3: varval3} + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('epos') + task = create_wf_task('epos', resources, cdump=self.cdump, envar=eposenvars, dependency=dependencies, + metatask='epmn', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def earc(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + earcenvars = self.envars.copy() + earcenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['earc']['NMEM_EARCGRP'], start_index=0) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('earc') + task = create_wf_task('earc', resources, cdump=self.cdump, envar=earcenvars, dependency=dependencies, + metatask='eamn', varname='grp', varval=groups, cycledef=cycledef) + + return task diff --git a/workflow/rocoto/rocoto.py b/workflow/rocoto/rocoto.py index b3f88f01d3..c4240622d4 100644 --- a/workflow/rocoto/rocoto.py +++ b/workflow/rocoto/rocoto.py @@ -115,8 +115,6 @@ def create_task(task_dict: Dict[str, Any]) -> List[str]: strings.append(f'\t\t{d}\n') strings.append('\t\n') strings.append('\n') - elif taskname != "gfswaveinit": - print("WARNING: No dependencies for task " + taskname) strings.append('\n') @@ -193,6 +191,7 @@ def _add_data_tag(dep_dict: Dict[str, Any]) -> str: dep_type = dep_dict.get('type', None) dep_data = dep_dict.get('data', None) dep_offset = dep_dict.get('offset', None) + dep_age = dep_dict.get('age', None) if dep_data is None: msg = f'a data value is necessary for {dep_type} dependency' @@ -206,7 +205,10 @@ def _add_data_tag(dep_dict: Dict[str, Any]) -> str: assert len(dep_data) == len(dep_offset) - strings = [''] + if dep_age is None: + strings = [''] + else: + strings = [f''] for data, offset in zip(dep_data, dep_offset): if '@' in data: offset_str = '' if offset in [None, ''] else f' offset="{offset}"' diff --git a/workflow/rocoto/rocoto_xml_factory.py b/workflow/rocoto/rocoto_xml_factory.py new file mode 100644 index 0000000000..cb2d4c276c --- /dev/null +++ b/workflow/rocoto/rocoto_xml_factory.py @@ -0,0 +1,10 @@ +from wxflow import Factory +from rocoto.gfs_cycled_xml import GFSCycledRocotoXML +from rocoto.gfs_forecast_only_xml import GFSForecastOnlyRocotoXML +from rocoto.gefs_xml import GEFSRocotoXML + + +rocoto_xml_factory = Factory('RocotoXML') +rocoto_xml_factory.register('gfs_cycled', GFSCycledRocotoXML) +rocoto_xml_factory.register('gfs_forecast-only', GFSForecastOnlyRocotoXML) +rocoto_xml_factory.register('gefs_forecast-only', GEFSRocotoXML) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py new file mode 100644 index 0000000000..1dfcd07c91 --- /dev/null +++ b/workflow/rocoto/tasks.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python3 + +import numpy as np +from applications.applications import AppConfig +import rocoto.rocoto as rocoto +from wxflow import Template, TemplateConstants, to_timedelta + +__all__ = ['Tasks', 'create_wf_task'] + + +class Tasks: + SERVICE_TASKS = ['arch', 'earc'] + VALID_TASKS = ['aerosol_init', 'stage_ic', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", + 'prepatmiodaobs', 'atmanlinit', 'atmanlrun', 'atmanlfinal', + 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', + 'earc', 'ecen', 'echgres', 'ediag', 'efcs', + 'eobs', 'eomg', 'epos', 'esfc', 'eupd', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', + 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', + 'preplandobs', 'landanl', + 'fcst', + 'atmanlupp', 'atmanlprod', 'atmupp', 'atmprod', + 'ocnpost', + 'verfozn', 'verfrad', 'vminmon', + 'metp', + 'tracker', 'genesis', 'genesis_fsu', + 'postsnd', 'awips_g2', 'awips_20km_1p0deg', 'fbwind', + 'gempak', 'gempakmeta', 'gempakmetancdc', 'gempakncdcupapgif', 'gempakpgrb2spec', 'npoess_pgrb2_0p5deg' + 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', + 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep', + 'npoess', + 'mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', + 'mos_stn_fcst', 'mos_grd_fcst', 'mos_ext_stn_fcst', 'mos_ext_grd_fcst', + 'mos_stn_prdgen', 'mos_grd_prdgen', 'mos_ext_stn_prdgen', 'mos_ext_grd_prdgen', 'mos_wx_prdgen', 'mos_wx_ext_prdgen'] + + def __init__(self, app_config: AppConfig, cdump: str) -> None: + + self.app_config = app_config + self.cdump = cdump + + # Save dict_configs and base in the internal state (never know where it may be needed) + self._configs = self.app_config.configs + self._base = self._configs['base'] + self._base['cycle_interval'] = to_timedelta(f'{self._base["assim_freq"]}H') + + self.n_tiles = 6 # TODO - this needs to be elsewhere + + envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'), + 'HOMEgfs': self._base.get('HOMEgfs'), + 'EXPDIR': self._base.get('EXPDIR'), + 'ROTDIR': self._base.get('ROTDIR'), + 'NET': self._base.get('NET'), + 'CDUMP': self.cdump, + 'RUN': self.cdump, + 'CDATE': '@Y@m@d@H', + 'PDY': '@Y@m@d', + 'cyc': '@H', + 'COMROOT': self._base.get('COMROOT'), + 'DATAROOT': self._base.get('DATAROOT')} + self.envars = self._set_envars(envar_dict) + + @staticmethod + def _set_envars(envar_dict) -> list: + + envars = [] + for key, value in envar_dict.items(): + envars.append(rocoto.create_envar(name=key, value=str(value))) + + return envars + + @staticmethod + def _get_hybgroups(nens: int, nmem_per_group: int, start_index: int = 1): + ngrps = nens / nmem_per_group + groups = ' '.join([f'{x:02d}' for x in range(start_index, int(ngrps) + 1)]) + return groups + + def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str: + ''' + Takes a string templated with ${ } and converts it into a string suitable + for use in a rocoto . Some common substitutions are defined by + default. Any additional variables in the template and overrides of the + defaults can be passed in by an optional dict. + + Variables substitued by default: + ${ROTDIR} -> '&ROTDIR;' + ${RUN} -> self.cdump + ${DUMP} -> self.cdump + ${MEMDIR} -> '' + ${YMD} -> '@Y@m@d' + ${HH} -> '@H' + + Parameters + ---------- + template: str + Template string with variables to be replaced + subs_dict: dict, optional + Dictionary containing substitutions + + Returns + ------- + str + Updated string with variables substituted + + ''' + + # Defaults + rocoto_conversion_dict = { + 'ROTDIR': '&ROTDIR;', + 'RUN': self.cdump, + 'DUMP': self.cdump, + 'MEMDIR': '', + 'YMD': '@Y@m@d', + 'HH': '@H' + } + + rocoto_conversion_dict.update(subs_dict) + + return Template.substitute_structure(template, + TemplateConstants.DOLLAR_CURLY_BRACE, + rocoto_conversion_dict.get) + + def get_resource(self, task_name): + """ + Given a task name (task_name) and its configuration (task_names), + return a dictionary of resources (task_resource) used by the task. + Task resource dictionary includes: + account, walltime, cores, nodes, ppn, threads, memory, queue, partition, native + """ + + scheduler = self.app_config.scheduler + + task_config = self._configs[task_name] + + account = task_config['ACCOUNT'] + + walltime = task_config[f'wtime_{task_name}'] + if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys(): + walltime = task_config[f'wtime_{task_name}_gfs'] + + cores = task_config[f'npe_{task_name}'] + if self.cdump in ['gfs'] and f'npe_{task_name}_gfs' in task_config.keys(): + cores = task_config[f'npe_{task_name}_gfs'] + + ppn = task_config[f'npe_node_{task_name}'] + if self.cdump in ['gfs'] and f'npe_node_{task_name}_gfs' in task_config.keys(): + ppn = task_config[f'npe_node_{task_name}_gfs'] + + nodes = int(np.ceil(float(cores) / float(ppn))) + + threads = task_config[f'nth_{task_name}'] + if self.cdump in ['gfs'] and f'nth_{task_name}_gfs' in task_config.keys(): + threads = task_config[f'nth_{task_name}_gfs'] + + memory = task_config.get(f'memory_{task_name}', None) + if scheduler in ['pbspro']: + if task_config.get('prepost', False): + memory += ':prepost=true' + + native = None + if scheduler in ['pbspro']: + native = '-l debug=true,place=vscatter' + # Set either exclusive or shared - default on WCOSS2 is exclusive when not set + if task_config.get('is_exclusive', False): + native += ':exclhost' + else: + native += ':shared' + elif scheduler in ['slurm']: + native = '&NATIVE_STR;' + + queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] + + partition = None + if scheduler in ['slurm']: + partition = task_config['PARTITION_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config[ + 'PARTITION_BATCH'] + + task_resource = {'account': account, + 'walltime': walltime, + 'nodes': nodes, + 'cores': cores, + 'ppn': ppn, + 'threads': threads, + 'memory': memory, + 'native': native, + 'queue': queue, + 'partition': partition} + + return task_resource + + def get_task(self, task_name, *args, **kwargs): + """ + Given a task_name, call the method for that task + """ + try: + return getattr(self, task_name, *args, **kwargs)() + except AttributeError: + raise AttributeError(f'"{task_name}" is not a valid task.\n' + + 'Valid tasks are:\n' + + f'{", ".join(Tasks.VALID_TASKS)}') + + +def create_wf_task(task_name, resources, + cdump='gdas', cycledef=None, envar=None, dependency=None, + metatask=None, varname=None, varval=None, vardict=None, + final=False, command=None): + tasknamestr = f'{cdump}{task_name}' + metatask_dict = None + if metatask is not None: + tasknamestr = f'{tasknamestr}#{varname}#' + metatask_dict = {'metataskname': f'{cdump}{metatask}', + 'varname': f'{varname}', + 'varval': f'{varval}', + 'vardict': vardict} + + cycledefstr = cdump.replace('enkf', '') if cycledef is None else cycledef + + task_dict = {'taskname': f'{tasknamestr}', + 'cycledef': f'{cycledefstr}', + 'maxtries': '&MAXTRIES;', + 'command': f'&JOBS_DIR;/{task_name}.sh' if command is None else command, + 'jobname': f'&PSLOT;_{tasknamestr}_@H', + 'resources': resources, + 'log': f'&ROTDIR;/logs/@Y@m@d@H/{tasknamestr}.log', + 'envars': envar, + 'dependency': dependency, + 'final': final} + + task = rocoto.create_task(task_dict) if metatask is None else rocoto.create_metatask(task_dict, metatask_dict) + + return ''.join(task) diff --git a/workflow/rocoto/tasks_factory.py b/workflow/rocoto/tasks_factory.py new file mode 100644 index 0000000000..38cf0d0bd1 --- /dev/null +++ b/workflow/rocoto/tasks_factory.py @@ -0,0 +1,8 @@ +from wxflow import Factory +from rocoto.gfs_tasks import GFSTasks +from rocoto.gefs_tasks import GEFSTasks + + +tasks_factory = Factory('Tasks') +tasks_factory.register('gfs', GFSTasks) +tasks_factory.register('gefs', GEFSTasks) diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py deleted file mode 120000 index 75800b2bf4..0000000000 --- a/workflow/rocoto/workflow_tasks.py +++ /dev/null @@ -1 +0,0 @@ -workflow_tasks_gsl.py \ No newline at end of file diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py new file mode 100644 index 0000000000..84af898d36 --- /dev/null +++ b/workflow/rocoto/workflow_tasks.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 + +from typing import List +from applications.applications import AppConfig +from rocoto.tasks_factory import tasks_factory + + +__all__ = ['get_wf_tasks'] + + +def get_wf_tasks(app_config: AppConfig) -> List: + """ + Take application configuration to return a list of all tasks for that application + """ + + tasks = [] + # Loop over all keys of cycles (CDUMP) + for cdump, cdump_tasks in app_config.task_names.items(): + task_obj = tasks_factory.create(app_config.net, app_config, cdump) # create Task object based on cdump + for task_name in cdump_tasks: + tasks.append(task_obj.get_task(task_name)) + + return tasks diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py deleted file mode 120000 index a6e327179c..0000000000 --- a/workflow/rocoto/workflow_xml.py +++ /dev/null @@ -1 +0,0 @@ -workflow_xml_gsl.py \ No newline at end of file diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py new file mode 100644 index 0000000000..c81b52111c --- /dev/null +++ b/workflow/rocoto/workflow_xml.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 + +import os +from distutils.spawn import find_executable +from datetime import datetime +from collections import OrderedDict +from typing import Dict +from applications.applications import AppConfig +from rocoto.workflow_tasks import get_wf_tasks +import rocoto.rocoto as rocoto +from abc import ABC, abstractmethod + + +class RocotoXML(ABC): + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + + self._app_config = app_config + self.rocoto_config = rocoto_config + + self._base = self._app_config.configs['base'] + + self.preamble = self._get_preamble() + self.definitions = self._get_definitions() + self.header = self._get_workflow_header() + self.cycledefs = self.get_cycledefs() + task_list = get_wf_tasks(app_config) + self.tasks = '\n'.join(task_list) + self.footer = self._get_workflow_footer() + + self.xml = self._assemble_xml() + + @staticmethod + def _get_preamble(): + """ + Generate preamble for XML + """ + + strings = ['', + '', + ''] + + return '\n'.join(strings) + + def _get_definitions(self) -> str: + """ + Create entities related to the experiment + """ + + entity = OrderedDict() + + entity['PSLOT'] = self._base['PSLOT'] + + entity['ROTDIR'] = self._base['ROTDIR'] + entity['JOBS_DIR'] = self._base['BASE_JOB'] + + entity['HOMEgfs'] = self._base['HOMEgfs'] + entity['EXPDIR'] = "&HOMEgfs;/FV3GFSwfm/&PSLOT;" + entity['ICSDIR'] = "/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127" + + entity['CASE'] = 'C768' + entity['COMPONENT'] = 'atmos' + entity['NATIVE_STR'] = '--export=NONE' + entity['MAXTRIES'] = self.rocoto_config['maxtries'] + + # Put them all in an XML key-value syntax + strings = [] + for key, value in entity.items(): + strings.append('\t' + rocoto.create_entity(key, value)) + + return '\n'.join(strings) + + def _get_workflow_header(self): + """ + Create the workflow header block + """ + + scheduler = self._app_config.scheduler + cyclethrottle = self.rocoto_config['cyclethrottle'] + taskthrottle = self.rocoto_config['taskthrottle'] + verbosity = self.rocoto_config['verbosity'] + + expdir = self._base['EXPDIR'] + + strings = ['', + ']>', + '', + f'', + '', + f'\t{expdir}/logs/@Y@m@d@H.log', + '', + '\t', + ''] + + return '\n'.join(strings) + + @abstractmethod + def get_cycledefs(self): + pass + + @staticmethod + def _get_workflow_footer(): + """ + Generate workflow footer + """ + + return '\n\n' + + def _assemble_xml(self) -> str: + + strings = [self.preamble, + self.definitions, + self.header, + self.cycledefs, + self.tasks, + self.footer] + + return ''.join(strings) + + def write(self, xml_file: str = None, crontab_file: str = None): + self._write_xml(xml_file=xml_file) + self._write_crontab(crontab_file=crontab_file) + + def _write_xml(self, xml_file: str = None) -> None: + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + if xml_file is None: + xml_file = f"{expdir}/{pslot}.xml" + + with open(xml_file, 'w') as fh: + fh.write(self.xml) + + def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: + """ + Create crontab to execute rocotorun every cronint (5) minutes + """ + + # No point creating a crontab if rocotorun is not available. + rocotoruncmd = find_executable('rocotorun') + if rocotoruncmd is None: + print('Failed to find rocotorun, crontab will not be created') + return + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + rocotorunstr = f'{rocotoruncmd} -d {expdir}/{pslot}.db -w {expdir}/{pslot}.xml' + cronintstr = f'*/{cronint} * * * *' + + try: + replyto = os.environ['REPLYTO'] + except KeyError: + replyto = '' + + strings = ['', + f'#################### {pslot} ####################', + f'MAILTO="{replyto}"', + f'{cronintstr} {rocotorunstr}', + '#################################################################', + ''] + + if crontab_file is None: + crontab_file = f"{expdir}/{pslot}.crontab" + + with open(crontab_file, 'w') as fh: + fh.write('\n'.join(strings)) + + return diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 9e91c860da..b1fa439052 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -12,9 +12,9 @@ from hosts import Host -from pygw.yaml_file import parse_j2yaml -from pygw.attrdict import AttrDict -from pygw.timetools import to_datetime, to_timedelta, datetime_to_YMDH +from wxflow import parse_j2yaml +from wxflow import AttrDict +from wxflow import to_datetime, to_timedelta, datetime_to_YMDH _here = os.path.dirname(__file__) @@ -61,7 +61,7 @@ def fill_COMROT_cycled(host, inputs): do_ocean = do_ice = do_med = False - if inputs.app in ['S2S', 'S2SW']: + if 'S2S' in inputs.app: do_ocean = do_ice = do_med = True if inputs.icsdir is None: @@ -71,7 +71,15 @@ def fill_COMROT_cycled(host, inputs): rdatestr = datetime_to_YMDH(inputs.idate - to_timedelta('T06H')) idatestr = datetime_to_YMDH(inputs.idate) - if os.path.isdir(os.path.join(inputs.icsdir, f'{inputs.cdump}.{rdatestr[:8]}', rdatestr[8:], 'model_data', 'atmos')): + # Test if we are using the new COM structure or the old flat one for ICs + if inputs.start in ['warm']: + pathstr = os.path.join(inputs.icsdir, f'{inputs.cdump}.{rdatestr[:8]}', + rdatestr[8:], 'model_data', 'atmos') + else: + pathstr = os.path.join(inputs.icsdir, f'{inputs.cdump}.{idatestr[:8]}', + idatestr[8:], 'model_data', 'atmos') + + if os.path.isdir(pathstr): flat_structure = False else: flat_structure = True @@ -87,7 +95,7 @@ def fill_COMROT_cycled(host, inputs): do_med = False dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart') dst_ocn_anl_dir = os.path.join('analysis', 'ocean') - dst_ice_dir = os.path.join('model_data', 'ice', 'restart') + dst_ice_rst_dir = os.path.join('model_data', 'ice', 'restart') dst_atm_anl_dir = os.path.join('analysis', 'atmos') if flat_structure: @@ -102,14 +110,14 @@ def fill_COMROT_cycled(host, inputs): # ocean and ice have the same filenames for warm and cold src_ocn_rst_dir = os.path.join('ocean', 'RESTART') src_ocn_anl_dir = 'ocean' - src_ice_dir = os.path.join('ice', 'RESTART') + src_ice_rst_dir = os.path.join('ice', 'RESTART') src_atm_anl_dir = 'atmos' else: src_atm_dir = dst_atm_dir src_med_dir = dst_med_dir src_ocn_rst_dir = dst_ocn_rst_dir src_ocn_anl_dir = dst_ocn_anl_dir - src_ice_dir = dst_ice_dir + src_ice_rst_dir = dst_ice_rst_dir src_atm_anl_dir = dst_atm_anl_dir def link_files_from_src_to_dst(src_dir, dst_dir): @@ -121,69 +129,95 @@ def link_files_from_src_to_dst(src_dir, dst_dir): # Link ensemble member initial conditions if inputs.nens > 0: - if inputs.start in ['warm']: - enkfdir = f'enkf{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - elif inputs.start in ['cold']: - enkfdir = f'enkf{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + previous_cycle_dir = f'enkf{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + current_cycle_dir = f'enkf{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' for ii in range(1, inputs.nens + 1): memdir = f'mem{ii:03d}' # Link atmospheric files - dst_dir = os.path.join(comrot, enkfdir, memdir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, enkfdir, memdir, src_atm_dir) + if inputs.start in ['warm']: + dst_dir = os.path.join(comrot, previous_cycle_dir, memdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_atm_dir) + elif inputs.start in ['cold']: + dst_dir = os.path.join(comrot, current_cycle_dir, memdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_atm_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) - # ocean, ice, etc. TBD ... + + # Link ocean files + if do_ocean: + dst_dir = os.path.join(comrot, previous_cycle_dir, memdir, dst_ocn_rst_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_ocn_rst_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # First 1/2 cycle needs a MOM6 increment + incfile = f'enkf{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc' + src_file = os.path.join(inputs.icsdir, current_cycle_dir, memdir, src_ocn_anl_dir, incfile) + dst_file = os.path.join(comrot, current_cycle_dir, memdir, dst_ocn_anl_dir, incfile) + makedirs_if_missing(os.path.join(comrot, current_cycle_dir, memdir, dst_ocn_anl_dir)) + os.symlink(src_file, dst_file) + + # Link ice files + if do_ice: + dst_dir = os.path.join(comrot, previous_cycle_dir, memdir, dst_ice_rst_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_ice_rst_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # Link mediator files + if do_med: + dst_dir = os.path.join(comrot, previous_cycle_dir, memdir, dst_med_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, memdir, src_med_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) # Link deterministic initial conditions + previous_cycle_dir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + current_cycle_dir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' # Link atmospheric files if inputs.start in ['warm']: - detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + dst_dir = os.path.join(comrot, previous_cycle_dir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_atm_dir) elif inputs.start in ['cold']: - detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + dst_dir = os.path.join(comrot, current_cycle_dir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_atm_dir) - dst_dir = os.path.join(comrot, detdir, dst_atm_dir) - src_dir = os.path.join(inputs.icsdir, detdir, src_atm_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link ocean files if do_ocean: - detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, dst_ocn_rst_dir) - src_dir = os.path.join(inputs.icsdir, detdir, src_ocn_rst_dir) + dst_dir = os.path.join(comrot, previous_cycle_dir, dst_ocn_rst_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_ocn_rst_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # First 1/2 cycle needs a MOM6 increment - incdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' incfile = f'{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc' - src_file = os.path.join(inputs.icsdir, incdir, src_ocn_anl_dir, incfile) - dst_file = os.path.join(comrot, incdir, dst_ocn_anl_dir, incfile) - makedirs_if_missing(os.path.join(comrot, incdir, dst_ocn_anl_dir)) + src_file = os.path.join(inputs.icsdir, current_cycle_dir, src_ocn_anl_dir, incfile) + dst_file = os.path.join(comrot, current_cycle_dir, dst_ocn_anl_dir, incfile) + makedirs_if_missing(os.path.join(comrot, current_cycle_dir, dst_ocn_anl_dir)) os.symlink(src_file, dst_file) # Link ice files if do_ice: - detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, dst_ice_dir) - src_dir = os.path.join(inputs.icsdir, detdir, src_ice_dir) + dst_dir = os.path.join(comrot, previous_cycle_dir, dst_ice_rst_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_ice_rst_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link mediator files if do_med: - detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' - dst_dir = os.path.join(comrot, detdir, dst_med_dir) - src_dir = os.path.join(inputs.icsdir, detdir, src_med_dir) + dst_dir = os.path.join(comrot, previous_cycle_dir, dst_med_dir) + src_dir = os.path.join(inputs.icsdir, previous_cycle_dir, src_med_dir) makedirs_if_missing(dst_dir) link_files_from_src_to_dst(src_dir, dst_dir) # Link bias correction and radiance diagnostics files - detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - src_dir = os.path.join(inputs.icsdir, detdir, src_atm_anl_dir) - dst_dir = os.path.join(comrot, detdir, dst_atm_anl_dir) + src_dir = os.path.join(inputs.icsdir, current_cycle_dir, src_atm_anl_dir) + dst_dir = os.path.join(comrot, current_cycle_dir, dst_atm_anl_dir) makedirs_if_missing(dst_dir) for ftype in ['abias', 'abias_pc', 'abias_air', 'radstat']: fname = f'{inputs.cdump}.t{idatestr[8:]}z.{ftype}' @@ -199,7 +233,6 @@ def fill_COMROT_forecasts(host, inputs): Implementation of 'fill_COMROT' for forecast-only mode """ print('forecast-only mode treats ICs differently and cannot be staged here') - return def fill_EXPDIR(inputs): @@ -266,6 +299,11 @@ def edit_baseconfig(host, inputs, yaml_dict): extend_dict = get_template_dict(host.info) tmpl_dict = dict(tmpl_dict, **extend_dict) + if inputs.start in ["warm"]: + is_warm_start = ".true." + elif inputs.start in ["cold"]: + is_warm_start = ".false." + extend_dict = dict() extend_dict = { "@PSLOT@": inputs.pslot, @@ -274,7 +312,7 @@ def edit_baseconfig(host, inputs, yaml_dict): "@CASECTL@": f'C{inputs.resdet}', "@EXPDIR@": inputs.expdir, "@ROTDIR@": inputs.comrot, - "@EXP_WARM_START@": inputs.warm_start, + "@EXP_WARM_START@": is_warm_start, "@MODE@": inputs.mode, "@gfs_cyc@": inputs.gfs_cyc, "@APP@": inputs.app @@ -296,10 +334,6 @@ def edit_baseconfig(host, inputs, yaml_dict): } tmpl_dict = dict(tmpl_dict, **extend_dict) - # All apps and modes now use the same physics and CCPP suite by default - extend_dict = {"@CCPP_SUITE@": "FV3_GFS_v17_p8", "@IMP_PHYSICS@": 8} - tmpl_dict = dict(tmpl_dict, **extend_dict) - try: tmpl_dict = dict(tmpl_dict, **get_template_dict(yaml_dict['base'])) except KeyError: @@ -343,11 +377,71 @@ def get_template_dict(input_dict): return output_dict -def input_args(): +def input_args(*argv): """ Method to collect user arguments for `setup_expt.py` """ + ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW'] + + def _common_args(parser): + parser.add_argument('--pslot', help='parallel experiment name', + type=str, required=False, default='test') + parser.add_argument('--resdet', help='resolution of the deterministic model forecast', + type=int, required=False, default=384) + parser.add_argument('--comrot', help='full path to COMROT', + type=str, required=False, default=os.getenv('HOME')) + parser.add_argument('--expdir', help='full path to EXPDIR', + type=str, required=False, default=os.getenv('HOME')) + parser.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', + required=True, type=lambda dd: to_datetime(dd)) + parser.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd)) + return parser + + def _gfs_args(parser): + parser.add_argument('--start', help='restart mode: warm or cold', type=str, + choices=['warm', 'cold'], required=False, default='cold') + parser.add_argument('--cdump', help='CDUMP to start the experiment', + type=str, required=False, default='gdas') + # --configdir is hidden from help + parser.add_argument('--configdir', help=SUPPRESS, type=str, required=False, default=os.path.join(_top, 'parm/config/gfs')) + parser.add_argument('--yaml', help='Defaults to substitute from', type=str, + required=False, default=os.path.join(_top, 'parm/config/gfs/yaml/defaults.yaml')) + return parser + + def _gfs_cycled_args(parser): + parser.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) + parser.add_argument('--app', help='UFS application', type=str, + choices=ufs_apps, required=False, default='ATM') + parser.add_argument('--gfs_cyc', help='cycles to run forecast', type=int, + choices=[0, 1, 2, 4], default=1, required=False) + return parser + + def _gfs_or_gefs_ensemble_args(parser): + parser.add_argument('--resens', help='resolution of the ensemble model forecast', + type=int, required=False, default=192) + parser.add_argument('--nens', help='number of ensemble members', + type=int, required=False, default=20) + return parser + + def _gfs_or_gefs_forecast_args(parser): + parser.add_argument('--app', help='UFS application', type=str, + choices=ufs_apps + ['S2SWA'], required=False, default='ATM') + parser.add_argument('--gfs_cyc', help='Number of forecasts per day', type=int, + choices=[1, 2, 4], default=1, required=False) + return parser + + def _gefs_args(parser): + parser.add_argument('--start', help='restart mode: warm or cold', type=str, + choices=['warm', 'cold'], required=False, default='cold') + parser.add_argument('--configdir', help=SUPPRESS, type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs')) + parser.add_argument('--yaml', help='Defaults to substitute from', type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs/yaml/defaults.yaml')) + parser.add_argument('--icsdir', help='full path to initial condition directory [temporary hack in place for testing]', + type=str, required=False, default=None) + return parser + description = """ Setup files and directories to start a GFS parallel.\n Create EXPDIR, copy config files.\n @@ -362,78 +456,38 @@ def input_args(): gfs = sysparser.add_parser('gfs', help='arguments for GFS') gefs = sysparser.add_parser('gefs', help='arguments for GEFS') - modeparser = gfs.add_subparsers(dest='mode') - cycled = modeparser.add_parser('cycled', help='arguments for cycled mode') - forecasts = modeparser.add_parser('forecast-only', help='arguments for forecast-only mode') + gfsmodeparser = gfs.add_subparsers(dest='mode') + gfscycled = gfsmodeparser.add_parser('cycled', help='arguments for cycled mode') + gfsforecasts = gfsmodeparser.add_parser('forecast-only', help='arguments for forecast-only mode') - # Common arguments across all modes - for subp in [cycled, forecasts, gefs]: - subp.add_argument('--pslot', help='parallel experiment name', - type=str, required=False, default='test') - subp.add_argument('--resdet', help='resolution of the deterministic model forecast', - type=int, required=False, default=384) - subp.add_argument('--comrot', help='full path to COMROT', - type=str, required=False, default=os.getenv('HOME')) - subp.add_argument('--expdir', help='full path to EXPDIR', - type=str, required=False, default=os.getenv('HOME')) - subp.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', - required=True, type=lambda dd: to_datetime(dd)) - subp.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd)) + gefsmodeparser = gefs.add_subparsers(dest='mode') + gefsforecasts = gefsmodeparser.add_parser('forecast-only', help='arguments for forecast-only mode') - ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW'] + # Common arguments across all modes + for subp in [gfscycled, gfsforecasts, gefsforecasts]: + subp = _common_args(subp) # GFS-only arguments - for subp in [cycled, forecasts]: - subp.add_argument('--start', help='restart mode: warm or cold', type=str, - choices=['warm', 'cold'], required=False, default='cold') - subp.add_argument('--cdump', help='CDUMP to start the experiment', - type=str, required=False, default='gdas') - # --configdir is hidden from help - subp.add_argument('--configdir', help=SUPPRESS, type=str, required=False, default=os.path.join(_top, 'parm/config/gfs')) - subp.add_argument('--yaml', help='Defaults to substitute from', type=str, - required=False, default=os.path.join(_top, 'parm/config/gfs/yaml/defaults.yaml')) + for subp in [gfscycled, gfsforecasts]: + subp = _gfs_args(subp) # ensemble-only arguments - for subp in [cycled, gefs]: - subp.add_argument('--resens', help='resolution of the ensemble model forecast', - type=int, required=False, default=192) - subp.add_argument('--nens', help='number of ensemble members', - type=int, required=False, default=20) + for subp in [gfscycled, gefsforecasts]: + subp = _gfs_or_gefs_ensemble_args(subp) # GFS/GEFS forecast-only additional arguments - for subp in [forecasts, gefs]: - subp.add_argument('--app', help='UFS application', type=str, - choices=ufs_apps + ['S2SWA'], required=False, default='ATM') - subp.add_argument('--gfs_cyc', help='Number of forecasts per day', type=int, - choices=[1, 2, 4], default=1, required=False) + for subp in [gfsforecasts, gefsforecasts]: + subp = _gfs_or_gefs_forecast_args(subp) # cycled mode additional arguments - cycled.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) - cycled.add_argument('--app', help='UFS application', type=str, - choices=ufs_apps, required=False, default='ATM') - cycled.add_argument('--gfs_cyc', help='cycles to run forecast', type=int, - choices=[0, 1, 2, 4], default=1, required=False) - - # GEFS-only arguments - # Create hidden mode argument since there is real option for GEFS - gefs.add_argument('--mode', help=SUPPRESS, type=str, required=False, default='forecast-only') - # Create hidden start argument since GEFS is always cold start - gefs.add_argument('--start', help=SUPPRESS, type=str, required=False, default='cold') - # Create hidden arguments for configdir and yaml - gefs.add_argument('--configdir', help=SUPPRESS, type=str, required=False, - default=os.path.join(_top, 'parm/config/gefs')) - gefs.add_argument('--yaml', help='Defaults to substitute from', type=str, required=False, - default=os.path.join(_top, 'parm/config/gefs/yaml/defaults.yaml')) - - args = parser.parse_args() - - # Add an entry for warm_start = .true. or .false. - if args.start in ['warm']: - args.warm_start = ".true." - elif args.start in ['cold']: - args.warm_start = ".false." - - return args + for subp in [gfscycled]: + subp = _gfs_cycled_args(subp) + + # GEFS forecast-only arguments + for subp in [gefsforecasts]: + subp = _gefs_args(subp) + + return parser.parse_args(list(*argv) if len(argv) else None) def query_and_clean(dirname): @@ -467,9 +521,9 @@ def validate_user_request(host, inputs): raise NotImplementedError(f"Supported resolutions on {machine} are:\n{', '.join(supp_res)}") -if __name__ == '__main__': +def main(*argv): - user_inputs = input_args() + user_inputs = input_args(*argv) host = Host() validate_user_request(host, user_inputs) @@ -488,3 +542,8 @@ def validate_user_request(host, inputs): makedirs_if_missing(expdir) fill_EXPDIR(user_inputs) update_configs(host, user_inputs) + + +if __name__ == '__main__': + + main() diff --git a/workflow/setup_xml.py b/workflow/setup_xml.py index d43efe21e1..a66e4922a3 100755 --- a/workflow/setup_xml.py +++ b/workflow/setup_xml.py @@ -6,12 +6,12 @@ import os from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -from applications import AppConfig -from rocoto.workflow_xml import RocotoXML -from pygw.configuration import Configuration +from applications.application_factory import app_config_factory +from rocoto.rocoto_xml_factory import rocoto_xml_factory +from wxflow import Configuration -def input_args(): +def input_args(*argv): """ Method to collect user arguments for `setup_xml.py` """ @@ -37,9 +37,7 @@ def input_args(): parser.add_argument('--verbosity', help='verbosity level of Rocoto', type=int, default=10, required=False) - args = parser.parse_args() - - return args + return parser.parse_args(argv[0][0] if len(argv[0]) else None) def check_expdir(cmd_expdir, cfg_expdir): @@ -51,9 +49,9 @@ def check_expdir(cmd_expdir, cfg_expdir): raise ValueError('Abort!') -if __name__ == '__main__': +def main(*argv): - user_inputs = input_args() + user_inputs = input_args(argv) rocoto_param_dict = {'maxtries': user_inputs.maxtries, 'cyclethrottle': user_inputs.cyclethrottle, 'taskthrottle': user_inputs.taskthrottle, @@ -61,11 +59,21 @@ def check_expdir(cmd_expdir, cfg_expdir): cfg = Configuration(user_inputs.expdir) - check_expdir(user_inputs.expdir, cfg.parse_config('config.base')['EXPDIR']) + base = cfg.parse_config('config.base') + + check_expdir(user_inputs.expdir, base['EXPDIR']) + + net = base['NET'] + mode = base['MODE'] # Configure the application - app_config = AppConfig(cfg) + app_config = app_config_factory.create(f'{net}_{mode}', cfg) # Create Rocoto Tasks and Assemble them into an XML - xml = RocotoXML(app_config, rocoto_param_dict) + xml = rocoto_xml_factory.create(f'{net}_{mode}', app_config, rocoto_param_dict) xml.write() + + +if __name__ == '__main__': + + main() diff --git a/workflow/test_configuration.py b/workflow/test_configuration.py index 5c59fd35bf..32e40c67af 100644 --- a/workflow/test_configuration.py +++ b/workflow/test_configuration.py @@ -1,5 +1,5 @@ import sys -from pygw.configuration import Configuration +from wxflow import Configuration expdir = sys.argv[1]