Skip to content

Commit

Permalink
Merge pull request #17 from natalie-perlin/feature/enable_test_cases
Browse files Browse the repository at this point in the history
  • Loading branch information
ulmononian authored Oct 30, 2024
2 parents a07a2c9 + 4d0e25a commit 5889335
Show file tree
Hide file tree
Showing 8 changed files with 55 additions and 45 deletions.
20 changes: 12 additions & 8 deletions modulefiles/ufs_noaacloud.intel.lua
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,27 @@ loads UFS Model prerequisites for NOAA Parallelworks/Intel

prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
prepend_path("PATH", "/contrib/EPIC/bin")
load("gnu")
load("stack-intel")
load("stack-intel-oneapi-mpi")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.10.0"
gnu_ver=os.getenv("gnu_ver") or ""
load(pathJoin("gnu", gnu_ver))

stack_intel_ver=os.getenv("stack_intel_ver") or ""
load(pathJoin("stack-intel", stack_intel_ver))

stack_impi_ver=os.getenv("stack_impi_ver") or "2021.10.0"
load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver))
unload("gnu")
stack_intel_oneapi_mpi_ver=os.getenv("stack_intel_oneapi_mpi_ver") or ""
load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver))

gnu_ver=os.getenv("gnu_ver") or ""
unload(pathJoin("gnu", gnu_ver))

cmake_ver=os.getenv("cmake_ver") or "3.23.1"
load(pathJoin("cmake", cmake_ver))

load("ufs_common")

nccmp_ver=os.getenv("nccmp_ver") or "1.9.0.1"
load(pathJoin("nccmp", nccmp_ver))

setenv("CC", "mpiicc")
setenv("CXX", "mpiicpc")
setenv("FC", "mpiifort")
Expand Down
12 changes: 10 additions & 2 deletions tests-dev/baseline_setup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ hera:
INPUTDATA_ROOT: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /scratch2/NAGAPE/epic/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /scratch1/NCEPDEV/nems/role.epic/HSD_INPUT_DATA
orion:
QUEUE: batch
COMPILE_QUEUE: batch
Expand All @@ -24,6 +25,7 @@ orion:
INPUTDATA_ROOT: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /work/noaa/epic/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA
hercules:
QUEUE: batch
COMPILE_QUEUE: batch
Expand All @@ -37,6 +39,7 @@ hercules:
INPUTDATA_ROOT: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /work/noaa/epic/hercules/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA
gaea:
QUEUE: normal
COMPILE_QUEUE: normal
Expand All @@ -50,6 +53,7 @@ gaea:
INPUTDATA_ROOT: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /gpfs/f5/epic/world-shared/UFS-WM_RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /glade/work/epicufsrt/contrib/HSD_INPUT_DATA
jet:
QUEUE: batch
COMPILE_QUEUE: batch
Expand All @@ -63,6 +67,7 @@ jet:
INPUTDATA_ROOT: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /mnt/lfs5/HFIP/hfv3gfs/role.epic/HSD_INPUT_DATA
derecho:
QUEUE: main
COMPILE_QUEUE: main
Expand All @@ -76,19 +81,21 @@ derecho:
INPUTDATA_ROOT: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /glade/derecho/scratch/epicufsrt/ufs-weather-model/RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /glade/work/epicufsrt/contrib/HSD_INPUT_DATA
noaacloud:
QUEUE: batch
COMPILE_QUEUE: batch
PARTITION:
dprefix: /lustre
DISKNM: /contrib/ufs-weather-model/RT
STMP: /lustre/stmp4
PTMP: /lustre/stmp4
STMP: /lustre/stmp
PTMP: /lustre/stmp
RUNDIR_ROOT:
SCHEDULER: slurm
INPUTDATA_ROOT: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /contrib/ufs-weather-model/RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA: /contrib/EPIC/HSD_INPUT_DATA
s4:
QUEUE: s4
COMPILE_QUEUE: s4
Expand All @@ -102,3 +109,4 @@ s4:
INPUTDATA_ROOT: /data/prod/emc.nemspara/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /data/prod/emc.nemspara/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /data/prod/emc.nemspara/RT/NEMSfv3gfs/BM_IC-20220207
HSD_INPUT_DATA:
7 changes: 5 additions & 2 deletions tests-dev/create_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,8 +145,11 @@ def finish_log():

TEST_START_TIME, TEST_END_TIME = get_timestamps('./logs/log_'+MACHINE_ID+'/')

start_time = datetime.strptime(TEST_START_TIME, "%Y-%m-%d %H:%M:%S")
end_time = datetime.strptime(TEST_END_TIME, "%Y-%m-%d %H:%M:%S")
clean_START_TIME= TEST_START_TIME.split('.')[0]
start_time = datetime.strptime(clean_START_TIME, "%Y-%m-%d %H:%M:%S")
clean_END_TIME= TEST_END_TIME.split('.')[0]
end_time = datetime.strptime(clean_END_TIME, "%Y-%m-%d %H:%M:%S")

hours, remainder= divmod((end_time - start_time).total_seconds(), 3600)
minutes, seconds= divmod(remainder, 60)
hours = int(hours); minutes=int(minutes); seconds =int(seconds)
Expand Down
10 changes: 8 additions & 2 deletions tests-dev/create_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import yaml
from ufs_test_utils import get_testcase, write_logfile, rrmdir, machine_check_off

def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INPUTDATA_ROOT_BMIC,RUNDIR_ROOT,NEW_BASELINE,ROCOTO_XML):
def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INPUTDATA_ROOT_BMIC,HSD_INPUT_DATA,RUNDIR_ROOT,NEW_BASELINE,ROCOTO_XML):
"""Generate header information for Rocoto xml file
Args:
Expand All @@ -13,6 +13,7 @@ def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INP
INPUTDATA_ROOT (str): Input data directory
INPUTDATA_ROOT_WW3 (str): WW3 input data directory
INPUTDATA_ROOT_BMIC (str): BMIC input data directory
HSD_INPUT_DATA (str): HSD tests input data directory
RUNDIR_ROOT (str): Test run directory
NEW_BASELINE (str): Directory for newly generated baselines
ROCOTO_XML (str): Rocoto .xml filename to write to
Expand All @@ -31,6 +32,7 @@ def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INP
<!ENTITY INPUTDATA_ROOT "{INPUTDATA_ROOT}">
<!ENTITY INPUTDATA_ROOT_WW3 "{INPUTDATA_ROOT_WW3}">
<!ENTITY INPUTDATA_ROOT_BMIC "{INPUTDATA_ROOT_BMIC}">
<!ENTITY HSD_INPUT_DATA "{HSD_INPUT_DATA}">
<!ENTITY RUNDIR_ROOT "{RUNDIR_ROOT}">
<!ENTITY NEW_BASELINE "{NEW_BASELINE}">
]>
Expand Down Expand Up @@ -166,6 +168,7 @@ def write_runtest_env():
INPUTDATA_ROOT = str(os.getenv('INPUTDATA_ROOT'))
INPUTDATA_ROOT_WW3 = str(os.getenv('INPUTDATA_ROOT_WW3'))
INPUTDATA_ROOT_BMIC= str(os.getenv('INPUTDATA_ROOT_BMIC'))
HSD_INPUT_DATA= str(os.getenv('HSD_INPUT_DATA'))
PATHRT = str(os.getenv('PATHRT'))
PATHTR, tail = os.path.split(PATHRT)
NEW_BASELINE = str(os.getenv('NEW_BASELINE'))
Expand Down Expand Up @@ -194,6 +197,7 @@ def write_runtest_env():
export INPUTDATA_ROOT={INPUTDATA_ROOT}
export INPUTDATA_ROOT_WW3={INPUTDATA_ROOT_WW3}
export INPUTDATA_ROOT_BMIC={INPUTDATA_ROOT_BMIC}
export HSD_INPUT_DATA={HSD_INPUT_DATA}
export PATHRT={PATHRT}
export PATHTR={PATHTR}
export NEW_BASELINE={NEW_BASELINE}
Expand Down Expand Up @@ -343,6 +347,7 @@ def xml_loop():
INPUTDATA_ROOT= str(base['INPUTDATA_ROOT'])
INPUTDATA_ROOT_WW3 = str(base['INPUTDATA_ROOT_WW3'])
INPUTDATA_ROOT_BMIC= str(base['INPUTDATA_ROOT_BMIC'])
HSD_INPUT_DATA= str(base['HSD_INPUT_DATA'])

path = STMP+'/'+USER
os.makedirs(path, exist_ok=True)
Expand Down Expand Up @@ -387,6 +392,7 @@ def xml_loop():
os.environ["INPUTDATA_ROOT"] = INPUTDATA_ROOT
os.environ["INPUTDATA_ROOT_WW3"] = INPUTDATA_ROOT_WW3
os.environ["INPUTDATA_ROOT_BMIC"]= INPUTDATA_ROOT_BMIC
os.environ["HSD_INPUT_DATA"]= HSD_INPUT_DATA
os.environ["PARTITION"] = PARTITION
os.environ["SCHEDULER"] = SCHEDULER
os.environ["RTPWD"] = RTPWD
Expand All @@ -395,7 +401,7 @@ def xml_loop():
JOB_NR = 0
ROCOTO = True
ROCOTO_XML = os.getenv('ROCOTO_XML')
rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INPUTDATA_ROOT_BMIC,RUNDIR_ROOT,NEW_BASELINE,ROCOTO_XML)
rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INPUTDATA_ROOT_BMIC,HSD_INPUT_DATA,RUNDIR_ROOT,NEW_BASELINE,ROCOTO_XML)
UFS_TEST_YAML = str(os.getenv('UFS_TEST_YAML'))
with open(UFS_TEST_YAML, 'r') as f:
rt_yaml = yaml.load(f, Loader=yaml.FullLoader)
Expand Down
14 changes: 1 addition & 13 deletions tests-dev/test_cases/exp_conf/2020_CAPE.IN
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,7 @@ if [ $WARM_START = .false. ]; then
elif [ "$V2_SFC_FILE" = "true" ]; then
cp -r @[INPUTDATA_ROOT]/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ "$HSD_CASE" = "true" ]; then
if [ ${MACHINE_ID} = hera ]; then
cp -r /scratch1/NCEPDEV/nems/role.epic/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ ${MACHINE_ID} = orion ]; then
cp -r /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ ${MACHINE_ID} = hercules ]; then
cp -r /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ ${MACHINE_ID} = jet ]; then
cp -r /mnt/lfs5/HFIP/hfv3gfs/role.epic/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ ${MACHINE_ID} = gaea ]; then
cp -r /gpfs/f5/epic/world-shared/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
elif [ ${MACHINE_ID} = derecho ]; then
cp -r /glade/work/epicufsrt/contrib/HSD_INPUT_DATA/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
fi
cp -r @[HSD_INPUT_DATA]/${inputdir}/INPUT_L127_v2_sfc/* ./INPUT/.
else
cp -r @[INPUTDATA_ROOT]/${inputdir}/INPUT_L127_gfsv17/* ./INPUT/.
fi
Expand Down
16 changes: 3 additions & 13 deletions tests-dev/test_cases/exp_conf/baroclinic_wave.IN
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,9 @@ elif [ $NPX = 769 ]; then
fi

echo "inputdir=$inputdir,NPX=$NPX,NESTED=$NESTED"

if [ ${MACHINE_ID} = hera ]; then
cp /scratch1/NCEPDEV/nems/role.epic/HSD_INPUT_DATA/global_hyblev.l128.txt .
elif [ ${MACHINE_ID} = orion ]; then
cp /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA/global_hyblev.l128.txt .
elif [ ${MACHINE_ID} = hercules ]; then
cp /work/noaa/epic/role-epic/contrib/HSD_INPUT_DATA/global_hyblev.l128.txt .
elif [ ${MACHINE_ID} = jet ]; then
cp /mnt/lfs5/HFIP/hfv3gfs/role.epic/HSD_INPUT_DATA/global_hyblev.l128.txt .
elif [ ${MACHINE_ID} = gaea ]; then
cp /gpfs/f5/epic/world-shared/HSD_INPUT_DATA/global_hyblev.l128.txt .
elif [ ${MACHINE_ID} = derecho ]; then
cp /glade/work/epicufsrt/contrib/HSD_INPUT_DATA/global_hyblev.l128.txt.
HSD_CASE=${HSD_CASE:-false}
if [ "$HSD_CASE" = "true" ]; then
cp @[HSD_INPUT_DATA]/global_hyblev.l128.txt .
fi

OPNREQ_TEST=${OPNREQ_TEST:-false}
Expand Down
8 changes: 8 additions & 0 deletions tests-dev/ufs_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,14 @@ test_cases_intel:
- 2020_CAPE: {'project':['test_case']}
- 2020_CAPE_C768: {'project':['test_case']}
- baroclinic_wave: {'project':['test_case']}
test_cases_gnu:
build:
compiler: 'gnu'
option: '-DAPP=ATM -DCCPP_SUITES=FV3_GFS_v16,FV3_GFS_v16_flake,FV3_GFS_v17_p8,FV3_GFS_v17_p8_rrtmgp,FV3_GFS_v15_thompson_mynn_lam3km,FV3_WoFS_v0,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_ugwpv1 -D32BIT=ON'
tests:
- 2020_CAPE: {'project':['test_case']}
- 2020_CAPE_C768: {'project':['test_case']}
- baroclinic_wave: {'project':['test_case']}
atm_gnu:
build:
compiler: 'gnu'
Expand Down
13 changes: 8 additions & 5 deletions tests/fv3_conf/fv3_slurm.IN_noaacloud
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,14 @@ set -eux
echo -n " $( date +%s )," > job_timestamp.txt

set +x
MACHINE_ID=noaacloud
module use $( pwd -P )
module use /contrib/EPIC/spack-stack/spack-stack-1.3.0/envs/unified-dev/install/modulefiles/Core
module load stack-intel/2021.3.0 stack-intel-oneapi-mpi/2021.3.0
module load ufs-weather-model-env/unified-dev
export MACHINE_ID=noaacloud
source ./module-setup.sh
if [[ ! -d modulefiles ]]; then
mkdir modulefiles
mv *.lua modulefiles
fi
module use modulefiles
module load modules.fv3
module list

set -x
Expand Down

0 comments on commit 5889335

Please sign in to comment.