Skip to content

Commit

Permalink
Merge branch 'ufs-community:develop' into feature/ursa_port
Browse files Browse the repository at this point in the history
  • Loading branch information
ulmononian authored Nov 15, 2024
2 parents 7d42680 + 6b0f516 commit 2c4ff66
Show file tree
Hide file tree
Showing 90 changed files with 5,249 additions and 4,230 deletions.
2 changes: 1 addition & 1 deletion FV3
2 changes: 1 addition & 1 deletion modulefiles/ufs_gaea.intel.lua
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
help([[
This module loads libraries required for building and running UFS Weather Model
This module loads libraries required for building and running UFS Weather Model
on the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0.
]])

Expand Down
11 changes: 5 additions & 6 deletions modulefiles/ufs_gaea.intelllvm.lua
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
help([[
This module loads libraries required for building and running UFS Weather Model
This module loads libraries required for building and running UFS Weather Model
on the NOAA RDHPC machine Gaea C5 using Intel-2023.1.0.
]])

whatis([===[Loads libraries needed for building the UFS Weather Model on Gaea ]===])

prepend_path("MODULEPATH", "/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/ncrc/proj/epic/spack-stack/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")

stack_intel_ver=os.getenv("stack_intel_ver") or "2023.1.0"
stack_intel_ver=os.getenv("stack_intel_ver") or "2023.2.0"
load(pathJoin("stack-intel", stack_intel_ver))

stack_cray_mpich_ver=os.getenv("stack_cray_mpich_ver") or "8.1.25"
stack_cray_mpich_ver=os.getenv("stack_cray_mpich_ver") or "8.1.28"
load(pathJoin("stack-cray-mpich", stack_cray_mpich_ver))

stack_python_ver=os.getenv("stack_python_ver") or "3.10.13"
Expand All @@ -27,8 +27,7 @@ load(pathJoin("nccmp", nccmp_ver))
unload("darshan-runtime")
unload("cray-libsci")

unload("intel-classic/2023.1.0")
load("intel-oneapi/2023.1.0")
load("intel-classic/2023.2.0")

setenv("I_MPI_CC", "icx")
setenv("I_MPI_CXX", "icpx")
Expand Down
2 changes: 1 addition & 1 deletion modulefiles/ufs_hera.intelllvm.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ help([[
loads UFS Model prerequisites for Hera/IntelLLVM
]])

prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0"
load(pathJoin("stack-intel", stack_intel_ver))
Expand Down
2 changes: 1 addition & 1 deletion modulefiles/ufs_hercules.intelllvm.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ help([[
loads UFS Model prerequisites for Hercules/IntelLLVM
]])

prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/hercules/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0"
load(pathJoin("stack-intel", stack_intel_ver))
Expand Down
20 changes: 12 additions & 8 deletions modulefiles/ufs_noaacloud.intel.lua
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,27 @@ loads UFS Model prerequisites for NOAA Parallelworks/Intel

prepend_path("MODULEPATH", "/contrib/spack-stack-rocky8/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")
prepend_path("MODULEPATH", "/apps/modules/modulefiles")
prepend_path("PATH", "/contrib/EPIC/bin")
load("gnu")
load("stack-intel")
load("stack-intel-oneapi-mpi")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.10.0"
gnu_ver=os.getenv("gnu_ver") or ""
load(pathJoin("gnu", gnu_ver))

stack_intel_ver=os.getenv("stack_intel_ver") or ""
load(pathJoin("stack-intel", stack_intel_ver))

stack_impi_ver=os.getenv("stack_impi_ver") or "2021.10.0"
load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver))
unload("gnu")
stack_intel_oneapi_mpi_ver=os.getenv("stack_intel_oneapi_mpi_ver") or ""
load(pathJoin("stack-intel-oneapi-mpi", stack_intel_oneapi_mpi_ver))

gnu_ver=os.getenv("gnu_ver") or ""
unload(pathJoin("gnu", gnu_ver))

cmake_ver=os.getenv("cmake_ver") or "3.23.1"
load(pathJoin("cmake", cmake_ver))

load("ufs_common")

nccmp_ver=os.getenv("nccmp_ver") or "1.9.0.1"
load(pathJoin("nccmp", nccmp_ver))

setenv("CC", "mpiicc")
setenv("CXX", "mpiicpc")
setenv("FC", "mpiifort")
Expand Down
2 changes: 1 addition & 1 deletion modulefiles/ufs_orion.intelllvm.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ help([[
loads UFS Model prerequisites for OrionLLVM/Intel
]])

prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/upp-addon-env/install/modulefiles/Core")
prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/fms-2024.01/install/modulefiles/Core")

stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0"
load(pathJoin("stack-intel", stack_intel_ver))
Expand Down
18 changes: 9 additions & 9 deletions tests-dev/baseline_setup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ jet:
QUEUE: batch
COMPILE_QUEUE: batch
PARTITION: xjet
dprefix: /mnt/lfs4/HFIP/hfv3gfs/${USER}
DISKNM: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT
STMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_BASELINE
PTMP: /mnt/lfs4/HFIP/hfv3gfs/${USER}/RT_RUNDIRS
dprefix: /lfs5/HFIP/hfv3gfs/${USER}
DISKNM: /lfs5/HFIP/hfv3gfs/role.epic/RT
STMP: /lfs5/HFIP/hfv3gfs/${USER}/RT_BASELINE
PTMP: /lfs5/HFIP/hfv3gfs/${USER}/RT_RUNDIRS
RUNDIR_ROOT:
SCHEDULER: slurm
INPUTDATA_ROOT: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /mnt/lfs4/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207
INPUTDATA_ROOT: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501
INPUTDATA_ROOT_WW3: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/input-data-20240501/WW3_input_data_20240214
INPUTDATA_ROOT_BMIC: /lfs5/HFIP/hfv3gfs/role.epic/RT/NEMSfv3gfs/BM_IC-20220207
derecho:
QUEUE: main
COMPILE_QUEUE: main
Expand All @@ -82,8 +82,8 @@ noaacloud:
PARTITION:
dprefix: /lustre
DISKNM: /contrib/ufs-weather-model/RT
STMP: /lustre/stmp4
PTMP: /lustre/stmp2
STMP: /lustre/stmp
PTMP: /lustre/stmp
RUNDIR_ROOT:
SCHEDULER: slurm
INPUTDATA_ROOT: /contrib/ufs-weather-model/RT/NEMSfv3gfs/input-data-20240501
Expand Down
153 changes: 89 additions & 64 deletions tests-dev/create_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,27 @@
import subprocess
import yaml
from datetime import datetime
#import datetime
from ufs_test_utils import get_testcase, write_logfile, delete_files, machine_check_off

def get_timestamps(path):
"""Obtain experiment starting and ending time marks through file timestamps
Args:
path (str): experiment log directory
Returns:
str: experiment starting and ending time strings
"""
dir_list = os.listdir(path)
dt = []
for f in dir_list:
m_time = os.path.getmtime(path+f)
dt.append(datetime.fromtimestamp(m_time))
dtsort=sorted(dt)
return str(dtsort[0]),str(dtsort[-1])

def finish_log():
"""Collects regression test results and generates log file.
"""Collect regression test results and generate log file.
"""
UFS_TEST_YAML = str(os.getenv('UFS_TEST_YAML'))
PATHRT = os.getenv('PATHRT')
Expand Down Expand Up @@ -40,40 +57,46 @@ def finish_log():
COMPILE_ID = apps
COMPILE_LOG = 'compile_'+COMPILE_ID+'.log'
COMPILE_LOG_TIME ='compile_'+COMPILE_ID+'_timestamp.txt'
with open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG) as f:
if "[100%] Linking Fortran executable" in f.read():
COMPILE_PASS += 1
f.seek(0)
for line in f:
if 'export RUNDIR_ROOT=' in line:
RUNDIR_ROOT=line.split("=")[1]
break
compile_err = RUNDIR_ROOT.strip('\n')+'/compile_'+COMPILE_ID+'/err'
with open(compile_err) as ferr:
contents = ferr.read()
count_warning = contents.count(": warning #")
count_remarks = contents.count(": remark #")
ferr.close()
warning_log = ""
if count_warning > 0:
warning_log = "("+str(count_warning)+" warnings"
if count_remarks > 0:
warning_log+= ","+str(count_remarks)+" remarks)"
flog = open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG_TIME)
timing_data = flog.read()
first_line = timing_data.split('\n', 1)[0]
etime = int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip())
btime = int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip())
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
btime_min, btime_sec = divmod(int(btime), 60)
btime_min = f"{btime_min:02}"; btime_sec = f"{btime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+btime_min+':'+btime_sec+"]"
flog.close()
compile_log = "PASS -- COMPILE "+COMPILE_ID+time_log+warning_log+"\n"
else:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
f.close()
COMPILE_CHECK1 ='Compile '+COMPILE_ID+' Completed'
COMPILE_CHECK2 ='[100%] Linking Fortran executable'
try:
with open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG) as f:
if COMPILE_CHECK1 in f.read() or COMPILE_CHECK2 in f.read():
COMPILE_PASS += 1
f.seek(0)
for line in f:
if 'export RUNDIR_ROOT=' in line:
RUNDIR_ROOT=line.split("=")[1]
break
compile_err = RUNDIR_ROOT.strip('\n')+'/compile_'+COMPILE_ID+'/err'
with open(compile_err) as ferr:
contents = ferr.read()
count_warning = contents.count(": warning #")
count_remarks = contents.count(": remark #")
ferr.close()
warning_log = ""
if count_warning > 0:
warning_log = "("+str(count_warning)+" warnings"
if count_remarks > 0:
warning_log+= ","+str(count_remarks)+" remarks)"
flog = open('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG_TIME)
timing_data = flog.read()
first_line = timing_data.split('\n', 1)[0]
etime = int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip())
btime = int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip())
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
btime_min, btime_sec = divmod(int(btime), 60)
btime_min = f"{btime_min:02}"; btime_sec = f"{btime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+btime_min+':'+btime_sec+"]"
flog.close()
compile_log = "PASS -- COMPILE "+COMPILE_ID+time_log+warning_log+"\n"
else:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
f.close()
except FileNotFoundError:
compile_log = "FAIL -- COMPILE "+COMPILE_ID+"\n"
print('./logs/log_'+MACHINE_ID+'/'+COMPILE_LOG+': does not exist')
run_logs += compile_log
else:
PASS_TESTS = True
Expand All @@ -94,47 +117,49 @@ def finish_log():
PASS_CHECK = 'Test '+TEST_ID+' PASS'
MAXS_CHECK = 'The maximum resident set size (KB)'
pass_flag = False
create_dep_flag = False
if (CREATE_BASELINE == 'true' and not DEP_RUN == ""):
create_dep_flag = True
if not create_dep_flag:
try:
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
if PASS_CHECK in f.read():
pass_flag = True
f.close()
if pass_flag:
f = open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG_TIME)
timing_data = f.read()
first_line = timing_data.split('\n', 1)[0]
etime = str(int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip()))
rtime = str(int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip()))
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
rtime_min, rtime_sec = divmod(int(rtime), 60)
rtime_min = f"{rtime_min:02}"; rtime_sec = f"{rtime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+rtime_min+':'+rtime_sec+"]"
f.close()
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
if pass_flag :
f.close()
except FileNotFoundError:
print('./logs/log_'+MACHINE_ID+'/'+TEST_LOG+': does not exist')
if pass_flag:
f = open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG_TIME)
timing_data = f.read()
first_line = timing_data.split('\n', 1)[0]
etime = str(int(first_line.split(",")[4].strip()) - int(first_line.split(",")[1].strip()))
rtime = str(int(first_line.split(",")[3].strip()) - int(first_line.split(",")[2].strip()))
etime_min, etime_sec = divmod(int(etime), 60)
etime_min = f"{etime_min:02}"; etime_sec = f"{etime_sec:02}"
rtime_min, rtime_sec = divmod(int(rtime), 60)
rtime_min = f"{rtime_min:02}"; rtime_sec = f"{rtime_sec:02}"
time_log = " ["+etime_min+':'+etime_sec+', '+rtime_min+':'+rtime_sec+"]"
f.close()
if pass_flag :
with open('./logs/log_'+MACHINE_ID+'/'+TEST_LOG) as f:
rtlog_file = f.readlines()
for line in rtlog_file:
if MAXS_CHECK in line:
memsize= line.split('=')[1].strip()
test_log = 'PASS -- TEST '+TEST_ID+time_log+' ('+memsize+' MB)\n'
PASS_NR += 1
else:
test_log = 'FAIL -- TEST '+TEST_ID+'\n'
failed_list.append(TEST_NAME+' '+RT_COMPILER)
FAIL_NR += 1
run_logs += test_log
f.close()
f.close()
else:
test_log = 'FAIL -- TEST '+TEST_ID+'\n'
failed_list.append(TEST_NAME+' '+RT_COMPILER)
FAIL_NR += 1
run_logs += test_log
run_logs += '\n'
write_logfile(filename, "a", output=run_logs)

TEST_START_TIME = os.getenv('TEST_START_TIME')
TEST_END_TIME = os.getenv('TEST_END_TIME')
start_time = datetime.strptime(TEST_START_TIME, "%Y%m%d %H:%M:%S")
end_time = datetime.strptime(TEST_END_TIME, "%Y%m%d %H:%M:%S")
TEST_START_TIME, TEST_END_TIME = get_timestamps('./logs/log_'+MACHINE_ID+'/')

clean_START_TIME= TEST_START_TIME.split('.')[0]
start_time = datetime.strptime(clean_START_TIME, "%Y-%m-%d %H:%M:%S")
clean_END_TIME= TEST_END_TIME.split('.')[0]
end_time = datetime.strptime(clean_END_TIME, "%Y-%m-%d %H:%M:%S")

hours, remainder= divmod((end_time - start_time).total_seconds(), 3600)
minutes, seconds= divmod(remainder, 60)
hours = int(hours); minutes=int(minutes); seconds =int(seconds)
Expand Down
6 changes: 4 additions & 2 deletions tests-dev/create_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def rocoto_create_entries(RTPWD,MACHINE_ID,INPUTDATA_ROOT,INPUTDATA_ROOT_WW3,INP
<!ENTITY INPUTDATA_ROOT "{INPUTDATA_ROOT}">
<!ENTITY INPUTDATA_ROOT_WW3 "{INPUTDATA_ROOT_WW3}">
<!ENTITY INPUTDATA_ROOT_BMIC "{INPUTDATA_ROOT_BMIC}">
<!ENTITY INPUTDATA_LM4 "{INPUTDATA_ROOT}/LM4_input_data">
<!ENTITY RUNDIR_ROOT "{RUNDIR_ROOT}">
<!ENTITY NEW_BASELINE "{NEW_BASELINE}">
]>
Expand Down Expand Up @@ -195,6 +196,7 @@ def write_runtest_env():
export INPUTDATA_ROOT={INPUTDATA_ROOT}
export INPUTDATA_ROOT_WW3={INPUTDATA_ROOT_WW3}
export INPUTDATA_ROOT_BMIC={INPUTDATA_ROOT_BMIC}
export INPUTDATA_LM4={INPUTDATA_ROOT}/LM4_input_data
export PATHRT={PATHRT}
export PATHTR={PATHTR}
export NEW_BASELINE={NEW_BASELINE}
Expand All @@ -216,8 +218,8 @@ def write_runtest_env():
export RTVERBOSE=false
"""
if ( MACHINE_ID == 'jet' ):
runtest_envs+="export PATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/bin:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/bin:$PATH"
runtest_envs+="export PYTHONPATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/lib/python3.8/site-packages:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/lib/python3.8/site-packages"
runtest_envs += f"export PATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/bin:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/bin:$PATH\n"
runtest_envs += f"export PYTHONPATH=/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/envs/ufs-weather-model/lib/python3.8/site-packages:/lfs4/HFIP/hfv3gfs/software/miniconda3/4.8.3/lib/python3.8/site-packages\n"

with open(filename,"w+") as f:
f.writelines(runtest_envs)
Expand Down
Loading

0 comments on commit 2c4ff66

Please sign in to comment.