diff --git a/jobs/rocoto/anal.sh b/jobs/rocoto/anal.sh index cb07749b9e..d99152ef19 100755 --- a/jobs/rocoto/anal.sh +++ b/jobs/rocoto/anal.sh @@ -4,43 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -#Source appropriate modulefiles based on machine - -source "${HOMEgfs}/ush/detect_machine.sh" -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - # Source FV3GFS workflow modules - . "${HOMEgfs}"/ush/load_fv3gfs_modules.sh - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" -else - # Append compiler (only on machines that have multiple compilers) - COMPILER=${COMPILER:-"intel"} - if [[ "${MACHINE_ID}" = "hera" ]] || [[ "${MACHINE_ID}" = "cheyenne" ]]; then - MACHINE_ID=${MACHINE_ID}.${COMPILER} - fi - - # Source machine specific GSI-EnKF modules - set +x - source "${HOMEgfs}/ush/module-setup.sh" - module use "${HOMEgfs}/sorc/gsi_enkf.fd/modulefiles" - module load gsi_"${MACHINE_ID}" - - if [[ "${MACHINE_ID}" = "orion" ]]; then - module load miniconda3/4.12.0 - set +u - conda activate regional_workflow_cmaq - set_strict - fi - - module list - unset MACHINE_ID - set_trace -fi +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="anal" export jobid="${job}.$$" diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh index 46a25ac759..b202015149 100755 --- a/jobs/rocoto/efcs.sh +++ b/jobs/rocoto/efcs.sh @@ -4,24 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -module use "${HOMEgfs}/sorc/ufs_model.fd/tests" -module load modules.ufs_model.lua -# Workflow needs utilities from prod_util (setPDY.sh, ndate, etc.) -module load prod_util -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals -fi -module list -unset MACHINE_ID -set_trace +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="efcs" export jobid="${job}.$$" diff --git a/jobs/rocoto/eobs.sh b/jobs/rocoto/eobs.sh index 4f813a8d84..95fa42cb08 100755 --- a/jobs/rocoto/eobs.sh +++ b/jobs/rocoto/eobs.sh @@ -4,43 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -#Source appropriate modulefiles based on machine - -source "${HOMEgfs}/ush/detect_machine.sh" -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - # Source FV3GFS workflow modules - . "${HOMEgfs}"/ush/load_fv3gfs_modules.sh - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" -else - # Append compiler (only on machines that have multiple compilers) - COMPILER=${COMPILER:-"intel"} - if [[ "${MACHINE_ID}" = "hera" ]] || [[ "${MACHINE_ID}" = "cheyenne" ]]; then - MACHINE_ID=${MACHINE_ID}.${COMPILER} - fi - - # Source machine specific GSI-EnKF modules - set +x - source "${HOMEgfs}/ush/module-setup.sh" - module use "${HOMEgfs}/sorc/gsi_enkf.fd/modulefiles" - module load gsi_"${MACHINE_ID}" - - if [[ "${MACHINE_ID}" = "orion" ]]; then - module load miniconda3/4.12.0 - set +u - conda activate regional_workflow_cmaq - set_strict - fi - - module list - unset MACHINE_ID - set_trace -fi +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="eobs" export jobid="${job}.$$" diff --git a/jobs/rocoto/eupd.sh b/jobs/rocoto/eupd.sh index 9ddde92d36..3ed028f87a 100755 --- a/jobs/rocoto/eupd.sh +++ b/jobs/rocoto/eupd.sh @@ -4,43 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -#Source appropriate modulefiles based on machine - -source "${HOMEgfs}/ush/detect_machine.sh" -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - # Source FV3GFS workflow modules - . "${HOMEgfs}"/ush/load_fv3gfs_modules.sh - status=$? - [[ ${status} -ne 0 ]] && exit "${status}" -else - # Append compiler (only on machines that have multiple compilers) - COMPILER=${COMPILER:-"intel"} - if [[ "${MACHINE_ID}" = "hera" ]] || [[ "${MACHINE_ID}" = "cheyenne" ]]; then - MACHINE_ID=${MACHINE_ID}.${COMPILER} - fi - - # Source machine specific GSI-EnKF modules - set +x - source "${HOMEgfs}/ush/module-setup.sh" - module use "${HOMEgfs}/sorc/gsi_enkf.fd/modulefiles" - module load gsi_"${MACHINE_ID}" - - if [[ "${MACHINE_ID}" = "orion" ]]; then - module load miniconda3/4.12.0 - set +u - conda activate regional_workflow_cmaq - set_strict - fi - - module list - unset MACHINE_ID - set_trace -fi +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} export job="eupd" export jobid="${job}.$$" diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 9d59f70bd8..daefd6c449 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -4,57 +4,9 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh -#status=$? -#[[ ${status} -ne 0 ]] && exit ${status} - -# TODO: clean this up -source "${HOMEgfs}/ush/detect_machine.sh" -set +x -source "${HOMEgfs}/ush/module-setup.sh" -if [[ "${MACHINE_ID}" != "noaacloud" ]]; then - module use "${HOMEgfs}/sorc/ufs_model.fd/tests" - module load modules.ufs_model.lua - module load prod_util -fi - -if [[ "${MACHINE_ID}" = "wcoss2" ]]; then - module load cray-pals -fi -if [[ "${MACHINE_ID}" = "hera" ]]; then - module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core" - module load "miniconda3/4.6.14" - module load "gfs_workflow/1.0.0" -# TODO: orion and wcoss2 will be uncommented when they are ready. This comment block will be removed in the next PR -#elif [[ "${MACHINE_ID}" = "orion" ]]; then -# module use "/home/rmahajan/opt/global-workflow/modulefiles/core" -# module load "python/3.7.5" -# module load "gfs_workflow/1.0.0" -#elif [[ "${MACHINE_ID}" = "wcoss2" ]]; then -# module load "python/3.7.5" -fi -if [[ "${MACHINE_ID}" == "noaacloud" ]]; then - if [[ "${PW_CSP:-}" = "aws" ]]; then - - # TODO: This can be cleaned-up; most of this is a hack for now. - module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core" - module load "stack-intel" - module load "stack-intel-oneapi-mpi" - module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/" - module load "py39_4.12.0" - module load "ufs-weather-model-env/1.0.0" - export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0" - # TODO: Are there plans for EPIC to maintain this package or should GW provide support? - export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util" - export PATH="${PATH}:/contrib/global-workflow/bin" - ndate_path="$(command -v ndate)" - export NDATE="${ndate_path}" - fi -fi - -module list -unset MACHINE_ID -set_trace +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### # exglobal_forecast.py requires the following in PYTHONPATH diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index f90be861d9..dfb541abb6 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -94,7 +94,6 @@ if [[ ${MAKE_PREPBUFR} = "YES" ]]; then fi export job="j${CDUMP}_prep_${cyc}" - export DATAROOT="${RUNDIR}/${CDATE}/${CDUMP}/prepbufr" export COMIN=${COM_OBS} export COMOUT=${COM_OBS} RUN="gdas" YMD=${PDY} HH=${cyc} generate_com -rx COMINgdas:COM_ATMOS_HISTORY_TMPL diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index 7464ac7bec..92cbce9ba0 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -2,46 +2,49 @@ help([[ Load environment to run GFS on Hera ]]) -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/Alexander.Richert/spack-stack-1.4.1-gw/envs/gw/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") -load(pathJoin("hpc", os.getenv("hpc_ver"))) -load(pathJoin("hpc-intel", os.getenv("hpc_intel_ver"))) -load(pathJoin("hpc-impi", os.getenv("hpc_impi_ver"))) +load(pathJoin("stack-intel", os.getenv("stack_intel_ver"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("stack_impi_ver"))) +load(pathJoin("stack-python", os.getenv("stack_python_ver"))) +load(pathJoin("miniconda", os.getenv("miniconda_ver"))) load(pathJoin("hpss", os.getenv("hpss_ver"))) load(pathJoin("gempak", os.getenv("gempak_ver"))) load(pathJoin("ncl", os.getenv("ncl_ver"))) load(pathJoin("jasper", os.getenv("jasper_ver"))) -load(pathJoin("png", os.getenv("libpng_ver"))) +load(pathJoin("libpng", os.getenv("libpng_ver"))) load(pathJoin("cdo", os.getenv("cdo_ver"))) load(pathJoin("R", os.getenv("R_ver"))) load(pathJoin("hdf5", os.getenv("hdf5_ver"))) -load(pathJoin("netcdf", os.getenv("netcdf_ver"))) +load(pathJoin("netcdf-c", os.getenv("netcdf_c_ver"))) +load(pathJoin("netcdf-fortran", os.getenv("netcdf_fortran_ver"))) load(pathJoin("nco", os.getenv("nco_ver"))) -load(pathJoin("prod_util", os.getenv("prod_util_ver"))) -load(pathJoin("grib_util", os.getenv("grib_util_ver"))) +load(pathJoin("prod-util", os.getenv("prod_util_ver"))) +load(pathJoin("grib-util", os.getenv("grib_util_ver"))) load(pathJoin("g2tmpl", os.getenv("g2tmpl_ver"))) -load(pathJoin("ncdiag", os.getenv("ncdiag_ver"))) +load(pathJoin("gsi-ncdiag", os.getenv("gsi_ncdiag_ver"))) load(pathJoin("crtm", os.getenv("crtm_ver"))) +load(pathJoin("bufr", os.getenv("bufr_ver"))) load(pathJoin("wgrib2", os.getenv("wgrib2_ver"))) +load(pathJoin("py-netcdf4", os.getenv("py_netcdf4_ver"))) +load(pathJoin("py-pyyaml", os.getenv("py_pyyaml_ver"))) +load(pathJoin("py-jinja2", os.getenv("py_jinja2_ver"))) + +load(pathJoin("met", os.getenv("met_ver"))) +load(pathJoin("metplus", os.getenv("metplus_ver"))) + setenv("WGRIB2","wgrib2") +setenv("UTILROOT",os.getenv("prod_util_ROOT")) --prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. os.getenv("prepobs_run_ver"), "modulefiles")) prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) load(pathJoin("prepobs", os.getenv("prepobs_run_ver"))) -prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. os.getenv("fit2obs_ver"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) load(pathJoin("fit2obs", os.getenv("fit2obs_ver"))) --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("gfs_workflow", "1.0.0")) -load(pathJoin("met", "9.1")) -load(pathJoin("metplus", "3.1")) - whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua index f4b62a5fd2..cce41f34da 100644 --- a/modulefiles/module_gwci.hera.lua +++ b/modulefiles/module_gwci.hera.lua @@ -2,14 +2,15 @@ help([[ Load environment to run GFS workflow setup scripts on Hera ]]) -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/Alexander.Richert/spack-stack-1.4.1-gw/envs/gw/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") -load(pathJoin("hpc", "1.1.0")) -load(pathJoin("hpc-intel", "18.0.5.274")) -load(pathJoin("hpc-impi", "2018.0.4")) +load(pathJoin("stack-intel", os.getenv("2021.5.0"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1"))) -load(pathJoin("netcdf","4.7.4")) -load(pathJoin("nccmp","1.8.7.0")) +load(pathJoin("netcdf-c", os.getenv("4.9.2"))) +load(pathJoin("netcdf-fortran", os.getenv("4.6.0"))) +load(pathJoin("nccmp","1.9.0.1")) load(pathJoin("wgrib2", "2.0.8")) whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua index a07b32b6a6..0858353669 100644 --- a/modulefiles/module_gwsetup.hera.lua +++ b/modulefiles/module_gwsetup.hera.lua @@ -4,10 +4,16 @@ Load environment to run GFS workflow setup scripts on Hera load(pathJoin("rocoto")) --- Temporary until official hpc-stack is updated -prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") -load(pathJoin("hpc", "1.2.0")) -load(pathJoin("hpc-miniconda3", "4.6.14")) -load(pathJoin("gfs_workflow", "1.0.0")) +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/nems/Alexander.Richert/spack-stack-1.4.1-gw/envs/gw/install/modulefiles/Core") +prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulefiles") + +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.5.0" +local stack_python_ver=os.getenv("stack_python_ver") or "3.9.12" + +load(pathJoin("stack-intel", stack_intel_ver)) +load(pathJoin("stack-python", stack_python_ver)) +load("py-jinja2") +load("py-pyyaml") +load("py-numpy") whatis("Description: GFS run setup environment") diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn index 7140ef477b..b6cf086fbb 100644 --- a/parm/config/gefs/config.base.emc.dyn +++ b/parm/config/gefs/config.base.emc.dyn @@ -82,7 +82,7 @@ export VERBOSE="YES" export KEEPDATA="NO" export CHGRP_RSTPROD="@CHGRP_RSTPROD@" export CHGRP_CMD="@CHGRP_CMD@" -export NCDUMP="${NETCDF}/bin/ncdump" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" export NCLEN="${HOMEgfs}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts @@ -126,7 +126,7 @@ export SENDCOM=${SENDCOM:-"YES"} export SENDSDM=${SENDSDM:-"NO"} export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} export SENDDBN=${SENDDBN:-"NO"} -export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +export DBNROOT=${DBNROOT:-${UTILROOT:-}/fakedbn} # APP settings export APP=@APP@ diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index db4cd22e7f..2c38fd20db 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -86,7 +86,7 @@ export VERBOSE="YES" export KEEPDATA="NO" export CHGRP_RSTPROD="@CHGRP_RSTPROD@" export CHGRP_CMD="@CHGRP_CMD@" -export NCDUMP="${NETCDF:-}/bin/ncdump" +export NCDUMP="${NETCDF:-${netcdf_c_ROOT:-}}/bin/ncdump" export NCLEN="${HOMEgfs}/ush/getncdimlen" # Machine environment, jobs, and other utility scripts diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh index 3aa1093fad..2322f8c93f 100755 --- a/scripts/exglobal_diag.sh +++ b/scripts/exglobal_diag.sh @@ -35,7 +35,7 @@ export NMV=${NMV:-"/bin/mv"} export NLN=${NLN:-"/bin/ln -sf"} export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} -export CATEXEC=${CATEXEC:-$ncdiag_ROOT/bin/ncdiag_cat_serial.x} +export CATEXEC=${CATEXEC:-${ncdiag_ROOT:-${gsi_ncdiag_ROOT}}/bin/ncdiag_cat_serial.x} COMPRESS=${COMPRESS:-gzip} UNCOMPRESS=${UNCOMPRESS:-gunzip} APRUNCFP=${APRUNCFP:-""} diff --git a/sorc/checkout.sh b/sorc/checkout.sh old mode 100755 new mode 100644 index 9c9addad1d..eb195a2b1a --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -150,7 +150,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" ; errs=$((errs + $?)) -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "a283262" ; errs=$((errs + $?)) +checkout "gfs_utils.fd" "https://github.com/davidhuber-noaa/gfs-utils" "feature/spack-stack" ; errs=$((errs + $?)) checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-4d05445}" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) @@ -164,8 +164,8 @@ if [[ ${checkout_gdas} == "YES" ]]; then fi if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then - checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) - checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) + checkout "gsi_utils.fd" "https://github.com/davidhuber-noaa/GSI-Utils.git" "feature/spack-stack"; errs=$((errs + $?)) + checkout "gsi_monitor.fd" "https://github.com/davidhuber-noaa/GSI-Monitor.git" "spack-stack"; errs=$((errs + $?)) fi if (( errs > 0 )); then diff --git a/ush/getncdimlen b/ush/getncdimlen index 5d230f6cc3..fcf231947b 100755 --- a/ush/getncdimlen +++ b/ush/getncdimlen @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # getncdimlen # cory.r.martin@noaa.gov # 2019-10-17 diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py index 94a2ae1348..8088cb7233 100644 --- a/ush/gsi_utils.py +++ b/ush/gsi_utils.py @@ -78,7 +78,7 @@ def get_ncdims(ncfile): try: import netCDF4 as nc except ImportError as err: - raise ImportError(f"Unable to import netCDF4 module\n{err}") + raise ImportError(f"Unable to import netCDF4 module") ncf = nc.Dataset(ncfile) ncdims = {} for d in ncf.dimensions.keys(): diff --git a/versions/build.hera.ver b/versions/build.hera.ver index dcc30aee76..0f7f7bb053 100644 --- a/versions/build.hera.ver +++ b/versions/build.hera.ver @@ -1,19 +1,19 @@ -export hpc_ver=1.1.0 -export hpc_intel_ver=18.0.5.274 -export hpc_impi_ver=2018.0.4 +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 -export cmake_ver=3.20.1 +export cmake_ver=3.23.1 export gempak_ver=7.4.2 -export jasper_ver=2.0.25 -export libpng_ver=1.6.35 +export jasper_ver=2.0.32 +export libpng_ver=1.6.37 export zlib_ver=1.2.11 export esmf_ver=8.3.0b09 export pio_ver=2.5.2 export fms_ver=2022.04 -export hdf5_ver=1.10.6 -export netcdf_ver=4.7.4 +export hdf5_ver=1.14.0 +export netcdf_c_ver=4.9.2 +export netcdf_fortran_ver=4.6.0 export bacio_ver=2.4.1 export w3nco_ver=2.4.1 @@ -24,11 +24,11 @@ export bufr_ver=11.7.0 export g2_ver=3.4.5 export sp_ver=2.3.3 export ip_ver=3.3.3 -export wrf_io_ver=1.2.0 -export ncio_ver=1.1.2 -export ncdiag_ver=1.0.0 -export g2tmpl_ver=1.10.0 +export gsi_ncdiag_ver=1.1.2 +export g2tmpl_ver=1.10.2 export crtm_ver=2.4.0 export wgrib2_ver=2.0.8 - -export upp_ver=10.0.8 +export py_netcdf4_ver=1.5.3 +export py_pyyaml_ver=6.0 +export py_jinja2_ver=3.1.2 +export fit2obs_ver=1.0.0 diff --git a/versions/run.hera.ver b/versions/run.hera.ver index 471e019dcd..5b730013af 100644 --- a/versions/run.hera.ver +++ b/versions/run.hera.ver @@ -1,24 +1,27 @@ -export hpc_ver=1.1.0 -export hpc_intel_ver=18.0.5.274 -export hpc_impi_ver=2018.0.4 +export stack_intel_ver=2021.5.0 +export stack_impi_ver=2021.5.1 +export stack_python_ver=3.9.12 +export miniconda_ver=3.9.12 export hpss_ver=hpss export gempak_ver=7.4.2 export ncl_ver=6.6.2 -export jasper_ver=2.0.25 -export libpng_ver=1.6.35 -export cdo_ver=1.9.5 +export jasper_ver=2.0.32 +export libpng_ver=1.6.37 +export cdo_ver=2.0.5 export R_ver=3.5.0 -export hdf5_ver=1.10.6 -export netcdf_ver=4.7.4 +export hdf5_ver=1.14.0 +export netcdf_c_ver=4.9.2 +export netcdf_fortran_ver=4.6.0 -export nco_ver=4.9.1 +export nco_ver=5.0.6 export prod_util_ver=1.2.2 -export grib_util_ver=1.2.2 -export g2tmpl_ver=1.10.0 -export ncdiag_ver=1.0.0 +export grib_util_ver=1.2.3 +export g2tmpl_ver=1.10.2 +export gsi_ncdiag_ver=1.1.2 export crtm_ver=2.4.0 +export bufr_ver=11.7.0 export wgrib2_ver=2.0.8 export obsproc_run_ver=1.1.2