From 235d597113fe2af04cf58c3aa7ac8a6892e2fc64 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 6 Oct 2022 17:21:56 -0400 Subject: [PATCH 1/9] Remove trace from link script (#1046) Removes the trace from the link script. Since this makes the script silent (including some errors), some messages are added to give feedback about the scripts success or failure. UFS Utils' [link_fixdirs.sh](https://github.com/ufs-community/UFS_UTILS/blob/develop/fix/link_fixdirs.sh) script turns the trace on when it runs, so its STDERR is thrown away. Also corrects a bug where the case of `$RUN_ENVIR` set did not match the case it was compared against. Fixes #1044 --- sorc/link_workflow.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 348d23595d..1d529d3ca2 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -2,6 +2,8 @@ #--make symbolic links for EMC installation and hardcopies for NCO delivery +trap 'echo "${BASH_SOURCE[0]} encounted an error at line ${LINENO} (rc=$?)"' ERR + function usage() { cat << EOF Builds all of the global-workflow components by calling the individual build @@ -15,9 +17,10 @@ Usage: ${BASH_SOURCE[0]} [-h][-o] EOF exit 1 } -set -eux -RUN_ENVIR="EMC" +set -eu + +RUN_ENVIR="emc" # Reset option counter in case this script is sourced OPTIND=1 @@ -26,7 +29,7 @@ while getopts ":ho" option; do h) usage ;; o) echo "-o option received, configuring for NCO" - RUN_ENVIR="NCO";; + RUN_ENVIR="nco";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" usage @@ -105,7 +108,7 @@ done if [ -d "${script_dir}/ufs_utils.fd" ]; then cd "${script_dir}/ufs_utils.fd/fix" || exit 1 - ./link_fixdirs.sh "${RUN_ENVIR}" "${machine}" + ./link_fixdirs.sh "${RUN_ENVIR}" "${machine}" 2> /dev/null fi @@ -420,6 +423,7 @@ if [[ "${RUN_ENVIR}" == "nco" ]] ; then fi #------------------------------ +echo "${BASH_SOURCE[0]} completed successfully" exit 0 From 9b3fa14ec9df6697b9de76f8a536ceff73358935 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 6 Oct 2022 17:22:45 -0400 Subject: [PATCH 2/9] Fix group number being treated as octal in gdas arch (#1053) The group number was being treated as an octal in gdas archive job, resulting in errors for being out-of-range when more than 7 groups were used. Fixes #1032 --- jobs/rocoto/earc.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh index a3814a550d..77f517fde7 100755 --- a/jobs/rocoto/earc.sh +++ b/jobs/rocoto/earc.sh @@ -32,7 +32,7 @@ done export COMPONENT=${COMPONENT:-atmos} -n=$((ENSGRP)) +n=$((10#${ENSGRP})) # ICS are restarts and always lag INC by $assim_freq hours. EARCINC_CYC=$ARCH_CYC @@ -63,7 +63,7 @@ source "${HOMEgfs}/ush/file_utils.sh" ################################################################### # ENSGRP > 0 archives a group of ensemble members firstday=$($NDATE +24 $SDATE) -if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then +if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then #--set the archiving command and create local directories, if necessary TARCMD="htar" From 8cb27c6e48437f28ffb965e0043dc00e90a42a31 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Thu, 6 Oct 2022 17:23:22 -0400 Subject: [PATCH 3/9] Add GDAS to the partial build list (#1050) When the GDAS app was added to the workflow, the corresponding build setting was not added to partial_build and the build configuration file. This means that after `build_all.sh` was updated to correct syntax issues, the build would fail because `$Build_gdas` was undefined. Note: the GDAS app still does not build currently due to unrelated problems within the gdas repo. Refs #1043 --- sorc/gfs_build.cfg | 1 + sorc/partial_build.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg index 1a436842a1..c56db1f71f 100644 --- a/sorc/gfs_build.cfg +++ b/sorc/gfs_build.cfg @@ -6,6 +6,7 @@ Building gsi_enkf (gsi_enkf) .......................... yes Building gsi_utils (gsi_utils) ........................ yes Building gsi_monitor (gsi_monitor) .................... yes + Building gdas (gdas) .................................. yes Building gldas (gldas) ................................ yes Building UPP (upp) .................................... yes Building ufs_utils (ufs_utils) ........................ yes diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index aee38c3a11..bc94674c5f 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -9,6 +9,7 @@ declare -a Build_prg=("Build_ufs_model" \ "Build_gsi_monitor" \ "Build_ww3_prepost" \ "Build_reg2grb2" \ + "Build_gdas" \ "Build_gldas" \ "Build_upp" \ "Build_ufs_utils" \ From b26a8ac85b2b981356417ad7ced3d1d420cede68 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Fri, 7 Oct 2022 10:51:25 -0400 Subject: [PATCH 4/9] Update to obsproc.v1.0.2 and prepobs.v1.0.1 (#1049) * Update HOMEobsproc paths in config.base * Update primary obsproc JJOB call in prep.sh * Add prepobs module load to R&D module_base modulefiles * Add cdate10 setting to config.prep * Add launcher_PREP to HERA and ORION env files * Add needed COMINtcvital path to config.prep * Retire config.prepbufr and prepbufr step from config.resources Refs #1033 --- env/HERA.env | 1 + env/ORION.env | 1 + jobs/rocoto/prep.sh | 4 ++-- modulefiles/module_base.hera.lua | 3 +++ modulefiles/module_base.orion.lua | 3 +++ parm/config/config.base.emc.dyn | 4 +--- parm/config/config.base.nco.static | 4 +--- parm/config/config.prep | 2 ++ parm/config/config.prepbufr | 19 ------------------- parm/config/config.resources | 12 ++++++------ 10 files changed, 20 insertions(+), 33 deletions(-) delete mode 100755 parm/config/config.prepbufr diff --git a/env/HERA.env b/env/HERA.env index 781cb9d415..954f25a322 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -43,6 +43,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then export POE="NO" export BACK="NO" export sys_tp="HERA" + export launcher_PREP="srun" elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then diff --git a/env/ORION.env b/env/ORION.env index 63d978d2e5..04bae19e0e 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -42,6 +42,7 @@ if [ $step = "prep" -o $step = "prepbufr" ]; then export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="ORION" + export launcher_PREP="srun" elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostpnt" ]; then diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index 7d22adc7aa..f33a8bc9f6 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -10,7 +10,7 @@ status=$? ############################################################### # Source relevant configs -configs="base prep prepbufr" +configs="base prep" for config in $configs; do . $EXPDIR/config.${config} status=$? @@ -109,7 +109,7 @@ if [ $DO_MAKEPREPBUFR = "YES" ]; then export MAKE_NSSTBUFR="NO" fi - $HOMEobsproc_network/jobs/JGLOBAL_PREP + $HOMEobsproc/jobs/JOBSPROC_GLOBAL_PREP status=$? [[ $status -ne 0 ]] && exit $status diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index 36cb672eb0..14c523c518 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -46,6 +46,9 @@ load(pathJoin("cdo", "1.9.5")) load(pathJoin("R", "3.5.0")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v1.0.1/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua index 22a4764419..bdb85d7430 100644 --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -47,6 +47,9 @@ load(pathJoin("slurm", "19.05.3-2")) load(pathJoin("cdo", "1.9.5")) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v1.0.1/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.2.0")) diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index e615c652a6..b36c726a34 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -77,9 +77,7 @@ export MODE="@MODE@" # cycled/forecast-only export FIXgsi="$HOMEgfs/fix/gsi" export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" export HOMEpost="$HOMEgfs" -export HOMEobsproc_prep="$BASE_GIT/obsproc/obsproc_prep.v5.5.0_hpc-stack" -export HOMEobsproc_network="$BASE_GIT/obsproc/obsproc_global.v3.4.2_hpc-stack" -export HOMEobsproc_global=$HOMEobsproc_network +export HOMEobsproc="$BASE_GIT/obsproc/v1.0.2" # CONVENIENT utility scripts and other environment parameters export NCP="/bin/cp -p" diff --git a/parm/config/config.base.nco.static b/parm/config/config.base.nco.static index 521eec6503..ee0a384db5 100755 --- a/parm/config/config.base.nco.static +++ b/parm/config/config.base.nco.static @@ -65,9 +65,7 @@ export REALTIME="YES" export FIXgsi="$HOMEgfs/fix/gsi" export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" export HOMEpost="$HOMEgfs" -export HOMEobsproc_prep="$NWPROD/obsproc_prep.v5.5.0" -export HOMEobsproc_network="$NWPROD/obsproc_global.v3.4.2" -export HOMEobsproc_global=$HOMEobsproc_network +export HOMEobsproc="/lfs/h1/ops/prod/packages/obsproc.v1.0.2" # CONVENIENT utility scripts and other environment parameters export NCP="/bin/cp -p" diff --git a/parm/config/config.prep b/parm/config/config.prep index 9c50bd6bd9..a300d544c8 100755 --- a/parm/config/config.prep +++ b/parm/config/config.prep @@ -9,6 +9,7 @@ echo "BEGIN: config.prep" . $EXPDIR/config.resources prep export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} # Relocation and syndata QC export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} @@ -17,6 +18,7 @@ export DO_RELOCATE="NO" export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" export SENDCOM=YES +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} export HOMERELO=$HOMEgfs diff --git a/parm/config/config.prepbufr b/parm/config/config.prepbufr deleted file mode 100755 index 2d6ececc5b..0000000000 --- a/parm/config/config.prepbufr +++ /dev/null @@ -1,19 +0,0 @@ -#! /usr/bin/env bash - -########## config.prepbufr ########## -# PREPBUFR specific configuration - -echo "BEGIN: config.prepbufr" - -# Get task specific resources -. $EXPDIR/config.resources prepbufr - -# Set variables - -if [ $machine = "HERA" ]; then - export GESROOT=/scratch1/NCEPDEV/rstprod -elif [ $machine = "ORION" ]; then - export GESROOT=/dev/null -fi - -echo "END: config.prepbufr" diff --git a/parm/config/config.resources b/parm/config/config.resources index b723b57916..0162379ba6 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -41,12 +41,12 @@ elif [[ "$machine" = "ORION" ]]; then export npe_node_max=40 fi -if [ $step = "prep" -o $step = "prepbufr" ]; then - eval "export wtime_$step='00:45:00'" - eval "export npe_$step=4" - eval "export npe_node_$step=2" - eval "export nth_$step=1" - eval "export memory_$step=40G" +if [ $step = "prep" ]; then + export wtime_prep='00:45:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + export memory_prep="40G" elif [ $step = "aerosol_init" ]; then export wtime_aerosol_init="00:05:00" From e09989b8285f71b44a0958fd1c60e7ca49d73661 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Fri, 7 Oct 2022 20:12:04 -0400 Subject: [PATCH 5/9] Port to S4 (#1023) Ports the global workflow to the S4 cluster. Note that this does not include support for S2S experiments. Additionally, S4 does not support C768 experiments. A couple of special notes: - S4 does not have access to rstprod data. Among other things, this means that the nsstbufr and prepbufr.acft_profiles files must be created on the fly. The way I accomplished this was by moving the `MAKE_NSSTBUFR` to and creating `MAKE_ACFTBUFR` in config.base.emc.dyn and setting them via setup_workflow.xml. This seems like a bit of a kludge and I am open to suggestions on how else to address this. Both options need to be set for the prep and analysis jobs. - S4 _can_ run S2S+ experiments, but this requires significant, and convoluted, modifications to the configuration files. Support for these are thus not enabled by default. Instead, I have placed a set of configuration files in S4:/data/users/dhuber/save/s2s_configs. Users interested in performing these experiments should contact me to set it up. Closes #138 --- Externals.cfg | 2 +- README.md | 7 +- env/S4.env | 295 ++++++++++++++++++++++++ jobs/JGDAS_ENKF_DIAG | 17 +- jobs/JGDAS_ENKF_SELECT_OBS | 17 +- jobs/JGLOBAL_ATMOS_ANALYSIS | 18 +- jobs/rocoto/prep.sh | 13 +- modulefiles/module-setup.csh.inc | 7 + modulefiles/module-setup.sh.inc | 6 + modulefiles/module_base.s4.lua | 51 +++++ modulefiles/modulefile.ww3.s4.lua | 21 ++ modulefiles/workflow_utils.s4.lua | 35 +++ parm/config/config.aero | 3 + parm/config/config.base.emc.dyn | 7 + parm/config/config.coupled_ic | 8 +- parm/config/config.fv3 | 28 ++- parm/config/config.prep | 3 +- parm/config/config.resources | 364 +++++++++++++++++------------- parm/config/config.vrfy | 8 +- sorc/checkout.sh | 2 +- sorc/link_workflow.sh | 1 + ush/load_fv3gfs_modules.sh | 3 + workflow/hosts.py | 4 +- workflow/hosts/hera.yaml | 5 +- workflow/hosts/orion.yaml | 5 +- workflow/hosts/s4.yaml | 23 ++ workflow/rocoto/workflow_tasks.py | 6 +- workflow/setup_expt.py | 3 + 28 files changed, 756 insertions(+), 206 deletions(-) create mode 100755 env/S4.env create mode 100644 modulefiles/module_base.s4.lua create mode 100644 modulefiles/modulefile.ww3.s4.lua create mode 100644 modulefiles/workflow_utils.s4.lua create mode 100644 workflow/hosts/s4.yaml diff --git a/Externals.cfg b/Externals.cfg index 9b87855275..e8851e2a6b 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ protocol = git required = True [gfs-utils] -hash = 3a609ea +hash = 7bf599f local_path = sorc/gfs_utils.fd repo_url = https://github.com/NOAA-EMC/gfs-utils protocol = git diff --git a/README.md b/README.md index c89aa7275b..750666e517 100644 --- a/README.md +++ b/README.md @@ -7,13 +7,14 @@ The global-workflow depends on the following prerequisities to be available on t * workflow manager - ROCOTO (https://github.com/christopherwharrop/rocoto) * modules - NCEPLIBS (various), esmf v8.0.0bs48, hdf5, intel/ips v18, impi v18, wgrib2, netcdf v4.7.0, hpss, gempak (see module files under /modulefiles for additional details) -The global-workflow current supports the following machines: +The global-workflow current supports the following tier-1 machines: -* WCOSS-Dell -* WCOSS-Cray * Hera * Orion +Additionally, the following tier-2 machine is supported: +* S4 (Note that S2S+ experiments are not fully supported) + Quick-start instructions are below. Full instructions are available in the [wiki](https://github.com/NOAA-EMC/global-workflow/wiki/Run-Global-Workflow) ## Build global-workflow: diff --git a/env/S4.env b/env/S4.env new file mode 100755 index 0000000000..a3faabaecf --- /dev/null +++ b/env/S4.env @@ -0,0 +1,295 @@ +#!/bin/bash -x + +if [ $# -ne 1 ]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "atmanalrun atmensanalrun" + echo "anal sfcanl fcst post vrfy metp" + echo "eobs eupd ecen efcs epos" + echo "postsnd awips gempak" + exit 1 + +fi + +step=$1 +PARTITION_BATCH=${PARTITION_BATCH:-"s4"} + +if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 +elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 +fi +export launcher="srun -l --export=ALL" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +export job=${PBS_JOBNAME:-${step}} +export jobid=${job}.${PBS_JOBID:-$$} + + +if [[ ${step} = "prep" || ${step} = "prepbufr" ]]; then + + npe_node_prep=${npe_node_prep:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_prep)) + + export POE="NO" + export BACK="NO" + export sys_tp="S4" + export launcher_PREP="srun" + +elif [[ ${step} = "waveinit" || ${step} = "waveprep" || ${step} = "wavepostsbs" || ${step} = "wavepostbndpnt" || ${step} = "wavepostbndpntbll" || ${step} = "wavepostpnt" ]]; then + + export mpmd="--multi-prog" + export CFP_MP="YES" + if [[ ${step} = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd} + +elif [[ ${step} = "atmanalrun" ]]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_atmanalrun=${npe_node_atmanalrun:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_atmanalrun)) + + export NTHREADS_ATMANAL=${nth_atmanalrun:-${nth_max}} + [[ ${NTHREADS_ATMANAL} -gt ${nth_max} ]] && export NTHREADS_ATMANAL=${nth_max} + export APRUN_ATMANAL="${launcher} -n ${npe_atmanalrun:-0}" + +elif [[ ${step} = "atmensanalrun" ]]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_atmensanalrun=${npe_node_atmensanalrun:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_atmensanalrun)) + + export NTHREADS_ATMENSANAL=${nth_atmensanalrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANAL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANAL=${nth_max} + export APRUN_ATMENSANAL="${launcher} -n ${npe_atmensanalrun:-0}" + +elif [[ ${step} = "aeroanlrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_aeroanlrun=${npe_node_aeroanlrun:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_aeroanlrun)) + + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun:-0}" + +elif [[ ${step} = "anal" || ${step} = "analcalc" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_anal=${npe_node_anal:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_anal)) + + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI=${launcher} + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC=${launcher} + + export NTHREADS_CYCLE=${nth_cycle:-12} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_cycle:-0}" + + + export NTHREADS_GAUSFCANL=1 + npe_gausfcanl=${npe_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl:-0}" + +elif [[ ${step} = "sfcanl" ]]; then + npe_node_sfcanl=${npe_node_sfcanl:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_sfcanl)) + + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl:-0}" + +elif [[ ${step} = "gldas" ]]; then + + npe_node_gldas=${npe_node_gldas:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_gldas)) + + export NTHREADS_GLDAS=${nth_gldas:-${nth_max}} + [[ ${NTHREADS_GLDAS} -gt ${nth_max} ]] && export NTHREADS_GLDAS=${nth_max} + export APRUN_GLDAS="${launcher} -n ${npe_gldas:-0}" + + export NTHREADS_GAUSSIAN=${nth_gaussian:-1} + [[ ${NTHREADS_GAUSSIAN} -gt ${nth_max} ]] && export NTHREADS_GAUSSIAN=${nth_max} + export APRUN_GAUSSIAN="${launcher} -n ${npe_gaussian:-0}" + +# Must run data processing with exactly the number of tasks as time +# periods being processed. + + gldas_spinup_hours=${gldas_spinup_hours:-0} + npe_gldas_data_proc=$((gldas_spinup_hours + 12)) + export APRUN_GLDAS_DATA_PROC="${launcher} -n ${npe_gldas_data_proc} --multi-prog" + +elif [[ ${step} = "eobs" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_eobs=${npe_node_eobs:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_eobs)) + + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI=${launcher} + +elif [[ ${step} = "eupd" ]]; then + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + npe_node_eupd=${npe_node_eupd:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_eupd)) + + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF=${launcher} + +elif [[ ${step} = "fcst" ]]; then + + #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs + if [[ ${CDUMP:-gdas} == "gfs" ]]; then + npe_fcst=${npe_fcst_gfs:-0} + npe_node_fcst=${npe_node_fcst_gfs:-${npe_node_max}} + nth_fv3=${nth_fv3_gfs:-1} + fi + + npe_node_fcst=${npe_node_fcst:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_fcst)) + + export NTHREADS_FV3=${nth_fv3:-${nth_max}} + [[ ${NTHREADS_FV3} -gt ${nth_max} ]] && export NTHREADS_FV3=${nth_max} + export cores_per_node=${npe_node_max} + export APRUN_FV3="${launcher} -n ${npe_fcst:-0}" + + export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} + [[ ${NTHREADS_REGRID_NEMSIO} -gt ${nth_max} ]] && export NTHREADS_REGRID_NEMSIO=${nth_max} + export APRUN_REGRID_NEMSIO=${launcher} + + export NTHREADS_REMAP=${nth_remap:-2} + [[ ${NTHREADS_REMAP} -gt ${nth_max} ]] && export NTHREADS_REMAP=${nth_max} + export APRUN_REMAP=${launcher} + export I_MPI_DAPL_UD="enable" + +elif [[ ${step} = "efcs" ]]; then + + npe_node_efcs=${npe_node_efcs:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_efcs)) + + export NTHREADS_FV3=${nth_efcs:-${nth_max}} + [[ ${NTHREADS_FV3} -gt ${nth_max} ]] && export NTHREADS_FV3=${nth_max} + export cores_per_node=${npe_node_max} + export APRUN_FV3="${launcher} -n ${npe_efcs:-0}" + + export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} + [[ ${NTHREADS_REGRID_NEMSIO} -gt ${nth_max} ]] && export NTHREADS_REGRID_NEMSIO=${nth_max} + export APRUN_REGRID_NEMSIO="${launcher} ${LEVS:-128}" + +elif [[ ${step} = "post" ]]; then + + npe_node_post=${npe_node_post:-npe_node_max} + nth_max=$((npe_node_max / npe_node_post)) + + export NTHREADS_NP=${nth_np:-1} + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP=${launcher} + + export NTHREADS_DWN=${nth_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN=${launcher} + +elif [[ ${step} = "ecen" ]]; then + + npe_node_ecen=${npe_node_ecen:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_ecen)) + + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN=${launcher} + + export NTHREADS_CHGRES=${nth_chgres:-12} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="time" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC=${launcher} + +elif [[ ${step} = "esfc" ]]; then + + npe_node_esfc=${npe_node_esfc:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_esfc)) + + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc:-0}" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc:-0}" + +elif [[ ${step} = "epos" ]]; then + + npe_node_epos=${npe_node_epos:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_epos)) + + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS=${launcher} + +elif [[ ${step} = "init" ]]; then + + export APRUN=${launcher} + +elif [[ ${step} = "postsnd" ]]; then + + npe_node_postsnd=${npe_node_postsnd:-${npe_node_max}} + nth_max=$((npe_node_max / npe_node_postsnd)) + + export NTHREADS_POSTSND=${nth_postsnd:-1} + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND=${launcher} + + export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP=${launcher} + +elif [[ ${step} = "awips" ]]; then + + echo "WARNING: ${step} is not enabled on S4!" + +elif [[ ${step} = "gempak" ]]; then + + echo "WARNING: ${step} is not enabled on S4!" +fi diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG index 3864f8d008..83e3e6b366 100755 --- a/jobs/JGDAS_ENKF_DIAG +++ b/jobs/JGDAS_ENKF_DIAG @@ -56,6 +56,8 @@ export pgmerr=errfile export CDATE=${CDATE:-${PDY}${cyc}} export CDUMP=${CDUMP:-${RUN:-"gdas"}} export COMPONENT=${COMPONENT:-atmos} +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} ############################################## @@ -104,9 +106,20 @@ export PREPQC="$COMIN_OBS/${OPREFIX}prepbufr" if [ ! -f $PREPQC ]; then echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" fi -export PREPQCPF="$COMIN_OBS/${OPREFIX}prepbufr.acft_profiles" export TCVITL="$COMIN_ANL/${OPREFIX}syndata.tcvitals.tm00" -[[ $DONST = "YES" ]] && export NSSTBF="$COMIN_OBS/${OPREFIX}nsstbufr" +if [[ $DONST = "YES" ]]; then + if [[ $MAKE_NSSTBUFR == "YES" ]]; then + export NSSTBF="${COMOUT}/${OPREFIX}nsstbufr" + else + export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + fi +fi + +if [[ $MAKE_ACFTBUFR == "YES" ]]; then + export PREPQCPF="${COMOUT}/${OPREFIX}prepbufr.acft_profiles" +else + export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +fi # Guess Bias correction coefficients related to control export GBIAS=${COMIN_GES_CTL}/${GPREFIX}abias diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS index 4d67379d66..0bb1bdc3bc 100755 --- a/jobs/JGDAS_ENKF_SELECT_OBS +++ b/jobs/JGDAS_ENKF_SELECT_OBS @@ -56,6 +56,8 @@ export pgmerr=errfile export CDATE=${CDATE:-${PDY}${cyc}} export CDUMP=${CDUMP:-${RUN:-"gdas"}} export COMPONENT=${COMPONENT:-atmos} +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} ############################################## @@ -107,9 +109,20 @@ export PREPQC="$COMIN_OBS/${OPREFIX}prepbufr" if [ ! -f $PREPQC ]; then echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" fi -export PREPQCPF="$COMIN_OBS/${OPREFIX}prepbufr.acft_profiles" export TCVITL="$COMIN_ANL/${OPREFIX}syndata.tcvitals.tm00" -[[ $DONST = "YES" ]] && export NSSTBF="$COMIN_OBS/${OPREFIX}nsstbufr" +if [[ $DONST = "YES" ]]; then + if [[ $MAKE_NSSTBUFR == "YES" ]]; then + export NSSTBF="${COMOUT}/${OPREFIX}nsstbufr" + else + export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + fi +fi + +if [[ $MAKE_ACFTBUFR == "YES" ]]; then + export PREPQCPF="${COMOUT}/${OPREFIX}prepbufr.acft_profiles" +else + export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +fi # Guess Bias correction coefficients related to control export GBIAS=${COMIN_GES_CTL}/${GPREFIX}abias diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS index db56beb582..3d4e26c84e 100755 --- a/jobs/JGLOBAL_ATMOS_ANALYSIS +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -58,6 +58,8 @@ export CDATE=${CDATE:-${PDY}${cyc}} export CDUMP=${CDUMP:-${RUN:-"gfs"}} export COMPONENT=${COMPONENT:-atmos} export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} ############################################## @@ -124,10 +126,20 @@ export PREPQC="${COMIN_OBS}/${OPREFIX}prepbufr" if [ ! -f $PREPQC ]; then echo "WARNING: Global PREPBUFR FILE $PREPQC MISSING" fi -export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" export TCVITL="${COMOUT}/${OPREFIX}syndata.tcvitals.tm00" -[[ $DONST = "YES" ]] && export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" +if [[ $DONST = "YES" ]]; then + if [[ $MAKE_NSSTBUFR == "YES" ]]; then + export NSSTBF="${COMOUT}/${OPREFIX}nsstbufr" + else + export NSSTBF="${COMIN_OBS}/${OPREFIX}nsstbufr" + fi +fi +if [[ $MAKE_ACFTBUFR == "YES" ]]; then + export PREPQCPF="${COMOUT}/${OPREFIX}prepbufr.acft_profiles" +else + export PREPQCPF="${COMIN_OBS}/${OPREFIX}prepbufr.acft_profiles" +fi # Copy fix file for obsproc if [ $RUN = "gfs" ]; then @@ -169,7 +181,7 @@ fi # Remove the Temporary working directory ########################################## cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} exit 0 diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index f33a8bc9f6..b9c3b72235 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -28,6 +28,7 @@ status=$? export COMPONENT=${COMPONENT:-atmos} export OPREFIX="${CDUMP}.t${cyc}z." export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" +export MAKE_PREPBUFR=${MAKE_PREPBUFR:-"YES"} [[ ! -d $COMOUT ]] && mkdir -p $COMOUT ############################################################### @@ -84,11 +85,11 @@ fi ############################################################### # Generate prepbufr files from dumps or copy from OPS -if [ $DO_MAKEPREPBUFR = "YES" ]; then +if [ $MAKE_PREPBUFR = "YES" ]; then if [ $ROTDIR_DUMP = "YES" ]; then - rm $COMOUT/${OPREFIX}prepbufr - rm $COMOUT/${OPREFIX}prepbufr.acft_profiles - rm $COMOUT/${OPREFIX}nsstbufr + rm -f $COMOUT/${OPREFIX}prepbufr + rm -f $COMOUT/${OPREFIX}prepbufr.acft_profiles + rm -f $COMOUT/${OPREFIX}nsstbufr fi export job="j${CDUMP}_prep_${cyc}" @@ -105,7 +106,7 @@ if [ $DO_MAKEPREPBUFR = "YES" ]; then fi # Disable creating NSSTBUFR if desired, copy from DMPDIR instead - if [[ ${DO_MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then + if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then export MAKE_NSSTBUFR="NO" fi @@ -114,7 +115,7 @@ if [ $DO_MAKEPREPBUFR = "YES" ]; then [[ $status -ne 0 ]] && exit $status # If creating NSSTBUFR was disabled, copy from DMPDIR if appropriate. - if [[ ${DO_MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then + if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then [[ $DONST = "YES" ]] && $NCP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${COMPONENT}/${OPREFIX}nsstbufr $COMOUT/${OPREFIX}nsstbufr fi diff --git a/modulefiles/module-setup.csh.inc b/modulefiles/module-setup.csh.inc index b342cb9655..e8219424f6 100644 --- a/modulefiles/module-setup.csh.inc +++ b/modulefiles/module-setup.csh.inc @@ -25,6 +25,13 @@ else if ( { test -d /work } ) then source /apps/lmod/init/$__ms_shell endif module purge +else if ( { test -d /data/prod } ) then + # We are on SSEC S4 + if ( ! { module help >& /dev/null } ) then + source /usr/share/lmod/lmod/init/$__ms_shell + endif + source /etc/profile + module purge else if ( { test -d /glade } ) then # We are on NCAR Yellowstone if ( ! { module help >& /dev/null } ) then diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index 419b308985..201daa7b8d 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -86,6 +86,12 @@ elif [[ -d /lustre && -d /ncrc ]] ; then source /etc/profile unset __ms_source_etc_profile fi +elif [[ -d /data/prod ]] ; then + # We are on SSEC's S4 + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /usr/share/lmod/lmod/init/$__ms_shell + fi + module purge else echo WARNING: UNKNOWN PLATFORM 1>&2 fi diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua new file mode 100644 index 0000000000..93d95de6b9 --- /dev/null +++ b/modulefiles/module_base.s4.lua @@ -0,0 +1,51 @@ +help([[ +Load environment to run GFS on S4 +]]) + +load("license_intel") +prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.4")) +load(pathJoin("hpc-impi", "18.0.4")) + +load(pathJoin("nco", "4.9.3")) +load(pathJoin("ncl", "6.4.0-precompiled")) + +load(pathJoin("prod_util", "1.2.2")) +load(pathJoin("grib_util", "1.2.2")) + +load(pathJoin("crtm", "2.3.0")) +setenv("CRTM_FIX","/data/prod/hpc-stack/fix/crtm/2.3.0") + +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("zlib", "1.2.11")) +load(pathJoin("png", "1.6.35")) + +load(pathJoin("hdf5", "1.10.6")) +load(pathJoin("netcdf", "4.7.4")) +load(pathJoin("pio", "2.5.2")) +load(pathJoin("esmf", "8.2.1b04")) +load(pathJoin("fms", "2021.03")) + +load(pathJoin("bacio", "2.4.1")) +load(pathJoin("g2", "3.4.2")) +load(pathJoin("g2tmpl", "1.10.0")) +load(pathJoin("ip", "3.3.3")) +load(pathJoin("nemsio", "2.5.2")) +load(pathJoin("sp", "2.3.3")) +load(pathJoin("w3emc", "2.7.3")) +load(pathJoin("w3nco", "2.4.1")) +load(pathJoin("ncdiag", "1.0.0")) + +load(pathJoin("wgrib2", "2.0.8")) +setenv("WGRIB2","wgrib2") + +load(pathJoin("miniconda", "3.8-s4")) + +load(pathJoin("cdo", "1.9.8")) + +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/v1.0.1/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + +whatis("Description: GFS run environment") diff --git a/modulefiles/modulefile.ww3.s4.lua b/modulefiles/modulefile.ww3.s4.lua new file mode 100644 index 0000000000..06aaf6c3fb --- /dev/null +++ b/modulefiles/modulefile.ww3.s4.lua @@ -0,0 +1,21 @@ +help([[ +Build environment for WW3 on S4 +]]) + +load("license_intel") +prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.4")) +load(pathJoin("hpc-impi", "18.0.4")) + +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("zlib", "1.2.11")) +load(pathJoin("png", "1.6.35")) + +load(pathJoin("bacio", "2.4.1")) +load(pathJoin("g2", "3.4.1")) + +load(pathJoin("hdf5", "1.10.6")) +load(pathJoin("netcdf", "4.7.4")) + +load(pathJoin("w3nco", "2.4.1")) diff --git a/modulefiles/workflow_utils.s4.lua b/modulefiles/workflow_utils.s4.lua new file mode 100644 index 0000000000..5770ff3cba --- /dev/null +++ b/modulefiles/workflow_utils.s4.lua @@ -0,0 +1,35 @@ +help([[ +Build environment for workflow utilities on Hera +]]) + +load("license_intel") +prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.4")) +load(pathJoin("hpc-impi", "18.0.4")) + +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("zlib", "1.2.11")) +load(pathJoin("png", "1.6.35")) + +load(pathJoin("hdf5", "1.10.6")) +load(pathJoin("netcdf", "4.7.4")) + +load(pathJoin("bacio", "2.4.1")) +load(pathJoin("g2", "3.4.1")) +load(pathJoin("ip", "3.3.3")) +load(pathJoin("nemsio", "2.5.2")) +load(pathJoin("sp", "2.3.3")) +load(pathJoin("w3emc", "2.7.3")) +load(pathJoin("w3nco", "2.4.1")) +load(pathJoin("nemsiogfs", "2.5.3")) +load(pathJoin("ncio", "1.1.2")) +load(pathJoin("landsfcutil", "2.4.1")) +load(pathJoin("sigio", "2.3.2")) +load(pathJoin("bufr", "11.4.0")) + +load(pathJoin("wgrib2", "2.0.8")) +setenv("WGRIB2","wgrib2") + +setenv("FFLAGS","-march=ivybridge") diff --git a/parm/config/config.aero b/parm/config/config.aero index 3aeb33790e..85f8583796 100644 --- a/parm/config/config.aero +++ b/parm/config/config.aero @@ -13,6 +13,9 @@ case $machine in "ORION") AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; *) echo "FATAL ERROR: Machine $machine unsupported for aerosols" exit 2 diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index b36c726a34..0633db18f5 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -16,6 +16,7 @@ export ACCOUNT="@ACCOUNT@" export QUEUE="@QUEUE@" export QUEUE_SERVICE="@QUEUE_SERVICE@" export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" # Project to use in mass store: HPSS_PROJECT=emc-global @@ -354,6 +355,12 @@ if [ $DONST = "YES" ]; then export FNTSFA=" "; fi # The switch to apply SST elevation correction or not export nst_anl=.true. +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + # Analysis increments to zero in CALCINCEXEC export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" diff --git a/parm/config/config.coupled_ic b/parm/config/config.coupled_ic index 0df82591d9..7ae44b5e8f 100755 --- a/parm/config/config.coupled_ic +++ b/parm/config/config.coupled_ic @@ -5,12 +5,14 @@ echo "BEGIN: config.coupled_ic" # Get task specific resources -source $EXPDIR/config.resources coupled_ic +source ${EXPDIR}/config.resources coupled_ic -if [[ "$machine" == "HERA" ]]; then +if [[ "${machine}" == "HERA" ]]; then export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" -elif [[ "$machine" == "ORION" ]]; then +elif [[ "${machine}" == "ORION" ]]; then export BASE_CPLIC="/work/noaa/global/wkolczyn/noscrub/global-workflow/IC" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" fi export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c diff --git a/parm/config/config.fv3 b/parm/config/config.fv3 index c8c959362e..faa806c91a 100755 --- a/parm/config/config.fv3 +++ b/parm/config/config.fv3 @@ -30,6 +30,12 @@ if [[ "$machine" = "JET" ]]; then fi elif [[ "$machine" = "HERA" ]]; then export npe_node_max=40 +elif [[ "$machine" = "S4" ]]; then + if [[ "$PARTITION_BATCH" = "s4" ]]; then + export npe_node_max=32 + elif [[ "$PARTITION_BATCH" = "ivy" ]]; then + export npe_node_max=20 + fi elif [[ "$machine" = "ORION" ]]; then export npe_node_max=40 fi @@ -42,8 +48,8 @@ case $case_in in export layout_y=2 export layout_x_gfs=3 export layout_y_gfs=2 - export nth_fv3=1 - export nth_fv3_gfs=1 + export nth_fv3=2 + export nth_fv3_gfs=2 export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=1 export WRTTASK_PER_GROUP=$npe_node_max @@ -107,9 +113,9 @@ case $case_in in export nth_fv3_gfs=4 export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=2 - export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRTTASK_PER_GROUP=$(echo "2*${npe_node_max}" |bc) export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTTASK_PER_GROUP_GFS=$(echo "2*${npe_node_max}" |bc) export WRTIOBUF="32M" ;; "C1152") @@ -122,9 +128,9 @@ case $case_in in export nth_fv3_gfs=4 export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRTTASK_PER_GROUP=$(echo "2*${npe_node_max}" |bc) export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTTASK_PER_GROUP_GFS=$(echo "2*${npe_node_max}" |bc) export WRTIOBUF="48M" ;; "C3072") @@ -137,19 +143,19 @@ case $case_in in export nth_fv3_gfs=4 export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=$(echo "3*$npe_node_max" |bc) + export WRTTASK_PER_GROUP=$(echo "3*${npe_node_max}" |bc) export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=$(echo "3*$npe_node_max" |bc) + export WRTTASK_PER_GROUP_GFS=$(echo "3*${npe_node_max}" |bc) export WRTIOBUF="64M" ;; *) - echo "grid $case_in not supported, ABORT!" + echo "grid ${case_in} not supported, ABORT!" exit 1 ;; esac # Calculate chunksize based on resolution -export RESTILE=$(echo $case_in |cut -c2-) +export RESTILE=$(echo ${case_in} |cut -c2-) export ichunk2d=$((4*RESTILE)) export jchunk2d=$((2*RESTILE)) export ichunk3d=$((4*RESTILE)) @@ -157,7 +163,7 @@ export jchunk3d=$((2*RESTILE)) export kchunk3d=1 # Determine whether to use parallel NetCDF based on resolution -case $case_in in +case ${case_in} in "C48" | "C96" | "C192") export OUTPUT_FILETYPE_ATM="netcdf" export OUTPUT_FILETYPE_SFC="netcdf" diff --git a/parm/config/config.prep b/parm/config/config.prep index a300d544c8..3e1cf8c32f 100755 --- a/parm/config/config.prep +++ b/parm/config/config.prep @@ -8,7 +8,7 @@ echo "BEGIN: config.prep" # Get task specific resources . $EXPDIR/config.resources prep -export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump export cdate10=${PDY}${cyc} # Relocation and syndata QC @@ -65,6 +65,5 @@ if [[ "$CDATE" -ge "2020102200" ]]; then else export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' fi -export DO_MAKE_NSSTBUFR="NO" # if NO, will copy nsstbufr from globaldump echo "END: config.prep" diff --git a/parm/config/config.resources b/parm/config/config.resources index 0162379ba6..909f0f6a2b 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -27,17 +27,23 @@ step=$1 echo "BEGIN: config.resources" -if [[ "$machine" = "JET" ]]; then - if [[ "$PARTITION_BATCH" = "xjet" ]]; then +if [[ ${machine} = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then export npe_node_max=24 - elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then export npe_node_max=16 - elif [[ "$PARTITION_BATCH" = "kjet" ]]; then + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then export npe_node_max=40 fi -elif [[ "$machine" = "HERA" ]]; then +elif [[ ${machine} = "HERA" ]]; then export npe_node_max=40 -elif [[ "$machine" = "ORION" ]]; then +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then export npe_node_max=40 fi @@ -48,126 +54,126 @@ if [ $step = "prep" ]; then export nth_prep=1 export memory_prep="40G" -elif [ $step = "aerosol_init" ]; then +elif [ ${step} = "aerosol_init" ]; then export wtime_aerosol_init="00:05:00" export npe_aerosol_init=1 export nth_aerosol_init=1 - export npe_node_aerosol_init=$(echo "$npe_node_max / $nth_aerosol_init" | bc) + export npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) export NTASKS=${npe_aerosol_init} export memory_aerosol_init="6G" -elif [ $step = "waveinit" ]; then +elif [ ${step} = "waveinit" ]; then export wtime_waveinit="00:10:00" export npe_waveinit=12 export nth_waveinit=1 - export npe_node_waveinit=$(echo "$npe_node_max / $nth_waveinit" | bc) + export npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) export NTASKS=${npe_waveinit} -elif [ $step = "waveprep" ]; then +elif [ ${step} = "waveprep" ]; then export wtime_waveprep="00:30:00" export npe_waveprep=65 export nth_waveprep=1 - export npe_node_waveprep=$(echo "$npe_node_max / $nth_waveprep" | bc) + export npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) export NTASKS=${npe_waveprep} -elif [ $step = "wavepostsbs" ]; then +elif [ ${step} = "wavepostsbs" ]; then export wtime_wavepostsbs="06:00:00" export npe_wavepostsbs=10 export nth_wavepostsbs=1 - export npe_node_wavepostsbs=$(echo "$npe_node_max / $nth_wavepostsbs" | bc) + export npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) export NTASKS=${npe_wavepostsbs} -elif [ $step = "wavepostbndpnt" ]; then +elif [ ${step} = "wavepostbndpnt" ]; then export wtime_wavepostbndpnt="02:00:00" export npe_wavepostbndpnt=280 export nth_wavepostbndpnt=1 - export npe_node_wavepostbndpnt=$(echo "$npe_node_max / $nth_wavepostbndpnt" | bc) + export npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) export NTASKS=${npe_wavepostbndpnt} -elif [ $step = "wavepostbndpntbll" ]; then +elif [ ${step} = "wavepostbndpntbll" ]; then export wtime_wavepostbndpntbll="01:00:00" export npe_wavepostbndpntbll=280 export nth_wavepostbndpntbll=1 - export npe_node_wavepostbndpntbll=$(echo "$npe_node_max / $nth_wavepostbndpntbll" | bc) + export npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) export NTASKS=${npe_wavepostbndpntbll} -elif [ $step = "wavepostpnt" ]; then +elif [ ${step} = "wavepostpnt" ]; then export wtime_wavepostpnt="02:00:00" export npe_wavepostpnt=280 export nth_wavepostpnt=1 - export npe_node_wavepostpnt=$(echo "$npe_node_max / $nth_wavepostpnt" | bc) + export npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) export NTASKS=${npe_wavepostpnt} -elif [ $step = "wavegempak" ]; then +elif [ ${step} = "wavegempak" ]; then export wtime_wavegempak="01:00:00" - export npe_wavegempak=$npe_node_max + export npe_wavegempak=${npe_node_max} export nth_wavegempak=1 - export npe_node_wavegempak=$(echo "$npe_node_max / $nth_wavegempak" | bc) + export npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) export NTASKS=${npe_wavegempak} -elif [ $step = "waveawipsbulls" ]; then +elif [ ${step} = "waveawipsbulls" ]; then export wtime_waveawipsbulls="00:30:00" - export npe_waveawipsbulls=$npe_node_max + export npe_waveawipsbulls=${npe_node_max} export nth_waveawipsbulls=1 - export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) export NTASKS=${npe_waveawipsbulls} -elif [ $step = "waveawipsgridded" ]; then +elif [ ${step} = "waveawipsgridded" ]; then export wtime_waveawipsgridded="00:30:00" - export npe_waveawipsgridded=$npe_node_max + export npe_waveawipsgridded=${npe_node_max} export nth_waveawipsgridded=1 - export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) export NTASKS=${npe_waveawipsgridded} -elif [ $step = "atmanalprep" ]; then +elif [ ${step} = "atmanalprep" ]; then export wtime_atmanalprep="00:10:00" export npe_atmanalprep=1 export nth_atmanalprep=1 - export npe_node_atmanalprep=$(echo "$npe_node_max / $nth_atmanalprep" | bc) + export npe_node_atmanalprep=$(echo "${npe_node_max} / ${nth_atmanalprep}" | bc) export memory_atmanalprep="3072M" -elif [ $step = "atmanalrun" ]; then +elif [ ${step} = "atmanalrun" ]; then # make below case dependent later export layout_x=1 export layout_y=1 export wtime_atmanalrun="00:30:00" - export npe_atmanalrun=$(echo "$layout_x * $layout_y * 6" | bc) - export npe_atmanalrun_gfs=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_atmanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) export nth_atmanalrun=1 - export nth_atmanalrun_gfs=$nth_atmanalrun + export nth_atmanalrun_gfs=${nth_atmanalrun} export native_atmanalrun="--exclusive" - export npe_node_atmanalrun=$(echo "$npe_node_max / $nth_atmanalrun" | bc) + export npe_node_atmanalrun=$(echo "${npe_node_max} / ${nth_atmanalrun}" | bc) -elif [ $step = "atmanalpost" ]; then +elif [ ${step} = "atmanalpost" ]; then export wtime_atmanalpost="00:30:00" - export npe_atmanalpost=$npe_node_max + export npe_atmanalpost=${npe_node_max} export nth_atmanalpost=1 - export npe_node_atmanalpost=$(echo "$npe_node_max / $nth_atmanalpost" | bc) + export npe_node_atmanalpost=$(echo "${npe_node_max} / ${nth_atmanalpost}" | bc) -elif [ $step = "aeroanlinit" ]; then +elif [ ${step} = "aeroanlinit" ]; then export wtime_aeroanlinit="00:10:00" export npe_aeroanlinit=1 export nth_aeroanlinit=1 - export npe_node_aeroanlinit=$(echo "$npe_node_max / $nth_aeroanlinit" | bc) + export npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) export memory_aeroanlinit="3072M" -elif [ $step = "aeroanlrun" ]; then +elif [ ${step} = "aeroanlrun" ]; then - case $CASE in + case ${CASE} in C768) layout_x=6 layout_y=6 @@ -183,79 +189,104 @@ elif [ $step = "aeroanlrun" ]; then esac export wtime_aeroanlrun="00:30:00" - export npe_aeroanlrun=$(echo "$layout_x * $layout_y * 6" | bc) - export npe_aeroanlrun_gfs=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) export nth_aeroanlrun=1 export nth_aeroanlrun_gfs=1 export native_aeroanlrun="--exclusive" - export npe_node_aeroanlrun=$(echo "$npe_node_max / $nth_aeroanlrun" | bc) + export npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) -elif [ $step = "aeroanlfinal" ]; then +elif [ ${step} = "aeroanlfinal" ]; then export wtime_aeroanlfinal="00:10:00" export npe_aeroanlfinal=1 export nth_aeroanlfinal=1 - export npe_node_aeroanlfinal=$(echo "$npe_node_max / $nth_aeroanlfinal" | bc) + export npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) export memory_aeroanlfinal="3072M" -elif [ $step = "anal" ]; then +elif [ ${step} = "anal" ]; then export wtime_anal="01:00:00" export npe_anal=1000 export nth_anal=5 export npe_anal_gfs=1000 - if [ $CASE = "C384" ]; then + if [ ${CASE} = "C384" ]; then export npe_anal=400 export npe_anal_gfs=400 + if [[ ${machine} = "S4" ]]; then + #For the analysis jobs, the number of tasks and cores used must be equal + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi fi - if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_anal=84 export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi fi - export nth_anal_gfs=$nth_anal - export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) - export nth_cycle=$nth_anal + export nth_anal_gfs=${nth_anal} + export npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export nth_cycle=${nth_anal} -elif [ $step = "analcalc" ]; then +elif [ ${step} = "analcalc" ]; then export wtime_analcalc="00:10:00" export npe_analcalc=127 - export ntasks=$npe_analcalc + export ntasks=${npe_analcalc} export nth_analcalc=1 export nth_echgres=4 export nth_echgres_gfs=12 - export npe_node_analcalc=$npe_node_max + export npe_node_analcalc=${npe_node_max} -elif [ $step = "analdiag" ]; then +elif [ ${step} = "analdiag" ]; then export wtime_analdiag="00:10:00" export npe_analdiag=112 export nth_analdiag=1 - export npe_node_analdiag=$npe_node_max + export npe_node_analdiag=${npe_node_max} -elif [ $step = "sfcanl" ]; then +elif [ ${step} = "sfcanl" ]; then export wtime_sfcanl="00:10:00" export npe_sfcanl=6 export nth_sfcanl=1 - export npe_node_sfcanl=$(echo "$npe_node_max / $nth_sfcanl" | bc) + export npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) -elif [ $step = "gldas" ]; then +elif [ ${step} = "gldas" ]; then export wtime_gldas="00:10:00" export npe_gldas=96 export nth_gldas=1 - export npe_node_gldas=$npe_node_max + export npe_node_gldas=${npe_node_max} export npe_gaussian=96 export nth_gaussian=1 export npe_node_gaussian=24 -elif [ $step = "fcst" ]; then +elif [ ${step} = "fcst" ]; then export wtime_fcst="00:30:00" - if [ $CASE = "C768" ]; then + if [ ${CASE} = "C768" ]; then export wtime_fcst_gfs="06:00:00" - elif [ $CASE = "C384" ]; then + elif [ ${CASE} = "C384" ]; then export wtime_fcst_gfs="06:00:00" else export wtime_fcst_gfs="03:00:00" @@ -263,12 +294,12 @@ elif [ $step = "fcst" ]; then # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined CDUMP_LIST=${CDUMP:-"gdas gfs"} - for CDUMP in $CDUMP_LIST; do - if [[ "$CDUMP" == "gfs" ]]; then - export layout_x=$layout_x_gfs - export layout_y=$layout_y_gfs - export WRITE_GROUP=$WRITE_GROUP_GFS - export WRTTASK_PER_GROUP=$WRTTASK_PER_GROUP_GFS + for CDUMP in ${CDUMP_LIST}; do + if [[ ${CDUMP} == "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP=${WRTTASK_PER_GROUP_GFS} fi (( ATMPETS = layout_x * layout_y * 6 )) @@ -276,75 +307,75 @@ elif [ $step = "fcst" ]; then # Mediator only uses the atm model PETS or less export MEDPETS=${MEDPETS:-ATMPETS} - if [[ $DO_AERO == "YES" ]]; then + if [[ ${DO_AERO} == "YES" ]]; then # Aerosol model only uses the atm model PETS - export CHMPETS=$ATMPETS + export CHMPETS=${ATMPETS} # Aerosol model runs on same PETs as ATM, so don't add to $NTASKS_TOT fi # If using in-line post, add the write tasks to the ATMPETS - if [[ $QUILTING == ".true." ]]; then + if [[ ${QUILTING} == ".true." ]]; then (( ATMPETS = ATMPETS + WRITE_GROUP * WRTTASK_PER_GROUP )) fi export ATMPETS - NTASKS_TOT=$ATMPETS + NTASKS_TOT=${ATMPETS} export nth_fcst=${nth_fv3:-2} export nth_fcst_gfs=${nth_fv3_gfs:-2} - export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) - export npe_node_fcst_gfs=$(echo "$npe_node_max / $nth_fcst_gfs" | bc) + export npe_node_fcst=$(echo "${npe_node_max} / ${nth_fcst}" | bc) + export npe_node_fcst_gfs=$(echo "${npe_node_max} / ${nth_fcst_gfs}" | bc) - if [[ $DO_WAVE == "YES" ]]; then - case $waveGRD in + if [[ ${DO_WAVE} == "YES" ]]; then + case ${waveGRD} in 'gnh_10m aoc_9km gsh_15m') export WAVPETS=140 ;; 'gwes_30m') export WAVPETS=100 ;; 'mx050') export WAVPETS=240 ;; 'mx025') export WAVPETS=80 ;; *) - echo "FATAL: Number of PEs not defined for wave grid '$waveGRD'" + echo "FATAL: Number of PEs not defined for wave grid '${waveGRD}'" echo " Please add an entry to config.resources within fcst for this grid" exit 3 esac (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) fi - if [[ $DO_OCN == "YES" ]]; then - case $OCNRES in + if [[ ${DO_OCN} == "YES" ]]; then + case ${OCNRES} in # Except for 025, these are guesses for now 100) export OCNPETS=20 ;; 050) export OCNPETS=60 ;; 025) export OCNPETS=220 ;; *) - echo "FATAL: Number of PEs not defined for ocean resolution '$OCNRES'" + echo "FATAL: Number of PEs not defined for ocean resolution ${OCNRES}" echo " Please add an entry to config.resources within fcst for this resolution" exit 3 esac (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) fi - if [[ $DO_ICE == "YES" ]]; then - case $ICERES in + if [[ ${DO_ICE} == "YES" ]]; then + case ${ICERES} in # Except for 025, these are guesses for now 100) export ICEPETS=10 ;; 050) export ICEPETS=30 ;; 025) export ICEPETS=120 ;; *) - echo "FATAL: Number of PEs not defined for ice resolution '$ICERES'" + echo "FATAL: Number of PEs not defined for ice resolution ${ICERES}" echo " Please add an entry to config.resources within fcst for this resolution" exit 3 esac (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) fi - if [[ $CDUMP == "gfs" ]]; then - export npe_fcst_gfs=$NTASKS_TOT + if [[ ${CDUMP} == "gfs" ]]; then + export npe_fcst_gfs=${NTASKS_TOT} else - export npe_fcst=$NTASKS_TOT + export npe_fcst=${NTASKS_TOT} fi done -elif [ $step = "ocnpost" ]; then +elif [ ${step} = "ocnpost" ]; then export wtime_ocnpost="00:30:00" export npe_ocnpost=1 @@ -352,58 +383,58 @@ elif [ $step = "ocnpost" ]; then export nth_ocnpost=1 export memory_ocnpost="96G" -elif [ $step = "post" ]; then +elif [ ${step} = "post" ]; then export wtime_post="02:00:00" export wtime_post_gfs="06:00:00" export npe_post=112 export nth_post=1 export npe_node_post=12 - export npe_node_dwn=$npe_node_max + export npe_node_dwn=${npe_node_max} -elif [ $step = "wafs" ]; then +elif [ ${step} = "wafs" ]; then export wtime_wafs="00:30:00" export npe_wafs=1 export npe_node_wafs=1 export nth_wafs=1 -elif [ $step = "wafsgcip" ]; then +elif [ ${step} = "wafsgcip" ]; then export wtime_wafsgcip="00:30:00" export npe_wafsgcip=2 export npe_node_wafsgcip=1 export nth_wafsgcip=1 -elif [ $step = "wafsgrib2" ]; then +elif [ ${step} = "wafsgrib2" ]; then export wtime_wafsgrib2="00:30:00" export npe_wafsgrib2=1 export npe_node_wafsgrib2=1 export nth_wafsgrib2=1 -elif [ $step = "wafsblending" ]; then +elif [ ${step} = "wafsblending" ]; then export wtime_wafsblending="00:30:00" export npe_wafsblending=1 export npe_node_wafsblending=1 export nth_wafsblending=1 -elif [ $step = "wafsgrib20p25" ]; then +elif [ ${step} = "wafsgrib20p25" ]; then export wtime_wafsgrib20p25="00:30:00" export npe_wafsgrib20p25=1 export npe_node_wafsgrib20p25=1 export nth_wafsgrib20p25=1 -elif [ $step = "wafsblending0p25" ]; then +elif [ ${step} = "wafsblending0p25" ]; then export wtime_wafsblending0p25="00:30:00" export npe_wafsblending0p25=1 export npe_node_wafsblending0p25=1 export nth_wafsblending0p25=1 -elif [ $step = "vrfy" ]; then +elif [ ${step} = "vrfy" ]; then export wtime_vrfy="03:00:00" export wtime_vrfy_gfs="06:00:00" @@ -412,11 +443,11 @@ elif [ $step = "vrfy" ]; then export npe_node_vrfy=1 export npe_vrfy_gfs=1 export npe_node_vrfy_gfs=1 - if [[ "$machine" == "HERA" ]]; then + if [[ ${machine} == "HERA" ]]; then export memory_vrfy="16384M" fi -elif [ $step = "metp" ]; then +elif [ ${step} = "metp" ]; then export nth_metp=1 export wtime_metp="03:00:00" @@ -426,14 +457,14 @@ elif [ $step = "metp" ]; then export npe_metp_gfs=4 export npe_node_metp_gfs=4 -elif [ $step = "echgres" ]; then +elif [ ${step} = "echgres" ]; then export wtime_echgres="00:10:00" export npe_echgres=3 - export nth_echgres=$npe_node_max + export nth_echgres=${npe_node_max} export npe_node_echgres=1 -elif [ $step = "init" ]; then +elif [ ${step} = "init" ]; then export wtime_init="00:30:00" export npe_init=24 @@ -441,150 +472,159 @@ elif [ $step = "init" ]; then export npe_node_init=6 export memory_init="70G" -elif [ $step = "init_chem" ]; then +elif [ ${step} = "init_chem" ]; then export wtime_init_chem="00:30:00" export npe_init_chem=1 export npe_node_init_chem=1 -elif [ $step = "mom6ic" ]; then +elif [ ${step} = "mom6ic" ]; then export wtime_mom6ic="00:30:00" export npe_mom6ic=24 export npe_node_mom6ic=24 -elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then - eval "export wtime_$step='06:00:00'" - eval "export npe_$step=1" - eval "export npe_node_$step=1" - eval "export nth_$step=1" - eval "export memory_$step=2048M" + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=2048M" -elif [ $step = "coupled_ic" ]; then +elif [ ${step} = "coupled_ic" ]; then export wtime_coupled_ic="00:15:00" export npe_coupled_ic=1 export npe_node_coupled_ic=1 export nth_coupled_ic=1 -elif [ $step = "atmensanalprep" ]; then +elif [ ${step} = "atmensanalprep" ]; then export wtime_atmensanalprep="00:10:00" export npe_atmensanalprep=1 export nth_atmensanalprep=1 - export npe_node_atmensanalprep=$(echo "$npe_node_max / $nth_atmensanalprep" | bc) + export npe_node_atmensanalprep=$(echo "${npe_node_max} / ${nth_atmensanalprep}" | bc) -elif [ $step = "atmensanalrun" ]; then +elif [ ${step} = "atmensanalrun" ]; then # make below case dependent later export layout_x=2 export layout_y=3 export wtime_atmensanalrun="00:30:00" - export npe_atmensanalrun=$(echo "$layout_x * $layout_y * 6" | bc) - export npe_atmensanalrun_gfs=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_atmensanalrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanalrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) export nth_atmensanalrun=1 - export nth_atmensanalrun_gfs=$nth_atmensanalrun + export nth_atmensanalrun_gfs=${nth_atmensanalrun} export native_atmensanalrun="--exclusive" - export npe_node_atmensanalrun=$(echo "$npe_node_max / $nth_atmensanalrun" | bc) + export npe_node_atmensanalrun=$(echo "${npe_node_max} / ${nth_atmensanalrun}" | bc) -elif [ $step = "atmensanalpost" ]; then +elif [ ${step} = "atmensanalpost" ]; then export wtime_atmensanalpost="00:30:00" - export npe_atmensanalpost=$npe_node_max + export npe_atmensanalpost=${npe_node_max} export nth_atmensanalpost=1 - export npe_node_atmensanalpost=$(echo "$npe_node_max / $nth_atmensanalpost" | bc) + export npe_node_atmensanalpost=$(echo "${npe_node_max} / ${nth_atmensanalpost}" | bc) -elif [ $step = "eobs" -o $step = "eomg" ]; then +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then export wtime_eobs="00:45:00" export wtime_eomg="01:00:00" - if [ $CASE = "C768" ]; then + if [ ${CASE} = "C768" ]; then export npe_eobs=200 - elif [ $CASE = "C384" ]; then + elif [ ${CASE} = "C384" ]; then export npe_eobs=100 - elif [ $CASE = "C192" ]; then + elif [ ${CASE} = "C192" ]; then export npe_eobs=40 - elif [ $CASE = "C96" -o $CASE = "C48" ]; then + elif [[ ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eobs=20 fi - export npe_eomg=$npe_eobs + export npe_eomg=${npe_eobs} export nth_eobs=2 - export nth_eomg=$nth_eobs - export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) - export npe_node_eomg=$npe_node_eobs + export nth_eomg=${nth_eobs} + export npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eomg=${npe_node_eobs} + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi -elif [ $step = "ediag" ]; then +elif [ ${step} = "ediag" ]; then export wtime_ediag="00:06:00" export npe_ediag=56 export nth_ediag=1 - export npe_node_ediag=$npe_node_max + export npe_node_ediag=${npe_node_max} -elif [ $step = "eupd" ]; then +elif [ ${step} = "eupd" ]; then export wtime_eupd="00:30:00" - if [ $CASE = "C768" ]; then + if [ ${CASE} = "C768" ]; then export npe_eupd=480 export nth_eupd=6 - if [[ "$machine" = "HERA" ]]; then + if [[ ${machine} = "HERA" ]]; then export npe_eupd=150 export nth_eupd=40 fi - elif [ $CASE = "C384" ]; then + elif [ ${CASE} = "C384" ]; then export npe_eupd=270 export nth_eupd=2 - if [[ "$machine" = "HERA" ]]; then + if [[ ${machine} = "HERA" ]]; then export npe_eupd=100 export nth_eupd=40 fi - elif [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then + if [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export npe_eupd=42 export nth_eupd=2 - if [[ "$machine" = "HERA" ]]; then + if [[ ${machine} = "HERA" ]]; then export npe_eupd=40 export nth_eupd=40 fi fi - export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + export npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) -elif [ $step = "ecen" ]; then +elif [ ${step} = "ecen" ]; then export wtime_ecen="00:10:00" export npe_ecen=80 export nth_ecen=6 - if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi - export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) - export nth_cycle=$nth_ecen + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + export npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export nth_cycle=${nth_ecen} -elif [ $step = "esfc" ]; then +elif [ ${step} = "esfc" ]; then export wtime_esfc="00:06:00" export npe_esfc=80 - export npe_node_esfc=$npe_node_max + export npe_node_esfc=${npe_node_max} export nth_esfc=1 - export nth_cycle=$nth_esfc + export nth_cycle=${nth_esfc} -elif [ $step = "efcs" ]; then +elif [ ${step} = "efcs" ]; then - if [ $CASE = "C768" ]; then + if [ ${CASE} = "C768" ]; then export wtime_efcs="01:00:00" else export wtime_efcs="00:40:00" fi - export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_efcs=$(echo "${layout_x} * ${layout_y} * 6" | bc) export nth_efcs=${nth_fv3:-2} - export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + export npe_node_efcs=$(echo "${npe_node_max} / ${nth_efcs}" | bc) -elif [ $step = "epos" ]; then +elif [ ${step} = "epos" ]; then export wtime_epos="00:15:00" export npe_epos=80 export nth_epos=6 - export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + export npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) -elif [ $step = "postsnd" ]; then +elif [ ${step} = "postsnd" ]; then export wtime_postsnd="02:00:00" export npe_postsnd=40 @@ -592,20 +632,20 @@ elif [ $step = "postsnd" ]; then export npe_node_postsnd=5 export npe_postsndcfp=9 export npe_node_postsndcfp=3 - if [ $OUTPUT_FILE == "nemsio" ]; then + if [ ${OUTPUT_FILE} == "nemsio" ]; then export npe_postsnd=13 export npe_node_postsnd=4 fi - if [[ "$machine" = "HERA" ]]; then export npe_node_postsnd=2; fi + if [[ ${machine} = "HERA" ]]; then export npe_node_postsnd=2; fi -elif [ $step = "awips" ]; then +elif [ ${step} = "awips" ]; then export wtime_awips="03:30:00" export npe_awips=4 export npe_node_awips=4 export nth_awips=2 -elif [ $step = "gempak" ]; then +elif [ ${step} = "gempak" ]; then export wtime_gempak="02:00:00" export npe_gempak=28 @@ -614,7 +654,7 @@ elif [ $step = "gempak" ]; then else - echo "Invalid step = $step, ABORT!" + echo "Invalid step = ${step}, ABORT!" exit 2 fi diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy index 15f902ca13..22ec61deff 100755 --- a/parm/config/config.vrfy +++ b/parm/config/config.vrfy @@ -43,6 +43,8 @@ if [ $VRFYFITS = "YES" ]; then if [ $machine = "HERA" ]; then export PREPQFITSH="$fitdir/subfits_hera_slurm" + elif [ $machine = "S4" ]; then + export PREPQFITSH="$fitdir/subfits_s4_slurm" elif [ $machine = "ORION" ]; then export PREPQFITSH="$fitdir/subfits_orion_netcdf" else @@ -129,10 +131,10 @@ fi if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then - if [ $machine = "HERA" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" + if [ "$machine" = "HERA" ] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" else - echo "WARNING: MOS package is not enabled on $machine!" + echo "WARNING: MOS package is not enabled on ${machine}!" export RUNMOS="NO" export RUNGFSMOSSH="" fi diff --git a/sorc/checkout.sh b/sorc/checkout.sh index e0d1857edd..441cd78124 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -152,7 +152,7 @@ mkdir -p "${logdir}" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 -checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "3a609ea" ; errs=$((errs + $?)) +checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "7bf599f" ; errs=$((errs + $?)) checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash}"; errs=$((errs + $?)) checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "8b990c0" ; errs=$((errs + $?)) checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 1d529d3ca2..15c09168d9 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -59,6 +59,7 @@ case "${machine}" in "hera") FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix" ;; "orion") FIX_DIR="/work/noaa/global/glopara/fix" ;; "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; + "s4") FIX_DIR="/data/prod/glopara/fix" ;; *) echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR" exit 1 diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index 1ca3bcaa35..11d7d88466 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -33,6 +33,9 @@ elif [[ -d /glade ]] ; then elif [[ -d /lustre && -d /ncrc ]] ; then # We are on GAEA. module load module_base.gaea +elif [[ -d /data/prod ]] ; then + # We are on SSEC S4 + module load module_base.s4 else echo WARNING: UNKNOWN PLATFORM fi diff --git a/workflow/hosts.py b/workflow/hosts.py index 57bce4e5e4..968b3bc1b4 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -24,7 +24,7 @@ class Host: """ SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', - 'WCOSS2'] + 'WCOSS2', 'S4'] def __init__(self, host=None): @@ -50,6 +50,8 @@ def detect(cls): machine = 'JET' elif os.path.exists('/lfs/f1'): machine = 'WCOSS2' + elif os.path.exists('/data/prod'): + machine = 'S4' if machine not in Host.SUPPORTED_HOSTS: raise NotImplementedError(f'This machine is not a supported host.\n' + diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml index 5f7951ff58..19cd1157e7 100644 --- a/workflow/hosts/hera.yaml +++ b/workflow/hosts/hera.yaml @@ -10,11 +10,14 @@ noscrub: $HOMEDIR account: fv3-cpu scheduler: slurm queue: batch -queue_service: service +queue_service: batch partition_batch: hera +partition_service: service chgrp_rstprod: 'YES' chgrp_cmd: 'chgrp rstprod' hpssarch: 'YES' localarch: 'NO' atardir: '/NCEPDEV/$HPSS_PROJECT/1year/$USER/$machine/scratch/$PSLOT' +make_nsstbufr: 'NO' +make_acftbufr: 'NO' supported_resolutions: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml index ce7d93eaba..49c905538b 100644 --- a/workflow/hosts/orion.yaml +++ b/workflow/hosts/orion.yaml @@ -10,11 +10,14 @@ noscrub: $HOMEDIR scheduler: slurm account: fv3-cpu queue: batch -queue_service: service +queue_service: batch partition_batch: orion +partition_service: service chgrp_rstprod: 'YES' chgrp_cmd: 'chgrp rstprod' hpssarch: 'NO' localarch: 'NO' atardir: '$NOSCRUB/archive_rotdir/$PSLOT' +make_nsstbufr: 'NO' +make_acftbufr: 'NO' supported_resolutions: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml new file mode 100644 index 0000000000..38a434b353 --- /dev/null +++ b/workflow/hosts/s4.yaml @@ -0,0 +1,23 @@ +base_git: '/data/prod/glopara/git' +dmpdir: '/data/prod/glopara/dump' +packageroot: '/data/prod/glopara/nwpara' +comroot: '/data/prod/glopara/com' +cominsyn: '${COMROOT}/gfs/prod/syndat' +homedir: '/data/users/$USER' +stmp: '/scratch/users/$USER' +ptmp: '/scratch/users/$USER' +noscrub: $HOMEDIR +account: star +scheduler: slurm +queue: s4 +queue_service: serial +partition_batch: s4 +partition_service: serial +chgrp_rstprod: 'NO' +chgrp_cmd: 'ls' +hpssarch: 'NO' +localarch: 'NO' +atardir: '$NOSCRUB/archive_rotdir/$PSLOT' +make_nsstbufr: 'YES' +make_acftbufr: 'YES' +supported_resolutions: ['C384', 'C192', 'C96', 'C48'] diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py index 53321f14fd..5ec1dbb39c 100644 --- a/workflow/rocoto/workflow_tasks.py +++ b/workflow/rocoto/workflow_tasks.py @@ -106,13 +106,11 @@ def get_resource(self, task_name): elif scheduler in ['slurm']: native = '--export=NONE' - queue = task_config['QUEUE'] - if task_name in Tasks.SERVICE_TASKS and scheduler not in ['slurm']: - queue = task_config['QUEUE_SERVICE'] + queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] partition = None if scheduler in ['slurm']: - partition = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config[ + partition = task_config['PARTITION_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config[ 'PARTITION_BATCH'] task_resource = {'account': account, diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index bc2eff7f43..ac9f1a8dc7 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -136,6 +136,7 @@ def edit_baseconfig(host, inputs): "@QUEUE@": host.info["queue"], "@QUEUE_SERVICE@": host.info["queue_service"], "@PARTITION_BATCH@": host.info["partition_batch"], + "@PARTITION_SERVICE@": host.info["partition_service"], "@EXP_WARM_START@": inputs.warm_start, "@MODE@": inputs.mode, "@CHGRP_RSTPROD@": host.info["chgrp_rstprod"], @@ -143,6 +144,8 @@ def edit_baseconfig(host, inputs): "@HPSSARCH@": host.info["hpssarch"], "@LOCALARCH@": host.info["localarch"], "@ATARDIR@": host.info["atardir"], + "@MAKE_NSSTBUFR@": host.info["make_nsstbufr"], + "@MAKE_ACFTBUFR@": host.info["make_acftbufr"], "@gfs_cyc@": inputs.gfs_cyc, "@APP@": inputs.app, } From e915eb64095a3ccf3d723892ffa1a2092c8e9a3f Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 7 Oct 2022 20:31:29 -0400 Subject: [PATCH 6/9] Fix eupd trace (#1057) When the preamble was implemented, a `set +x` was inadvertently left in the ensemble update script while the subsequent `set -x` was. This led to much of the trace not appearing in the output. Also removed a jlog print that is only encountered when ICs are missing. It complicated the real IC missing error with an additional unbound variable error. --- scripts/exgdas_enkf_update.sh | 1 - ush/forecast_postdet.sh | 2 -- 2 files changed, 3 deletions(-) diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh index 422b2e54e2..253a64443f 100755 --- a/scripts/exgdas_enkf_update.sh +++ b/scripts/exgdas_enkf_update.sh @@ -153,7 +153,6 @@ $NLN $COMOUT_ANL_ENS/$GBIASe satbias_in if [ $USE_CFP = "YES" ]; then [[ -f $DATA/untar.sh ]] && rm $DATA/untar.sh [[ -f $DATA/mp_untar.sh ]] && rm $DATA/mp_untar.sh - set +x cat > $DATA/untar.sh << EOFuntar #!/bin/sh memchar=\$1 diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 1e0e1631e7..7a2ba0e7ad 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -150,8 +150,6 @@ EOF nfiles=$(ls -1 $DATA/INPUT/* | wc -l) if [ $nfiles -le 0 ]; then echo SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT! - msg="SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT!" - postmsg "$jlogfile" "$msg" exit 1 fi fi From 9553ef690b12709fd3024f07ad81257d02453ac6 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 11 Oct 2022 11:22:50 -0400 Subject: [PATCH 7/9] Limit number of CPU for post (#1061) Limits the number of MPI tasks for post to the resolution of the forecast. UPP seems to fail if it is given more ranks than the resolution. Fixes #1060 --- parm/config/config.resources | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/parm/config/config.resources b/parm/config/config.resources index 909f0f6a2b..21ec6d2b5b 100755 --- a/parm/config/config.resources +++ b/parm/config/config.resources @@ -387,7 +387,11 @@ elif [ ${step} = "post" ]; then export wtime_post="02:00:00" export wtime_post_gfs="06:00:00" + res=$(echo ${CASE} | cut -c2-) export npe_post=112 + if (( npe_post > res )); then + export npe_post=${res} + fi export nth_post=1 export npe_node_post=12 export npe_node_dwn=${npe_node_max} From e8ef5fc6cc2781f5c3c47e7cf2762a6f7de2d123 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 11 Oct 2022 14:25:02 -0400 Subject: [PATCH 8/9] Add trailing slash for gldas topo path (#1064) GLDAS requires the namelist definition for the topo directory to have the trailing slash. Fixes #1063 --- scripts/exgdas_atmos_gldas.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/exgdas_atmos_gldas.sh b/scripts/exgdas_atmos_gldas.sh index 0497dab574..c06989b947 100755 --- a/scripts/exgdas_atmos_gldas.sh +++ b/scripts/exgdas_atmos_gldas.sh @@ -180,7 +180,7 @@ cat >> fort.141 << EOF data_dir_input_grid="${input1}" sfc_files_input_grid="${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile1.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile2.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile3.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile4.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile5.nc","${gldas_symd}.${gldas_scyc}0000.sfcanl_data.tile6.nc" mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}" + orog_dir_input_grid="${topodir}/" orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" i_target=${nlon} j_target=${nlat} @@ -244,7 +244,7 @@ cat >> fort.241 << EOF data_dir_input_grid="${input2}" sfc_files_input_grid="${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile1.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile2.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile3.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile4.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile5.nc","${iau_eymd}.${iau_ecyc}0000.sfcanl_data.tile6.nc" mosaic_file_input_grid="${CASE}_mosaic.nc" - orog_dir_input_grid="${topodir}" + orog_dir_input_grid="${topodir}/" orog_files_input_grid="${CASE}_oro_data.tile1.nc","${CASE}_oro_data.tile2.nc","${CASE}_oro_data.tile3.nc","${CASE}_oro_data.tile4.nc","${CASE}_oro_data.tile5.nc","${CASE}_oro_data.tile6.nc" i_target=${nlon} j_target=${nlat} @@ -290,7 +290,7 @@ mv fort.22 "${sfcanl}.gldas" cat >> fort.42 << EOF &config - orog_dir_gdas_grid="${topodir}" + orog_dir_gdas_grid="${topodir}/" mosaic_file_gdas_grid="${CASE}_mosaic.nc" / EOF From 8172530245972c7f569a2bf950b1929282b937e4 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 11 Oct 2022 15:58:37 -0400 Subject: [PATCH 9/9] Fix companion ocean resolution for C48 (#1066) The ocean resolution for atmostphere C48 should by 5 deg, not 4 deg. Fixes #1054 --- parm/config/config.base.emc.dyn | 2 +- parm/config/config.efcs | 2 +- parm/config/config.ocn | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn index 0633db18f5..8e465c908c 100755 --- a/parm/config/config.base.emc.dyn +++ b/parm/config/config.base.emc.dyn @@ -218,7 +218,7 @@ export LEVS=128 export CASE="@CASECTL@" export CASE_ENKF="@CASEENS@" case "$CASE" in - "C48") export OCNRES=400;; + "C48") export OCNRES=500;; "C96") export OCNRES=100;; "C192") export OCNRES=050;; "C384") export OCNRES=025;; diff --git a/parm/config/config.efcs b/parm/config/config.efcs index bfffe3ced8..4dedc38d3a 100755 --- a/parm/config/config.efcs +++ b/parm/config/config.efcs @@ -67,7 +67,7 @@ export cplwav=.false. # ocean model resolution case "$CASE_ENKF" in - "C48") export OCNRES=400;; + "C48") export OCNRES=500;; "C96") export OCNRES=100;; "C192") export OCNRES=050;; "C384") export OCNRES=025;; diff --git a/parm/config/config.ocn b/parm/config/config.ocn index 1675713e7c..4d24c0d87f 100644 --- a/parm/config/config.ocn +++ b/parm/config/config.ocn @@ -2,7 +2,7 @@ # OCNRES is currently being set in config.base # case "$CASE" in -# "C48") export OCNRES=400;; +# "C48") export OCNRES=500;; # "C96") export OCNRES=100;; # "C192") export OCNRES=050;; # "C384") export OCNRES=025;;